##// END OF EJS Templates
debug-delta-find: add a --source option...
marmoute -
r50484:266bb5c8 default
parent child Browse files
Show More
@@ -1,5058 +1,5091 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import binascii
9 import binascii
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import string
24 import string
25 import subprocess
25 import subprocess
26 import sys
26 import sys
27 import time
27 import time
28
28
29 from .i18n import _
29 from .i18n import _
30 from .node import (
30 from .node import (
31 bin,
31 bin,
32 hex,
32 hex,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 dirstateutils,
49 dirstateutils,
50 encoding,
50 encoding,
51 error,
51 error,
52 exchange,
52 exchange,
53 extensions,
53 extensions,
54 filemerge,
54 filemerge,
55 filesetlang,
55 filesetlang,
56 formatter,
56 formatter,
57 hg,
57 hg,
58 httppeer,
58 httppeer,
59 localrepo,
59 localrepo,
60 lock as lockmod,
60 lock as lockmod,
61 logcmdutil,
61 logcmdutil,
62 mdiff,
62 mergestate as mergestatemod,
63 mergestate as mergestatemod,
63 metadata,
64 metadata,
64 obsolete,
65 obsolete,
65 obsutil,
66 obsutil,
66 pathutil,
67 pathutil,
67 phases,
68 phases,
68 policy,
69 policy,
69 pvec,
70 pvec,
70 pycompat,
71 pycompat,
71 registrar,
72 registrar,
72 repair,
73 repair,
73 repoview,
74 repoview,
74 requirements,
75 requirements,
75 revlog,
76 revlog,
76 revlogutils,
77 revlogutils,
77 revset,
78 revset,
78 revsetlang,
79 revsetlang,
79 scmutil,
80 scmutil,
80 setdiscovery,
81 setdiscovery,
81 simplemerge,
82 simplemerge,
82 sshpeer,
83 sshpeer,
83 sslutil,
84 sslutil,
84 streamclone,
85 streamclone,
85 strip,
86 strip,
86 tags as tagsmod,
87 tags as tagsmod,
87 templater,
88 templater,
88 treediscovery,
89 treediscovery,
89 upgrade,
90 upgrade,
90 url as urlmod,
91 url as urlmod,
91 util,
92 util,
92 vfs as vfsmod,
93 vfs as vfsmod,
93 wireprotoframing,
94 wireprotoframing,
94 wireprotoserver,
95 wireprotoserver,
95 )
96 )
96 from .interfaces import repository
97 from .interfaces import repository
97 from .utils import (
98 from .utils import (
98 cborutil,
99 cborutil,
99 compression,
100 compression,
100 dateutil,
101 dateutil,
101 procutil,
102 procutil,
102 stringutil,
103 stringutil,
103 urlutil,
104 urlutil,
104 )
105 )
105
106
106 from .revlogutils import (
107 from .revlogutils import (
107 constants as revlog_constants,
108 constants as revlog_constants,
108 debug as revlog_debug,
109 debug as revlog_debug,
109 deltas as deltautil,
110 deltas as deltautil,
110 nodemap,
111 nodemap,
111 rewrite,
112 rewrite,
112 sidedata,
113 sidedata,
113 )
114 )
114
115
115 release = lockmod.release
116 release = lockmod.release
116
117
117 table = {}
118 table = {}
118 table.update(strip.command._table)
119 table.update(strip.command._table)
119 command = registrar.command(table)
120 command = registrar.command(table)
120
121
121
122
122 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
123 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
123 def debugancestor(ui, repo, *args):
124 def debugancestor(ui, repo, *args):
124 """find the ancestor revision of two revisions in a given index"""
125 """find the ancestor revision of two revisions in a given index"""
125 if len(args) == 3:
126 if len(args) == 3:
126 index, rev1, rev2 = args
127 index, rev1, rev2 = args
127 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
128 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
128 lookup = r.lookup
129 lookup = r.lookup
129 elif len(args) == 2:
130 elif len(args) == 2:
130 if not repo:
131 if not repo:
131 raise error.Abort(
132 raise error.Abort(
132 _(b'there is no Mercurial repository here (.hg not found)')
133 _(b'there is no Mercurial repository here (.hg not found)')
133 )
134 )
134 rev1, rev2 = args
135 rev1, rev2 = args
135 r = repo.changelog
136 r = repo.changelog
136 lookup = repo.lookup
137 lookup = repo.lookup
137 else:
138 else:
138 raise error.Abort(_(b'either two or three arguments required'))
139 raise error.Abort(_(b'either two or three arguments required'))
139 a = r.ancestor(lookup(rev1), lookup(rev2))
140 a = r.ancestor(lookup(rev1), lookup(rev2))
140 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
141 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
141
142
142
143
143 @command(b'debugantivirusrunning', [])
144 @command(b'debugantivirusrunning', [])
144 def debugantivirusrunning(ui, repo):
145 def debugantivirusrunning(ui, repo):
145 """attempt to trigger an antivirus scanner to see if one is active"""
146 """attempt to trigger an antivirus scanner to see if one is active"""
146 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
147 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
147 f.write(
148 f.write(
148 util.b85decode(
149 util.b85decode(
149 # This is a base85-armored version of the EICAR test file. See
150 # This is a base85-armored version of the EICAR test file. See
150 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
151 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
151 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
152 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
152 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
153 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
153 )
154 )
154 )
155 )
155 # Give an AV engine time to scan the file.
156 # Give an AV engine time to scan the file.
156 time.sleep(2)
157 time.sleep(2)
157 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
158 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
158
159
159
160
160 @command(b'debugapplystreamclonebundle', [], b'FILE')
161 @command(b'debugapplystreamclonebundle', [], b'FILE')
161 def debugapplystreamclonebundle(ui, repo, fname):
162 def debugapplystreamclonebundle(ui, repo, fname):
162 """apply a stream clone bundle file"""
163 """apply a stream clone bundle file"""
163 f = hg.openpath(ui, fname)
164 f = hg.openpath(ui, fname)
164 gen = exchange.readbundle(ui, f, fname)
165 gen = exchange.readbundle(ui, f, fname)
165 gen.apply(repo)
166 gen.apply(repo)
166
167
167
168
168 @command(
169 @command(
169 b'debugbuilddag',
170 b'debugbuilddag',
170 [
171 [
171 (
172 (
172 b'm',
173 b'm',
173 b'mergeable-file',
174 b'mergeable-file',
174 None,
175 None,
175 _(b'add single file mergeable changes'),
176 _(b'add single file mergeable changes'),
176 ),
177 ),
177 (
178 (
178 b'o',
179 b'o',
179 b'overwritten-file',
180 b'overwritten-file',
180 None,
181 None,
181 _(b'add single file all revs overwrite'),
182 _(b'add single file all revs overwrite'),
182 ),
183 ),
183 (b'n', b'new-file', None, _(b'add new file at each rev')),
184 (b'n', b'new-file', None, _(b'add new file at each rev')),
184 (
185 (
185 b'',
186 b'',
186 b'from-existing',
187 b'from-existing',
187 None,
188 None,
188 _(b'continue from a non-empty repository'),
189 _(b'continue from a non-empty repository'),
189 ),
190 ),
190 ],
191 ],
191 _(b'[OPTION]... [TEXT]'),
192 _(b'[OPTION]... [TEXT]'),
192 )
193 )
193 def debugbuilddag(
194 def debugbuilddag(
194 ui,
195 ui,
195 repo,
196 repo,
196 text=None,
197 text=None,
197 mergeable_file=False,
198 mergeable_file=False,
198 overwritten_file=False,
199 overwritten_file=False,
199 new_file=False,
200 new_file=False,
200 from_existing=False,
201 from_existing=False,
201 ):
202 ):
202 """builds a repo with a given DAG from scratch in the current empty repo
203 """builds a repo with a given DAG from scratch in the current empty repo
203
204
204 The description of the DAG is read from stdin if not given on the
205 The description of the DAG is read from stdin if not given on the
205 command line.
206 command line.
206
207
207 Elements:
208 Elements:
208
209
209 - "+n" is a linear run of n nodes based on the current default parent
210 - "+n" is a linear run of n nodes based on the current default parent
210 - "." is a single node based on the current default parent
211 - "." is a single node based on the current default parent
211 - "$" resets the default parent to null (implied at the start);
212 - "$" resets the default parent to null (implied at the start);
212 otherwise the default parent is always the last node created
213 otherwise the default parent is always the last node created
213 - "<p" sets the default parent to the backref p
214 - "<p" sets the default parent to the backref p
214 - "*p" is a fork at parent p, which is a backref
215 - "*p" is a fork at parent p, which is a backref
215 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
216 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
216 - "/p2" is a merge of the preceding node and p2
217 - "/p2" is a merge of the preceding node and p2
217 - ":tag" defines a local tag for the preceding node
218 - ":tag" defines a local tag for the preceding node
218 - "@branch" sets the named branch for subsequent nodes
219 - "@branch" sets the named branch for subsequent nodes
219 - "#...\\n" is a comment up to the end of the line
220 - "#...\\n" is a comment up to the end of the line
220
221
221 Whitespace between the above elements is ignored.
222 Whitespace between the above elements is ignored.
222
223
223 A backref is either
224 A backref is either
224
225
225 - a number n, which references the node curr-n, where curr is the current
226 - a number n, which references the node curr-n, where curr is the current
226 node, or
227 node, or
227 - the name of a local tag you placed earlier using ":tag", or
228 - the name of a local tag you placed earlier using ":tag", or
228 - empty to denote the default parent.
229 - empty to denote the default parent.
229
230
230 All string valued-elements are either strictly alphanumeric, or must
231 All string valued-elements are either strictly alphanumeric, or must
231 be enclosed in double quotes ("..."), with "\\" as escape character.
232 be enclosed in double quotes ("..."), with "\\" as escape character.
232 """
233 """
233
234
234 if text is None:
235 if text is None:
235 ui.status(_(b"reading DAG from stdin\n"))
236 ui.status(_(b"reading DAG from stdin\n"))
236 text = ui.fin.read()
237 text = ui.fin.read()
237
238
238 cl = repo.changelog
239 cl = repo.changelog
239 if len(cl) > 0 and not from_existing:
240 if len(cl) > 0 and not from_existing:
240 raise error.Abort(_(b'repository is not empty'))
241 raise error.Abort(_(b'repository is not empty'))
241
242
242 # determine number of revs in DAG
243 # determine number of revs in DAG
243 total = 0
244 total = 0
244 for type, data in dagparser.parsedag(text):
245 for type, data in dagparser.parsedag(text):
245 if type == b'n':
246 if type == b'n':
246 total += 1
247 total += 1
247
248
248 if mergeable_file:
249 if mergeable_file:
249 linesperrev = 2
250 linesperrev = 2
250 # make a file with k lines per rev
251 # make a file with k lines per rev
251 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
252 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
252 initialmergedlines.append(b"")
253 initialmergedlines.append(b"")
253
254
254 tags = []
255 tags = []
255 progress = ui.makeprogress(
256 progress = ui.makeprogress(
256 _(b'building'), unit=_(b'revisions'), total=total
257 _(b'building'), unit=_(b'revisions'), total=total
257 )
258 )
258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
259 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
259 at = -1
260 at = -1
260 atbranch = b'default'
261 atbranch = b'default'
261 nodeids = []
262 nodeids = []
262 id = 0
263 id = 0
263 progress.update(id)
264 progress.update(id)
264 for type, data in dagparser.parsedag(text):
265 for type, data in dagparser.parsedag(text):
265 if type == b'n':
266 if type == b'n':
266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
267 ui.note((b'node %s\n' % pycompat.bytestr(data)))
267 id, ps = data
268 id, ps = data
268
269
269 files = []
270 files = []
270 filecontent = {}
271 filecontent = {}
271
272
272 p2 = None
273 p2 = None
273 if mergeable_file:
274 if mergeable_file:
274 fn = b"mf"
275 fn = b"mf"
275 p1 = repo[ps[0]]
276 p1 = repo[ps[0]]
276 if len(ps) > 1:
277 if len(ps) > 1:
277 p2 = repo[ps[1]]
278 p2 = repo[ps[1]]
278 pa = p1.ancestor(p2)
279 pa = p1.ancestor(p2)
279 base, local, other = [
280 base, local, other = [
280 x[fn].data() for x in (pa, p1, p2)
281 x[fn].data() for x in (pa, p1, p2)
281 ]
282 ]
282 m3 = simplemerge.Merge3Text(base, local, other)
283 m3 = simplemerge.Merge3Text(base, local, other)
283 ml = [
284 ml = [
284 l.strip()
285 l.strip()
285 for l in simplemerge.render_minimized(m3)[0]
286 for l in simplemerge.render_minimized(m3)[0]
286 ]
287 ]
287 ml.append(b"")
288 ml.append(b"")
288 elif at > 0:
289 elif at > 0:
289 ml = p1[fn].data().split(b"\n")
290 ml = p1[fn].data().split(b"\n")
290 else:
291 else:
291 ml = initialmergedlines
292 ml = initialmergedlines
292 ml[id * linesperrev] += b" r%i" % id
293 ml[id * linesperrev] += b" r%i" % id
293 mergedtext = b"\n".join(ml)
294 mergedtext = b"\n".join(ml)
294 files.append(fn)
295 files.append(fn)
295 filecontent[fn] = mergedtext
296 filecontent[fn] = mergedtext
296
297
297 if overwritten_file:
298 if overwritten_file:
298 fn = b"of"
299 fn = b"of"
299 files.append(fn)
300 files.append(fn)
300 filecontent[fn] = b"r%i\n" % id
301 filecontent[fn] = b"r%i\n" % id
301
302
302 if new_file:
303 if new_file:
303 fn = b"nf%i" % id
304 fn = b"nf%i" % id
304 files.append(fn)
305 files.append(fn)
305 filecontent[fn] = b"r%i\n" % id
306 filecontent[fn] = b"r%i\n" % id
306 if len(ps) > 1:
307 if len(ps) > 1:
307 if not p2:
308 if not p2:
308 p2 = repo[ps[1]]
309 p2 = repo[ps[1]]
309 for fn in p2:
310 for fn in p2:
310 if fn.startswith(b"nf"):
311 if fn.startswith(b"nf"):
311 files.append(fn)
312 files.append(fn)
312 filecontent[fn] = p2[fn].data()
313 filecontent[fn] = p2[fn].data()
313
314
314 def fctxfn(repo, cx, path):
315 def fctxfn(repo, cx, path):
315 if path in filecontent:
316 if path in filecontent:
316 return context.memfilectx(
317 return context.memfilectx(
317 repo, cx, path, filecontent[path]
318 repo, cx, path, filecontent[path]
318 )
319 )
319 return None
320 return None
320
321
321 if len(ps) == 0 or ps[0] < 0:
322 if len(ps) == 0 or ps[0] < 0:
322 pars = [None, None]
323 pars = [None, None]
323 elif len(ps) == 1:
324 elif len(ps) == 1:
324 pars = [nodeids[ps[0]], None]
325 pars = [nodeids[ps[0]], None]
325 else:
326 else:
326 pars = [nodeids[p] for p in ps]
327 pars = [nodeids[p] for p in ps]
327 cx = context.memctx(
328 cx = context.memctx(
328 repo,
329 repo,
329 pars,
330 pars,
330 b"r%i" % id,
331 b"r%i" % id,
331 files,
332 files,
332 fctxfn,
333 fctxfn,
333 date=(id, 0),
334 date=(id, 0),
334 user=b"debugbuilddag",
335 user=b"debugbuilddag",
335 extra={b'branch': atbranch},
336 extra={b'branch': atbranch},
336 )
337 )
337 nodeid = repo.commitctx(cx)
338 nodeid = repo.commitctx(cx)
338 nodeids.append(nodeid)
339 nodeids.append(nodeid)
339 at = id
340 at = id
340 elif type == b'l':
341 elif type == b'l':
341 id, name = data
342 id, name = data
342 ui.note((b'tag %s\n' % name))
343 ui.note((b'tag %s\n' % name))
343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
344 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
344 elif type == b'a':
345 elif type == b'a':
345 ui.note((b'branch %s\n' % data))
346 ui.note((b'branch %s\n' % data))
346 atbranch = data
347 atbranch = data
347 progress.update(id)
348 progress.update(id)
348
349
349 if tags:
350 if tags:
350 repo.vfs.write(b"localtags", b"".join(tags))
351 repo.vfs.write(b"localtags", b"".join(tags))
351
352
352
353
353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
354 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
354 indent_string = b' ' * indent
355 indent_string = b' ' * indent
355 if all:
356 if all:
356 ui.writenoi18n(
357 ui.writenoi18n(
357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
358 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
358 % indent_string
359 % indent_string
359 )
360 )
360
361
361 def showchunks(named):
362 def showchunks(named):
362 ui.write(b"\n%s%s\n" % (indent_string, named))
363 ui.write(b"\n%s%s\n" % (indent_string, named))
363 for deltadata in gen.deltaiter():
364 for deltadata in gen.deltaiter():
364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
365 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
365 ui.write(
366 ui.write(
366 b"%s%s %s %s %s %s %d\n"
367 b"%s%s %s %s %s %s %d\n"
367 % (
368 % (
368 indent_string,
369 indent_string,
369 hex(node),
370 hex(node),
370 hex(p1),
371 hex(p1),
371 hex(p2),
372 hex(p2),
372 hex(cs),
373 hex(cs),
373 hex(deltabase),
374 hex(deltabase),
374 len(delta),
375 len(delta),
375 )
376 )
376 )
377 )
377
378
378 gen.changelogheader()
379 gen.changelogheader()
379 showchunks(b"changelog")
380 showchunks(b"changelog")
380 gen.manifestheader()
381 gen.manifestheader()
381 showchunks(b"manifest")
382 showchunks(b"manifest")
382 for chunkdata in iter(gen.filelogheader, {}):
383 for chunkdata in iter(gen.filelogheader, {}):
383 fname = chunkdata[b'filename']
384 fname = chunkdata[b'filename']
384 showchunks(fname)
385 showchunks(fname)
385 else:
386 else:
386 if isinstance(gen, bundle2.unbundle20):
387 if isinstance(gen, bundle2.unbundle20):
387 raise error.Abort(_(b'use debugbundle2 for this file'))
388 raise error.Abort(_(b'use debugbundle2 for this file'))
388 gen.changelogheader()
389 gen.changelogheader()
389 for deltadata in gen.deltaiter():
390 for deltadata in gen.deltaiter():
390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
391 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
392 ui.write(b"%s%s\n" % (indent_string, hex(node)))
392
393
393
394
394 def _debugobsmarkers(ui, part, indent=0, **opts):
395 def _debugobsmarkers(ui, part, indent=0, **opts):
395 """display version and markers contained in 'data'"""
396 """display version and markers contained in 'data'"""
396 opts = pycompat.byteskwargs(opts)
397 opts = pycompat.byteskwargs(opts)
397 data = part.read()
398 data = part.read()
398 indent_string = b' ' * indent
399 indent_string = b' ' * indent
399 try:
400 try:
400 version, markers = obsolete._readmarkers(data)
401 version, markers = obsolete._readmarkers(data)
401 except error.UnknownVersion as exc:
402 except error.UnknownVersion as exc:
402 msg = b"%sunsupported version: %s (%d bytes)\n"
403 msg = b"%sunsupported version: %s (%d bytes)\n"
403 msg %= indent_string, exc.version, len(data)
404 msg %= indent_string, exc.version, len(data)
404 ui.write(msg)
405 ui.write(msg)
405 else:
406 else:
406 msg = b"%sversion: %d (%d bytes)\n"
407 msg = b"%sversion: %d (%d bytes)\n"
407 msg %= indent_string, version, len(data)
408 msg %= indent_string, version, len(data)
408 ui.write(msg)
409 ui.write(msg)
409 fm = ui.formatter(b'debugobsolete', opts)
410 fm = ui.formatter(b'debugobsolete', opts)
410 for rawmarker in sorted(markers):
411 for rawmarker in sorted(markers):
411 m = obsutil.marker(None, rawmarker)
412 m = obsutil.marker(None, rawmarker)
412 fm.startitem()
413 fm.startitem()
413 fm.plain(indent_string)
414 fm.plain(indent_string)
414 cmdutil.showmarker(fm, m)
415 cmdutil.showmarker(fm, m)
415 fm.end()
416 fm.end()
416
417
417
418
418 def _debugphaseheads(ui, data, indent=0):
419 def _debugphaseheads(ui, data, indent=0):
419 """display version and markers contained in 'data'"""
420 """display version and markers contained in 'data'"""
420 indent_string = b' ' * indent
421 indent_string = b' ' * indent
421 headsbyphase = phases.binarydecode(data)
422 headsbyphase = phases.binarydecode(data)
422 for phase in phases.allphases:
423 for phase in phases.allphases:
423 for head in headsbyphase[phase]:
424 for head in headsbyphase[phase]:
424 ui.write(indent_string)
425 ui.write(indent_string)
425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
426 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
426
427
427
428
428 def _quasirepr(thing):
429 def _quasirepr(thing):
429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
430 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
430 return b'{%s}' % (
431 return b'{%s}' % (
431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
432 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
432 )
433 )
433 return pycompat.bytestr(repr(thing))
434 return pycompat.bytestr(repr(thing))
434
435
435
436
436 def _debugbundle2(ui, gen, all=None, **opts):
437 def _debugbundle2(ui, gen, all=None, **opts):
437 """lists the contents of a bundle2"""
438 """lists the contents of a bundle2"""
438 if not isinstance(gen, bundle2.unbundle20):
439 if not isinstance(gen, bundle2.unbundle20):
439 raise error.Abort(_(b'not a bundle2 file'))
440 raise error.Abort(_(b'not a bundle2 file'))
440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
441 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
441 parttypes = opts.get('part_type', [])
442 parttypes = opts.get('part_type', [])
442 for part in gen.iterparts():
443 for part in gen.iterparts():
443 if parttypes and part.type not in parttypes:
444 if parttypes and part.type not in parttypes:
444 continue
445 continue
445 msg = b'%s -- %s (mandatory: %r)\n'
446 msg = b'%s -- %s (mandatory: %r)\n'
446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
447 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
447 if part.type == b'changegroup':
448 if part.type == b'changegroup':
448 version = part.params.get(b'version', b'01')
449 version = part.params.get(b'version', b'01')
449 cg = changegroup.getunbundler(version, part, b'UN')
450 cg = changegroup.getunbundler(version, part, b'UN')
450 if not ui.quiet:
451 if not ui.quiet:
451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
452 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
452 if part.type == b'obsmarkers':
453 if part.type == b'obsmarkers':
453 if not ui.quiet:
454 if not ui.quiet:
454 _debugobsmarkers(ui, part, indent=4, **opts)
455 _debugobsmarkers(ui, part, indent=4, **opts)
455 if part.type == b'phase-heads':
456 if part.type == b'phase-heads':
456 if not ui.quiet:
457 if not ui.quiet:
457 _debugphaseheads(ui, part, indent=4)
458 _debugphaseheads(ui, part, indent=4)
458
459
459
460
460 @command(
461 @command(
461 b'debugbundle',
462 b'debugbundle',
462 [
463 [
463 (b'a', b'all', None, _(b'show all details')),
464 (b'a', b'all', None, _(b'show all details')),
464 (b'', b'part-type', [], _(b'show only the named part type')),
465 (b'', b'part-type', [], _(b'show only the named part type')),
465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
466 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
466 ],
467 ],
467 _(b'FILE'),
468 _(b'FILE'),
468 norepo=True,
469 norepo=True,
469 )
470 )
470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
471 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
471 """lists the contents of a bundle"""
472 """lists the contents of a bundle"""
472 with hg.openpath(ui, bundlepath) as f:
473 with hg.openpath(ui, bundlepath) as f:
473 if spec:
474 if spec:
474 spec = exchange.getbundlespec(ui, f)
475 spec = exchange.getbundlespec(ui, f)
475 ui.write(b'%s\n' % spec)
476 ui.write(b'%s\n' % spec)
476 return
477 return
477
478
478 gen = exchange.readbundle(ui, f, bundlepath)
479 gen = exchange.readbundle(ui, f, bundlepath)
479 if isinstance(gen, bundle2.unbundle20):
480 if isinstance(gen, bundle2.unbundle20):
480 return _debugbundle2(ui, gen, all=all, **opts)
481 return _debugbundle2(ui, gen, all=all, **opts)
481 _debugchangegroup(ui, gen, all=all, **opts)
482 _debugchangegroup(ui, gen, all=all, **opts)
482
483
483
484
484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
485 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
485 def debugcapabilities(ui, path, **opts):
486 def debugcapabilities(ui, path, **opts):
486 """lists the capabilities of a remote peer"""
487 """lists the capabilities of a remote peer"""
487 opts = pycompat.byteskwargs(opts)
488 opts = pycompat.byteskwargs(opts)
488 peer = hg.peer(ui, opts, path)
489 peer = hg.peer(ui, opts, path)
489 try:
490 try:
490 caps = peer.capabilities()
491 caps = peer.capabilities()
491 ui.writenoi18n(b'Main capabilities:\n')
492 ui.writenoi18n(b'Main capabilities:\n')
492 for c in sorted(caps):
493 for c in sorted(caps):
493 ui.write(b' %s\n' % c)
494 ui.write(b' %s\n' % c)
494 b2caps = bundle2.bundle2caps(peer)
495 b2caps = bundle2.bundle2caps(peer)
495 if b2caps:
496 if b2caps:
496 ui.writenoi18n(b'Bundle2 capabilities:\n')
497 ui.writenoi18n(b'Bundle2 capabilities:\n')
497 for key, values in sorted(b2caps.items()):
498 for key, values in sorted(b2caps.items()):
498 ui.write(b' %s\n' % key)
499 ui.write(b' %s\n' % key)
499 for v in values:
500 for v in values:
500 ui.write(b' %s\n' % v)
501 ui.write(b' %s\n' % v)
501 finally:
502 finally:
502 peer.close()
503 peer.close()
503
504
504
505
505 @command(
506 @command(
506 b'debugchangedfiles',
507 b'debugchangedfiles',
507 [
508 [
508 (
509 (
509 b'',
510 b'',
510 b'compute',
511 b'compute',
511 False,
512 False,
512 b"compute information instead of reading it from storage",
513 b"compute information instead of reading it from storage",
513 ),
514 ),
514 ],
515 ],
515 b'REV',
516 b'REV',
516 )
517 )
517 def debugchangedfiles(ui, repo, rev, **opts):
518 def debugchangedfiles(ui, repo, rev, **opts):
518 """list the stored files changes for a revision"""
519 """list the stored files changes for a revision"""
519 ctx = logcmdutil.revsingle(repo, rev, None)
520 ctx = logcmdutil.revsingle(repo, rev, None)
520 files = None
521 files = None
521
522
522 if opts['compute']:
523 if opts['compute']:
523 files = metadata.compute_all_files_changes(ctx)
524 files = metadata.compute_all_files_changes(ctx)
524 else:
525 else:
525 sd = repo.changelog.sidedata(ctx.rev())
526 sd = repo.changelog.sidedata(ctx.rev())
526 files_block = sd.get(sidedata.SD_FILES)
527 files_block = sd.get(sidedata.SD_FILES)
527 if files_block is not None:
528 if files_block is not None:
528 files = metadata.decode_files_sidedata(sd)
529 files = metadata.decode_files_sidedata(sd)
529 if files is not None:
530 if files is not None:
530 for f in sorted(files.touched):
531 for f in sorted(files.touched):
531 if f in files.added:
532 if f in files.added:
532 action = b"added"
533 action = b"added"
533 elif f in files.removed:
534 elif f in files.removed:
534 action = b"removed"
535 action = b"removed"
535 elif f in files.merged:
536 elif f in files.merged:
536 action = b"merged"
537 action = b"merged"
537 elif f in files.salvaged:
538 elif f in files.salvaged:
538 action = b"salvaged"
539 action = b"salvaged"
539 else:
540 else:
540 action = b"touched"
541 action = b"touched"
541
542
542 copy_parent = b""
543 copy_parent = b""
543 copy_source = b""
544 copy_source = b""
544 if f in files.copied_from_p1:
545 if f in files.copied_from_p1:
545 copy_parent = b"p1"
546 copy_parent = b"p1"
546 copy_source = files.copied_from_p1[f]
547 copy_source = files.copied_from_p1[f]
547 elif f in files.copied_from_p2:
548 elif f in files.copied_from_p2:
548 copy_parent = b"p2"
549 copy_parent = b"p2"
549 copy_source = files.copied_from_p2[f]
550 copy_source = files.copied_from_p2[f]
550
551
551 data = (action, copy_parent, f, copy_source)
552 data = (action, copy_parent, f, copy_source)
552 template = b"%-8s %2s: %s, %s;\n"
553 template = b"%-8s %2s: %s, %s;\n"
553 ui.write(template % data)
554 ui.write(template % data)
554
555
555
556
556 @command(b'debugcheckstate', [], b'')
557 @command(b'debugcheckstate', [], b'')
557 def debugcheckstate(ui, repo):
558 def debugcheckstate(ui, repo):
558 """validate the correctness of the current dirstate"""
559 """validate the correctness of the current dirstate"""
559 parent1, parent2 = repo.dirstate.parents()
560 parent1, parent2 = repo.dirstate.parents()
560 m1 = repo[parent1].manifest()
561 m1 = repo[parent1].manifest()
561 m2 = repo[parent2].manifest()
562 m2 = repo[parent2].manifest()
562 errors = 0
563 errors = 0
563 for err in repo.dirstate.verify(m1, m2):
564 for err in repo.dirstate.verify(m1, m2):
564 ui.warn(err[0] % err[1:])
565 ui.warn(err[0] % err[1:])
565 errors += 1
566 errors += 1
566 if errors:
567 if errors:
567 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
568 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
568 raise error.Abort(errstr)
569 raise error.Abort(errstr)
569
570
570
571
571 @command(
572 @command(
572 b'debugcolor',
573 b'debugcolor',
573 [(b'', b'style', None, _(b'show all configured styles'))],
574 [(b'', b'style', None, _(b'show all configured styles'))],
574 b'hg debugcolor',
575 b'hg debugcolor',
575 )
576 )
576 def debugcolor(ui, repo, **opts):
577 def debugcolor(ui, repo, **opts):
577 """show available color, effects or style"""
578 """show available color, effects or style"""
578 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
579 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
579 if opts.get('style'):
580 if opts.get('style'):
580 return _debugdisplaystyle(ui)
581 return _debugdisplaystyle(ui)
581 else:
582 else:
582 return _debugdisplaycolor(ui)
583 return _debugdisplaycolor(ui)
583
584
584
585
585 def _debugdisplaycolor(ui):
586 def _debugdisplaycolor(ui):
586 ui = ui.copy()
587 ui = ui.copy()
587 ui._styles.clear()
588 ui._styles.clear()
588 for effect in color._activeeffects(ui).keys():
589 for effect in color._activeeffects(ui).keys():
589 ui._styles[effect] = effect
590 ui._styles[effect] = effect
590 if ui._terminfoparams:
591 if ui._terminfoparams:
591 for k, v in ui.configitems(b'color'):
592 for k, v in ui.configitems(b'color'):
592 if k.startswith(b'color.'):
593 if k.startswith(b'color.'):
593 ui._styles[k] = k[6:]
594 ui._styles[k] = k[6:]
594 elif k.startswith(b'terminfo.'):
595 elif k.startswith(b'terminfo.'):
595 ui._styles[k] = k[9:]
596 ui._styles[k] = k[9:]
596 ui.write(_(b'available colors:\n'))
597 ui.write(_(b'available colors:\n'))
597 # sort label with a '_' after the other to group '_background' entry.
598 # sort label with a '_' after the other to group '_background' entry.
598 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
599 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
599 for colorname, label in items:
600 for colorname, label in items:
600 ui.write(b'%s\n' % colorname, label=label)
601 ui.write(b'%s\n' % colorname, label=label)
601
602
602
603
603 def _debugdisplaystyle(ui):
604 def _debugdisplaystyle(ui):
604 ui.write(_(b'available style:\n'))
605 ui.write(_(b'available style:\n'))
605 if not ui._styles:
606 if not ui._styles:
606 return
607 return
607 width = max(len(s) for s in ui._styles)
608 width = max(len(s) for s in ui._styles)
608 for label, effects in sorted(ui._styles.items()):
609 for label, effects in sorted(ui._styles.items()):
609 ui.write(b'%s' % label, label=label)
610 ui.write(b'%s' % label, label=label)
610 if effects:
611 if effects:
611 # 50
612 # 50
612 ui.write(b': ')
613 ui.write(b': ')
613 ui.write(b' ' * (max(0, width - len(label))))
614 ui.write(b' ' * (max(0, width - len(label))))
614 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
615 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
615 ui.write(b'\n')
616 ui.write(b'\n')
616
617
617
618
618 @command(b'debugcreatestreamclonebundle', [], b'FILE')
619 @command(b'debugcreatestreamclonebundle', [], b'FILE')
619 def debugcreatestreamclonebundle(ui, repo, fname):
620 def debugcreatestreamclonebundle(ui, repo, fname):
620 """create a stream clone bundle file
621 """create a stream clone bundle file
621
622
622 Stream bundles are special bundles that are essentially archives of
623 Stream bundles are special bundles that are essentially archives of
623 revlog files. They are commonly used for cloning very quickly.
624 revlog files. They are commonly used for cloning very quickly.
624 """
625 """
625 # TODO we may want to turn this into an abort when this functionality
626 # TODO we may want to turn this into an abort when this functionality
626 # is moved into `hg bundle`.
627 # is moved into `hg bundle`.
627 if phases.hassecret(repo):
628 if phases.hassecret(repo):
628 ui.warn(
629 ui.warn(
629 _(
630 _(
630 b'(warning: stream clone bundle will contain secret '
631 b'(warning: stream clone bundle will contain secret '
631 b'revisions)\n'
632 b'revisions)\n'
632 )
633 )
633 )
634 )
634
635
635 requirements, gen = streamclone.generatebundlev1(repo)
636 requirements, gen = streamclone.generatebundlev1(repo)
636 changegroup.writechunks(ui, gen, fname)
637 changegroup.writechunks(ui, gen, fname)
637
638
638 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
639 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
639
640
640
641
641 @command(
642 @command(
642 b'debugdag',
643 b'debugdag',
643 [
644 [
644 (b't', b'tags', None, _(b'use tags as labels')),
645 (b't', b'tags', None, _(b'use tags as labels')),
645 (b'b', b'branches', None, _(b'annotate with branch names')),
646 (b'b', b'branches', None, _(b'annotate with branch names')),
646 (b'', b'dots', None, _(b'use dots for runs')),
647 (b'', b'dots', None, _(b'use dots for runs')),
647 (b's', b'spaces', None, _(b'separate elements by spaces')),
648 (b's', b'spaces', None, _(b'separate elements by spaces')),
648 ],
649 ],
649 _(b'[OPTION]... [FILE [REV]...]'),
650 _(b'[OPTION]... [FILE [REV]...]'),
650 optionalrepo=True,
651 optionalrepo=True,
651 )
652 )
652 def debugdag(ui, repo, file_=None, *revs, **opts):
653 def debugdag(ui, repo, file_=None, *revs, **opts):
653 """format the changelog or an index DAG as a concise textual description
654 """format the changelog or an index DAG as a concise textual description
654
655
655 If you pass a revlog index, the revlog's DAG is emitted. If you list
656 If you pass a revlog index, the revlog's DAG is emitted. If you list
656 revision numbers, they get labeled in the output as rN.
657 revision numbers, they get labeled in the output as rN.
657
658
658 Otherwise, the changelog DAG of the current repo is emitted.
659 Otherwise, the changelog DAG of the current repo is emitted.
659 """
660 """
660 spaces = opts.get('spaces')
661 spaces = opts.get('spaces')
661 dots = opts.get('dots')
662 dots = opts.get('dots')
662 if file_:
663 if file_:
663 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
664 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
664 revs = {int(r) for r in revs}
665 revs = {int(r) for r in revs}
665
666
666 def events():
667 def events():
667 for r in rlog:
668 for r in rlog:
668 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
669 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
669 if r in revs:
670 if r in revs:
670 yield b'l', (r, b"r%i" % r)
671 yield b'l', (r, b"r%i" % r)
671
672
672 elif repo:
673 elif repo:
673 cl = repo.changelog
674 cl = repo.changelog
674 tags = opts.get('tags')
675 tags = opts.get('tags')
675 branches = opts.get('branches')
676 branches = opts.get('branches')
676 if tags:
677 if tags:
677 labels = {}
678 labels = {}
678 for l, n in repo.tags().items():
679 for l, n in repo.tags().items():
679 labels.setdefault(cl.rev(n), []).append(l)
680 labels.setdefault(cl.rev(n), []).append(l)
680
681
681 def events():
682 def events():
682 b = b"default"
683 b = b"default"
683 for r in cl:
684 for r in cl:
684 if branches:
685 if branches:
685 newb = cl.read(cl.node(r))[5][b'branch']
686 newb = cl.read(cl.node(r))[5][b'branch']
686 if newb != b:
687 if newb != b:
687 yield b'a', newb
688 yield b'a', newb
688 b = newb
689 b = newb
689 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
690 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
690 if tags:
691 if tags:
691 ls = labels.get(r)
692 ls = labels.get(r)
692 if ls:
693 if ls:
693 for l in ls:
694 for l in ls:
694 yield b'l', (r, l)
695 yield b'l', (r, l)
695
696
696 else:
697 else:
697 raise error.Abort(_(b'need repo for changelog dag'))
698 raise error.Abort(_(b'need repo for changelog dag'))
698
699
699 for line in dagparser.dagtextlines(
700 for line in dagparser.dagtextlines(
700 events(),
701 events(),
701 addspaces=spaces,
702 addspaces=spaces,
702 wraplabels=True,
703 wraplabels=True,
703 wrapannotations=True,
704 wrapannotations=True,
704 wrapnonlinear=dots,
705 wrapnonlinear=dots,
705 usedots=dots,
706 usedots=dots,
706 maxlinewidth=70,
707 maxlinewidth=70,
707 ):
708 ):
708 ui.write(line)
709 ui.write(line)
709 ui.write(b"\n")
710 ui.write(b"\n")
710
711
711
712
712 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
713 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
713 def debugdata(ui, repo, file_, rev=None, **opts):
714 def debugdata(ui, repo, file_, rev=None, **opts):
714 """dump the contents of a data file revision"""
715 """dump the contents of a data file revision"""
715 opts = pycompat.byteskwargs(opts)
716 opts = pycompat.byteskwargs(opts)
716 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
717 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
717 if rev is not None:
718 if rev is not None:
718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 file_, rev = None, file_
720 file_, rev = None, file_
720 elif rev is None:
721 elif rev is None:
721 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
722 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
722 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
723 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
723 try:
724 try:
724 ui.write(r.rawdata(r.lookup(rev)))
725 ui.write(r.rawdata(r.lookup(rev)))
725 except KeyError:
726 except KeyError:
726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727
728
728
729
729 @command(
730 @command(
730 b'debugdate',
731 b'debugdate',
731 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 _(b'[-e] DATE [RANGE]'),
733 _(b'[-e] DATE [RANGE]'),
733 norepo=True,
734 norepo=True,
734 optionalrepo=True,
735 optionalrepo=True,
735 )
736 )
736 def debugdate(ui, date, range=None, **opts):
737 def debugdate(ui, date, range=None, **opts):
737 """parse and display a date"""
738 """parse and display a date"""
738 if opts["extended"]:
739 if opts["extended"]:
739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 else:
741 else:
741 d = dateutil.parsedate(date)
742 d = dateutil.parsedate(date)
742 ui.writenoi18n(b"internal: %d %d\n" % d)
743 ui.writenoi18n(b"internal: %d %d\n" % d)
743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 if range:
745 if range:
745 m = dateutil.matchdate(range)
746 m = dateutil.matchdate(range)
746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747
748
748
749
749 @command(
750 @command(
750 b'debugdeltachain',
751 b'debugdeltachain',
751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 _(b'-c|-m|FILE'),
753 _(b'-c|-m|FILE'),
753 optionalrepo=True,
754 optionalrepo=True,
754 )
755 )
755 def debugdeltachain(ui, repo, file_=None, **opts):
756 def debugdeltachain(ui, repo, file_=None, **opts):
756 """dump information about delta chains in a revlog
757 """dump information about delta chains in a revlog
757
758
758 Output can be templatized. Available template keywords are:
759 Output can be templatized. Available template keywords are:
759
760
760 :``rev``: revision number
761 :``rev``: revision number
761 :``p1``: parent 1 revision number (for reference)
762 :``p1``: parent 1 revision number (for reference)
762 :``p2``: parent 2 revision number (for reference)
763 :``p2``: parent 2 revision number (for reference)
763 :``chainid``: delta chain identifier (numbered by unique base)
764 :``chainid``: delta chain identifier (numbered by unique base)
764 :``chainlen``: delta chain length to this revision
765 :``chainlen``: delta chain length to this revision
765 :``prevrev``: previous revision in delta chain
766 :``prevrev``: previous revision in delta chain
766 :``deltatype``: role of delta / how it was computed
767 :``deltatype``: role of delta / how it was computed
767 - base: a full snapshot
768 - base: a full snapshot
768 - snap: an intermediate snapshot
769 - snap: an intermediate snapshot
769 - p1: a delta against the first parent
770 - p1: a delta against the first parent
770 - p2: a delta against the second parent
771 - p2: a delta against the second parent
771 - skip1: a delta against the same base as p1
772 - skip1: a delta against the same base as p1
772 (when p1 has empty delta
773 (when p1 has empty delta
773 - skip2: a delta against the same base as p2
774 - skip2: a delta against the same base as p2
774 (when p2 has empty delta
775 (when p2 has empty delta
775 - prev: a delta against the previous revision
776 - prev: a delta against the previous revision
776 - other: a delta against an arbitrary revision
777 - other: a delta against an arbitrary revision
777 :``compsize``: compressed size of revision
778 :``compsize``: compressed size of revision
778 :``uncompsize``: uncompressed size of revision
779 :``uncompsize``: uncompressed size of revision
779 :``chainsize``: total size of compressed revisions in chain
780 :``chainsize``: total size of compressed revisions in chain
780 :``chainratio``: total chain size divided by uncompressed revision size
781 :``chainratio``: total chain size divided by uncompressed revision size
781 (new delta chains typically start at ratio 2.00)
782 (new delta chains typically start at ratio 2.00)
782 :``lindist``: linear distance from base revision in delta chain to end
783 :``lindist``: linear distance from base revision in delta chain to end
783 of this revision
784 of this revision
784 :``extradist``: total size of revisions not part of this delta chain from
785 :``extradist``: total size of revisions not part of this delta chain from
785 base of delta chain to end of this revision; a measurement
786 base of delta chain to end of this revision; a measurement
786 of how much extra data we need to read/seek across to read
787 of how much extra data we need to read/seek across to read
787 the delta chain for this revision
788 the delta chain for this revision
788 :``extraratio``: extradist divided by chainsize; another representation of
789 :``extraratio``: extradist divided by chainsize; another representation of
789 how much unrelated data is needed to load this delta chain
790 how much unrelated data is needed to load this delta chain
790
791
791 If the repository is configured to use the sparse read, additional keywords
792 If the repository is configured to use the sparse read, additional keywords
792 are available:
793 are available:
793
794
794 :``readsize``: total size of data read from the disk for a revision
795 :``readsize``: total size of data read from the disk for a revision
795 (sum of the sizes of all the blocks)
796 (sum of the sizes of all the blocks)
796 :``largestblock``: size of the largest block of data read from the disk
797 :``largestblock``: size of the largest block of data read from the disk
797 :``readdensity``: density of useful bytes in the data read from the disk
798 :``readdensity``: density of useful bytes in the data read from the disk
798 :``srchunks``: in how many data hunks the whole revision would be read
799 :``srchunks``: in how many data hunks the whole revision would be read
799
800
800 The sparse read can be enabled with experimental.sparse-read = True
801 The sparse read can be enabled with experimental.sparse-read = True
801 """
802 """
802 opts = pycompat.byteskwargs(opts)
803 opts = pycompat.byteskwargs(opts)
803 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
804 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
804 index = r.index
805 index = r.index
805 start = r.start
806 start = r.start
806 length = r.length
807 length = r.length
807 generaldelta = r._generaldelta
808 generaldelta = r._generaldelta
808 withsparseread = getattr(r, '_withsparseread', False)
809 withsparseread = getattr(r, '_withsparseread', False)
809
810
810 # security to avoid crash on corrupted revlogs
811 # security to avoid crash on corrupted revlogs
811 total_revs = len(index)
812 total_revs = len(index)
812
813
813 def revinfo(rev):
814 def revinfo(rev):
814 e = index[rev]
815 e = index[rev]
815 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
816 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
816 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
817 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
817 chainsize = 0
818 chainsize = 0
818
819
819 base = e[revlog_constants.ENTRY_DELTA_BASE]
820 base = e[revlog_constants.ENTRY_DELTA_BASE]
820 p1 = e[revlog_constants.ENTRY_PARENT_1]
821 p1 = e[revlog_constants.ENTRY_PARENT_1]
821 p2 = e[revlog_constants.ENTRY_PARENT_2]
822 p2 = e[revlog_constants.ENTRY_PARENT_2]
822
823
823 # If the parents of a revision has an empty delta, we never try to delta
824 # If the parents of a revision has an empty delta, we never try to delta
824 # against that parent, but directly against the delta base of that
825 # against that parent, but directly against the delta base of that
825 # parent (recursively). It avoids adding a useless entry in the chain.
826 # parent (recursively). It avoids adding a useless entry in the chain.
826 #
827 #
827 # However we need to detect that as a special case for delta-type, that
828 # However we need to detect that as a special case for delta-type, that
828 # is not simply "other".
829 # is not simply "other".
829 p1_base = p1
830 p1_base = p1
830 if p1 != nullrev and p1 < total_revs:
831 if p1 != nullrev and p1 < total_revs:
831 e1 = index[p1]
832 e1 = index[p1]
832 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
833 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
833 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
834 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
834 if (
835 if (
835 new_base == p1_base
836 new_base == p1_base
836 or new_base == nullrev
837 or new_base == nullrev
837 or new_base >= total_revs
838 or new_base >= total_revs
838 ):
839 ):
839 break
840 break
840 p1_base = new_base
841 p1_base = new_base
841 e1 = index[p1_base]
842 e1 = index[p1_base]
842 p2_base = p2
843 p2_base = p2
843 if p2 != nullrev and p2 < total_revs:
844 if p2 != nullrev and p2 < total_revs:
844 e2 = index[p2]
845 e2 = index[p2]
845 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
846 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
846 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
847 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
847 if (
848 if (
848 new_base == p2_base
849 new_base == p2_base
849 or new_base == nullrev
850 or new_base == nullrev
850 or new_base >= total_revs
851 or new_base >= total_revs
851 ):
852 ):
852 break
853 break
853 p2_base = new_base
854 p2_base = new_base
854 e2 = index[p2_base]
855 e2 = index[p2_base]
855
856
856 if generaldelta:
857 if generaldelta:
857 if base == p1:
858 if base == p1:
858 deltatype = b'p1'
859 deltatype = b'p1'
859 elif base == p2:
860 elif base == p2:
860 deltatype = b'p2'
861 deltatype = b'p2'
861 elif base == rev:
862 elif base == rev:
862 deltatype = b'base'
863 deltatype = b'base'
863 elif base == p1_base:
864 elif base == p1_base:
864 deltatype = b'skip1'
865 deltatype = b'skip1'
865 elif base == p2_base:
866 elif base == p2_base:
866 deltatype = b'skip2'
867 deltatype = b'skip2'
867 elif r.issnapshot(rev):
868 elif r.issnapshot(rev):
868 deltatype = b'snap'
869 deltatype = b'snap'
869 elif base == rev - 1:
870 elif base == rev - 1:
870 deltatype = b'prev'
871 deltatype = b'prev'
871 else:
872 else:
872 deltatype = b'other'
873 deltatype = b'other'
873 else:
874 else:
874 if base == rev:
875 if base == rev:
875 deltatype = b'base'
876 deltatype = b'base'
876 else:
877 else:
877 deltatype = b'prev'
878 deltatype = b'prev'
878
879
879 chain = r._deltachain(rev)[0]
880 chain = r._deltachain(rev)[0]
880 for iterrev in chain:
881 for iterrev in chain:
881 e = index[iterrev]
882 e = index[iterrev]
882 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
883 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
883
884
884 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
885 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
885
886
886 fm = ui.formatter(b'debugdeltachain', opts)
887 fm = ui.formatter(b'debugdeltachain', opts)
887
888
888 fm.plain(
889 fm.plain(
889 b' rev p1 p2 chain# chainlen prev delta '
890 b' rev p1 p2 chain# chainlen prev delta '
890 b'size rawsize chainsize ratio lindist extradist '
891 b'size rawsize chainsize ratio lindist extradist '
891 b'extraratio'
892 b'extraratio'
892 )
893 )
893 if withsparseread:
894 if withsparseread:
894 fm.plain(b' readsize largestblk rddensity srchunks')
895 fm.plain(b' readsize largestblk rddensity srchunks')
895 fm.plain(b'\n')
896 fm.plain(b'\n')
896
897
897 chainbases = {}
898 chainbases = {}
898 for rev in r:
899 for rev in r:
899 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
900 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
900 chainbase = chain[0]
901 chainbase = chain[0]
901 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
902 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
902 basestart = start(chainbase)
903 basestart = start(chainbase)
903 revstart = start(rev)
904 revstart = start(rev)
904 lineardist = revstart + comp - basestart
905 lineardist = revstart + comp - basestart
905 extradist = lineardist - chainsize
906 extradist = lineardist - chainsize
906 try:
907 try:
907 prevrev = chain[-2]
908 prevrev = chain[-2]
908 except IndexError:
909 except IndexError:
909 prevrev = -1
910 prevrev = -1
910
911
911 if uncomp != 0:
912 if uncomp != 0:
912 chainratio = float(chainsize) / float(uncomp)
913 chainratio = float(chainsize) / float(uncomp)
913 else:
914 else:
914 chainratio = chainsize
915 chainratio = chainsize
915
916
916 if chainsize != 0:
917 if chainsize != 0:
917 extraratio = float(extradist) / float(chainsize)
918 extraratio = float(extradist) / float(chainsize)
918 else:
919 else:
919 extraratio = extradist
920 extraratio = extradist
920
921
921 fm.startitem()
922 fm.startitem()
922 fm.write(
923 fm.write(
923 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
924 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
924 b'uncompsize chainsize chainratio lindist extradist '
925 b'uncompsize chainsize chainratio lindist extradist '
925 b'extraratio',
926 b'extraratio',
926 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
927 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
927 rev,
928 rev,
928 p1,
929 p1,
929 p2,
930 p2,
930 chainid,
931 chainid,
931 len(chain),
932 len(chain),
932 prevrev,
933 prevrev,
933 deltatype,
934 deltatype,
934 comp,
935 comp,
935 uncomp,
936 uncomp,
936 chainsize,
937 chainsize,
937 chainratio,
938 chainratio,
938 lineardist,
939 lineardist,
939 extradist,
940 extradist,
940 extraratio,
941 extraratio,
941 rev=rev,
942 rev=rev,
942 chainid=chainid,
943 chainid=chainid,
943 chainlen=len(chain),
944 chainlen=len(chain),
944 prevrev=prevrev,
945 prevrev=prevrev,
945 deltatype=deltatype,
946 deltatype=deltatype,
946 compsize=comp,
947 compsize=comp,
947 uncompsize=uncomp,
948 uncompsize=uncomp,
948 chainsize=chainsize,
949 chainsize=chainsize,
949 chainratio=chainratio,
950 chainratio=chainratio,
950 lindist=lineardist,
951 lindist=lineardist,
951 extradist=extradist,
952 extradist=extradist,
952 extraratio=extraratio,
953 extraratio=extraratio,
953 )
954 )
954 if withsparseread:
955 if withsparseread:
955 readsize = 0
956 readsize = 0
956 largestblock = 0
957 largestblock = 0
957 srchunks = 0
958 srchunks = 0
958
959
959 for revschunk in deltautil.slicechunk(r, chain):
960 for revschunk in deltautil.slicechunk(r, chain):
960 srchunks += 1
961 srchunks += 1
961 blkend = start(revschunk[-1]) + length(revschunk[-1])
962 blkend = start(revschunk[-1]) + length(revschunk[-1])
962 blksize = blkend - start(revschunk[0])
963 blksize = blkend - start(revschunk[0])
963
964
964 readsize += blksize
965 readsize += blksize
965 if largestblock < blksize:
966 if largestblock < blksize:
966 largestblock = blksize
967 largestblock = blksize
967
968
968 if readsize:
969 if readsize:
969 readdensity = float(chainsize) / float(readsize)
970 readdensity = float(chainsize) / float(readsize)
970 else:
971 else:
971 readdensity = 1
972 readdensity = 1
972
973
973 fm.write(
974 fm.write(
974 b'readsize largestblock readdensity srchunks',
975 b'readsize largestblock readdensity srchunks',
975 b' %10d %10d %9.5f %8d',
976 b' %10d %10d %9.5f %8d',
976 readsize,
977 readsize,
977 largestblock,
978 largestblock,
978 readdensity,
979 readdensity,
979 srchunks,
980 srchunks,
980 readsize=readsize,
981 readsize=readsize,
981 largestblock=largestblock,
982 largestblock=largestblock,
982 readdensity=readdensity,
983 readdensity=readdensity,
983 srchunks=srchunks,
984 srchunks=srchunks,
984 )
985 )
985
986
986 fm.plain(b'\n')
987 fm.plain(b'\n')
987
988
988 fm.end()
989 fm.end()
989
990
990
991
991 @command(
992 @command(
992 b'debug-delta-find',
993 b'debug-delta-find',
993 cmdutil.debugrevlogopts + cmdutil.formatteropts,
994 cmdutil.debugrevlogopts
995 + cmdutil.formatteropts
996 + [
997 (
998 b'',
999 b'source',
1000 b'full',
1001 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
1002 ),
1003 ],
994 _(b'-c|-m|FILE REV'),
1004 _(b'-c|-m|FILE REV'),
995 optionalrepo=True,
1005 optionalrepo=True,
996 )
1006 )
997 def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
1007 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
998 """display the computation to get to a valid delta for storing REV
1008 """display the computation to get to a valid delta for storing REV
999
1009
1000 This command will replay the process used to find the "best" delta to store
1010 This command will replay the process used to find the "best" delta to store
1001 a revision and display information about all the steps used to get to that
1011 a revision and display information about all the steps used to get to that
1002 result.
1012 result.
1003
1013
1014 By default, the process is fed with a the full-text for the revision. This
1015 can be controlled with the --source flag.
1016
1004 The revision use the revision number of the target storage (not changelog
1017 The revision use the revision number of the target storage (not changelog
1005 revision number).
1018 revision number).
1006
1019
1007 note: the process is initiated from a full text of the revision to store.
1020 note: the process is initiated from a full text of the revision to store.
1008 """
1021 """
1009 opts = pycompat.byteskwargs(opts)
1022 opts = pycompat.byteskwargs(opts)
1010 if arg_2 is None:
1023 if arg_2 is None:
1011 file_ = None
1024 file_ = None
1012 rev = arg_1
1025 rev = arg_1
1013 else:
1026 else:
1014 file_ = arg_1
1027 file_ = arg_1
1015 rev = arg_2
1028 rev = arg_2
1016
1029
1017 rev = int(rev)
1030 rev = int(rev)
1018
1031
1019 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1032 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1020
1033
1021 deltacomputer = deltautil.deltacomputer(
1034 deltacomputer = deltautil.deltacomputer(
1022 revlog,
1035 revlog,
1023 write_debug=ui.write,
1036 write_debug=ui.write,
1024 debug_search=not ui.quiet,
1037 debug_search=not ui.quiet,
1025 )
1038 )
1026
1039
1027 node = revlog.node(rev)
1040 node = revlog.node(rev)
1028 p1r, p2r = revlog.parentrevs(rev)
1041 p1r, p2r = revlog.parentrevs(rev)
1029 p1 = revlog.node(p1r)
1042 p1 = revlog.node(p1r)
1030 p2 = revlog.node(p2r)
1043 p2 = revlog.node(p2r)
1031 btext = [revlog.revision(rev)]
1044 full_text = revlog.revision(rev)
1045 btext = [full_text]
1032 textlen = len(btext[0])
1046 textlen = len(btext[0])
1033 cachedelta = None
1047 cachedelta = None
1034 flags = revlog.flags(rev)
1048 flags = revlog.flags(rev)
1035
1049
1050 if source != b'full':
1051 if source == b'storage':
1052 base_rev = revlog.deltaparent(rev)
1053 elif source == b'p1':
1054 base_rev = p1r
1055 elif source == b'p2':
1056 base_rev = p2r
1057 elif source == b'prev':
1058 base_rev = rev - 1
1059 else:
1060 raise error.InputError(b"invalid --source value: %s" % source)
1061
1062 if base_rev != nullrev:
1063 base_text = revlog.revision(base_rev)
1064 delta = mdiff.textdiff(base_text, full_text)
1065
1066 cachedelta = (base_rev, delta)
1067 btext = [None]
1068
1036 revinfo = revlogutils.revisioninfo(
1069 revinfo = revlogutils.revisioninfo(
1037 node,
1070 node,
1038 p1,
1071 p1,
1039 p2,
1072 p2,
1040 btext,
1073 btext,
1041 textlen,
1074 textlen,
1042 cachedelta,
1075 cachedelta,
1043 flags,
1076 flags,
1044 )
1077 )
1045
1078
1046 fh = revlog._datafp()
1079 fh = revlog._datafp()
1047 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1080 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1048
1081
1049
1082
1050 @command(
1083 @command(
1051 b'debugdirstate|debugstate',
1084 b'debugdirstate|debugstate',
1052 [
1085 [
1053 (
1086 (
1054 b'',
1087 b'',
1055 b'nodates',
1088 b'nodates',
1056 None,
1089 None,
1057 _(b'do not display the saved mtime (DEPRECATED)'),
1090 _(b'do not display the saved mtime (DEPRECATED)'),
1058 ),
1091 ),
1059 (b'', b'dates', True, _(b'display the saved mtime')),
1092 (b'', b'dates', True, _(b'display the saved mtime')),
1060 (b'', b'datesort', None, _(b'sort by saved mtime')),
1093 (b'', b'datesort', None, _(b'sort by saved mtime')),
1061 (
1094 (
1062 b'',
1095 b'',
1063 b'docket',
1096 b'docket',
1064 False,
1097 False,
1065 _(b'display the docket (metadata file) instead'),
1098 _(b'display the docket (metadata file) instead'),
1066 ),
1099 ),
1067 (
1100 (
1068 b'',
1101 b'',
1069 b'all',
1102 b'all',
1070 False,
1103 False,
1071 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1104 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1072 ),
1105 ),
1073 ],
1106 ],
1074 _(b'[OPTION]...'),
1107 _(b'[OPTION]...'),
1075 )
1108 )
1076 def debugstate(ui, repo, **opts):
1109 def debugstate(ui, repo, **opts):
1077 """show the contents of the current dirstate"""
1110 """show the contents of the current dirstate"""
1078
1111
1079 if opts.get("docket"):
1112 if opts.get("docket"):
1080 if not repo.dirstate._use_dirstate_v2:
1113 if not repo.dirstate._use_dirstate_v2:
1081 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1114 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1082
1115
1083 docket = repo.dirstate._map.docket
1116 docket = repo.dirstate._map.docket
1084 (
1117 (
1085 start_offset,
1118 start_offset,
1086 root_nodes,
1119 root_nodes,
1087 nodes_with_entry,
1120 nodes_with_entry,
1088 nodes_with_copy,
1121 nodes_with_copy,
1089 unused_bytes,
1122 unused_bytes,
1090 _unused,
1123 _unused,
1091 ignore_pattern,
1124 ignore_pattern,
1092 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1125 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1093
1126
1094 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1127 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1095 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1128 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1096 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1129 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1097 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1130 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1098 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1131 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1099 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1132 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1100 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1133 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1101 ui.write(
1134 ui.write(
1102 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1135 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1103 )
1136 )
1104 return
1137 return
1105
1138
1106 nodates = not opts['dates']
1139 nodates = not opts['dates']
1107 if opts.get('nodates') is not None:
1140 if opts.get('nodates') is not None:
1108 nodates = True
1141 nodates = True
1109 datesort = opts.get('datesort')
1142 datesort = opts.get('datesort')
1110
1143
1111 if datesort:
1144 if datesort:
1112
1145
1113 def keyfunc(entry):
1146 def keyfunc(entry):
1114 filename, _state, _mode, _size, mtime = entry
1147 filename, _state, _mode, _size, mtime = entry
1115 return (mtime, filename)
1148 return (mtime, filename)
1116
1149
1117 else:
1150 else:
1118 keyfunc = None # sort by filename
1151 keyfunc = None # sort by filename
1119 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1152 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1120 entries.sort(key=keyfunc)
1153 entries.sort(key=keyfunc)
1121 for entry in entries:
1154 for entry in entries:
1122 filename, state, mode, size, mtime = entry
1155 filename, state, mode, size, mtime = entry
1123 if mtime == -1:
1156 if mtime == -1:
1124 timestr = b'unset '
1157 timestr = b'unset '
1125 elif nodates:
1158 elif nodates:
1126 timestr = b'set '
1159 timestr = b'set '
1127 else:
1160 else:
1128 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1161 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1129 timestr = encoding.strtolocal(timestr)
1162 timestr = encoding.strtolocal(timestr)
1130 if mode & 0o20000:
1163 if mode & 0o20000:
1131 mode = b'lnk'
1164 mode = b'lnk'
1132 else:
1165 else:
1133 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1166 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1134 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1167 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1135 for f in repo.dirstate.copies():
1168 for f in repo.dirstate.copies():
1136 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1169 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1137
1170
1138
1171
1139 @command(
1172 @command(
1140 b'debugdirstateignorepatternshash',
1173 b'debugdirstateignorepatternshash',
1141 [],
1174 [],
1142 _(b''),
1175 _(b''),
1143 )
1176 )
1144 def debugdirstateignorepatternshash(ui, repo, **opts):
1177 def debugdirstateignorepatternshash(ui, repo, **opts):
1145 """show the hash of ignore patterns stored in dirstate if v2,
1178 """show the hash of ignore patterns stored in dirstate if v2,
1146 or nothing for dirstate-v2
1179 or nothing for dirstate-v2
1147 """
1180 """
1148 if repo.dirstate._use_dirstate_v2:
1181 if repo.dirstate._use_dirstate_v2:
1149 docket = repo.dirstate._map.docket
1182 docket = repo.dirstate._map.docket
1150 hash_len = 20 # 160 bits for SHA-1
1183 hash_len = 20 # 160 bits for SHA-1
1151 hash_bytes = docket.tree_metadata[-hash_len:]
1184 hash_bytes = docket.tree_metadata[-hash_len:]
1152 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1185 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1153
1186
1154
1187
1155 @command(
1188 @command(
1156 b'debugdiscovery',
1189 b'debugdiscovery',
1157 [
1190 [
1158 (b'', b'old', None, _(b'use old-style discovery')),
1191 (b'', b'old', None, _(b'use old-style discovery')),
1159 (
1192 (
1160 b'',
1193 b'',
1161 b'nonheads',
1194 b'nonheads',
1162 None,
1195 None,
1163 _(b'use old-style discovery with non-heads included'),
1196 _(b'use old-style discovery with non-heads included'),
1164 ),
1197 ),
1165 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1198 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1166 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1199 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1167 (
1200 (
1168 b'',
1201 b'',
1169 b'local-as-revs',
1202 b'local-as-revs',
1170 b"",
1203 b"",
1171 b'treat local has having these revisions only',
1204 b'treat local has having these revisions only',
1172 ),
1205 ),
1173 (
1206 (
1174 b'',
1207 b'',
1175 b'remote-as-revs',
1208 b'remote-as-revs',
1176 b"",
1209 b"",
1177 b'use local as remote, with only these revisions',
1210 b'use local as remote, with only these revisions',
1178 ),
1211 ),
1179 ]
1212 ]
1180 + cmdutil.remoteopts
1213 + cmdutil.remoteopts
1181 + cmdutil.formatteropts,
1214 + cmdutil.formatteropts,
1182 _(b'[--rev REV] [OTHER]'),
1215 _(b'[--rev REV] [OTHER]'),
1183 )
1216 )
1184 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1217 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1185 """runs the changeset discovery protocol in isolation
1218 """runs the changeset discovery protocol in isolation
1186
1219
1187 The local peer can be "replaced" by a subset of the local repository by
1220 The local peer can be "replaced" by a subset of the local repository by
1188 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1221 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1189 can be "replaced" by a subset of the local repository using the
1222 can be "replaced" by a subset of the local repository using the
1190 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1223 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1191 discovery situations.
1224 discovery situations.
1192
1225
1193 The following developer oriented config are relevant for people playing with this command:
1226 The following developer oriented config are relevant for people playing with this command:
1194
1227
1195 * devel.discovery.exchange-heads=True
1228 * devel.discovery.exchange-heads=True
1196
1229
1197 If False, the discovery will not start with
1230 If False, the discovery will not start with
1198 remote head fetching and local head querying.
1231 remote head fetching and local head querying.
1199
1232
1200 * devel.discovery.grow-sample=True
1233 * devel.discovery.grow-sample=True
1201
1234
1202 If False, the sample size used in set discovery will not be increased
1235 If False, the sample size used in set discovery will not be increased
1203 through the process
1236 through the process
1204
1237
1205 * devel.discovery.grow-sample.dynamic=True
1238 * devel.discovery.grow-sample.dynamic=True
1206
1239
1207 When discovery.grow-sample.dynamic is True, the default, the sample size is
1240 When discovery.grow-sample.dynamic is True, the default, the sample size is
1208 adapted to the shape of the undecided set (it is set to the max of:
1241 adapted to the shape of the undecided set (it is set to the max of:
1209 <target-size>, len(roots(undecided)), len(heads(undecided)
1242 <target-size>, len(roots(undecided)), len(heads(undecided)
1210
1243
1211 * devel.discovery.grow-sample.rate=1.05
1244 * devel.discovery.grow-sample.rate=1.05
1212
1245
1213 the rate at which the sample grow
1246 the rate at which the sample grow
1214
1247
1215 * devel.discovery.randomize=True
1248 * devel.discovery.randomize=True
1216
1249
1217 If andom sampling during discovery are deterministic. It is meant for
1250 If andom sampling during discovery are deterministic. It is meant for
1218 integration tests.
1251 integration tests.
1219
1252
1220 * devel.discovery.sample-size=200
1253 * devel.discovery.sample-size=200
1221
1254
1222 Control the initial size of the discovery sample
1255 Control the initial size of the discovery sample
1223
1256
1224 * devel.discovery.sample-size.initial=100
1257 * devel.discovery.sample-size.initial=100
1225
1258
1226 Control the initial size of the discovery for initial change
1259 Control the initial size of the discovery for initial change
1227 """
1260 """
1228 opts = pycompat.byteskwargs(opts)
1261 opts = pycompat.byteskwargs(opts)
1229 unfi = repo.unfiltered()
1262 unfi = repo.unfiltered()
1230
1263
1231 # setup potential extra filtering
1264 # setup potential extra filtering
1232 local_revs = opts[b"local_as_revs"]
1265 local_revs = opts[b"local_as_revs"]
1233 remote_revs = opts[b"remote_as_revs"]
1266 remote_revs = opts[b"remote_as_revs"]
1234
1267
1235 # make sure tests are repeatable
1268 # make sure tests are repeatable
1236 random.seed(int(opts[b'seed']))
1269 random.seed(int(opts[b'seed']))
1237
1270
1238 if not remote_revs:
1271 if not remote_revs:
1239
1272
1240 remoteurl, branches = urlutil.get_unique_pull_path(
1273 remoteurl, branches = urlutil.get_unique_pull_path(
1241 b'debugdiscovery', repo, ui, remoteurl
1274 b'debugdiscovery', repo, ui, remoteurl
1242 )
1275 )
1243 remote = hg.peer(repo, opts, remoteurl)
1276 remote = hg.peer(repo, opts, remoteurl)
1244 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1277 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1245 else:
1278 else:
1246 branches = (None, [])
1279 branches = (None, [])
1247 remote_filtered_revs = logcmdutil.revrange(
1280 remote_filtered_revs = logcmdutil.revrange(
1248 unfi, [b"not (::(%s))" % remote_revs]
1281 unfi, [b"not (::(%s))" % remote_revs]
1249 )
1282 )
1250 remote_filtered_revs = frozenset(remote_filtered_revs)
1283 remote_filtered_revs = frozenset(remote_filtered_revs)
1251
1284
1252 def remote_func(x):
1285 def remote_func(x):
1253 return remote_filtered_revs
1286 return remote_filtered_revs
1254
1287
1255 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1288 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1256
1289
1257 remote = repo.peer()
1290 remote = repo.peer()
1258 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1291 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1259
1292
1260 if local_revs:
1293 if local_revs:
1261 local_filtered_revs = logcmdutil.revrange(
1294 local_filtered_revs = logcmdutil.revrange(
1262 unfi, [b"not (::(%s))" % local_revs]
1295 unfi, [b"not (::(%s))" % local_revs]
1263 )
1296 )
1264 local_filtered_revs = frozenset(local_filtered_revs)
1297 local_filtered_revs = frozenset(local_filtered_revs)
1265
1298
1266 def local_func(x):
1299 def local_func(x):
1267 return local_filtered_revs
1300 return local_filtered_revs
1268
1301
1269 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1302 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1270 repo = repo.filtered(b'debug-discovery-local-filter')
1303 repo = repo.filtered(b'debug-discovery-local-filter')
1271
1304
1272 data = {}
1305 data = {}
1273 if opts.get(b'old'):
1306 if opts.get(b'old'):
1274
1307
1275 def doit(pushedrevs, remoteheads, remote=remote):
1308 def doit(pushedrevs, remoteheads, remote=remote):
1276 if not util.safehasattr(remote, b'branches'):
1309 if not util.safehasattr(remote, b'branches'):
1277 # enable in-client legacy support
1310 # enable in-client legacy support
1278 remote = localrepo.locallegacypeer(remote.local())
1311 remote = localrepo.locallegacypeer(remote.local())
1279 if remote_revs:
1312 if remote_revs:
1280 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1313 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1281 remote._repo = r
1314 remote._repo = r
1282 common, _in, hds = treediscovery.findcommonincoming(
1315 common, _in, hds = treediscovery.findcommonincoming(
1283 repo, remote, force=True, audit=data
1316 repo, remote, force=True, audit=data
1284 )
1317 )
1285 common = set(common)
1318 common = set(common)
1286 if not opts.get(b'nonheads'):
1319 if not opts.get(b'nonheads'):
1287 ui.writenoi18n(
1320 ui.writenoi18n(
1288 b"unpruned common: %s\n"
1321 b"unpruned common: %s\n"
1289 % b" ".join(sorted(short(n) for n in common))
1322 % b" ".join(sorted(short(n) for n in common))
1290 )
1323 )
1291
1324
1292 clnode = repo.changelog.node
1325 clnode = repo.changelog.node
1293 common = repo.revs(b'heads(::%ln)', common)
1326 common = repo.revs(b'heads(::%ln)', common)
1294 common = {clnode(r) for r in common}
1327 common = {clnode(r) for r in common}
1295 return common, hds
1328 return common, hds
1296
1329
1297 else:
1330 else:
1298
1331
1299 def doit(pushedrevs, remoteheads, remote=remote):
1332 def doit(pushedrevs, remoteheads, remote=remote):
1300 nodes = None
1333 nodes = None
1301 if pushedrevs:
1334 if pushedrevs:
1302 revs = logcmdutil.revrange(repo, pushedrevs)
1335 revs = logcmdutil.revrange(repo, pushedrevs)
1303 nodes = [repo[r].node() for r in revs]
1336 nodes = [repo[r].node() for r in revs]
1304 common, any, hds = setdiscovery.findcommonheads(
1337 common, any, hds = setdiscovery.findcommonheads(
1305 ui,
1338 ui,
1306 repo,
1339 repo,
1307 remote,
1340 remote,
1308 ancestorsof=nodes,
1341 ancestorsof=nodes,
1309 audit=data,
1342 audit=data,
1310 abortwhenunrelated=False,
1343 abortwhenunrelated=False,
1311 )
1344 )
1312 return common, hds
1345 return common, hds
1313
1346
1314 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1347 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1315 localrevs = opts[b'rev']
1348 localrevs = opts[b'rev']
1316
1349
1317 fm = ui.formatter(b'debugdiscovery', opts)
1350 fm = ui.formatter(b'debugdiscovery', opts)
1318 if fm.strict_format:
1351 if fm.strict_format:
1319
1352
1320 @contextlib.contextmanager
1353 @contextlib.contextmanager
1321 def may_capture_output():
1354 def may_capture_output():
1322 ui.pushbuffer()
1355 ui.pushbuffer()
1323 yield
1356 yield
1324 data[b'output'] = ui.popbuffer()
1357 data[b'output'] = ui.popbuffer()
1325
1358
1326 else:
1359 else:
1327 may_capture_output = util.nullcontextmanager
1360 may_capture_output = util.nullcontextmanager
1328 with may_capture_output():
1361 with may_capture_output():
1329 with util.timedcm('debug-discovery') as t:
1362 with util.timedcm('debug-discovery') as t:
1330 common, hds = doit(localrevs, remoterevs)
1363 common, hds = doit(localrevs, remoterevs)
1331
1364
1332 # compute all statistics
1365 # compute all statistics
1333 if len(common) == 1 and repo.nullid in common:
1366 if len(common) == 1 and repo.nullid in common:
1334 common = set()
1367 common = set()
1335 heads_common = set(common)
1368 heads_common = set(common)
1336 heads_remote = set(hds)
1369 heads_remote = set(hds)
1337 heads_local = set(repo.heads())
1370 heads_local = set(repo.heads())
1338 # note: they cannot be a local or remote head that is in common and not
1371 # note: they cannot be a local or remote head that is in common and not
1339 # itself a head of common.
1372 # itself a head of common.
1340 heads_common_local = heads_common & heads_local
1373 heads_common_local = heads_common & heads_local
1341 heads_common_remote = heads_common & heads_remote
1374 heads_common_remote = heads_common & heads_remote
1342 heads_common_both = heads_common & heads_remote & heads_local
1375 heads_common_both = heads_common & heads_remote & heads_local
1343
1376
1344 all = repo.revs(b'all()')
1377 all = repo.revs(b'all()')
1345 common = repo.revs(b'::%ln', common)
1378 common = repo.revs(b'::%ln', common)
1346 roots_common = repo.revs(b'roots(::%ld)', common)
1379 roots_common = repo.revs(b'roots(::%ld)', common)
1347 missing = repo.revs(b'not ::%ld', common)
1380 missing = repo.revs(b'not ::%ld', common)
1348 heads_missing = repo.revs(b'heads(%ld)', missing)
1381 heads_missing = repo.revs(b'heads(%ld)', missing)
1349 roots_missing = repo.revs(b'roots(%ld)', missing)
1382 roots_missing = repo.revs(b'roots(%ld)', missing)
1350 assert len(common) + len(missing) == len(all)
1383 assert len(common) + len(missing) == len(all)
1351
1384
1352 initial_undecided = repo.revs(
1385 initial_undecided = repo.revs(
1353 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1386 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1354 )
1387 )
1355 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1388 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1356 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1389 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1357 common_initial_undecided = initial_undecided & common
1390 common_initial_undecided = initial_undecided & common
1358 missing_initial_undecided = initial_undecided & missing
1391 missing_initial_undecided = initial_undecided & missing
1359
1392
1360 data[b'elapsed'] = t.elapsed
1393 data[b'elapsed'] = t.elapsed
1361 data[b'nb-common-heads'] = len(heads_common)
1394 data[b'nb-common-heads'] = len(heads_common)
1362 data[b'nb-common-heads-local'] = len(heads_common_local)
1395 data[b'nb-common-heads-local'] = len(heads_common_local)
1363 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1396 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1364 data[b'nb-common-heads-both'] = len(heads_common_both)
1397 data[b'nb-common-heads-both'] = len(heads_common_both)
1365 data[b'nb-common-roots'] = len(roots_common)
1398 data[b'nb-common-roots'] = len(roots_common)
1366 data[b'nb-head-local'] = len(heads_local)
1399 data[b'nb-head-local'] = len(heads_local)
1367 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1400 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1368 data[b'nb-head-remote'] = len(heads_remote)
1401 data[b'nb-head-remote'] = len(heads_remote)
1369 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1402 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1370 heads_common_remote
1403 heads_common_remote
1371 )
1404 )
1372 data[b'nb-revs'] = len(all)
1405 data[b'nb-revs'] = len(all)
1373 data[b'nb-revs-common'] = len(common)
1406 data[b'nb-revs-common'] = len(common)
1374 data[b'nb-revs-missing'] = len(missing)
1407 data[b'nb-revs-missing'] = len(missing)
1375 data[b'nb-missing-heads'] = len(heads_missing)
1408 data[b'nb-missing-heads'] = len(heads_missing)
1376 data[b'nb-missing-roots'] = len(roots_missing)
1409 data[b'nb-missing-roots'] = len(roots_missing)
1377 data[b'nb-ini_und'] = len(initial_undecided)
1410 data[b'nb-ini_und'] = len(initial_undecided)
1378 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1411 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1379 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1412 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1380 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1413 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1381 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1414 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1382
1415
1383 fm.startitem()
1416 fm.startitem()
1384 fm.data(**pycompat.strkwargs(data))
1417 fm.data(**pycompat.strkwargs(data))
1385 # display discovery summary
1418 # display discovery summary
1386 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1419 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1387 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1420 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1388 if b'total-round-trips-heads' in data:
1421 if b'total-round-trips-heads' in data:
1389 fm.plain(
1422 fm.plain(
1390 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1423 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1391 )
1424 )
1392 if b'total-round-trips-branches' in data:
1425 if b'total-round-trips-branches' in data:
1393 fm.plain(
1426 fm.plain(
1394 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1427 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1395 % data
1428 % data
1396 )
1429 )
1397 if b'total-round-trips-between' in data:
1430 if b'total-round-trips-between' in data:
1398 fm.plain(
1431 fm.plain(
1399 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1432 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1400 )
1433 )
1401 fm.plain(b"queries: %(total-queries)9d\n" % data)
1434 fm.plain(b"queries: %(total-queries)9d\n" % data)
1402 if b'total-queries-branches' in data:
1435 if b'total-queries-branches' in data:
1403 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1436 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1404 if b'total-queries-between' in data:
1437 if b'total-queries-between' in data:
1405 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1438 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1406 fm.plain(b"heads summary:\n")
1439 fm.plain(b"heads summary:\n")
1407 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1440 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1408 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1441 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1409 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1442 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1410 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1443 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1411 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1444 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1412 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1445 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1413 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1446 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1414 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1447 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1415 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1448 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1416 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1449 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1417 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1450 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1418 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1451 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1419 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1452 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1420 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1453 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1421 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1454 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1422 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1455 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1423 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1456 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1424 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1457 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1425 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1458 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1426 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1459 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1427 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1460 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1428 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1461 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1429
1462
1430 if ui.verbose:
1463 if ui.verbose:
1431 fm.plain(
1464 fm.plain(
1432 b"common heads: %s\n"
1465 b"common heads: %s\n"
1433 % b" ".join(sorted(short(n) for n in heads_common))
1466 % b" ".join(sorted(short(n) for n in heads_common))
1434 )
1467 )
1435 fm.end()
1468 fm.end()
1436
1469
1437
1470
1438 _chunksize = 4 << 10
1471 _chunksize = 4 << 10
1439
1472
1440
1473
1441 @command(
1474 @command(
1442 b'debugdownload',
1475 b'debugdownload',
1443 [
1476 [
1444 (b'o', b'output', b'', _(b'path')),
1477 (b'o', b'output', b'', _(b'path')),
1445 ],
1478 ],
1446 optionalrepo=True,
1479 optionalrepo=True,
1447 )
1480 )
1448 def debugdownload(ui, repo, url, output=None, **opts):
1481 def debugdownload(ui, repo, url, output=None, **opts):
1449 """download a resource using Mercurial logic and config"""
1482 """download a resource using Mercurial logic and config"""
1450 fh = urlmod.open(ui, url, output)
1483 fh = urlmod.open(ui, url, output)
1451
1484
1452 dest = ui
1485 dest = ui
1453 if output:
1486 if output:
1454 dest = open(output, b"wb", _chunksize)
1487 dest = open(output, b"wb", _chunksize)
1455 try:
1488 try:
1456 data = fh.read(_chunksize)
1489 data = fh.read(_chunksize)
1457 while data:
1490 while data:
1458 dest.write(data)
1491 dest.write(data)
1459 data = fh.read(_chunksize)
1492 data = fh.read(_chunksize)
1460 finally:
1493 finally:
1461 if output:
1494 if output:
1462 dest.close()
1495 dest.close()
1463
1496
1464
1497
1465 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1498 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1466 def debugextensions(ui, repo, **opts):
1499 def debugextensions(ui, repo, **opts):
1467 '''show information about active extensions'''
1500 '''show information about active extensions'''
1468 opts = pycompat.byteskwargs(opts)
1501 opts = pycompat.byteskwargs(opts)
1469 exts = extensions.extensions(ui)
1502 exts = extensions.extensions(ui)
1470 hgver = util.version()
1503 hgver = util.version()
1471 fm = ui.formatter(b'debugextensions', opts)
1504 fm = ui.formatter(b'debugextensions', opts)
1472 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1505 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1473 isinternal = extensions.ismoduleinternal(extmod)
1506 isinternal = extensions.ismoduleinternal(extmod)
1474 extsource = None
1507 extsource = None
1475
1508
1476 if util.safehasattr(extmod, '__file__'):
1509 if util.safehasattr(extmod, '__file__'):
1477 extsource = pycompat.fsencode(extmod.__file__)
1510 extsource = pycompat.fsencode(extmod.__file__)
1478 elif getattr(sys, 'oxidized', False):
1511 elif getattr(sys, 'oxidized', False):
1479 extsource = pycompat.sysexecutable
1512 extsource = pycompat.sysexecutable
1480 if isinternal:
1513 if isinternal:
1481 exttestedwith = [] # never expose magic string to users
1514 exttestedwith = [] # never expose magic string to users
1482 else:
1515 else:
1483 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1516 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1484 extbuglink = getattr(extmod, 'buglink', None)
1517 extbuglink = getattr(extmod, 'buglink', None)
1485
1518
1486 fm.startitem()
1519 fm.startitem()
1487
1520
1488 if ui.quiet or ui.verbose:
1521 if ui.quiet or ui.verbose:
1489 fm.write(b'name', b'%s\n', extname)
1522 fm.write(b'name', b'%s\n', extname)
1490 else:
1523 else:
1491 fm.write(b'name', b'%s', extname)
1524 fm.write(b'name', b'%s', extname)
1492 if isinternal or hgver in exttestedwith:
1525 if isinternal or hgver in exttestedwith:
1493 fm.plain(b'\n')
1526 fm.plain(b'\n')
1494 elif not exttestedwith:
1527 elif not exttestedwith:
1495 fm.plain(_(b' (untested!)\n'))
1528 fm.plain(_(b' (untested!)\n'))
1496 else:
1529 else:
1497 lasttestedversion = exttestedwith[-1]
1530 lasttestedversion = exttestedwith[-1]
1498 fm.plain(b' (%s!)\n' % lasttestedversion)
1531 fm.plain(b' (%s!)\n' % lasttestedversion)
1499
1532
1500 fm.condwrite(
1533 fm.condwrite(
1501 ui.verbose and extsource,
1534 ui.verbose and extsource,
1502 b'source',
1535 b'source',
1503 _(b' location: %s\n'),
1536 _(b' location: %s\n'),
1504 extsource or b"",
1537 extsource or b"",
1505 )
1538 )
1506
1539
1507 if ui.verbose:
1540 if ui.verbose:
1508 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1541 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1509 fm.data(bundled=isinternal)
1542 fm.data(bundled=isinternal)
1510
1543
1511 fm.condwrite(
1544 fm.condwrite(
1512 ui.verbose and exttestedwith,
1545 ui.verbose and exttestedwith,
1513 b'testedwith',
1546 b'testedwith',
1514 _(b' tested with: %s\n'),
1547 _(b' tested with: %s\n'),
1515 fm.formatlist(exttestedwith, name=b'ver'),
1548 fm.formatlist(exttestedwith, name=b'ver'),
1516 )
1549 )
1517
1550
1518 fm.condwrite(
1551 fm.condwrite(
1519 ui.verbose and extbuglink,
1552 ui.verbose and extbuglink,
1520 b'buglink',
1553 b'buglink',
1521 _(b' bug reporting: %s\n'),
1554 _(b' bug reporting: %s\n'),
1522 extbuglink or b"",
1555 extbuglink or b"",
1523 )
1556 )
1524
1557
1525 fm.end()
1558 fm.end()
1526
1559
1527
1560
1528 @command(
1561 @command(
1529 b'debugfileset',
1562 b'debugfileset',
1530 [
1563 [
1531 (
1564 (
1532 b'r',
1565 b'r',
1533 b'rev',
1566 b'rev',
1534 b'',
1567 b'',
1535 _(b'apply the filespec on this revision'),
1568 _(b'apply the filespec on this revision'),
1536 _(b'REV'),
1569 _(b'REV'),
1537 ),
1570 ),
1538 (
1571 (
1539 b'',
1572 b'',
1540 b'all-files',
1573 b'all-files',
1541 False,
1574 False,
1542 _(b'test files from all revisions and working directory'),
1575 _(b'test files from all revisions and working directory'),
1543 ),
1576 ),
1544 (
1577 (
1545 b's',
1578 b's',
1546 b'show-matcher',
1579 b'show-matcher',
1547 None,
1580 None,
1548 _(b'print internal representation of matcher'),
1581 _(b'print internal representation of matcher'),
1549 ),
1582 ),
1550 (
1583 (
1551 b'p',
1584 b'p',
1552 b'show-stage',
1585 b'show-stage',
1553 [],
1586 [],
1554 _(b'print parsed tree at the given stage'),
1587 _(b'print parsed tree at the given stage'),
1555 _(b'NAME'),
1588 _(b'NAME'),
1556 ),
1589 ),
1557 ],
1590 ],
1558 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1591 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1559 )
1592 )
1560 def debugfileset(ui, repo, expr, **opts):
1593 def debugfileset(ui, repo, expr, **opts):
1561 '''parse and apply a fileset specification'''
1594 '''parse and apply a fileset specification'''
1562 from . import fileset
1595 from . import fileset
1563
1596
1564 fileset.symbols # force import of fileset so we have predicates to optimize
1597 fileset.symbols # force import of fileset so we have predicates to optimize
1565 opts = pycompat.byteskwargs(opts)
1598 opts = pycompat.byteskwargs(opts)
1566 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1599 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1567
1600
1568 stages = [
1601 stages = [
1569 (b'parsed', pycompat.identity),
1602 (b'parsed', pycompat.identity),
1570 (b'analyzed', filesetlang.analyze),
1603 (b'analyzed', filesetlang.analyze),
1571 (b'optimized', filesetlang.optimize),
1604 (b'optimized', filesetlang.optimize),
1572 ]
1605 ]
1573 stagenames = {n for n, f in stages}
1606 stagenames = {n for n, f in stages}
1574
1607
1575 showalways = set()
1608 showalways = set()
1576 if ui.verbose and not opts[b'show_stage']:
1609 if ui.verbose and not opts[b'show_stage']:
1577 # show parsed tree by --verbose (deprecated)
1610 # show parsed tree by --verbose (deprecated)
1578 showalways.add(b'parsed')
1611 showalways.add(b'parsed')
1579 if opts[b'show_stage'] == [b'all']:
1612 if opts[b'show_stage'] == [b'all']:
1580 showalways.update(stagenames)
1613 showalways.update(stagenames)
1581 else:
1614 else:
1582 for n in opts[b'show_stage']:
1615 for n in opts[b'show_stage']:
1583 if n not in stagenames:
1616 if n not in stagenames:
1584 raise error.Abort(_(b'invalid stage name: %s') % n)
1617 raise error.Abort(_(b'invalid stage name: %s') % n)
1585 showalways.update(opts[b'show_stage'])
1618 showalways.update(opts[b'show_stage'])
1586
1619
1587 tree = filesetlang.parse(expr)
1620 tree = filesetlang.parse(expr)
1588 for n, f in stages:
1621 for n, f in stages:
1589 tree = f(tree)
1622 tree = f(tree)
1590 if n in showalways:
1623 if n in showalways:
1591 if opts[b'show_stage'] or n != b'parsed':
1624 if opts[b'show_stage'] or n != b'parsed':
1592 ui.write(b"* %s:\n" % n)
1625 ui.write(b"* %s:\n" % n)
1593 ui.write(filesetlang.prettyformat(tree), b"\n")
1626 ui.write(filesetlang.prettyformat(tree), b"\n")
1594
1627
1595 files = set()
1628 files = set()
1596 if opts[b'all_files']:
1629 if opts[b'all_files']:
1597 for r in repo:
1630 for r in repo:
1598 c = repo[r]
1631 c = repo[r]
1599 files.update(c.files())
1632 files.update(c.files())
1600 files.update(c.substate)
1633 files.update(c.substate)
1601 if opts[b'all_files'] or ctx.rev() is None:
1634 if opts[b'all_files'] or ctx.rev() is None:
1602 wctx = repo[None]
1635 wctx = repo[None]
1603 files.update(
1636 files.update(
1604 repo.dirstate.walk(
1637 repo.dirstate.walk(
1605 scmutil.matchall(repo),
1638 scmutil.matchall(repo),
1606 subrepos=list(wctx.substate),
1639 subrepos=list(wctx.substate),
1607 unknown=True,
1640 unknown=True,
1608 ignored=True,
1641 ignored=True,
1609 )
1642 )
1610 )
1643 )
1611 files.update(wctx.substate)
1644 files.update(wctx.substate)
1612 else:
1645 else:
1613 files.update(ctx.files())
1646 files.update(ctx.files())
1614 files.update(ctx.substate)
1647 files.update(ctx.substate)
1615
1648
1616 m = ctx.matchfileset(repo.getcwd(), expr)
1649 m = ctx.matchfileset(repo.getcwd(), expr)
1617 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1650 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1618 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1651 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1619 for f in sorted(files):
1652 for f in sorted(files):
1620 if not m(f):
1653 if not m(f):
1621 continue
1654 continue
1622 ui.write(b"%s\n" % f)
1655 ui.write(b"%s\n" % f)
1623
1656
1624
1657
1625 @command(
1658 @command(
1626 b"debug-repair-issue6528",
1659 b"debug-repair-issue6528",
1627 [
1660 [
1628 (
1661 (
1629 b'',
1662 b'',
1630 b'to-report',
1663 b'to-report',
1631 b'',
1664 b'',
1632 _(b'build a report of affected revisions to this file'),
1665 _(b'build a report of affected revisions to this file'),
1633 _(b'FILE'),
1666 _(b'FILE'),
1634 ),
1667 ),
1635 (
1668 (
1636 b'',
1669 b'',
1637 b'from-report',
1670 b'from-report',
1638 b'',
1671 b'',
1639 _(b'repair revisions listed in this report file'),
1672 _(b'repair revisions listed in this report file'),
1640 _(b'FILE'),
1673 _(b'FILE'),
1641 ),
1674 ),
1642 (
1675 (
1643 b'',
1676 b'',
1644 b'paranoid',
1677 b'paranoid',
1645 False,
1678 False,
1646 _(b'check that both detection methods do the same thing'),
1679 _(b'check that both detection methods do the same thing'),
1647 ),
1680 ),
1648 ]
1681 ]
1649 + cmdutil.dryrunopts,
1682 + cmdutil.dryrunopts,
1650 )
1683 )
1651 def debug_repair_issue6528(ui, repo, **opts):
1684 def debug_repair_issue6528(ui, repo, **opts):
1652 """find affected revisions and repair them. See issue6528 for more details.
1685 """find affected revisions and repair them. See issue6528 for more details.
1653
1686
1654 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1687 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1655 computation of affected revisions for a given repository across clones.
1688 computation of affected revisions for a given repository across clones.
1656 The report format is line-based (with empty lines ignored):
1689 The report format is line-based (with empty lines ignored):
1657
1690
1658 ```
1691 ```
1659 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1692 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1660 ```
1693 ```
1661
1694
1662 There can be multiple broken revisions per filelog, they are separated by
1695 There can be multiple broken revisions per filelog, they are separated by
1663 a comma with no spaces. The only space is between the revision(s) and the
1696 a comma with no spaces. The only space is between the revision(s) and the
1664 filename.
1697 filename.
1665
1698
1666 Note that this does *not* mean that this repairs future affected revisions,
1699 Note that this does *not* mean that this repairs future affected revisions,
1667 that needs a separate fix at the exchange level that was introduced in
1700 that needs a separate fix at the exchange level that was introduced in
1668 Mercurial 5.9.1.
1701 Mercurial 5.9.1.
1669
1702
1670 There is a `--paranoid` flag to test that the fast implementation is correct
1703 There is a `--paranoid` flag to test that the fast implementation is correct
1671 by checking it against the slow implementation. Since this matter is quite
1704 by checking it against the slow implementation. Since this matter is quite
1672 urgent and testing every edge-case is probably quite costly, we use this
1705 urgent and testing every edge-case is probably quite costly, we use this
1673 method to test on large repositories as a fuzzing method of sorts.
1706 method to test on large repositories as a fuzzing method of sorts.
1674 """
1707 """
1675 cmdutil.check_incompatible_arguments(
1708 cmdutil.check_incompatible_arguments(
1676 opts, 'to_report', ['from_report', 'dry_run']
1709 opts, 'to_report', ['from_report', 'dry_run']
1677 )
1710 )
1678 dry_run = opts.get('dry_run')
1711 dry_run = opts.get('dry_run')
1679 to_report = opts.get('to_report')
1712 to_report = opts.get('to_report')
1680 from_report = opts.get('from_report')
1713 from_report = opts.get('from_report')
1681 paranoid = opts.get('paranoid')
1714 paranoid = opts.get('paranoid')
1682 # TODO maybe add filelog pattern and revision pattern parameters to help
1715 # TODO maybe add filelog pattern and revision pattern parameters to help
1683 # narrow down the search for users that know what they're looking for?
1716 # narrow down the search for users that know what they're looking for?
1684
1717
1685 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1718 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1686 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1719 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1687 raise error.Abort(_(msg))
1720 raise error.Abort(_(msg))
1688
1721
1689 rewrite.repair_issue6528(
1722 rewrite.repair_issue6528(
1690 ui,
1723 ui,
1691 repo,
1724 repo,
1692 dry_run=dry_run,
1725 dry_run=dry_run,
1693 to_report=to_report,
1726 to_report=to_report,
1694 from_report=from_report,
1727 from_report=from_report,
1695 paranoid=paranoid,
1728 paranoid=paranoid,
1696 )
1729 )
1697
1730
1698
1731
1699 @command(b'debugformat', [] + cmdutil.formatteropts)
1732 @command(b'debugformat', [] + cmdutil.formatteropts)
1700 def debugformat(ui, repo, **opts):
1733 def debugformat(ui, repo, **opts):
1701 """display format information about the current repository
1734 """display format information about the current repository
1702
1735
1703 Use --verbose to get extra information about current config value and
1736 Use --verbose to get extra information about current config value and
1704 Mercurial default."""
1737 Mercurial default."""
1705 opts = pycompat.byteskwargs(opts)
1738 opts = pycompat.byteskwargs(opts)
1706 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1739 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1707 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1740 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1708
1741
1709 def makeformatname(name):
1742 def makeformatname(name):
1710 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1743 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1711
1744
1712 fm = ui.formatter(b'debugformat', opts)
1745 fm = ui.formatter(b'debugformat', opts)
1713 if fm.isplain():
1746 if fm.isplain():
1714
1747
1715 def formatvalue(value):
1748 def formatvalue(value):
1716 if util.safehasattr(value, b'startswith'):
1749 if util.safehasattr(value, b'startswith'):
1717 return value
1750 return value
1718 if value:
1751 if value:
1719 return b'yes'
1752 return b'yes'
1720 else:
1753 else:
1721 return b'no'
1754 return b'no'
1722
1755
1723 else:
1756 else:
1724 formatvalue = pycompat.identity
1757 formatvalue = pycompat.identity
1725
1758
1726 fm.plain(b'format-variant')
1759 fm.plain(b'format-variant')
1727 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1760 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1728 fm.plain(b' repo')
1761 fm.plain(b' repo')
1729 if ui.verbose:
1762 if ui.verbose:
1730 fm.plain(b' config default')
1763 fm.plain(b' config default')
1731 fm.plain(b'\n')
1764 fm.plain(b'\n')
1732 for fv in upgrade.allformatvariant:
1765 for fv in upgrade.allformatvariant:
1733 fm.startitem()
1766 fm.startitem()
1734 repovalue = fv.fromrepo(repo)
1767 repovalue = fv.fromrepo(repo)
1735 configvalue = fv.fromconfig(repo)
1768 configvalue = fv.fromconfig(repo)
1736
1769
1737 if repovalue != configvalue:
1770 if repovalue != configvalue:
1738 namelabel = b'formatvariant.name.mismatchconfig'
1771 namelabel = b'formatvariant.name.mismatchconfig'
1739 repolabel = b'formatvariant.repo.mismatchconfig'
1772 repolabel = b'formatvariant.repo.mismatchconfig'
1740 elif repovalue != fv.default:
1773 elif repovalue != fv.default:
1741 namelabel = b'formatvariant.name.mismatchdefault'
1774 namelabel = b'formatvariant.name.mismatchdefault'
1742 repolabel = b'formatvariant.repo.mismatchdefault'
1775 repolabel = b'formatvariant.repo.mismatchdefault'
1743 else:
1776 else:
1744 namelabel = b'formatvariant.name.uptodate'
1777 namelabel = b'formatvariant.name.uptodate'
1745 repolabel = b'formatvariant.repo.uptodate'
1778 repolabel = b'formatvariant.repo.uptodate'
1746
1779
1747 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1780 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1748 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1781 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1749 if fv.default != configvalue:
1782 if fv.default != configvalue:
1750 configlabel = b'formatvariant.config.special'
1783 configlabel = b'formatvariant.config.special'
1751 else:
1784 else:
1752 configlabel = b'formatvariant.config.default'
1785 configlabel = b'formatvariant.config.default'
1753 fm.condwrite(
1786 fm.condwrite(
1754 ui.verbose,
1787 ui.verbose,
1755 b'config',
1788 b'config',
1756 b' %6s',
1789 b' %6s',
1757 formatvalue(configvalue),
1790 formatvalue(configvalue),
1758 label=configlabel,
1791 label=configlabel,
1759 )
1792 )
1760 fm.condwrite(
1793 fm.condwrite(
1761 ui.verbose,
1794 ui.verbose,
1762 b'default',
1795 b'default',
1763 b' %7s',
1796 b' %7s',
1764 formatvalue(fv.default),
1797 formatvalue(fv.default),
1765 label=b'formatvariant.default',
1798 label=b'formatvariant.default',
1766 )
1799 )
1767 fm.plain(b'\n')
1800 fm.plain(b'\n')
1768 fm.end()
1801 fm.end()
1769
1802
1770
1803
1771 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1804 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1772 def debugfsinfo(ui, path=b"."):
1805 def debugfsinfo(ui, path=b"."):
1773 """show information detected about current filesystem"""
1806 """show information detected about current filesystem"""
1774 ui.writenoi18n(b'path: %s\n' % path)
1807 ui.writenoi18n(b'path: %s\n' % path)
1775 ui.writenoi18n(
1808 ui.writenoi18n(
1776 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1809 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1777 )
1810 )
1778 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1811 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1779 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1812 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1780 ui.writenoi18n(
1813 ui.writenoi18n(
1781 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1814 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1782 )
1815 )
1783 ui.writenoi18n(
1816 ui.writenoi18n(
1784 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1817 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1785 )
1818 )
1786 casesensitive = b'(unknown)'
1819 casesensitive = b'(unknown)'
1787 try:
1820 try:
1788 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1821 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1789 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1822 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1790 except OSError:
1823 except OSError:
1791 pass
1824 pass
1792 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1825 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1793
1826
1794
1827
1795 @command(
1828 @command(
1796 b'debuggetbundle',
1829 b'debuggetbundle',
1797 [
1830 [
1798 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1831 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1799 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1832 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1800 (
1833 (
1801 b't',
1834 b't',
1802 b'type',
1835 b'type',
1803 b'bzip2',
1836 b'bzip2',
1804 _(b'bundle compression type to use'),
1837 _(b'bundle compression type to use'),
1805 _(b'TYPE'),
1838 _(b'TYPE'),
1806 ),
1839 ),
1807 ],
1840 ],
1808 _(b'REPO FILE [-H|-C ID]...'),
1841 _(b'REPO FILE [-H|-C ID]...'),
1809 norepo=True,
1842 norepo=True,
1810 )
1843 )
1811 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1844 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1812 """retrieves a bundle from a repo
1845 """retrieves a bundle from a repo
1813
1846
1814 Every ID must be a full-length hex node id string. Saves the bundle to the
1847 Every ID must be a full-length hex node id string. Saves the bundle to the
1815 given file.
1848 given file.
1816 """
1849 """
1817 opts = pycompat.byteskwargs(opts)
1850 opts = pycompat.byteskwargs(opts)
1818 repo = hg.peer(ui, opts, repopath)
1851 repo = hg.peer(ui, opts, repopath)
1819 if not repo.capable(b'getbundle'):
1852 if not repo.capable(b'getbundle'):
1820 raise error.Abort(b"getbundle() not supported by target repository")
1853 raise error.Abort(b"getbundle() not supported by target repository")
1821 args = {}
1854 args = {}
1822 if common:
1855 if common:
1823 args['common'] = [bin(s) for s in common]
1856 args['common'] = [bin(s) for s in common]
1824 if head:
1857 if head:
1825 args['heads'] = [bin(s) for s in head]
1858 args['heads'] = [bin(s) for s in head]
1826 # TODO: get desired bundlecaps from command line.
1859 # TODO: get desired bundlecaps from command line.
1827 args['bundlecaps'] = None
1860 args['bundlecaps'] = None
1828 bundle = repo.getbundle(b'debug', **args)
1861 bundle = repo.getbundle(b'debug', **args)
1829
1862
1830 bundletype = opts.get(b'type', b'bzip2').lower()
1863 bundletype = opts.get(b'type', b'bzip2').lower()
1831 btypes = {
1864 btypes = {
1832 b'none': b'HG10UN',
1865 b'none': b'HG10UN',
1833 b'bzip2': b'HG10BZ',
1866 b'bzip2': b'HG10BZ',
1834 b'gzip': b'HG10GZ',
1867 b'gzip': b'HG10GZ',
1835 b'bundle2': b'HG20',
1868 b'bundle2': b'HG20',
1836 }
1869 }
1837 bundletype = btypes.get(bundletype)
1870 bundletype = btypes.get(bundletype)
1838 if bundletype not in bundle2.bundletypes:
1871 if bundletype not in bundle2.bundletypes:
1839 raise error.Abort(_(b'unknown bundle type specified with --type'))
1872 raise error.Abort(_(b'unknown bundle type specified with --type'))
1840 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1873 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1841
1874
1842
1875
1843 @command(b'debugignore', [], b'[FILE]')
1876 @command(b'debugignore', [], b'[FILE]')
1844 def debugignore(ui, repo, *files, **opts):
1877 def debugignore(ui, repo, *files, **opts):
1845 """display the combined ignore pattern and information about ignored files
1878 """display the combined ignore pattern and information about ignored files
1846
1879
1847 With no argument display the combined ignore pattern.
1880 With no argument display the combined ignore pattern.
1848
1881
1849 Given space separated file names, shows if the given file is ignored and
1882 Given space separated file names, shows if the given file is ignored and
1850 if so, show the ignore rule (file and line number) that matched it.
1883 if so, show the ignore rule (file and line number) that matched it.
1851 """
1884 """
1852 ignore = repo.dirstate._ignore
1885 ignore = repo.dirstate._ignore
1853 if not files:
1886 if not files:
1854 # Show all the patterns
1887 # Show all the patterns
1855 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1888 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1856 else:
1889 else:
1857 m = scmutil.match(repo[None], pats=files)
1890 m = scmutil.match(repo[None], pats=files)
1858 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1891 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1859 for f in m.files():
1892 for f in m.files():
1860 nf = util.normpath(f)
1893 nf = util.normpath(f)
1861 ignored = None
1894 ignored = None
1862 ignoredata = None
1895 ignoredata = None
1863 if nf != b'.':
1896 if nf != b'.':
1864 if ignore(nf):
1897 if ignore(nf):
1865 ignored = nf
1898 ignored = nf
1866 ignoredata = repo.dirstate._ignorefileandline(nf)
1899 ignoredata = repo.dirstate._ignorefileandline(nf)
1867 else:
1900 else:
1868 for p in pathutil.finddirs(nf):
1901 for p in pathutil.finddirs(nf):
1869 if ignore(p):
1902 if ignore(p):
1870 ignored = p
1903 ignored = p
1871 ignoredata = repo.dirstate._ignorefileandline(p)
1904 ignoredata = repo.dirstate._ignorefileandline(p)
1872 break
1905 break
1873 if ignored:
1906 if ignored:
1874 if ignored == nf:
1907 if ignored == nf:
1875 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1908 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1876 else:
1909 else:
1877 ui.write(
1910 ui.write(
1878 _(
1911 _(
1879 b"%s is ignored because of "
1912 b"%s is ignored because of "
1880 b"containing directory %s\n"
1913 b"containing directory %s\n"
1881 )
1914 )
1882 % (uipathfn(f), ignored)
1915 % (uipathfn(f), ignored)
1883 )
1916 )
1884 ignorefile, lineno, line = ignoredata
1917 ignorefile, lineno, line = ignoredata
1885 ui.write(
1918 ui.write(
1886 _(b"(ignore rule in %s, line %d: '%s')\n")
1919 _(b"(ignore rule in %s, line %d: '%s')\n")
1887 % (ignorefile, lineno, line)
1920 % (ignorefile, lineno, line)
1888 )
1921 )
1889 else:
1922 else:
1890 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1923 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1891
1924
1892
1925
1893 @command(
1926 @command(
1894 b'debug-revlog-index|debugindex',
1927 b'debug-revlog-index|debugindex',
1895 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1928 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1896 _(b'-c|-m|FILE'),
1929 _(b'-c|-m|FILE'),
1897 )
1930 )
1898 def debugindex(ui, repo, file_=None, **opts):
1931 def debugindex(ui, repo, file_=None, **opts):
1899 """dump index data for a revlog"""
1932 """dump index data for a revlog"""
1900 opts = pycompat.byteskwargs(opts)
1933 opts = pycompat.byteskwargs(opts)
1901 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1934 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1902
1935
1903 fm = ui.formatter(b'debugindex', opts)
1936 fm = ui.formatter(b'debugindex', opts)
1904
1937
1905 revlog = getattr(store, b'_revlog', store)
1938 revlog = getattr(store, b'_revlog', store)
1906
1939
1907 return revlog_debug.debug_index(
1940 return revlog_debug.debug_index(
1908 ui,
1941 ui,
1909 repo,
1942 repo,
1910 formatter=fm,
1943 formatter=fm,
1911 revlog=revlog,
1944 revlog=revlog,
1912 full_node=ui.debugflag,
1945 full_node=ui.debugflag,
1913 )
1946 )
1914
1947
1915
1948
1916 @command(
1949 @command(
1917 b'debugindexdot',
1950 b'debugindexdot',
1918 cmdutil.debugrevlogopts,
1951 cmdutil.debugrevlogopts,
1919 _(b'-c|-m|FILE'),
1952 _(b'-c|-m|FILE'),
1920 optionalrepo=True,
1953 optionalrepo=True,
1921 )
1954 )
1922 def debugindexdot(ui, repo, file_=None, **opts):
1955 def debugindexdot(ui, repo, file_=None, **opts):
1923 """dump an index DAG as a graphviz dot file"""
1956 """dump an index DAG as a graphviz dot file"""
1924 opts = pycompat.byteskwargs(opts)
1957 opts = pycompat.byteskwargs(opts)
1925 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1958 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1926 ui.writenoi18n(b"digraph G {\n")
1959 ui.writenoi18n(b"digraph G {\n")
1927 for i in r:
1960 for i in r:
1928 node = r.node(i)
1961 node = r.node(i)
1929 pp = r.parents(node)
1962 pp = r.parents(node)
1930 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1963 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1931 if pp[1] != repo.nullid:
1964 if pp[1] != repo.nullid:
1932 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1965 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1933 ui.write(b"}\n")
1966 ui.write(b"}\n")
1934
1967
1935
1968
1936 @command(b'debugindexstats', [])
1969 @command(b'debugindexstats', [])
1937 def debugindexstats(ui, repo):
1970 def debugindexstats(ui, repo):
1938 """show stats related to the changelog index"""
1971 """show stats related to the changelog index"""
1939 repo.changelog.shortest(repo.nullid, 1)
1972 repo.changelog.shortest(repo.nullid, 1)
1940 index = repo.changelog.index
1973 index = repo.changelog.index
1941 if not util.safehasattr(index, b'stats'):
1974 if not util.safehasattr(index, b'stats'):
1942 raise error.Abort(_(b'debugindexstats only works with native code'))
1975 raise error.Abort(_(b'debugindexstats only works with native code'))
1943 for k, v in sorted(index.stats().items()):
1976 for k, v in sorted(index.stats().items()):
1944 ui.write(b'%s: %d\n' % (k, v))
1977 ui.write(b'%s: %d\n' % (k, v))
1945
1978
1946
1979
1947 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1980 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1948 def debuginstall(ui, **opts):
1981 def debuginstall(ui, **opts):
1949 """test Mercurial installation
1982 """test Mercurial installation
1950
1983
1951 Returns 0 on success.
1984 Returns 0 on success.
1952 """
1985 """
1953 opts = pycompat.byteskwargs(opts)
1986 opts = pycompat.byteskwargs(opts)
1954
1987
1955 problems = 0
1988 problems = 0
1956
1989
1957 fm = ui.formatter(b'debuginstall', opts)
1990 fm = ui.formatter(b'debuginstall', opts)
1958 fm.startitem()
1991 fm.startitem()
1959
1992
1960 # encoding might be unknown or wrong. don't translate these messages.
1993 # encoding might be unknown or wrong. don't translate these messages.
1961 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1994 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1962 err = None
1995 err = None
1963 try:
1996 try:
1964 codecs.lookup(pycompat.sysstr(encoding.encoding))
1997 codecs.lookup(pycompat.sysstr(encoding.encoding))
1965 except LookupError as inst:
1998 except LookupError as inst:
1966 err = stringutil.forcebytestr(inst)
1999 err = stringutil.forcebytestr(inst)
1967 problems += 1
2000 problems += 1
1968 fm.condwrite(
2001 fm.condwrite(
1969 err,
2002 err,
1970 b'encodingerror',
2003 b'encodingerror',
1971 b" %s\n (check that your locale is properly set)\n",
2004 b" %s\n (check that your locale is properly set)\n",
1972 err,
2005 err,
1973 )
2006 )
1974
2007
1975 # Python
2008 # Python
1976 pythonlib = None
2009 pythonlib = None
1977 if util.safehasattr(os, '__file__'):
2010 if util.safehasattr(os, '__file__'):
1978 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
2011 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1979 elif getattr(sys, 'oxidized', False):
2012 elif getattr(sys, 'oxidized', False):
1980 pythonlib = pycompat.sysexecutable
2013 pythonlib = pycompat.sysexecutable
1981
2014
1982 fm.write(
2015 fm.write(
1983 b'pythonexe',
2016 b'pythonexe',
1984 _(b"checking Python executable (%s)\n"),
2017 _(b"checking Python executable (%s)\n"),
1985 pycompat.sysexecutable or _(b"unknown"),
2018 pycompat.sysexecutable or _(b"unknown"),
1986 )
2019 )
1987 fm.write(
2020 fm.write(
1988 b'pythonimplementation',
2021 b'pythonimplementation',
1989 _(b"checking Python implementation (%s)\n"),
2022 _(b"checking Python implementation (%s)\n"),
1990 pycompat.sysbytes(platform.python_implementation()),
2023 pycompat.sysbytes(platform.python_implementation()),
1991 )
2024 )
1992 fm.write(
2025 fm.write(
1993 b'pythonver',
2026 b'pythonver',
1994 _(b"checking Python version (%s)\n"),
2027 _(b"checking Python version (%s)\n"),
1995 (b"%d.%d.%d" % sys.version_info[:3]),
2028 (b"%d.%d.%d" % sys.version_info[:3]),
1996 )
2029 )
1997 fm.write(
2030 fm.write(
1998 b'pythonlib',
2031 b'pythonlib',
1999 _(b"checking Python lib (%s)...\n"),
2032 _(b"checking Python lib (%s)...\n"),
2000 pythonlib or _(b"unknown"),
2033 pythonlib or _(b"unknown"),
2001 )
2034 )
2002
2035
2003 try:
2036 try:
2004 from . import rustext # pytype: disable=import-error
2037 from . import rustext # pytype: disable=import-error
2005
2038
2006 rustext.__doc__ # trigger lazy import
2039 rustext.__doc__ # trigger lazy import
2007 except ImportError:
2040 except ImportError:
2008 rustext = None
2041 rustext = None
2009
2042
2010 security = set(sslutil.supportedprotocols)
2043 security = set(sslutil.supportedprotocols)
2011 if sslutil.hassni:
2044 if sslutil.hassni:
2012 security.add(b'sni')
2045 security.add(b'sni')
2013
2046
2014 fm.write(
2047 fm.write(
2015 b'pythonsecurity',
2048 b'pythonsecurity',
2016 _(b"checking Python security support (%s)\n"),
2049 _(b"checking Python security support (%s)\n"),
2017 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2050 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2018 )
2051 )
2019
2052
2020 # These are warnings, not errors. So don't increment problem count. This
2053 # These are warnings, not errors. So don't increment problem count. This
2021 # may change in the future.
2054 # may change in the future.
2022 if b'tls1.2' not in security:
2055 if b'tls1.2' not in security:
2023 fm.plain(
2056 fm.plain(
2024 _(
2057 _(
2025 b' TLS 1.2 not supported by Python install; '
2058 b' TLS 1.2 not supported by Python install; '
2026 b'network connections lack modern security\n'
2059 b'network connections lack modern security\n'
2027 )
2060 )
2028 )
2061 )
2029 if b'sni' not in security:
2062 if b'sni' not in security:
2030 fm.plain(
2063 fm.plain(
2031 _(
2064 _(
2032 b' SNI not supported by Python install; may have '
2065 b' SNI not supported by Python install; may have '
2033 b'connectivity issues with some servers\n'
2066 b'connectivity issues with some servers\n'
2034 )
2067 )
2035 )
2068 )
2036
2069
2037 fm.plain(
2070 fm.plain(
2038 _(
2071 _(
2039 b"checking Rust extensions (%s)\n"
2072 b"checking Rust extensions (%s)\n"
2040 % (b'missing' if rustext is None else b'installed')
2073 % (b'missing' if rustext is None else b'installed')
2041 ),
2074 ),
2042 )
2075 )
2043
2076
2044 # TODO print CA cert info
2077 # TODO print CA cert info
2045
2078
2046 # hg version
2079 # hg version
2047 hgver = util.version()
2080 hgver = util.version()
2048 fm.write(
2081 fm.write(
2049 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2082 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2050 )
2083 )
2051 fm.write(
2084 fm.write(
2052 b'hgverextra',
2085 b'hgverextra',
2053 _(b"checking Mercurial custom build (%s)\n"),
2086 _(b"checking Mercurial custom build (%s)\n"),
2054 b'+'.join(hgver.split(b'+')[1:]),
2087 b'+'.join(hgver.split(b'+')[1:]),
2055 )
2088 )
2056
2089
2057 # compiled modules
2090 # compiled modules
2058 hgmodules = None
2091 hgmodules = None
2059 if util.safehasattr(sys.modules[__name__], '__file__'):
2092 if util.safehasattr(sys.modules[__name__], '__file__'):
2060 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2093 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2061 elif getattr(sys, 'oxidized', False):
2094 elif getattr(sys, 'oxidized', False):
2062 hgmodules = pycompat.sysexecutable
2095 hgmodules = pycompat.sysexecutable
2063
2096
2064 fm.write(
2097 fm.write(
2065 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2098 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2066 )
2099 )
2067 fm.write(
2100 fm.write(
2068 b'hgmodules',
2101 b'hgmodules',
2069 _(b"checking installed modules (%s)...\n"),
2102 _(b"checking installed modules (%s)...\n"),
2070 hgmodules or _(b"unknown"),
2103 hgmodules or _(b"unknown"),
2071 )
2104 )
2072
2105
2073 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2106 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2074 rustext = rustandc # for now, that's the only case
2107 rustext = rustandc # for now, that's the only case
2075 cext = policy.policy in (b'c', b'allow') or rustandc
2108 cext = policy.policy in (b'c', b'allow') or rustandc
2076 nopure = cext or rustext
2109 nopure = cext or rustext
2077 if nopure:
2110 if nopure:
2078 err = None
2111 err = None
2079 try:
2112 try:
2080 if cext:
2113 if cext:
2081 from .cext import ( # pytype: disable=import-error
2114 from .cext import ( # pytype: disable=import-error
2082 base85,
2115 base85,
2083 bdiff,
2116 bdiff,
2084 mpatch,
2117 mpatch,
2085 osutil,
2118 osutil,
2086 )
2119 )
2087
2120
2088 # quiet pyflakes
2121 # quiet pyflakes
2089 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2122 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2090 if rustext:
2123 if rustext:
2091 from .rustext import ( # pytype: disable=import-error
2124 from .rustext import ( # pytype: disable=import-error
2092 ancestor,
2125 ancestor,
2093 dirstate,
2126 dirstate,
2094 )
2127 )
2095
2128
2096 dir(ancestor), dir(dirstate) # quiet pyflakes
2129 dir(ancestor), dir(dirstate) # quiet pyflakes
2097 except Exception as inst:
2130 except Exception as inst:
2098 err = stringutil.forcebytestr(inst)
2131 err = stringutil.forcebytestr(inst)
2099 problems += 1
2132 problems += 1
2100 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2133 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2101
2134
2102 compengines = util.compengines._engines.values()
2135 compengines = util.compengines._engines.values()
2103 fm.write(
2136 fm.write(
2104 b'compengines',
2137 b'compengines',
2105 _(b'checking registered compression engines (%s)\n'),
2138 _(b'checking registered compression engines (%s)\n'),
2106 fm.formatlist(
2139 fm.formatlist(
2107 sorted(e.name() for e in compengines),
2140 sorted(e.name() for e in compengines),
2108 name=b'compengine',
2141 name=b'compengine',
2109 fmt=b'%s',
2142 fmt=b'%s',
2110 sep=b', ',
2143 sep=b', ',
2111 ),
2144 ),
2112 )
2145 )
2113 fm.write(
2146 fm.write(
2114 b'compenginesavail',
2147 b'compenginesavail',
2115 _(b'checking available compression engines (%s)\n'),
2148 _(b'checking available compression engines (%s)\n'),
2116 fm.formatlist(
2149 fm.formatlist(
2117 sorted(e.name() for e in compengines if e.available()),
2150 sorted(e.name() for e in compengines if e.available()),
2118 name=b'compengine',
2151 name=b'compengine',
2119 fmt=b'%s',
2152 fmt=b'%s',
2120 sep=b', ',
2153 sep=b', ',
2121 ),
2154 ),
2122 )
2155 )
2123 wirecompengines = compression.compengines.supportedwireengines(
2156 wirecompengines = compression.compengines.supportedwireengines(
2124 compression.SERVERROLE
2157 compression.SERVERROLE
2125 )
2158 )
2126 fm.write(
2159 fm.write(
2127 b'compenginesserver',
2160 b'compenginesserver',
2128 _(
2161 _(
2129 b'checking available compression engines '
2162 b'checking available compression engines '
2130 b'for wire protocol (%s)\n'
2163 b'for wire protocol (%s)\n'
2131 ),
2164 ),
2132 fm.formatlist(
2165 fm.formatlist(
2133 [e.name() for e in wirecompengines if e.wireprotosupport()],
2166 [e.name() for e in wirecompengines if e.wireprotosupport()],
2134 name=b'compengine',
2167 name=b'compengine',
2135 fmt=b'%s',
2168 fmt=b'%s',
2136 sep=b', ',
2169 sep=b', ',
2137 ),
2170 ),
2138 )
2171 )
2139 re2 = b'missing'
2172 re2 = b'missing'
2140 if util._re2:
2173 if util._re2:
2141 re2 = b'available'
2174 re2 = b'available'
2142 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2175 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2143 fm.data(re2=bool(util._re2))
2176 fm.data(re2=bool(util._re2))
2144
2177
2145 # templates
2178 # templates
2146 p = templater.templatedir()
2179 p = templater.templatedir()
2147 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2180 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2148 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2181 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2149 if p:
2182 if p:
2150 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2183 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2151 if m:
2184 if m:
2152 # template found, check if it is working
2185 # template found, check if it is working
2153 err = None
2186 err = None
2154 try:
2187 try:
2155 templater.templater.frommapfile(m)
2188 templater.templater.frommapfile(m)
2156 except Exception as inst:
2189 except Exception as inst:
2157 err = stringutil.forcebytestr(inst)
2190 err = stringutil.forcebytestr(inst)
2158 p = None
2191 p = None
2159 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2192 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2160 else:
2193 else:
2161 p = None
2194 p = None
2162 fm.condwrite(
2195 fm.condwrite(
2163 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2196 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2164 )
2197 )
2165 fm.condwrite(
2198 fm.condwrite(
2166 not m,
2199 not m,
2167 b'defaulttemplatenotfound',
2200 b'defaulttemplatenotfound',
2168 _(b" template '%s' not found\n"),
2201 _(b" template '%s' not found\n"),
2169 b"default",
2202 b"default",
2170 )
2203 )
2171 if not p:
2204 if not p:
2172 problems += 1
2205 problems += 1
2173 fm.condwrite(
2206 fm.condwrite(
2174 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2207 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2175 )
2208 )
2176
2209
2177 # editor
2210 # editor
2178 editor = ui.geteditor()
2211 editor = ui.geteditor()
2179 editor = util.expandpath(editor)
2212 editor = util.expandpath(editor)
2180 editorbin = procutil.shellsplit(editor)[0]
2213 editorbin = procutil.shellsplit(editor)[0]
2181 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2214 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2182 cmdpath = procutil.findexe(editorbin)
2215 cmdpath = procutil.findexe(editorbin)
2183 fm.condwrite(
2216 fm.condwrite(
2184 not cmdpath and editor == b'vi',
2217 not cmdpath and editor == b'vi',
2185 b'vinotfound',
2218 b'vinotfound',
2186 _(
2219 _(
2187 b" No commit editor set and can't find %s in PATH\n"
2220 b" No commit editor set and can't find %s in PATH\n"
2188 b" (specify a commit editor in your configuration"
2221 b" (specify a commit editor in your configuration"
2189 b" file)\n"
2222 b" file)\n"
2190 ),
2223 ),
2191 not cmdpath and editor == b'vi' and editorbin,
2224 not cmdpath and editor == b'vi' and editorbin,
2192 )
2225 )
2193 fm.condwrite(
2226 fm.condwrite(
2194 not cmdpath and editor != b'vi',
2227 not cmdpath and editor != b'vi',
2195 b'editornotfound',
2228 b'editornotfound',
2196 _(
2229 _(
2197 b" Can't find editor '%s' in PATH\n"
2230 b" Can't find editor '%s' in PATH\n"
2198 b" (specify a commit editor in your configuration"
2231 b" (specify a commit editor in your configuration"
2199 b" file)\n"
2232 b" file)\n"
2200 ),
2233 ),
2201 not cmdpath and editorbin,
2234 not cmdpath and editorbin,
2202 )
2235 )
2203 if not cmdpath and editor != b'vi':
2236 if not cmdpath and editor != b'vi':
2204 problems += 1
2237 problems += 1
2205
2238
2206 # check username
2239 # check username
2207 username = None
2240 username = None
2208 err = None
2241 err = None
2209 try:
2242 try:
2210 username = ui.username()
2243 username = ui.username()
2211 except error.Abort as e:
2244 except error.Abort as e:
2212 err = e.message
2245 err = e.message
2213 problems += 1
2246 problems += 1
2214
2247
2215 fm.condwrite(
2248 fm.condwrite(
2216 username, b'username', _(b"checking username (%s)\n"), username
2249 username, b'username', _(b"checking username (%s)\n"), username
2217 )
2250 )
2218 fm.condwrite(
2251 fm.condwrite(
2219 err,
2252 err,
2220 b'usernameerror',
2253 b'usernameerror',
2221 _(
2254 _(
2222 b"checking username...\n %s\n"
2255 b"checking username...\n %s\n"
2223 b" (specify a username in your configuration file)\n"
2256 b" (specify a username in your configuration file)\n"
2224 ),
2257 ),
2225 err,
2258 err,
2226 )
2259 )
2227
2260
2228 for name, mod in extensions.extensions():
2261 for name, mod in extensions.extensions():
2229 handler = getattr(mod, 'debuginstall', None)
2262 handler = getattr(mod, 'debuginstall', None)
2230 if handler is not None:
2263 if handler is not None:
2231 problems += handler(ui, fm)
2264 problems += handler(ui, fm)
2232
2265
2233 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2266 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2234 if not problems:
2267 if not problems:
2235 fm.data(problems=problems)
2268 fm.data(problems=problems)
2236 fm.condwrite(
2269 fm.condwrite(
2237 problems,
2270 problems,
2238 b'problems',
2271 b'problems',
2239 _(b"%d problems detected, please check your install!\n"),
2272 _(b"%d problems detected, please check your install!\n"),
2240 problems,
2273 problems,
2241 )
2274 )
2242 fm.end()
2275 fm.end()
2243
2276
2244 return problems
2277 return problems
2245
2278
2246
2279
2247 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2280 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2248 def debugknown(ui, repopath, *ids, **opts):
2281 def debugknown(ui, repopath, *ids, **opts):
2249 """test whether node ids are known to a repo
2282 """test whether node ids are known to a repo
2250
2283
2251 Every ID must be a full-length hex node id string. Returns a list of 0s
2284 Every ID must be a full-length hex node id string. Returns a list of 0s
2252 and 1s indicating unknown/known.
2285 and 1s indicating unknown/known.
2253 """
2286 """
2254 opts = pycompat.byteskwargs(opts)
2287 opts = pycompat.byteskwargs(opts)
2255 repo = hg.peer(ui, opts, repopath)
2288 repo = hg.peer(ui, opts, repopath)
2256 if not repo.capable(b'known'):
2289 if not repo.capable(b'known'):
2257 raise error.Abort(b"known() not supported by target repository")
2290 raise error.Abort(b"known() not supported by target repository")
2258 flags = repo.known([bin(s) for s in ids])
2291 flags = repo.known([bin(s) for s in ids])
2259 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2292 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2260
2293
2261
2294
2262 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2295 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2263 def debuglabelcomplete(ui, repo, *args):
2296 def debuglabelcomplete(ui, repo, *args):
2264 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2297 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2265 debugnamecomplete(ui, repo, *args)
2298 debugnamecomplete(ui, repo, *args)
2266
2299
2267
2300
2268 @command(
2301 @command(
2269 b'debuglocks',
2302 b'debuglocks',
2270 [
2303 [
2271 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2304 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2272 (
2305 (
2273 b'W',
2306 b'W',
2274 b'force-free-wlock',
2307 b'force-free-wlock',
2275 None,
2308 None,
2276 _(b'free the working state lock (DANGEROUS)'),
2309 _(b'free the working state lock (DANGEROUS)'),
2277 ),
2310 ),
2278 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2311 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2279 (
2312 (
2280 b'S',
2313 b'S',
2281 b'set-wlock',
2314 b'set-wlock',
2282 None,
2315 None,
2283 _(b'set the working state lock until stopped'),
2316 _(b'set the working state lock until stopped'),
2284 ),
2317 ),
2285 ],
2318 ],
2286 _(b'[OPTION]...'),
2319 _(b'[OPTION]...'),
2287 )
2320 )
2288 def debuglocks(ui, repo, **opts):
2321 def debuglocks(ui, repo, **opts):
2289 """show or modify state of locks
2322 """show or modify state of locks
2290
2323
2291 By default, this command will show which locks are held. This
2324 By default, this command will show which locks are held. This
2292 includes the user and process holding the lock, the amount of time
2325 includes the user and process holding the lock, the amount of time
2293 the lock has been held, and the machine name where the process is
2326 the lock has been held, and the machine name where the process is
2294 running if it's not local.
2327 running if it's not local.
2295
2328
2296 Locks protect the integrity of Mercurial's data, so should be
2329 Locks protect the integrity of Mercurial's data, so should be
2297 treated with care. System crashes or other interruptions may cause
2330 treated with care. System crashes or other interruptions may cause
2298 locks to not be properly released, though Mercurial will usually
2331 locks to not be properly released, though Mercurial will usually
2299 detect and remove such stale locks automatically.
2332 detect and remove such stale locks automatically.
2300
2333
2301 However, detecting stale locks may not always be possible (for
2334 However, detecting stale locks may not always be possible (for
2302 instance, on a shared filesystem). Removing locks may also be
2335 instance, on a shared filesystem). Removing locks may also be
2303 blocked by filesystem permissions.
2336 blocked by filesystem permissions.
2304
2337
2305 Setting a lock will prevent other commands from changing the data.
2338 Setting a lock will prevent other commands from changing the data.
2306 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2339 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2307 The set locks are removed when the command exits.
2340 The set locks are removed when the command exits.
2308
2341
2309 Returns 0 if no locks are held.
2342 Returns 0 if no locks are held.
2310
2343
2311 """
2344 """
2312
2345
2313 if opts.get('force_free_lock'):
2346 if opts.get('force_free_lock'):
2314 repo.svfs.tryunlink(b'lock')
2347 repo.svfs.tryunlink(b'lock')
2315 if opts.get('force_free_wlock'):
2348 if opts.get('force_free_wlock'):
2316 repo.vfs.tryunlink(b'wlock')
2349 repo.vfs.tryunlink(b'wlock')
2317 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2350 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2318 return 0
2351 return 0
2319
2352
2320 locks = []
2353 locks = []
2321 try:
2354 try:
2322 if opts.get('set_wlock'):
2355 if opts.get('set_wlock'):
2323 try:
2356 try:
2324 locks.append(repo.wlock(False))
2357 locks.append(repo.wlock(False))
2325 except error.LockHeld:
2358 except error.LockHeld:
2326 raise error.Abort(_(b'wlock is already held'))
2359 raise error.Abort(_(b'wlock is already held'))
2327 if opts.get('set_lock'):
2360 if opts.get('set_lock'):
2328 try:
2361 try:
2329 locks.append(repo.lock(False))
2362 locks.append(repo.lock(False))
2330 except error.LockHeld:
2363 except error.LockHeld:
2331 raise error.Abort(_(b'lock is already held'))
2364 raise error.Abort(_(b'lock is already held'))
2332 if len(locks):
2365 if len(locks):
2333 try:
2366 try:
2334 if ui.interactive():
2367 if ui.interactive():
2335 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2368 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2336 ui.promptchoice(prompt)
2369 ui.promptchoice(prompt)
2337 else:
2370 else:
2338 msg = b"%d locks held, waiting for signal\n"
2371 msg = b"%d locks held, waiting for signal\n"
2339 msg %= len(locks)
2372 msg %= len(locks)
2340 ui.status(msg)
2373 ui.status(msg)
2341 while True: # XXX wait for a signal
2374 while True: # XXX wait for a signal
2342 time.sleep(0.1)
2375 time.sleep(0.1)
2343 except KeyboardInterrupt:
2376 except KeyboardInterrupt:
2344 msg = b"signal-received releasing locks\n"
2377 msg = b"signal-received releasing locks\n"
2345 ui.status(msg)
2378 ui.status(msg)
2346 return 0
2379 return 0
2347 finally:
2380 finally:
2348 release(*locks)
2381 release(*locks)
2349
2382
2350 now = time.time()
2383 now = time.time()
2351 held = 0
2384 held = 0
2352
2385
2353 def report(vfs, name, method):
2386 def report(vfs, name, method):
2354 # this causes stale locks to get reaped for more accurate reporting
2387 # this causes stale locks to get reaped for more accurate reporting
2355 try:
2388 try:
2356 l = method(False)
2389 l = method(False)
2357 except error.LockHeld:
2390 except error.LockHeld:
2358 l = None
2391 l = None
2359
2392
2360 if l:
2393 if l:
2361 l.release()
2394 l.release()
2362 else:
2395 else:
2363 try:
2396 try:
2364 st = vfs.lstat(name)
2397 st = vfs.lstat(name)
2365 age = now - st[stat.ST_MTIME]
2398 age = now - st[stat.ST_MTIME]
2366 user = util.username(st.st_uid)
2399 user = util.username(st.st_uid)
2367 locker = vfs.readlock(name)
2400 locker = vfs.readlock(name)
2368 if b":" in locker:
2401 if b":" in locker:
2369 host, pid = locker.split(b':')
2402 host, pid = locker.split(b':')
2370 if host == socket.gethostname():
2403 if host == socket.gethostname():
2371 locker = b'user %s, process %s' % (user or b'None', pid)
2404 locker = b'user %s, process %s' % (user or b'None', pid)
2372 else:
2405 else:
2373 locker = b'user %s, process %s, host %s' % (
2406 locker = b'user %s, process %s, host %s' % (
2374 user or b'None',
2407 user or b'None',
2375 pid,
2408 pid,
2376 host,
2409 host,
2377 )
2410 )
2378 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2411 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2379 return 1
2412 return 1
2380 except FileNotFoundError:
2413 except FileNotFoundError:
2381 pass
2414 pass
2382
2415
2383 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2416 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2384 return 0
2417 return 0
2385
2418
2386 held += report(repo.svfs, b"lock", repo.lock)
2419 held += report(repo.svfs, b"lock", repo.lock)
2387 held += report(repo.vfs, b"wlock", repo.wlock)
2420 held += report(repo.vfs, b"wlock", repo.wlock)
2388
2421
2389 return held
2422 return held
2390
2423
2391
2424
2392 @command(
2425 @command(
2393 b'debugmanifestfulltextcache',
2426 b'debugmanifestfulltextcache',
2394 [
2427 [
2395 (b'', b'clear', False, _(b'clear the cache')),
2428 (b'', b'clear', False, _(b'clear the cache')),
2396 (
2429 (
2397 b'a',
2430 b'a',
2398 b'add',
2431 b'add',
2399 [],
2432 [],
2400 _(b'add the given manifest nodes to the cache'),
2433 _(b'add the given manifest nodes to the cache'),
2401 _(b'NODE'),
2434 _(b'NODE'),
2402 ),
2435 ),
2403 ],
2436 ],
2404 b'',
2437 b'',
2405 )
2438 )
2406 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2439 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2407 """show, clear or amend the contents of the manifest fulltext cache"""
2440 """show, clear or amend the contents of the manifest fulltext cache"""
2408
2441
2409 def getcache():
2442 def getcache():
2410 r = repo.manifestlog.getstorage(b'')
2443 r = repo.manifestlog.getstorage(b'')
2411 try:
2444 try:
2412 return r._fulltextcache
2445 return r._fulltextcache
2413 except AttributeError:
2446 except AttributeError:
2414 msg = _(
2447 msg = _(
2415 b"Current revlog implementation doesn't appear to have a "
2448 b"Current revlog implementation doesn't appear to have a "
2416 b"manifest fulltext cache\n"
2449 b"manifest fulltext cache\n"
2417 )
2450 )
2418 raise error.Abort(msg)
2451 raise error.Abort(msg)
2419
2452
2420 if opts.get('clear'):
2453 if opts.get('clear'):
2421 with repo.wlock():
2454 with repo.wlock():
2422 cache = getcache()
2455 cache = getcache()
2423 cache.clear(clear_persisted_data=True)
2456 cache.clear(clear_persisted_data=True)
2424 return
2457 return
2425
2458
2426 if add:
2459 if add:
2427 with repo.wlock():
2460 with repo.wlock():
2428 m = repo.manifestlog
2461 m = repo.manifestlog
2429 store = m.getstorage(b'')
2462 store = m.getstorage(b'')
2430 for n in add:
2463 for n in add:
2431 try:
2464 try:
2432 manifest = m[store.lookup(n)]
2465 manifest = m[store.lookup(n)]
2433 except error.LookupError as e:
2466 except error.LookupError as e:
2434 raise error.Abort(
2467 raise error.Abort(
2435 bytes(e), hint=b"Check your manifest node id"
2468 bytes(e), hint=b"Check your manifest node id"
2436 )
2469 )
2437 manifest.read() # stores revisision in cache too
2470 manifest.read() # stores revisision in cache too
2438 return
2471 return
2439
2472
2440 cache = getcache()
2473 cache = getcache()
2441 if not len(cache):
2474 if not len(cache):
2442 ui.write(_(b'cache empty\n'))
2475 ui.write(_(b'cache empty\n'))
2443 else:
2476 else:
2444 ui.write(
2477 ui.write(
2445 _(
2478 _(
2446 b'cache contains %d manifest entries, in order of most to '
2479 b'cache contains %d manifest entries, in order of most to '
2447 b'least recent:\n'
2480 b'least recent:\n'
2448 )
2481 )
2449 % (len(cache),)
2482 % (len(cache),)
2450 )
2483 )
2451 totalsize = 0
2484 totalsize = 0
2452 for nodeid in cache:
2485 for nodeid in cache:
2453 # Use cache.get to not update the LRU order
2486 # Use cache.get to not update the LRU order
2454 data = cache.peek(nodeid)
2487 data = cache.peek(nodeid)
2455 size = len(data)
2488 size = len(data)
2456 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2489 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2457 ui.write(
2490 ui.write(
2458 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2491 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2459 )
2492 )
2460 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2493 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2461 ui.write(
2494 ui.write(
2462 _(b'total cache data size %s, on-disk %s\n')
2495 _(b'total cache data size %s, on-disk %s\n')
2463 % (util.bytecount(totalsize), util.bytecount(ondisk))
2496 % (util.bytecount(totalsize), util.bytecount(ondisk))
2464 )
2497 )
2465
2498
2466
2499
2467 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2500 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2468 def debugmergestate(ui, repo, *args, **opts):
2501 def debugmergestate(ui, repo, *args, **opts):
2469 """print merge state
2502 """print merge state
2470
2503
2471 Use --verbose to print out information about whether v1 or v2 merge state
2504 Use --verbose to print out information about whether v1 or v2 merge state
2472 was chosen."""
2505 was chosen."""
2473
2506
2474 if ui.verbose:
2507 if ui.verbose:
2475 ms = mergestatemod.mergestate(repo)
2508 ms = mergestatemod.mergestate(repo)
2476
2509
2477 # sort so that reasonable information is on top
2510 # sort so that reasonable information is on top
2478 v1records = ms._readrecordsv1()
2511 v1records = ms._readrecordsv1()
2479 v2records = ms._readrecordsv2()
2512 v2records = ms._readrecordsv2()
2480
2513
2481 if not v1records and not v2records:
2514 if not v1records and not v2records:
2482 pass
2515 pass
2483 elif not v2records:
2516 elif not v2records:
2484 ui.writenoi18n(b'no version 2 merge state\n')
2517 ui.writenoi18n(b'no version 2 merge state\n')
2485 elif ms._v1v2match(v1records, v2records):
2518 elif ms._v1v2match(v1records, v2records):
2486 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2519 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2487 else:
2520 else:
2488 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2521 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2489
2522
2490 opts = pycompat.byteskwargs(opts)
2523 opts = pycompat.byteskwargs(opts)
2491 if not opts[b'template']:
2524 if not opts[b'template']:
2492 opts[b'template'] = (
2525 opts[b'template'] = (
2493 b'{if(commits, "", "no merge state found\n")}'
2526 b'{if(commits, "", "no merge state found\n")}'
2494 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2527 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2495 b'{files % "file: {path} (state \\"{state}\\")\n'
2528 b'{files % "file: {path} (state \\"{state}\\")\n'
2496 b'{if(local_path, "'
2529 b'{if(local_path, "'
2497 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2530 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2498 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2531 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2499 b' other path: {other_path} (node {other_node})\n'
2532 b' other path: {other_path} (node {other_node})\n'
2500 b'")}'
2533 b'")}'
2501 b'{if(rename_side, "'
2534 b'{if(rename_side, "'
2502 b' rename side: {rename_side}\n'
2535 b' rename side: {rename_side}\n'
2503 b' renamed path: {renamed_path}\n'
2536 b' renamed path: {renamed_path}\n'
2504 b'")}'
2537 b'")}'
2505 b'{extras % " extra: {key} = {value}\n"}'
2538 b'{extras % " extra: {key} = {value}\n"}'
2506 b'"}'
2539 b'"}'
2507 b'{extras % "extra: {file} ({key} = {value})\n"}'
2540 b'{extras % "extra: {file} ({key} = {value})\n"}'
2508 )
2541 )
2509
2542
2510 ms = mergestatemod.mergestate.read(repo)
2543 ms = mergestatemod.mergestate.read(repo)
2511
2544
2512 fm = ui.formatter(b'debugmergestate', opts)
2545 fm = ui.formatter(b'debugmergestate', opts)
2513 fm.startitem()
2546 fm.startitem()
2514
2547
2515 fm_commits = fm.nested(b'commits')
2548 fm_commits = fm.nested(b'commits')
2516 if ms.active():
2549 if ms.active():
2517 for name, node, label_index in (
2550 for name, node, label_index in (
2518 (b'local', ms.local, 0),
2551 (b'local', ms.local, 0),
2519 (b'other', ms.other, 1),
2552 (b'other', ms.other, 1),
2520 ):
2553 ):
2521 fm_commits.startitem()
2554 fm_commits.startitem()
2522 fm_commits.data(name=name)
2555 fm_commits.data(name=name)
2523 fm_commits.data(node=hex(node))
2556 fm_commits.data(node=hex(node))
2524 if ms._labels and len(ms._labels) > label_index:
2557 if ms._labels and len(ms._labels) > label_index:
2525 fm_commits.data(label=ms._labels[label_index])
2558 fm_commits.data(label=ms._labels[label_index])
2526 fm_commits.end()
2559 fm_commits.end()
2527
2560
2528 fm_files = fm.nested(b'files')
2561 fm_files = fm.nested(b'files')
2529 if ms.active():
2562 if ms.active():
2530 for f in ms:
2563 for f in ms:
2531 fm_files.startitem()
2564 fm_files.startitem()
2532 fm_files.data(path=f)
2565 fm_files.data(path=f)
2533 state = ms._state[f]
2566 state = ms._state[f]
2534 fm_files.data(state=state[0])
2567 fm_files.data(state=state[0])
2535 if state[0] in (
2568 if state[0] in (
2536 mergestatemod.MERGE_RECORD_UNRESOLVED,
2569 mergestatemod.MERGE_RECORD_UNRESOLVED,
2537 mergestatemod.MERGE_RECORD_RESOLVED,
2570 mergestatemod.MERGE_RECORD_RESOLVED,
2538 ):
2571 ):
2539 fm_files.data(local_key=state[1])
2572 fm_files.data(local_key=state[1])
2540 fm_files.data(local_path=state[2])
2573 fm_files.data(local_path=state[2])
2541 fm_files.data(ancestor_path=state[3])
2574 fm_files.data(ancestor_path=state[3])
2542 fm_files.data(ancestor_node=state[4])
2575 fm_files.data(ancestor_node=state[4])
2543 fm_files.data(other_path=state[5])
2576 fm_files.data(other_path=state[5])
2544 fm_files.data(other_node=state[6])
2577 fm_files.data(other_node=state[6])
2545 fm_files.data(local_flags=state[7])
2578 fm_files.data(local_flags=state[7])
2546 elif state[0] in (
2579 elif state[0] in (
2547 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2580 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2548 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2581 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2549 ):
2582 ):
2550 fm_files.data(renamed_path=state[1])
2583 fm_files.data(renamed_path=state[1])
2551 fm_files.data(rename_side=state[2])
2584 fm_files.data(rename_side=state[2])
2552 fm_extras = fm_files.nested(b'extras')
2585 fm_extras = fm_files.nested(b'extras')
2553 for k, v in sorted(ms.extras(f).items()):
2586 for k, v in sorted(ms.extras(f).items()):
2554 fm_extras.startitem()
2587 fm_extras.startitem()
2555 fm_extras.data(key=k)
2588 fm_extras.data(key=k)
2556 fm_extras.data(value=v)
2589 fm_extras.data(value=v)
2557 fm_extras.end()
2590 fm_extras.end()
2558
2591
2559 fm_files.end()
2592 fm_files.end()
2560
2593
2561 fm_extras = fm.nested(b'extras')
2594 fm_extras = fm.nested(b'extras')
2562 for f, d in sorted(ms.allextras().items()):
2595 for f, d in sorted(ms.allextras().items()):
2563 if f in ms:
2596 if f in ms:
2564 # If file is in mergestate, we have already processed it's extras
2597 # If file is in mergestate, we have already processed it's extras
2565 continue
2598 continue
2566 for k, v in d.items():
2599 for k, v in d.items():
2567 fm_extras.startitem()
2600 fm_extras.startitem()
2568 fm_extras.data(file=f)
2601 fm_extras.data(file=f)
2569 fm_extras.data(key=k)
2602 fm_extras.data(key=k)
2570 fm_extras.data(value=v)
2603 fm_extras.data(value=v)
2571 fm_extras.end()
2604 fm_extras.end()
2572
2605
2573 fm.end()
2606 fm.end()
2574
2607
2575
2608
2576 @command(b'debugnamecomplete', [], _(b'NAME...'))
2609 @command(b'debugnamecomplete', [], _(b'NAME...'))
2577 def debugnamecomplete(ui, repo, *args):
2610 def debugnamecomplete(ui, repo, *args):
2578 '''complete "names" - tags, open branch names, bookmark names'''
2611 '''complete "names" - tags, open branch names, bookmark names'''
2579
2612
2580 names = set()
2613 names = set()
2581 # since we previously only listed open branches, we will handle that
2614 # since we previously only listed open branches, we will handle that
2582 # specially (after this for loop)
2615 # specially (after this for loop)
2583 for name, ns in repo.names.items():
2616 for name, ns in repo.names.items():
2584 if name != b'branches':
2617 if name != b'branches':
2585 names.update(ns.listnames(repo))
2618 names.update(ns.listnames(repo))
2586 names.update(
2619 names.update(
2587 tag
2620 tag
2588 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2621 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2589 if not closed
2622 if not closed
2590 )
2623 )
2591 completions = set()
2624 completions = set()
2592 if not args:
2625 if not args:
2593 args = [b'']
2626 args = [b'']
2594 for a in args:
2627 for a in args:
2595 completions.update(n for n in names if n.startswith(a))
2628 completions.update(n for n in names if n.startswith(a))
2596 ui.write(b'\n'.join(sorted(completions)))
2629 ui.write(b'\n'.join(sorted(completions)))
2597 ui.write(b'\n')
2630 ui.write(b'\n')
2598
2631
2599
2632
2600 @command(
2633 @command(
2601 b'debugnodemap',
2634 b'debugnodemap',
2602 [
2635 [
2603 (
2636 (
2604 b'',
2637 b'',
2605 b'dump-new',
2638 b'dump-new',
2606 False,
2639 False,
2607 _(b'write a (new) persistent binary nodemap on stdout'),
2640 _(b'write a (new) persistent binary nodemap on stdout'),
2608 ),
2641 ),
2609 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2642 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2610 (
2643 (
2611 b'',
2644 b'',
2612 b'check',
2645 b'check',
2613 False,
2646 False,
2614 _(b'check that the data on disk data are correct.'),
2647 _(b'check that the data on disk data are correct.'),
2615 ),
2648 ),
2616 (
2649 (
2617 b'',
2650 b'',
2618 b'metadata',
2651 b'metadata',
2619 False,
2652 False,
2620 _(b'display the on disk meta data for the nodemap'),
2653 _(b'display the on disk meta data for the nodemap'),
2621 ),
2654 ),
2622 ],
2655 ],
2623 )
2656 )
2624 def debugnodemap(ui, repo, **opts):
2657 def debugnodemap(ui, repo, **opts):
2625 """write and inspect on disk nodemap"""
2658 """write and inspect on disk nodemap"""
2626 if opts['dump_new']:
2659 if opts['dump_new']:
2627 unfi = repo.unfiltered()
2660 unfi = repo.unfiltered()
2628 cl = unfi.changelog
2661 cl = unfi.changelog
2629 if util.safehasattr(cl.index, "nodemap_data_all"):
2662 if util.safehasattr(cl.index, "nodemap_data_all"):
2630 data = cl.index.nodemap_data_all()
2663 data = cl.index.nodemap_data_all()
2631 else:
2664 else:
2632 data = nodemap.persistent_data(cl.index)
2665 data = nodemap.persistent_data(cl.index)
2633 ui.write(data)
2666 ui.write(data)
2634 elif opts['dump_disk']:
2667 elif opts['dump_disk']:
2635 unfi = repo.unfiltered()
2668 unfi = repo.unfiltered()
2636 cl = unfi.changelog
2669 cl = unfi.changelog
2637 nm_data = nodemap.persisted_data(cl)
2670 nm_data = nodemap.persisted_data(cl)
2638 if nm_data is not None:
2671 if nm_data is not None:
2639 docket, data = nm_data
2672 docket, data = nm_data
2640 ui.write(data[:])
2673 ui.write(data[:])
2641 elif opts['check']:
2674 elif opts['check']:
2642 unfi = repo.unfiltered()
2675 unfi = repo.unfiltered()
2643 cl = unfi.changelog
2676 cl = unfi.changelog
2644 nm_data = nodemap.persisted_data(cl)
2677 nm_data = nodemap.persisted_data(cl)
2645 if nm_data is not None:
2678 if nm_data is not None:
2646 docket, data = nm_data
2679 docket, data = nm_data
2647 return nodemap.check_data(ui, cl.index, data)
2680 return nodemap.check_data(ui, cl.index, data)
2648 elif opts['metadata']:
2681 elif opts['metadata']:
2649 unfi = repo.unfiltered()
2682 unfi = repo.unfiltered()
2650 cl = unfi.changelog
2683 cl = unfi.changelog
2651 nm_data = nodemap.persisted_data(cl)
2684 nm_data = nodemap.persisted_data(cl)
2652 if nm_data is not None:
2685 if nm_data is not None:
2653 docket, data = nm_data
2686 docket, data = nm_data
2654 ui.write((b"uid: %s\n") % docket.uid)
2687 ui.write((b"uid: %s\n") % docket.uid)
2655 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2688 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2656 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2689 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2657 ui.write((b"data-length: %d\n") % docket.data_length)
2690 ui.write((b"data-length: %d\n") % docket.data_length)
2658 ui.write((b"data-unused: %d\n") % docket.data_unused)
2691 ui.write((b"data-unused: %d\n") % docket.data_unused)
2659 unused_perc = docket.data_unused * 100.0 / docket.data_length
2692 unused_perc = docket.data_unused * 100.0 / docket.data_length
2660 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2693 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2661
2694
2662
2695
2663 @command(
2696 @command(
2664 b'debugobsolete',
2697 b'debugobsolete',
2665 [
2698 [
2666 (b'', b'flags', 0, _(b'markers flag')),
2699 (b'', b'flags', 0, _(b'markers flag')),
2667 (
2700 (
2668 b'',
2701 b'',
2669 b'record-parents',
2702 b'record-parents',
2670 False,
2703 False,
2671 _(b'record parent information for the precursor'),
2704 _(b'record parent information for the precursor'),
2672 ),
2705 ),
2673 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2706 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2674 (
2707 (
2675 b'',
2708 b'',
2676 b'exclusive',
2709 b'exclusive',
2677 False,
2710 False,
2678 _(b'restrict display to markers only relevant to REV'),
2711 _(b'restrict display to markers only relevant to REV'),
2679 ),
2712 ),
2680 (b'', b'index', False, _(b'display index of the marker')),
2713 (b'', b'index', False, _(b'display index of the marker')),
2681 (b'', b'delete', [], _(b'delete markers specified by indices')),
2714 (b'', b'delete', [], _(b'delete markers specified by indices')),
2682 ]
2715 ]
2683 + cmdutil.commitopts2
2716 + cmdutil.commitopts2
2684 + cmdutil.formatteropts,
2717 + cmdutil.formatteropts,
2685 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2718 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2686 )
2719 )
2687 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2720 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2688 """create arbitrary obsolete marker
2721 """create arbitrary obsolete marker
2689
2722
2690 With no arguments, displays the list of obsolescence markers."""
2723 With no arguments, displays the list of obsolescence markers."""
2691
2724
2692 opts = pycompat.byteskwargs(opts)
2725 opts = pycompat.byteskwargs(opts)
2693
2726
2694 def parsenodeid(s):
2727 def parsenodeid(s):
2695 try:
2728 try:
2696 # We do not use revsingle/revrange functions here to accept
2729 # We do not use revsingle/revrange functions here to accept
2697 # arbitrary node identifiers, possibly not present in the
2730 # arbitrary node identifiers, possibly not present in the
2698 # local repository.
2731 # local repository.
2699 n = bin(s)
2732 n = bin(s)
2700 if len(n) != repo.nodeconstants.nodelen:
2733 if len(n) != repo.nodeconstants.nodelen:
2701 raise ValueError
2734 raise ValueError
2702 return n
2735 return n
2703 except ValueError:
2736 except ValueError:
2704 raise error.InputError(
2737 raise error.InputError(
2705 b'changeset references must be full hexadecimal '
2738 b'changeset references must be full hexadecimal '
2706 b'node identifiers'
2739 b'node identifiers'
2707 )
2740 )
2708
2741
2709 if opts.get(b'delete'):
2742 if opts.get(b'delete'):
2710 indices = []
2743 indices = []
2711 for v in opts.get(b'delete'):
2744 for v in opts.get(b'delete'):
2712 try:
2745 try:
2713 indices.append(int(v))
2746 indices.append(int(v))
2714 except ValueError:
2747 except ValueError:
2715 raise error.InputError(
2748 raise error.InputError(
2716 _(b'invalid index value: %r') % v,
2749 _(b'invalid index value: %r') % v,
2717 hint=_(b'use integers for indices'),
2750 hint=_(b'use integers for indices'),
2718 )
2751 )
2719
2752
2720 if repo.currenttransaction():
2753 if repo.currenttransaction():
2721 raise error.Abort(
2754 raise error.Abort(
2722 _(b'cannot delete obsmarkers in the middle of transaction.')
2755 _(b'cannot delete obsmarkers in the middle of transaction.')
2723 )
2756 )
2724
2757
2725 with repo.lock():
2758 with repo.lock():
2726 n = repair.deleteobsmarkers(repo.obsstore, indices)
2759 n = repair.deleteobsmarkers(repo.obsstore, indices)
2727 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2760 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2728
2761
2729 return
2762 return
2730
2763
2731 if precursor is not None:
2764 if precursor is not None:
2732 if opts[b'rev']:
2765 if opts[b'rev']:
2733 raise error.InputError(
2766 raise error.InputError(
2734 b'cannot select revision when creating marker'
2767 b'cannot select revision when creating marker'
2735 )
2768 )
2736 metadata = {}
2769 metadata = {}
2737 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2770 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2738 succs = tuple(parsenodeid(succ) for succ in successors)
2771 succs = tuple(parsenodeid(succ) for succ in successors)
2739 l = repo.lock()
2772 l = repo.lock()
2740 try:
2773 try:
2741 tr = repo.transaction(b'debugobsolete')
2774 tr = repo.transaction(b'debugobsolete')
2742 try:
2775 try:
2743 date = opts.get(b'date')
2776 date = opts.get(b'date')
2744 if date:
2777 if date:
2745 date = dateutil.parsedate(date)
2778 date = dateutil.parsedate(date)
2746 else:
2779 else:
2747 date = None
2780 date = None
2748 prec = parsenodeid(precursor)
2781 prec = parsenodeid(precursor)
2749 parents = None
2782 parents = None
2750 if opts[b'record_parents']:
2783 if opts[b'record_parents']:
2751 if prec not in repo.unfiltered():
2784 if prec not in repo.unfiltered():
2752 raise error.Abort(
2785 raise error.Abort(
2753 b'cannot used --record-parents on '
2786 b'cannot used --record-parents on '
2754 b'unknown changesets'
2787 b'unknown changesets'
2755 )
2788 )
2756 parents = repo.unfiltered()[prec].parents()
2789 parents = repo.unfiltered()[prec].parents()
2757 parents = tuple(p.node() for p in parents)
2790 parents = tuple(p.node() for p in parents)
2758 repo.obsstore.create(
2791 repo.obsstore.create(
2759 tr,
2792 tr,
2760 prec,
2793 prec,
2761 succs,
2794 succs,
2762 opts[b'flags'],
2795 opts[b'flags'],
2763 parents=parents,
2796 parents=parents,
2764 date=date,
2797 date=date,
2765 metadata=metadata,
2798 metadata=metadata,
2766 ui=ui,
2799 ui=ui,
2767 )
2800 )
2768 tr.close()
2801 tr.close()
2769 except ValueError as exc:
2802 except ValueError as exc:
2770 raise error.Abort(
2803 raise error.Abort(
2771 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2804 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2772 )
2805 )
2773 finally:
2806 finally:
2774 tr.release()
2807 tr.release()
2775 finally:
2808 finally:
2776 l.release()
2809 l.release()
2777 else:
2810 else:
2778 if opts[b'rev']:
2811 if opts[b'rev']:
2779 revs = logcmdutil.revrange(repo, opts[b'rev'])
2812 revs = logcmdutil.revrange(repo, opts[b'rev'])
2780 nodes = [repo[r].node() for r in revs]
2813 nodes = [repo[r].node() for r in revs]
2781 markers = list(
2814 markers = list(
2782 obsutil.getmarkers(
2815 obsutil.getmarkers(
2783 repo, nodes=nodes, exclusive=opts[b'exclusive']
2816 repo, nodes=nodes, exclusive=opts[b'exclusive']
2784 )
2817 )
2785 )
2818 )
2786 markers.sort(key=lambda x: x._data)
2819 markers.sort(key=lambda x: x._data)
2787 else:
2820 else:
2788 markers = obsutil.getmarkers(repo)
2821 markers = obsutil.getmarkers(repo)
2789
2822
2790 markerstoiter = markers
2823 markerstoiter = markers
2791 isrelevant = lambda m: True
2824 isrelevant = lambda m: True
2792 if opts.get(b'rev') and opts.get(b'index'):
2825 if opts.get(b'rev') and opts.get(b'index'):
2793 markerstoiter = obsutil.getmarkers(repo)
2826 markerstoiter = obsutil.getmarkers(repo)
2794 markerset = set(markers)
2827 markerset = set(markers)
2795 isrelevant = lambda m: m in markerset
2828 isrelevant = lambda m: m in markerset
2796
2829
2797 fm = ui.formatter(b'debugobsolete', opts)
2830 fm = ui.formatter(b'debugobsolete', opts)
2798 for i, m in enumerate(markerstoiter):
2831 for i, m in enumerate(markerstoiter):
2799 if not isrelevant(m):
2832 if not isrelevant(m):
2800 # marker can be irrelevant when we're iterating over a set
2833 # marker can be irrelevant when we're iterating over a set
2801 # of markers (markerstoiter) which is bigger than the set
2834 # of markers (markerstoiter) which is bigger than the set
2802 # of markers we want to display (markers)
2835 # of markers we want to display (markers)
2803 # this can happen if both --index and --rev options are
2836 # this can happen if both --index and --rev options are
2804 # provided and thus we need to iterate over all of the markers
2837 # provided and thus we need to iterate over all of the markers
2805 # to get the correct indices, but only display the ones that
2838 # to get the correct indices, but only display the ones that
2806 # are relevant to --rev value
2839 # are relevant to --rev value
2807 continue
2840 continue
2808 fm.startitem()
2841 fm.startitem()
2809 ind = i if opts.get(b'index') else None
2842 ind = i if opts.get(b'index') else None
2810 cmdutil.showmarker(fm, m, index=ind)
2843 cmdutil.showmarker(fm, m, index=ind)
2811 fm.end()
2844 fm.end()
2812
2845
2813
2846
2814 @command(
2847 @command(
2815 b'debugp1copies',
2848 b'debugp1copies',
2816 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2849 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2817 _(b'[-r REV]'),
2850 _(b'[-r REV]'),
2818 )
2851 )
2819 def debugp1copies(ui, repo, **opts):
2852 def debugp1copies(ui, repo, **opts):
2820 """dump copy information compared to p1"""
2853 """dump copy information compared to p1"""
2821
2854
2822 opts = pycompat.byteskwargs(opts)
2855 opts = pycompat.byteskwargs(opts)
2823 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2856 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2824 for dst, src in ctx.p1copies().items():
2857 for dst, src in ctx.p1copies().items():
2825 ui.write(b'%s -> %s\n' % (src, dst))
2858 ui.write(b'%s -> %s\n' % (src, dst))
2826
2859
2827
2860
2828 @command(
2861 @command(
2829 b'debugp2copies',
2862 b'debugp2copies',
2830 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2863 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2831 _(b'[-r REV]'),
2864 _(b'[-r REV]'),
2832 )
2865 )
2833 def debugp2copies(ui, repo, **opts):
2866 def debugp2copies(ui, repo, **opts):
2834 """dump copy information compared to p2"""
2867 """dump copy information compared to p2"""
2835
2868
2836 opts = pycompat.byteskwargs(opts)
2869 opts = pycompat.byteskwargs(opts)
2837 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2870 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2838 for dst, src in ctx.p2copies().items():
2871 for dst, src in ctx.p2copies().items():
2839 ui.write(b'%s -> %s\n' % (src, dst))
2872 ui.write(b'%s -> %s\n' % (src, dst))
2840
2873
2841
2874
2842 @command(
2875 @command(
2843 b'debugpathcomplete',
2876 b'debugpathcomplete',
2844 [
2877 [
2845 (b'f', b'full', None, _(b'complete an entire path')),
2878 (b'f', b'full', None, _(b'complete an entire path')),
2846 (b'n', b'normal', None, _(b'show only normal files')),
2879 (b'n', b'normal', None, _(b'show only normal files')),
2847 (b'a', b'added', None, _(b'show only added files')),
2880 (b'a', b'added', None, _(b'show only added files')),
2848 (b'r', b'removed', None, _(b'show only removed files')),
2881 (b'r', b'removed', None, _(b'show only removed files')),
2849 ],
2882 ],
2850 _(b'FILESPEC...'),
2883 _(b'FILESPEC...'),
2851 )
2884 )
2852 def debugpathcomplete(ui, repo, *specs, **opts):
2885 def debugpathcomplete(ui, repo, *specs, **opts):
2853 """complete part or all of a tracked path
2886 """complete part or all of a tracked path
2854
2887
2855 This command supports shells that offer path name completion. It
2888 This command supports shells that offer path name completion. It
2856 currently completes only files already known to the dirstate.
2889 currently completes only files already known to the dirstate.
2857
2890
2858 Completion extends only to the next path segment unless
2891 Completion extends only to the next path segment unless
2859 --full is specified, in which case entire paths are used."""
2892 --full is specified, in which case entire paths are used."""
2860
2893
2861 def complete(path, acceptable):
2894 def complete(path, acceptable):
2862 dirstate = repo.dirstate
2895 dirstate = repo.dirstate
2863 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2896 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2864 rootdir = repo.root + pycompat.ossep
2897 rootdir = repo.root + pycompat.ossep
2865 if spec != repo.root and not spec.startswith(rootdir):
2898 if spec != repo.root and not spec.startswith(rootdir):
2866 return [], []
2899 return [], []
2867 if os.path.isdir(spec):
2900 if os.path.isdir(spec):
2868 spec += b'/'
2901 spec += b'/'
2869 spec = spec[len(rootdir) :]
2902 spec = spec[len(rootdir) :]
2870 fixpaths = pycompat.ossep != b'/'
2903 fixpaths = pycompat.ossep != b'/'
2871 if fixpaths:
2904 if fixpaths:
2872 spec = spec.replace(pycompat.ossep, b'/')
2905 spec = spec.replace(pycompat.ossep, b'/')
2873 speclen = len(spec)
2906 speclen = len(spec)
2874 fullpaths = opts['full']
2907 fullpaths = opts['full']
2875 files, dirs = set(), set()
2908 files, dirs = set(), set()
2876 adddir, addfile = dirs.add, files.add
2909 adddir, addfile = dirs.add, files.add
2877 for f, st in dirstate.items():
2910 for f, st in dirstate.items():
2878 if f.startswith(spec) and st.state in acceptable:
2911 if f.startswith(spec) and st.state in acceptable:
2879 if fixpaths:
2912 if fixpaths:
2880 f = f.replace(b'/', pycompat.ossep)
2913 f = f.replace(b'/', pycompat.ossep)
2881 if fullpaths:
2914 if fullpaths:
2882 addfile(f)
2915 addfile(f)
2883 continue
2916 continue
2884 s = f.find(pycompat.ossep, speclen)
2917 s = f.find(pycompat.ossep, speclen)
2885 if s >= 0:
2918 if s >= 0:
2886 adddir(f[:s])
2919 adddir(f[:s])
2887 else:
2920 else:
2888 addfile(f)
2921 addfile(f)
2889 return files, dirs
2922 return files, dirs
2890
2923
2891 acceptable = b''
2924 acceptable = b''
2892 if opts['normal']:
2925 if opts['normal']:
2893 acceptable += b'nm'
2926 acceptable += b'nm'
2894 if opts['added']:
2927 if opts['added']:
2895 acceptable += b'a'
2928 acceptable += b'a'
2896 if opts['removed']:
2929 if opts['removed']:
2897 acceptable += b'r'
2930 acceptable += b'r'
2898 cwd = repo.getcwd()
2931 cwd = repo.getcwd()
2899 if not specs:
2932 if not specs:
2900 specs = [b'.']
2933 specs = [b'.']
2901
2934
2902 files, dirs = set(), set()
2935 files, dirs = set(), set()
2903 for spec in specs:
2936 for spec in specs:
2904 f, d = complete(spec, acceptable or b'nmar')
2937 f, d = complete(spec, acceptable or b'nmar')
2905 files.update(f)
2938 files.update(f)
2906 dirs.update(d)
2939 dirs.update(d)
2907 files.update(dirs)
2940 files.update(dirs)
2908 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2941 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2909 ui.write(b'\n')
2942 ui.write(b'\n')
2910
2943
2911
2944
2912 @command(
2945 @command(
2913 b'debugpathcopies',
2946 b'debugpathcopies',
2914 cmdutil.walkopts,
2947 cmdutil.walkopts,
2915 b'hg debugpathcopies REV1 REV2 [FILE]',
2948 b'hg debugpathcopies REV1 REV2 [FILE]',
2916 inferrepo=True,
2949 inferrepo=True,
2917 )
2950 )
2918 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2951 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2919 """show copies between two revisions"""
2952 """show copies between two revisions"""
2920 ctx1 = scmutil.revsingle(repo, rev1)
2953 ctx1 = scmutil.revsingle(repo, rev1)
2921 ctx2 = scmutil.revsingle(repo, rev2)
2954 ctx2 = scmutil.revsingle(repo, rev2)
2922 m = scmutil.match(ctx1, pats, opts)
2955 m = scmutil.match(ctx1, pats, opts)
2923 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2956 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2924 ui.write(b'%s -> %s\n' % (src, dst))
2957 ui.write(b'%s -> %s\n' % (src, dst))
2925
2958
2926
2959
2927 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2960 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2928 def debugpeer(ui, path):
2961 def debugpeer(ui, path):
2929 """establish a connection to a peer repository"""
2962 """establish a connection to a peer repository"""
2930 # Always enable peer request logging. Requires --debug to display
2963 # Always enable peer request logging. Requires --debug to display
2931 # though.
2964 # though.
2932 overrides = {
2965 overrides = {
2933 (b'devel', b'debug.peer-request'): True,
2966 (b'devel', b'debug.peer-request'): True,
2934 }
2967 }
2935
2968
2936 with ui.configoverride(overrides):
2969 with ui.configoverride(overrides):
2937 peer = hg.peer(ui, {}, path)
2970 peer = hg.peer(ui, {}, path)
2938
2971
2939 try:
2972 try:
2940 local = peer.local() is not None
2973 local = peer.local() is not None
2941 canpush = peer.canpush()
2974 canpush = peer.canpush()
2942
2975
2943 ui.write(_(b'url: %s\n') % peer.url())
2976 ui.write(_(b'url: %s\n') % peer.url())
2944 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2977 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2945 ui.write(
2978 ui.write(
2946 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2979 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2947 )
2980 )
2948 finally:
2981 finally:
2949 peer.close()
2982 peer.close()
2950
2983
2951
2984
2952 @command(
2985 @command(
2953 b'debugpickmergetool',
2986 b'debugpickmergetool',
2954 [
2987 [
2955 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2988 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2956 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2989 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2957 ]
2990 ]
2958 + cmdutil.walkopts
2991 + cmdutil.walkopts
2959 + cmdutil.mergetoolopts,
2992 + cmdutil.mergetoolopts,
2960 _(b'[PATTERN]...'),
2993 _(b'[PATTERN]...'),
2961 inferrepo=True,
2994 inferrepo=True,
2962 )
2995 )
2963 def debugpickmergetool(ui, repo, *pats, **opts):
2996 def debugpickmergetool(ui, repo, *pats, **opts):
2964 """examine which merge tool is chosen for specified file
2997 """examine which merge tool is chosen for specified file
2965
2998
2966 As described in :hg:`help merge-tools`, Mercurial examines
2999 As described in :hg:`help merge-tools`, Mercurial examines
2967 configurations below in this order to decide which merge tool is
3000 configurations below in this order to decide which merge tool is
2968 chosen for specified file.
3001 chosen for specified file.
2969
3002
2970 1. ``--tool`` option
3003 1. ``--tool`` option
2971 2. ``HGMERGE`` environment variable
3004 2. ``HGMERGE`` environment variable
2972 3. configurations in ``merge-patterns`` section
3005 3. configurations in ``merge-patterns`` section
2973 4. configuration of ``ui.merge``
3006 4. configuration of ``ui.merge``
2974 5. configurations in ``merge-tools`` section
3007 5. configurations in ``merge-tools`` section
2975 6. ``hgmerge`` tool (for historical reason only)
3008 6. ``hgmerge`` tool (for historical reason only)
2976 7. default tool for fallback (``:merge`` or ``:prompt``)
3009 7. default tool for fallback (``:merge`` or ``:prompt``)
2977
3010
2978 This command writes out examination result in the style below::
3011 This command writes out examination result in the style below::
2979
3012
2980 FILE = MERGETOOL
3013 FILE = MERGETOOL
2981
3014
2982 By default, all files known in the first parent context of the
3015 By default, all files known in the first parent context of the
2983 working directory are examined. Use file patterns and/or -I/-X
3016 working directory are examined. Use file patterns and/or -I/-X
2984 options to limit target files. -r/--rev is also useful to examine
3017 options to limit target files. -r/--rev is also useful to examine
2985 files in another context without actual updating to it.
3018 files in another context without actual updating to it.
2986
3019
2987 With --debug, this command shows warning messages while matching
3020 With --debug, this command shows warning messages while matching
2988 against ``merge-patterns`` and so on, too. It is recommended to
3021 against ``merge-patterns`` and so on, too. It is recommended to
2989 use this option with explicit file patterns and/or -I/-X options,
3022 use this option with explicit file patterns and/or -I/-X options,
2990 because this option increases amount of output per file according
3023 because this option increases amount of output per file according
2991 to configurations in hgrc.
3024 to configurations in hgrc.
2992
3025
2993 With -v/--verbose, this command shows configurations below at
3026 With -v/--verbose, this command shows configurations below at
2994 first (only if specified).
3027 first (only if specified).
2995
3028
2996 - ``--tool`` option
3029 - ``--tool`` option
2997 - ``HGMERGE`` environment variable
3030 - ``HGMERGE`` environment variable
2998 - configuration of ``ui.merge``
3031 - configuration of ``ui.merge``
2999
3032
3000 If merge tool is chosen before matching against
3033 If merge tool is chosen before matching against
3001 ``merge-patterns``, this command can't show any helpful
3034 ``merge-patterns``, this command can't show any helpful
3002 information, even with --debug. In such case, information above is
3035 information, even with --debug. In such case, information above is
3003 useful to know why a merge tool is chosen.
3036 useful to know why a merge tool is chosen.
3004 """
3037 """
3005 opts = pycompat.byteskwargs(opts)
3038 opts = pycompat.byteskwargs(opts)
3006 overrides = {}
3039 overrides = {}
3007 if opts[b'tool']:
3040 if opts[b'tool']:
3008 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3041 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3009 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3042 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3010
3043
3011 with ui.configoverride(overrides, b'debugmergepatterns'):
3044 with ui.configoverride(overrides, b'debugmergepatterns'):
3012 hgmerge = encoding.environ.get(b"HGMERGE")
3045 hgmerge = encoding.environ.get(b"HGMERGE")
3013 if hgmerge is not None:
3046 if hgmerge is not None:
3014 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3047 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3015 uimerge = ui.config(b"ui", b"merge")
3048 uimerge = ui.config(b"ui", b"merge")
3016 if uimerge:
3049 if uimerge:
3017 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3050 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3018
3051
3019 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3052 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3020 m = scmutil.match(ctx, pats, opts)
3053 m = scmutil.match(ctx, pats, opts)
3021 changedelete = opts[b'changedelete']
3054 changedelete = opts[b'changedelete']
3022 for path in ctx.walk(m):
3055 for path in ctx.walk(m):
3023 fctx = ctx[path]
3056 fctx = ctx[path]
3024 with ui.silent(
3057 with ui.silent(
3025 error=True
3058 error=True
3026 ) if not ui.debugflag else util.nullcontextmanager():
3059 ) if not ui.debugflag else util.nullcontextmanager():
3027 tool, toolpath = filemerge._picktool(
3060 tool, toolpath = filemerge._picktool(
3028 repo,
3061 repo,
3029 ui,
3062 ui,
3030 path,
3063 path,
3031 fctx.isbinary(),
3064 fctx.isbinary(),
3032 b'l' in fctx.flags(),
3065 b'l' in fctx.flags(),
3033 changedelete,
3066 changedelete,
3034 )
3067 )
3035 ui.write(b'%s = %s\n' % (path, tool))
3068 ui.write(b'%s = %s\n' % (path, tool))
3036
3069
3037
3070
3038 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3071 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3039 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3072 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3040 """access the pushkey key/value protocol
3073 """access the pushkey key/value protocol
3041
3074
3042 With two args, list the keys in the given namespace.
3075 With two args, list the keys in the given namespace.
3043
3076
3044 With five args, set a key to new if it currently is set to old.
3077 With five args, set a key to new if it currently is set to old.
3045 Reports success or failure.
3078 Reports success or failure.
3046 """
3079 """
3047
3080
3048 target = hg.peer(ui, {}, repopath)
3081 target = hg.peer(ui, {}, repopath)
3049 try:
3082 try:
3050 if keyinfo:
3083 if keyinfo:
3051 key, old, new = keyinfo
3084 key, old, new = keyinfo
3052 with target.commandexecutor() as e:
3085 with target.commandexecutor() as e:
3053 r = e.callcommand(
3086 r = e.callcommand(
3054 b'pushkey',
3087 b'pushkey',
3055 {
3088 {
3056 b'namespace': namespace,
3089 b'namespace': namespace,
3057 b'key': key,
3090 b'key': key,
3058 b'old': old,
3091 b'old': old,
3059 b'new': new,
3092 b'new': new,
3060 },
3093 },
3061 ).result()
3094 ).result()
3062
3095
3063 ui.status(pycompat.bytestr(r) + b'\n')
3096 ui.status(pycompat.bytestr(r) + b'\n')
3064 return not r
3097 return not r
3065 else:
3098 else:
3066 for k, v in sorted(target.listkeys(namespace).items()):
3099 for k, v in sorted(target.listkeys(namespace).items()):
3067 ui.write(
3100 ui.write(
3068 b"%s\t%s\n"
3101 b"%s\t%s\n"
3069 % (stringutil.escapestr(k), stringutil.escapestr(v))
3102 % (stringutil.escapestr(k), stringutil.escapestr(v))
3070 )
3103 )
3071 finally:
3104 finally:
3072 target.close()
3105 target.close()
3073
3106
3074
3107
3075 @command(b'debugpvec', [], _(b'A B'))
3108 @command(b'debugpvec', [], _(b'A B'))
3076 def debugpvec(ui, repo, a, b=None):
3109 def debugpvec(ui, repo, a, b=None):
3077 ca = scmutil.revsingle(repo, a)
3110 ca = scmutil.revsingle(repo, a)
3078 cb = scmutil.revsingle(repo, b)
3111 cb = scmutil.revsingle(repo, b)
3079 pa = pvec.ctxpvec(ca)
3112 pa = pvec.ctxpvec(ca)
3080 pb = pvec.ctxpvec(cb)
3113 pb = pvec.ctxpvec(cb)
3081 if pa == pb:
3114 if pa == pb:
3082 rel = b"="
3115 rel = b"="
3083 elif pa > pb:
3116 elif pa > pb:
3084 rel = b">"
3117 rel = b">"
3085 elif pa < pb:
3118 elif pa < pb:
3086 rel = b"<"
3119 rel = b"<"
3087 elif pa | pb:
3120 elif pa | pb:
3088 rel = b"|"
3121 rel = b"|"
3089 ui.write(_(b"a: %s\n") % pa)
3122 ui.write(_(b"a: %s\n") % pa)
3090 ui.write(_(b"b: %s\n") % pb)
3123 ui.write(_(b"b: %s\n") % pb)
3091 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3124 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3092 ui.write(
3125 ui.write(
3093 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3126 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3094 % (
3127 % (
3095 abs(pa._depth - pb._depth),
3128 abs(pa._depth - pb._depth),
3096 pvec._hamming(pa._vec, pb._vec),
3129 pvec._hamming(pa._vec, pb._vec),
3097 pa.distance(pb),
3130 pa.distance(pb),
3098 rel,
3131 rel,
3099 )
3132 )
3100 )
3133 )
3101
3134
3102
3135
3103 @command(
3136 @command(
3104 b'debugrebuilddirstate|debugrebuildstate',
3137 b'debugrebuilddirstate|debugrebuildstate',
3105 [
3138 [
3106 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3139 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3107 (
3140 (
3108 b'',
3141 b'',
3109 b'minimal',
3142 b'minimal',
3110 None,
3143 None,
3111 _(
3144 _(
3112 b'only rebuild files that are inconsistent with '
3145 b'only rebuild files that are inconsistent with '
3113 b'the working copy parent'
3146 b'the working copy parent'
3114 ),
3147 ),
3115 ),
3148 ),
3116 ],
3149 ],
3117 _(b'[-r REV]'),
3150 _(b'[-r REV]'),
3118 )
3151 )
3119 def debugrebuilddirstate(ui, repo, rev, **opts):
3152 def debugrebuilddirstate(ui, repo, rev, **opts):
3120 """rebuild the dirstate as it would look like for the given revision
3153 """rebuild the dirstate as it would look like for the given revision
3121
3154
3122 If no revision is specified the first current parent will be used.
3155 If no revision is specified the first current parent will be used.
3123
3156
3124 The dirstate will be set to the files of the given revision.
3157 The dirstate will be set to the files of the given revision.
3125 The actual working directory content or existing dirstate
3158 The actual working directory content or existing dirstate
3126 information such as adds or removes is not considered.
3159 information such as adds or removes is not considered.
3127
3160
3128 ``minimal`` will only rebuild the dirstate status for files that claim to be
3161 ``minimal`` will only rebuild the dirstate status for files that claim to be
3129 tracked but are not in the parent manifest, or that exist in the parent
3162 tracked but are not in the parent manifest, or that exist in the parent
3130 manifest but are not in the dirstate. It will not change adds, removes, or
3163 manifest but are not in the dirstate. It will not change adds, removes, or
3131 modified files that are in the working copy parent.
3164 modified files that are in the working copy parent.
3132
3165
3133 One use of this command is to make the next :hg:`status` invocation
3166 One use of this command is to make the next :hg:`status` invocation
3134 check the actual file content.
3167 check the actual file content.
3135 """
3168 """
3136 ctx = scmutil.revsingle(repo, rev)
3169 ctx = scmutil.revsingle(repo, rev)
3137 with repo.wlock():
3170 with repo.wlock():
3138 dirstate = repo.dirstate
3171 dirstate = repo.dirstate
3139 changedfiles = None
3172 changedfiles = None
3140 # See command doc for what minimal does.
3173 # See command doc for what minimal does.
3141 if opts.get('minimal'):
3174 if opts.get('minimal'):
3142 manifestfiles = set(ctx.manifest().keys())
3175 manifestfiles = set(ctx.manifest().keys())
3143 dirstatefiles = set(dirstate)
3176 dirstatefiles = set(dirstate)
3144 manifestonly = manifestfiles - dirstatefiles
3177 manifestonly = manifestfiles - dirstatefiles
3145 dsonly = dirstatefiles - manifestfiles
3178 dsonly = dirstatefiles - manifestfiles
3146 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3179 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3147 changedfiles = manifestonly | dsnotadded
3180 changedfiles = manifestonly | dsnotadded
3148
3181
3149 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3182 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3150
3183
3151
3184
3152 @command(
3185 @command(
3153 b'debugrebuildfncache',
3186 b'debugrebuildfncache',
3154 [
3187 [
3155 (
3188 (
3156 b'',
3189 b'',
3157 b'only-data',
3190 b'only-data',
3158 False,
3191 False,
3159 _(b'only look for wrong .d files (much faster)'),
3192 _(b'only look for wrong .d files (much faster)'),
3160 )
3193 )
3161 ],
3194 ],
3162 b'',
3195 b'',
3163 )
3196 )
3164 def debugrebuildfncache(ui, repo, **opts):
3197 def debugrebuildfncache(ui, repo, **opts):
3165 """rebuild the fncache file"""
3198 """rebuild the fncache file"""
3166 opts = pycompat.byteskwargs(opts)
3199 opts = pycompat.byteskwargs(opts)
3167 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3200 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3168
3201
3169
3202
3170 @command(
3203 @command(
3171 b'debugrename',
3204 b'debugrename',
3172 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3205 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3173 _(b'[-r REV] [FILE]...'),
3206 _(b'[-r REV] [FILE]...'),
3174 )
3207 )
3175 def debugrename(ui, repo, *pats, **opts):
3208 def debugrename(ui, repo, *pats, **opts):
3176 """dump rename information"""
3209 """dump rename information"""
3177
3210
3178 opts = pycompat.byteskwargs(opts)
3211 opts = pycompat.byteskwargs(opts)
3179 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3212 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3180 m = scmutil.match(ctx, pats, opts)
3213 m = scmutil.match(ctx, pats, opts)
3181 for abs in ctx.walk(m):
3214 for abs in ctx.walk(m):
3182 fctx = ctx[abs]
3215 fctx = ctx[abs]
3183 o = fctx.filelog().renamed(fctx.filenode())
3216 o = fctx.filelog().renamed(fctx.filenode())
3184 rel = repo.pathto(abs)
3217 rel = repo.pathto(abs)
3185 if o:
3218 if o:
3186 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3219 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3187 else:
3220 else:
3188 ui.write(_(b"%s not renamed\n") % rel)
3221 ui.write(_(b"%s not renamed\n") % rel)
3189
3222
3190
3223
3191 @command(b'debugrequires|debugrequirements', [], b'')
3224 @command(b'debugrequires|debugrequirements', [], b'')
3192 def debugrequirements(ui, repo):
3225 def debugrequirements(ui, repo):
3193 """print the current repo requirements"""
3226 """print the current repo requirements"""
3194 for r in sorted(repo.requirements):
3227 for r in sorted(repo.requirements):
3195 ui.write(b"%s\n" % r)
3228 ui.write(b"%s\n" % r)
3196
3229
3197
3230
3198 @command(
3231 @command(
3199 b'debugrevlog',
3232 b'debugrevlog',
3200 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3233 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3201 _(b'-c|-m|FILE'),
3234 _(b'-c|-m|FILE'),
3202 optionalrepo=True,
3235 optionalrepo=True,
3203 )
3236 )
3204 def debugrevlog(ui, repo, file_=None, **opts):
3237 def debugrevlog(ui, repo, file_=None, **opts):
3205 """show data and statistics about a revlog"""
3238 """show data and statistics about a revlog"""
3206 opts = pycompat.byteskwargs(opts)
3239 opts = pycompat.byteskwargs(opts)
3207 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3240 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3208
3241
3209 if opts.get(b"dump"):
3242 if opts.get(b"dump"):
3210 numrevs = len(r)
3243 numrevs = len(r)
3211 ui.write(
3244 ui.write(
3212 (
3245 (
3213 b"# rev p1rev p2rev start end deltastart base p1 p2"
3246 b"# rev p1rev p2rev start end deltastart base p1 p2"
3214 b" rawsize totalsize compression heads chainlen\n"
3247 b" rawsize totalsize compression heads chainlen\n"
3215 )
3248 )
3216 )
3249 )
3217 ts = 0
3250 ts = 0
3218 heads = set()
3251 heads = set()
3219
3252
3220 for rev in range(numrevs):
3253 for rev in range(numrevs):
3221 dbase = r.deltaparent(rev)
3254 dbase = r.deltaparent(rev)
3222 if dbase == -1:
3255 if dbase == -1:
3223 dbase = rev
3256 dbase = rev
3224 cbase = r.chainbase(rev)
3257 cbase = r.chainbase(rev)
3225 clen = r.chainlen(rev)
3258 clen = r.chainlen(rev)
3226 p1, p2 = r.parentrevs(rev)
3259 p1, p2 = r.parentrevs(rev)
3227 rs = r.rawsize(rev)
3260 rs = r.rawsize(rev)
3228 ts = ts + rs
3261 ts = ts + rs
3229 heads -= set(r.parentrevs(rev))
3262 heads -= set(r.parentrevs(rev))
3230 heads.add(rev)
3263 heads.add(rev)
3231 try:
3264 try:
3232 compression = ts / r.end(rev)
3265 compression = ts / r.end(rev)
3233 except ZeroDivisionError:
3266 except ZeroDivisionError:
3234 compression = 0
3267 compression = 0
3235 ui.write(
3268 ui.write(
3236 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3269 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3237 b"%11d %5d %8d\n"
3270 b"%11d %5d %8d\n"
3238 % (
3271 % (
3239 rev,
3272 rev,
3240 p1,
3273 p1,
3241 p2,
3274 p2,
3242 r.start(rev),
3275 r.start(rev),
3243 r.end(rev),
3276 r.end(rev),
3244 r.start(dbase),
3277 r.start(dbase),
3245 r.start(cbase),
3278 r.start(cbase),
3246 r.start(p1),
3279 r.start(p1),
3247 r.start(p2),
3280 r.start(p2),
3248 rs,
3281 rs,
3249 ts,
3282 ts,
3250 compression,
3283 compression,
3251 len(heads),
3284 len(heads),
3252 clen,
3285 clen,
3253 )
3286 )
3254 )
3287 )
3255 return 0
3288 return 0
3256
3289
3257 format = r._format_version
3290 format = r._format_version
3258 v = r._format_flags
3291 v = r._format_flags
3259 flags = []
3292 flags = []
3260 gdelta = False
3293 gdelta = False
3261 if v & revlog.FLAG_INLINE_DATA:
3294 if v & revlog.FLAG_INLINE_DATA:
3262 flags.append(b'inline')
3295 flags.append(b'inline')
3263 if v & revlog.FLAG_GENERALDELTA:
3296 if v & revlog.FLAG_GENERALDELTA:
3264 gdelta = True
3297 gdelta = True
3265 flags.append(b'generaldelta')
3298 flags.append(b'generaldelta')
3266 if not flags:
3299 if not flags:
3267 flags = [b'(none)']
3300 flags = [b'(none)']
3268
3301
3269 ### tracks merge vs single parent
3302 ### tracks merge vs single parent
3270 nummerges = 0
3303 nummerges = 0
3271
3304
3272 ### tracks ways the "delta" are build
3305 ### tracks ways the "delta" are build
3273 # nodelta
3306 # nodelta
3274 numempty = 0
3307 numempty = 0
3275 numemptytext = 0
3308 numemptytext = 0
3276 numemptydelta = 0
3309 numemptydelta = 0
3277 # full file content
3310 # full file content
3278 numfull = 0
3311 numfull = 0
3279 # intermediate snapshot against a prior snapshot
3312 # intermediate snapshot against a prior snapshot
3280 numsemi = 0
3313 numsemi = 0
3281 # snapshot count per depth
3314 # snapshot count per depth
3282 numsnapdepth = collections.defaultdict(lambda: 0)
3315 numsnapdepth = collections.defaultdict(lambda: 0)
3283 # delta against previous revision
3316 # delta against previous revision
3284 numprev = 0
3317 numprev = 0
3285 # delta against first or second parent (not prev)
3318 # delta against first or second parent (not prev)
3286 nump1 = 0
3319 nump1 = 0
3287 nump2 = 0
3320 nump2 = 0
3288 # delta against neither prev nor parents
3321 # delta against neither prev nor parents
3289 numother = 0
3322 numother = 0
3290 # delta against prev that are also first or second parent
3323 # delta against prev that are also first or second parent
3291 # (details of `numprev`)
3324 # (details of `numprev`)
3292 nump1prev = 0
3325 nump1prev = 0
3293 nump2prev = 0
3326 nump2prev = 0
3294
3327
3295 # data about delta chain of each revs
3328 # data about delta chain of each revs
3296 chainlengths = []
3329 chainlengths = []
3297 chainbases = []
3330 chainbases = []
3298 chainspans = []
3331 chainspans = []
3299
3332
3300 # data about each revision
3333 # data about each revision
3301 datasize = [None, 0, 0]
3334 datasize = [None, 0, 0]
3302 fullsize = [None, 0, 0]
3335 fullsize = [None, 0, 0]
3303 semisize = [None, 0, 0]
3336 semisize = [None, 0, 0]
3304 # snapshot count per depth
3337 # snapshot count per depth
3305 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3338 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3306 deltasize = [None, 0, 0]
3339 deltasize = [None, 0, 0]
3307 chunktypecounts = {}
3340 chunktypecounts = {}
3308 chunktypesizes = {}
3341 chunktypesizes = {}
3309
3342
3310 def addsize(size, l):
3343 def addsize(size, l):
3311 if l[0] is None or size < l[0]:
3344 if l[0] is None or size < l[0]:
3312 l[0] = size
3345 l[0] = size
3313 if size > l[1]:
3346 if size > l[1]:
3314 l[1] = size
3347 l[1] = size
3315 l[2] += size
3348 l[2] += size
3316
3349
3317 numrevs = len(r)
3350 numrevs = len(r)
3318 for rev in range(numrevs):
3351 for rev in range(numrevs):
3319 p1, p2 = r.parentrevs(rev)
3352 p1, p2 = r.parentrevs(rev)
3320 delta = r.deltaparent(rev)
3353 delta = r.deltaparent(rev)
3321 if format > 0:
3354 if format > 0:
3322 addsize(r.rawsize(rev), datasize)
3355 addsize(r.rawsize(rev), datasize)
3323 if p2 != nullrev:
3356 if p2 != nullrev:
3324 nummerges += 1
3357 nummerges += 1
3325 size = r.length(rev)
3358 size = r.length(rev)
3326 if delta == nullrev:
3359 if delta == nullrev:
3327 chainlengths.append(0)
3360 chainlengths.append(0)
3328 chainbases.append(r.start(rev))
3361 chainbases.append(r.start(rev))
3329 chainspans.append(size)
3362 chainspans.append(size)
3330 if size == 0:
3363 if size == 0:
3331 numempty += 1
3364 numempty += 1
3332 numemptytext += 1
3365 numemptytext += 1
3333 else:
3366 else:
3334 numfull += 1
3367 numfull += 1
3335 numsnapdepth[0] += 1
3368 numsnapdepth[0] += 1
3336 addsize(size, fullsize)
3369 addsize(size, fullsize)
3337 addsize(size, snapsizedepth[0])
3370 addsize(size, snapsizedepth[0])
3338 else:
3371 else:
3339 chainlengths.append(chainlengths[delta] + 1)
3372 chainlengths.append(chainlengths[delta] + 1)
3340 baseaddr = chainbases[delta]
3373 baseaddr = chainbases[delta]
3341 revaddr = r.start(rev)
3374 revaddr = r.start(rev)
3342 chainbases.append(baseaddr)
3375 chainbases.append(baseaddr)
3343 chainspans.append((revaddr - baseaddr) + size)
3376 chainspans.append((revaddr - baseaddr) + size)
3344 if size == 0:
3377 if size == 0:
3345 numempty += 1
3378 numempty += 1
3346 numemptydelta += 1
3379 numemptydelta += 1
3347 elif r.issnapshot(rev):
3380 elif r.issnapshot(rev):
3348 addsize(size, semisize)
3381 addsize(size, semisize)
3349 numsemi += 1
3382 numsemi += 1
3350 depth = r.snapshotdepth(rev)
3383 depth = r.snapshotdepth(rev)
3351 numsnapdepth[depth] += 1
3384 numsnapdepth[depth] += 1
3352 addsize(size, snapsizedepth[depth])
3385 addsize(size, snapsizedepth[depth])
3353 else:
3386 else:
3354 addsize(size, deltasize)
3387 addsize(size, deltasize)
3355 if delta == rev - 1:
3388 if delta == rev - 1:
3356 numprev += 1
3389 numprev += 1
3357 if delta == p1:
3390 if delta == p1:
3358 nump1prev += 1
3391 nump1prev += 1
3359 elif delta == p2:
3392 elif delta == p2:
3360 nump2prev += 1
3393 nump2prev += 1
3361 elif delta == p1:
3394 elif delta == p1:
3362 nump1 += 1
3395 nump1 += 1
3363 elif delta == p2:
3396 elif delta == p2:
3364 nump2 += 1
3397 nump2 += 1
3365 elif delta != nullrev:
3398 elif delta != nullrev:
3366 numother += 1
3399 numother += 1
3367
3400
3368 # Obtain data on the raw chunks in the revlog.
3401 # Obtain data on the raw chunks in the revlog.
3369 if util.safehasattr(r, b'_getsegmentforrevs'):
3402 if util.safehasattr(r, b'_getsegmentforrevs'):
3370 segment = r._getsegmentforrevs(rev, rev)[1]
3403 segment = r._getsegmentforrevs(rev, rev)[1]
3371 else:
3404 else:
3372 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3405 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3373 if segment:
3406 if segment:
3374 chunktype = bytes(segment[0:1])
3407 chunktype = bytes(segment[0:1])
3375 else:
3408 else:
3376 chunktype = b'empty'
3409 chunktype = b'empty'
3377
3410
3378 if chunktype not in chunktypecounts:
3411 if chunktype not in chunktypecounts:
3379 chunktypecounts[chunktype] = 0
3412 chunktypecounts[chunktype] = 0
3380 chunktypesizes[chunktype] = 0
3413 chunktypesizes[chunktype] = 0
3381
3414
3382 chunktypecounts[chunktype] += 1
3415 chunktypecounts[chunktype] += 1
3383 chunktypesizes[chunktype] += size
3416 chunktypesizes[chunktype] += size
3384
3417
3385 # Adjust size min value for empty cases
3418 # Adjust size min value for empty cases
3386 for size in (datasize, fullsize, semisize, deltasize):
3419 for size in (datasize, fullsize, semisize, deltasize):
3387 if size[0] is None:
3420 if size[0] is None:
3388 size[0] = 0
3421 size[0] = 0
3389
3422
3390 numdeltas = numrevs - numfull - numempty - numsemi
3423 numdeltas = numrevs - numfull - numempty - numsemi
3391 numoprev = numprev - nump1prev - nump2prev
3424 numoprev = numprev - nump1prev - nump2prev
3392 totalrawsize = datasize[2]
3425 totalrawsize = datasize[2]
3393 datasize[2] /= numrevs
3426 datasize[2] /= numrevs
3394 fulltotal = fullsize[2]
3427 fulltotal = fullsize[2]
3395 if numfull == 0:
3428 if numfull == 0:
3396 fullsize[2] = 0
3429 fullsize[2] = 0
3397 else:
3430 else:
3398 fullsize[2] /= numfull
3431 fullsize[2] /= numfull
3399 semitotal = semisize[2]
3432 semitotal = semisize[2]
3400 snaptotal = {}
3433 snaptotal = {}
3401 if numsemi > 0:
3434 if numsemi > 0:
3402 semisize[2] /= numsemi
3435 semisize[2] /= numsemi
3403 for depth in snapsizedepth:
3436 for depth in snapsizedepth:
3404 snaptotal[depth] = snapsizedepth[depth][2]
3437 snaptotal[depth] = snapsizedepth[depth][2]
3405 snapsizedepth[depth][2] /= numsnapdepth[depth]
3438 snapsizedepth[depth][2] /= numsnapdepth[depth]
3406
3439
3407 deltatotal = deltasize[2]
3440 deltatotal = deltasize[2]
3408 if numdeltas > 0:
3441 if numdeltas > 0:
3409 deltasize[2] /= numdeltas
3442 deltasize[2] /= numdeltas
3410 totalsize = fulltotal + semitotal + deltatotal
3443 totalsize = fulltotal + semitotal + deltatotal
3411 avgchainlen = sum(chainlengths) / numrevs
3444 avgchainlen = sum(chainlengths) / numrevs
3412 maxchainlen = max(chainlengths)
3445 maxchainlen = max(chainlengths)
3413 maxchainspan = max(chainspans)
3446 maxchainspan = max(chainspans)
3414 compratio = 1
3447 compratio = 1
3415 if totalsize:
3448 if totalsize:
3416 compratio = totalrawsize / totalsize
3449 compratio = totalrawsize / totalsize
3417
3450
3418 basedfmtstr = b'%%%dd\n'
3451 basedfmtstr = b'%%%dd\n'
3419 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3452 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3420
3453
3421 def dfmtstr(max):
3454 def dfmtstr(max):
3422 return basedfmtstr % len(str(max))
3455 return basedfmtstr % len(str(max))
3423
3456
3424 def pcfmtstr(max, padding=0):
3457 def pcfmtstr(max, padding=0):
3425 return basepcfmtstr % (len(str(max)), b' ' * padding)
3458 return basepcfmtstr % (len(str(max)), b' ' * padding)
3426
3459
3427 def pcfmt(value, total):
3460 def pcfmt(value, total):
3428 if total:
3461 if total:
3429 return (value, 100 * float(value) / total)
3462 return (value, 100 * float(value) / total)
3430 else:
3463 else:
3431 return value, 100.0
3464 return value, 100.0
3432
3465
3433 ui.writenoi18n(b'format : %d\n' % format)
3466 ui.writenoi18n(b'format : %d\n' % format)
3434 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3467 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3435
3468
3436 ui.write(b'\n')
3469 ui.write(b'\n')
3437 fmt = pcfmtstr(totalsize)
3470 fmt = pcfmtstr(totalsize)
3438 fmt2 = dfmtstr(totalsize)
3471 fmt2 = dfmtstr(totalsize)
3439 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3472 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3440 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3473 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3441 ui.writenoi18n(
3474 ui.writenoi18n(
3442 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3475 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3443 )
3476 )
3444 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3477 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3445 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3478 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3446 ui.writenoi18n(
3479 ui.writenoi18n(
3447 b' text : '
3480 b' text : '
3448 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3481 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3449 )
3482 )
3450 ui.writenoi18n(
3483 ui.writenoi18n(
3451 b' delta : '
3484 b' delta : '
3452 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3485 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3453 )
3486 )
3454 ui.writenoi18n(
3487 ui.writenoi18n(
3455 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3488 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3456 )
3489 )
3457 for depth in sorted(numsnapdepth):
3490 for depth in sorted(numsnapdepth):
3458 ui.write(
3491 ui.write(
3459 (b' lvl-%-3d : ' % depth)
3492 (b' lvl-%-3d : ' % depth)
3460 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3493 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3461 )
3494 )
3462 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3495 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3463 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3496 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3464 ui.writenoi18n(
3497 ui.writenoi18n(
3465 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3498 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3466 )
3499 )
3467 for depth in sorted(numsnapdepth):
3500 for depth in sorted(numsnapdepth):
3468 ui.write(
3501 ui.write(
3469 (b' lvl-%-3d : ' % depth)
3502 (b' lvl-%-3d : ' % depth)
3470 + fmt % pcfmt(snaptotal[depth], totalsize)
3503 + fmt % pcfmt(snaptotal[depth], totalsize)
3471 )
3504 )
3472 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3505 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3473
3506
3474 def fmtchunktype(chunktype):
3507 def fmtchunktype(chunktype):
3475 if chunktype == b'empty':
3508 if chunktype == b'empty':
3476 return b' %s : ' % chunktype
3509 return b' %s : ' % chunktype
3477 elif chunktype in pycompat.bytestr(string.ascii_letters):
3510 elif chunktype in pycompat.bytestr(string.ascii_letters):
3478 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3511 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3479 else:
3512 else:
3480 return b' 0x%s : ' % hex(chunktype)
3513 return b' 0x%s : ' % hex(chunktype)
3481
3514
3482 ui.write(b'\n')
3515 ui.write(b'\n')
3483 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3516 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3484 for chunktype in sorted(chunktypecounts):
3517 for chunktype in sorted(chunktypecounts):
3485 ui.write(fmtchunktype(chunktype))
3518 ui.write(fmtchunktype(chunktype))
3486 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3519 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3487 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3520 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3488 for chunktype in sorted(chunktypecounts):
3521 for chunktype in sorted(chunktypecounts):
3489 ui.write(fmtchunktype(chunktype))
3522 ui.write(fmtchunktype(chunktype))
3490 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3523 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3491
3524
3492 ui.write(b'\n')
3525 ui.write(b'\n')
3493 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3526 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3494 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3527 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3495 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3528 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3496 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3529 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3497 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3530 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3498
3531
3499 if format > 0:
3532 if format > 0:
3500 ui.write(b'\n')
3533 ui.write(b'\n')
3501 ui.writenoi18n(
3534 ui.writenoi18n(
3502 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3535 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3503 % tuple(datasize)
3536 % tuple(datasize)
3504 )
3537 )
3505 ui.writenoi18n(
3538 ui.writenoi18n(
3506 b'full revision size (min/max/avg) : %d / %d / %d\n'
3539 b'full revision size (min/max/avg) : %d / %d / %d\n'
3507 % tuple(fullsize)
3540 % tuple(fullsize)
3508 )
3541 )
3509 ui.writenoi18n(
3542 ui.writenoi18n(
3510 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3543 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3511 % tuple(semisize)
3544 % tuple(semisize)
3512 )
3545 )
3513 for depth in sorted(snapsizedepth):
3546 for depth in sorted(snapsizedepth):
3514 if depth == 0:
3547 if depth == 0:
3515 continue
3548 continue
3516 ui.writenoi18n(
3549 ui.writenoi18n(
3517 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3550 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3518 % ((depth,) + tuple(snapsizedepth[depth]))
3551 % ((depth,) + tuple(snapsizedepth[depth]))
3519 )
3552 )
3520 ui.writenoi18n(
3553 ui.writenoi18n(
3521 b'delta size (min/max/avg) : %d / %d / %d\n'
3554 b'delta size (min/max/avg) : %d / %d / %d\n'
3522 % tuple(deltasize)
3555 % tuple(deltasize)
3523 )
3556 )
3524
3557
3525 if numdeltas > 0:
3558 if numdeltas > 0:
3526 ui.write(b'\n')
3559 ui.write(b'\n')
3527 fmt = pcfmtstr(numdeltas)
3560 fmt = pcfmtstr(numdeltas)
3528 fmt2 = pcfmtstr(numdeltas, 4)
3561 fmt2 = pcfmtstr(numdeltas, 4)
3529 ui.writenoi18n(
3562 ui.writenoi18n(
3530 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3563 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3531 )
3564 )
3532 if numprev > 0:
3565 if numprev > 0:
3533 ui.writenoi18n(
3566 ui.writenoi18n(
3534 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3567 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3535 )
3568 )
3536 ui.writenoi18n(
3569 ui.writenoi18n(
3537 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3570 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3538 )
3571 )
3539 ui.writenoi18n(
3572 ui.writenoi18n(
3540 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3573 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3541 )
3574 )
3542 if gdelta:
3575 if gdelta:
3543 ui.writenoi18n(
3576 ui.writenoi18n(
3544 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3577 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3545 )
3578 )
3546 ui.writenoi18n(
3579 ui.writenoi18n(
3547 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3580 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3548 )
3581 )
3549 ui.writenoi18n(
3582 ui.writenoi18n(
3550 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3583 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3551 )
3584 )
3552
3585
3553
3586
3554 @command(
3587 @command(
3555 b'debugrevlogindex',
3588 b'debugrevlogindex',
3556 cmdutil.debugrevlogopts
3589 cmdutil.debugrevlogopts
3557 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3590 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3558 _(b'[-f FORMAT] -c|-m|FILE'),
3591 _(b'[-f FORMAT] -c|-m|FILE'),
3559 optionalrepo=True,
3592 optionalrepo=True,
3560 )
3593 )
3561 def debugrevlogindex(ui, repo, file_=None, **opts):
3594 def debugrevlogindex(ui, repo, file_=None, **opts):
3562 """dump the contents of a revlog index"""
3595 """dump the contents of a revlog index"""
3563 opts = pycompat.byteskwargs(opts)
3596 opts = pycompat.byteskwargs(opts)
3564 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3597 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3565 format = opts.get(b'format', 0)
3598 format = opts.get(b'format', 0)
3566 if format not in (0, 1):
3599 if format not in (0, 1):
3567 raise error.Abort(_(b"unknown format %d") % format)
3600 raise error.Abort(_(b"unknown format %d") % format)
3568
3601
3569 if ui.debugflag:
3602 if ui.debugflag:
3570 shortfn = hex
3603 shortfn = hex
3571 else:
3604 else:
3572 shortfn = short
3605 shortfn = short
3573
3606
3574 # There might not be anything in r, so have a sane default
3607 # There might not be anything in r, so have a sane default
3575 idlen = 12
3608 idlen = 12
3576 for i in r:
3609 for i in r:
3577 idlen = len(shortfn(r.node(i)))
3610 idlen = len(shortfn(r.node(i)))
3578 break
3611 break
3579
3612
3580 if format == 0:
3613 if format == 0:
3581 if ui.verbose:
3614 if ui.verbose:
3582 ui.writenoi18n(
3615 ui.writenoi18n(
3583 b" rev offset length linkrev %s %s p2\n"
3616 b" rev offset length linkrev %s %s p2\n"
3584 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3617 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3585 )
3618 )
3586 else:
3619 else:
3587 ui.writenoi18n(
3620 ui.writenoi18n(
3588 b" rev linkrev %s %s p2\n"
3621 b" rev linkrev %s %s p2\n"
3589 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3622 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3590 )
3623 )
3591 elif format == 1:
3624 elif format == 1:
3592 if ui.verbose:
3625 if ui.verbose:
3593 ui.writenoi18n(
3626 ui.writenoi18n(
3594 (
3627 (
3595 b" rev flag offset length size link p1"
3628 b" rev flag offset length size link p1"
3596 b" p2 %s\n"
3629 b" p2 %s\n"
3597 )
3630 )
3598 % b"nodeid".rjust(idlen)
3631 % b"nodeid".rjust(idlen)
3599 )
3632 )
3600 else:
3633 else:
3601 ui.writenoi18n(
3634 ui.writenoi18n(
3602 b" rev flag size link p1 p2 %s\n"
3635 b" rev flag size link p1 p2 %s\n"
3603 % b"nodeid".rjust(idlen)
3636 % b"nodeid".rjust(idlen)
3604 )
3637 )
3605
3638
3606 for i in r:
3639 for i in r:
3607 node = r.node(i)
3640 node = r.node(i)
3608 if format == 0:
3641 if format == 0:
3609 try:
3642 try:
3610 pp = r.parents(node)
3643 pp = r.parents(node)
3611 except Exception:
3644 except Exception:
3612 pp = [repo.nullid, repo.nullid]
3645 pp = [repo.nullid, repo.nullid]
3613 if ui.verbose:
3646 if ui.verbose:
3614 ui.write(
3647 ui.write(
3615 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3648 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3616 % (
3649 % (
3617 i,
3650 i,
3618 r.start(i),
3651 r.start(i),
3619 r.length(i),
3652 r.length(i),
3620 r.linkrev(i),
3653 r.linkrev(i),
3621 shortfn(node),
3654 shortfn(node),
3622 shortfn(pp[0]),
3655 shortfn(pp[0]),
3623 shortfn(pp[1]),
3656 shortfn(pp[1]),
3624 )
3657 )
3625 )
3658 )
3626 else:
3659 else:
3627 ui.write(
3660 ui.write(
3628 b"% 6d % 7d %s %s %s\n"
3661 b"% 6d % 7d %s %s %s\n"
3629 % (
3662 % (
3630 i,
3663 i,
3631 r.linkrev(i),
3664 r.linkrev(i),
3632 shortfn(node),
3665 shortfn(node),
3633 shortfn(pp[0]),
3666 shortfn(pp[0]),
3634 shortfn(pp[1]),
3667 shortfn(pp[1]),
3635 )
3668 )
3636 )
3669 )
3637 elif format == 1:
3670 elif format == 1:
3638 pr = r.parentrevs(i)
3671 pr = r.parentrevs(i)
3639 if ui.verbose:
3672 if ui.verbose:
3640 ui.write(
3673 ui.write(
3641 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3674 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3642 % (
3675 % (
3643 i,
3676 i,
3644 r.flags(i),
3677 r.flags(i),
3645 r.start(i),
3678 r.start(i),
3646 r.length(i),
3679 r.length(i),
3647 r.rawsize(i),
3680 r.rawsize(i),
3648 r.linkrev(i),
3681 r.linkrev(i),
3649 pr[0],
3682 pr[0],
3650 pr[1],
3683 pr[1],
3651 shortfn(node),
3684 shortfn(node),
3652 )
3685 )
3653 )
3686 )
3654 else:
3687 else:
3655 ui.write(
3688 ui.write(
3656 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3689 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3657 % (
3690 % (
3658 i,
3691 i,
3659 r.flags(i),
3692 r.flags(i),
3660 r.rawsize(i),
3693 r.rawsize(i),
3661 r.linkrev(i),
3694 r.linkrev(i),
3662 pr[0],
3695 pr[0],
3663 pr[1],
3696 pr[1],
3664 shortfn(node),
3697 shortfn(node),
3665 )
3698 )
3666 )
3699 )
3667
3700
3668
3701
3669 @command(
3702 @command(
3670 b'debugrevspec',
3703 b'debugrevspec',
3671 [
3704 [
3672 (
3705 (
3673 b'',
3706 b'',
3674 b'optimize',
3707 b'optimize',
3675 None,
3708 None,
3676 _(b'print parsed tree after optimizing (DEPRECATED)'),
3709 _(b'print parsed tree after optimizing (DEPRECATED)'),
3677 ),
3710 ),
3678 (
3711 (
3679 b'',
3712 b'',
3680 b'show-revs',
3713 b'show-revs',
3681 True,
3714 True,
3682 _(b'print list of result revisions (default)'),
3715 _(b'print list of result revisions (default)'),
3683 ),
3716 ),
3684 (
3717 (
3685 b's',
3718 b's',
3686 b'show-set',
3719 b'show-set',
3687 None,
3720 None,
3688 _(b'print internal representation of result set'),
3721 _(b'print internal representation of result set'),
3689 ),
3722 ),
3690 (
3723 (
3691 b'p',
3724 b'p',
3692 b'show-stage',
3725 b'show-stage',
3693 [],
3726 [],
3694 _(b'print parsed tree at the given stage'),
3727 _(b'print parsed tree at the given stage'),
3695 _(b'NAME'),
3728 _(b'NAME'),
3696 ),
3729 ),
3697 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3730 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3698 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3731 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3699 ],
3732 ],
3700 b'REVSPEC',
3733 b'REVSPEC',
3701 )
3734 )
3702 def debugrevspec(ui, repo, expr, **opts):
3735 def debugrevspec(ui, repo, expr, **opts):
3703 """parse and apply a revision specification
3736 """parse and apply a revision specification
3704
3737
3705 Use -p/--show-stage option to print the parsed tree at the given stages.
3738 Use -p/--show-stage option to print the parsed tree at the given stages.
3706 Use -p all to print tree at every stage.
3739 Use -p all to print tree at every stage.
3707
3740
3708 Use --no-show-revs option with -s or -p to print only the set
3741 Use --no-show-revs option with -s or -p to print only the set
3709 representation or the parsed tree respectively.
3742 representation or the parsed tree respectively.
3710
3743
3711 Use --verify-optimized to compare the optimized result with the unoptimized
3744 Use --verify-optimized to compare the optimized result with the unoptimized
3712 one. Returns 1 if the optimized result differs.
3745 one. Returns 1 if the optimized result differs.
3713 """
3746 """
3714 opts = pycompat.byteskwargs(opts)
3747 opts = pycompat.byteskwargs(opts)
3715 aliases = ui.configitems(b'revsetalias')
3748 aliases = ui.configitems(b'revsetalias')
3716 stages = [
3749 stages = [
3717 (b'parsed', lambda tree: tree),
3750 (b'parsed', lambda tree: tree),
3718 (
3751 (
3719 b'expanded',
3752 b'expanded',
3720 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3753 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3721 ),
3754 ),
3722 (b'concatenated', revsetlang.foldconcat),
3755 (b'concatenated', revsetlang.foldconcat),
3723 (b'analyzed', revsetlang.analyze),
3756 (b'analyzed', revsetlang.analyze),
3724 (b'optimized', revsetlang.optimize),
3757 (b'optimized', revsetlang.optimize),
3725 ]
3758 ]
3726 if opts[b'no_optimized']:
3759 if opts[b'no_optimized']:
3727 stages = stages[:-1]
3760 stages = stages[:-1]
3728 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3761 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3729 raise error.Abort(
3762 raise error.Abort(
3730 _(b'cannot use --verify-optimized with --no-optimized')
3763 _(b'cannot use --verify-optimized with --no-optimized')
3731 )
3764 )
3732 stagenames = {n for n, f in stages}
3765 stagenames = {n for n, f in stages}
3733
3766
3734 showalways = set()
3767 showalways = set()
3735 showchanged = set()
3768 showchanged = set()
3736 if ui.verbose and not opts[b'show_stage']:
3769 if ui.verbose and not opts[b'show_stage']:
3737 # show parsed tree by --verbose (deprecated)
3770 # show parsed tree by --verbose (deprecated)
3738 showalways.add(b'parsed')
3771 showalways.add(b'parsed')
3739 showchanged.update([b'expanded', b'concatenated'])
3772 showchanged.update([b'expanded', b'concatenated'])
3740 if opts[b'optimize']:
3773 if opts[b'optimize']:
3741 showalways.add(b'optimized')
3774 showalways.add(b'optimized')
3742 if opts[b'show_stage'] and opts[b'optimize']:
3775 if opts[b'show_stage'] and opts[b'optimize']:
3743 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3776 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3744 if opts[b'show_stage'] == [b'all']:
3777 if opts[b'show_stage'] == [b'all']:
3745 showalways.update(stagenames)
3778 showalways.update(stagenames)
3746 else:
3779 else:
3747 for n in opts[b'show_stage']:
3780 for n in opts[b'show_stage']:
3748 if n not in stagenames:
3781 if n not in stagenames:
3749 raise error.Abort(_(b'invalid stage name: %s') % n)
3782 raise error.Abort(_(b'invalid stage name: %s') % n)
3750 showalways.update(opts[b'show_stage'])
3783 showalways.update(opts[b'show_stage'])
3751
3784
3752 treebystage = {}
3785 treebystage = {}
3753 printedtree = None
3786 printedtree = None
3754 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3787 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3755 for n, f in stages:
3788 for n, f in stages:
3756 treebystage[n] = tree = f(tree)
3789 treebystage[n] = tree = f(tree)
3757 if n in showalways or (n in showchanged and tree != printedtree):
3790 if n in showalways or (n in showchanged and tree != printedtree):
3758 if opts[b'show_stage'] or n != b'parsed':
3791 if opts[b'show_stage'] or n != b'parsed':
3759 ui.write(b"* %s:\n" % n)
3792 ui.write(b"* %s:\n" % n)
3760 ui.write(revsetlang.prettyformat(tree), b"\n")
3793 ui.write(revsetlang.prettyformat(tree), b"\n")
3761 printedtree = tree
3794 printedtree = tree
3762
3795
3763 if opts[b'verify_optimized']:
3796 if opts[b'verify_optimized']:
3764 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3797 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3765 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3798 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3766 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3799 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3767 ui.writenoi18n(
3800 ui.writenoi18n(
3768 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3801 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3769 )
3802 )
3770 ui.writenoi18n(
3803 ui.writenoi18n(
3771 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3804 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3772 )
3805 )
3773 arevs = list(arevs)
3806 arevs = list(arevs)
3774 brevs = list(brevs)
3807 brevs = list(brevs)
3775 if arevs == brevs:
3808 if arevs == brevs:
3776 return 0
3809 return 0
3777 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3810 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3778 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3811 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3779 sm = difflib.SequenceMatcher(None, arevs, brevs)
3812 sm = difflib.SequenceMatcher(None, arevs, brevs)
3780 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3813 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3781 if tag in ('delete', 'replace'):
3814 if tag in ('delete', 'replace'):
3782 for c in arevs[alo:ahi]:
3815 for c in arevs[alo:ahi]:
3783 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3816 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3784 if tag in ('insert', 'replace'):
3817 if tag in ('insert', 'replace'):
3785 for c in brevs[blo:bhi]:
3818 for c in brevs[blo:bhi]:
3786 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3819 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3787 if tag == 'equal':
3820 if tag == 'equal':
3788 for c in arevs[alo:ahi]:
3821 for c in arevs[alo:ahi]:
3789 ui.write(b' %d\n' % c)
3822 ui.write(b' %d\n' % c)
3790 return 1
3823 return 1
3791
3824
3792 func = revset.makematcher(tree)
3825 func = revset.makematcher(tree)
3793 revs = func(repo)
3826 revs = func(repo)
3794 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3827 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3795 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3828 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3796 if not opts[b'show_revs']:
3829 if not opts[b'show_revs']:
3797 return
3830 return
3798 for c in revs:
3831 for c in revs:
3799 ui.write(b"%d\n" % c)
3832 ui.write(b"%d\n" % c)
3800
3833
3801
3834
3802 @command(
3835 @command(
3803 b'debugserve',
3836 b'debugserve',
3804 [
3837 [
3805 (
3838 (
3806 b'',
3839 b'',
3807 b'sshstdio',
3840 b'sshstdio',
3808 False,
3841 False,
3809 _(b'run an SSH server bound to process handles'),
3842 _(b'run an SSH server bound to process handles'),
3810 ),
3843 ),
3811 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3844 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3812 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3845 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3813 ],
3846 ],
3814 b'',
3847 b'',
3815 )
3848 )
3816 def debugserve(ui, repo, **opts):
3849 def debugserve(ui, repo, **opts):
3817 """run a server with advanced settings
3850 """run a server with advanced settings
3818
3851
3819 This command is similar to :hg:`serve`. It exists partially as a
3852 This command is similar to :hg:`serve`. It exists partially as a
3820 workaround to the fact that ``hg serve --stdio`` must have specific
3853 workaround to the fact that ``hg serve --stdio`` must have specific
3821 arguments for security reasons.
3854 arguments for security reasons.
3822 """
3855 """
3823 opts = pycompat.byteskwargs(opts)
3856 opts = pycompat.byteskwargs(opts)
3824
3857
3825 if not opts[b'sshstdio']:
3858 if not opts[b'sshstdio']:
3826 raise error.Abort(_(b'only --sshstdio is currently supported'))
3859 raise error.Abort(_(b'only --sshstdio is currently supported'))
3827
3860
3828 logfh = None
3861 logfh = None
3829
3862
3830 if opts[b'logiofd'] and opts[b'logiofile']:
3863 if opts[b'logiofd'] and opts[b'logiofile']:
3831 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3864 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3832
3865
3833 if opts[b'logiofd']:
3866 if opts[b'logiofd']:
3834 # Ideally we would be line buffered. But line buffering in binary
3867 # Ideally we would be line buffered. But line buffering in binary
3835 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3868 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3836 # buffering could have performance impacts. But since this isn't
3869 # buffering could have performance impacts. But since this isn't
3837 # performance critical code, it should be fine.
3870 # performance critical code, it should be fine.
3838 try:
3871 try:
3839 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3872 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3840 except OSError as e:
3873 except OSError as e:
3841 if e.errno != errno.ESPIPE:
3874 if e.errno != errno.ESPIPE:
3842 raise
3875 raise
3843 # can't seek a pipe, so `ab` mode fails on py3
3876 # can't seek a pipe, so `ab` mode fails on py3
3844 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3877 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3845 elif opts[b'logiofile']:
3878 elif opts[b'logiofile']:
3846 logfh = open(opts[b'logiofile'], b'ab', 0)
3879 logfh = open(opts[b'logiofile'], b'ab', 0)
3847
3880
3848 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3881 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3849 s.serve_forever()
3882 s.serve_forever()
3850
3883
3851
3884
3852 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3885 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3853 def debugsetparents(ui, repo, rev1, rev2=None):
3886 def debugsetparents(ui, repo, rev1, rev2=None):
3854 """manually set the parents of the current working directory (DANGEROUS)
3887 """manually set the parents of the current working directory (DANGEROUS)
3855
3888
3856 This command is not what you are looking for and should not be used. Using
3889 This command is not what you are looking for and should not be used. Using
3857 this command will most certainly results in slight corruption of the file
3890 this command will most certainly results in slight corruption of the file
3858 level histories withing your repository. DO NOT USE THIS COMMAND.
3891 level histories withing your repository. DO NOT USE THIS COMMAND.
3859
3892
3860 The command update the p1 and p2 field in the dirstate, and not touching
3893 The command update the p1 and p2 field in the dirstate, and not touching
3861 anything else. This useful for writing repository conversion tools, but
3894 anything else. This useful for writing repository conversion tools, but
3862 should be used with extreme care. For example, neither the working
3895 should be used with extreme care. For example, neither the working
3863 directory nor the dirstate is updated, so file status may be incorrect
3896 directory nor the dirstate is updated, so file status may be incorrect
3864 after running this command. Only used if you are one of the few people that
3897 after running this command. Only used if you are one of the few people that
3865 deeply unstand both conversion tools and file level histories. If you are
3898 deeply unstand both conversion tools and file level histories. If you are
3866 reading this help, you are not one of this people (most of them sailed west
3899 reading this help, you are not one of this people (most of them sailed west
3867 from Mithlond anyway.
3900 from Mithlond anyway.
3868
3901
3869 So one last time DO NOT USE THIS COMMAND.
3902 So one last time DO NOT USE THIS COMMAND.
3870
3903
3871 Returns 0 on success.
3904 Returns 0 on success.
3872 """
3905 """
3873
3906
3874 node1 = scmutil.revsingle(repo, rev1).node()
3907 node1 = scmutil.revsingle(repo, rev1).node()
3875 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3908 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3876
3909
3877 with repo.wlock():
3910 with repo.wlock():
3878 repo.setparents(node1, node2)
3911 repo.setparents(node1, node2)
3879
3912
3880
3913
3881 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3914 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3882 def debugsidedata(ui, repo, file_, rev=None, **opts):
3915 def debugsidedata(ui, repo, file_, rev=None, **opts):
3883 """dump the side data for a cl/manifest/file revision
3916 """dump the side data for a cl/manifest/file revision
3884
3917
3885 Use --verbose to dump the sidedata content."""
3918 Use --verbose to dump the sidedata content."""
3886 opts = pycompat.byteskwargs(opts)
3919 opts = pycompat.byteskwargs(opts)
3887 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3920 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3888 if rev is not None:
3921 if rev is not None:
3889 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3922 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3890 file_, rev = None, file_
3923 file_, rev = None, file_
3891 elif rev is None:
3924 elif rev is None:
3892 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3925 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3893 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3926 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3894 r = getattr(r, '_revlog', r)
3927 r = getattr(r, '_revlog', r)
3895 try:
3928 try:
3896 sidedata = r.sidedata(r.lookup(rev))
3929 sidedata = r.sidedata(r.lookup(rev))
3897 except KeyError:
3930 except KeyError:
3898 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3931 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3899 if sidedata:
3932 if sidedata:
3900 sidedata = list(sidedata.items())
3933 sidedata = list(sidedata.items())
3901 sidedata.sort()
3934 sidedata.sort()
3902 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3935 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3903 for key, value in sidedata:
3936 for key, value in sidedata:
3904 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3937 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3905 if ui.verbose:
3938 if ui.verbose:
3906 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3939 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3907
3940
3908
3941
3909 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3942 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3910 def debugssl(ui, repo, source=None, **opts):
3943 def debugssl(ui, repo, source=None, **opts):
3911 """test a secure connection to a server
3944 """test a secure connection to a server
3912
3945
3913 This builds the certificate chain for the server on Windows, installing the
3946 This builds the certificate chain for the server on Windows, installing the
3914 missing intermediates and trusted root via Windows Update if necessary. It
3947 missing intermediates and trusted root via Windows Update if necessary. It
3915 does nothing on other platforms.
3948 does nothing on other platforms.
3916
3949
3917 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3950 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3918 that server is used. See :hg:`help urls` for more information.
3951 that server is used. See :hg:`help urls` for more information.
3919
3952
3920 If the update succeeds, retry the original operation. Otherwise, the cause
3953 If the update succeeds, retry the original operation. Otherwise, the cause
3921 of the SSL error is likely another issue.
3954 of the SSL error is likely another issue.
3922 """
3955 """
3923 if not pycompat.iswindows:
3956 if not pycompat.iswindows:
3924 raise error.Abort(
3957 raise error.Abort(
3925 _(b'certificate chain building is only possible on Windows')
3958 _(b'certificate chain building is only possible on Windows')
3926 )
3959 )
3927
3960
3928 if not source:
3961 if not source:
3929 if not repo:
3962 if not repo:
3930 raise error.Abort(
3963 raise error.Abort(
3931 _(
3964 _(
3932 b"there is no Mercurial repository here, and no "
3965 b"there is no Mercurial repository here, and no "
3933 b"server specified"
3966 b"server specified"
3934 )
3967 )
3935 )
3968 )
3936 source = b"default"
3969 source = b"default"
3937
3970
3938 source, branches = urlutil.get_unique_pull_path(
3971 source, branches = urlutil.get_unique_pull_path(
3939 b'debugssl', repo, ui, source
3972 b'debugssl', repo, ui, source
3940 )
3973 )
3941 url = urlutil.url(source)
3974 url = urlutil.url(source)
3942
3975
3943 defaultport = {b'https': 443, b'ssh': 22}
3976 defaultport = {b'https': 443, b'ssh': 22}
3944 if url.scheme in defaultport:
3977 if url.scheme in defaultport:
3945 try:
3978 try:
3946 addr = (url.host, int(url.port or defaultport[url.scheme]))
3979 addr = (url.host, int(url.port or defaultport[url.scheme]))
3947 except ValueError:
3980 except ValueError:
3948 raise error.Abort(_(b"malformed port number in URL"))
3981 raise error.Abort(_(b"malformed port number in URL"))
3949 else:
3982 else:
3950 raise error.Abort(_(b"only https and ssh connections are supported"))
3983 raise error.Abort(_(b"only https and ssh connections are supported"))
3951
3984
3952 from . import win32
3985 from . import win32
3953
3986
3954 s = ssl.wrap_socket(
3987 s = ssl.wrap_socket(
3955 socket.socket(),
3988 socket.socket(),
3956 ssl_version=ssl.PROTOCOL_TLS,
3989 ssl_version=ssl.PROTOCOL_TLS,
3957 cert_reqs=ssl.CERT_NONE,
3990 cert_reqs=ssl.CERT_NONE,
3958 ca_certs=None,
3991 ca_certs=None,
3959 )
3992 )
3960
3993
3961 try:
3994 try:
3962 s.connect(addr)
3995 s.connect(addr)
3963 cert = s.getpeercert(True)
3996 cert = s.getpeercert(True)
3964
3997
3965 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3998 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3966
3999
3967 complete = win32.checkcertificatechain(cert, build=False)
4000 complete = win32.checkcertificatechain(cert, build=False)
3968
4001
3969 if not complete:
4002 if not complete:
3970 ui.status(_(b'certificate chain is incomplete, updating... '))
4003 ui.status(_(b'certificate chain is incomplete, updating... '))
3971
4004
3972 if not win32.checkcertificatechain(cert):
4005 if not win32.checkcertificatechain(cert):
3973 ui.status(_(b'failed.\n'))
4006 ui.status(_(b'failed.\n'))
3974 else:
4007 else:
3975 ui.status(_(b'done.\n'))
4008 ui.status(_(b'done.\n'))
3976 else:
4009 else:
3977 ui.status(_(b'full certificate chain is available\n'))
4010 ui.status(_(b'full certificate chain is available\n'))
3978 finally:
4011 finally:
3979 s.close()
4012 s.close()
3980
4013
3981
4014
3982 @command(
4015 @command(
3983 b"debugbackupbundle",
4016 b"debugbackupbundle",
3984 [
4017 [
3985 (
4018 (
3986 b"",
4019 b"",
3987 b"recover",
4020 b"recover",
3988 b"",
4021 b"",
3989 b"brings the specified changeset back into the repository",
4022 b"brings the specified changeset back into the repository",
3990 )
4023 )
3991 ]
4024 ]
3992 + cmdutil.logopts,
4025 + cmdutil.logopts,
3993 _(b"hg debugbackupbundle [--recover HASH]"),
4026 _(b"hg debugbackupbundle [--recover HASH]"),
3994 )
4027 )
3995 def debugbackupbundle(ui, repo, *pats, **opts):
4028 def debugbackupbundle(ui, repo, *pats, **opts):
3996 """lists the changesets available in backup bundles
4029 """lists the changesets available in backup bundles
3997
4030
3998 Without any arguments, this command prints a list of the changesets in each
4031 Without any arguments, this command prints a list of the changesets in each
3999 backup bundle.
4032 backup bundle.
4000
4033
4001 --recover takes a changeset hash and unbundles the first bundle that
4034 --recover takes a changeset hash and unbundles the first bundle that
4002 contains that hash, which puts that changeset back in your repository.
4035 contains that hash, which puts that changeset back in your repository.
4003
4036
4004 --verbose will print the entire commit message and the bundle path for that
4037 --verbose will print the entire commit message and the bundle path for that
4005 backup.
4038 backup.
4006 """
4039 """
4007 backups = list(
4040 backups = list(
4008 filter(
4041 filter(
4009 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
4042 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
4010 )
4043 )
4011 )
4044 )
4012 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
4045 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
4013
4046
4014 opts = pycompat.byteskwargs(opts)
4047 opts = pycompat.byteskwargs(opts)
4015 opts[b"bundle"] = b""
4048 opts[b"bundle"] = b""
4016 opts[b"force"] = None
4049 opts[b"force"] = None
4017 limit = logcmdutil.getlimit(opts)
4050 limit = logcmdutil.getlimit(opts)
4018
4051
4019 def display(other, chlist, displayer):
4052 def display(other, chlist, displayer):
4020 if opts.get(b"newest_first"):
4053 if opts.get(b"newest_first"):
4021 chlist.reverse()
4054 chlist.reverse()
4022 count = 0
4055 count = 0
4023 for n in chlist:
4056 for n in chlist:
4024 if limit is not None and count >= limit:
4057 if limit is not None and count >= limit:
4025 break
4058 break
4026 parents = [
4059 parents = [
4027 True for p in other.changelog.parents(n) if p != repo.nullid
4060 True for p in other.changelog.parents(n) if p != repo.nullid
4028 ]
4061 ]
4029 if opts.get(b"no_merges") and len(parents) == 2:
4062 if opts.get(b"no_merges") and len(parents) == 2:
4030 continue
4063 continue
4031 count += 1
4064 count += 1
4032 displayer.show(other[n])
4065 displayer.show(other[n])
4033
4066
4034 recovernode = opts.get(b"recover")
4067 recovernode = opts.get(b"recover")
4035 if recovernode:
4068 if recovernode:
4036 if scmutil.isrevsymbol(repo, recovernode):
4069 if scmutil.isrevsymbol(repo, recovernode):
4037 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4070 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4038 return
4071 return
4039 elif backups:
4072 elif backups:
4040 msg = _(
4073 msg = _(
4041 b"Recover changesets using: hg debugbackupbundle --recover "
4074 b"Recover changesets using: hg debugbackupbundle --recover "
4042 b"<changeset hash>\n\nAvailable backup changesets:"
4075 b"<changeset hash>\n\nAvailable backup changesets:"
4043 )
4076 )
4044 ui.status(msg, label=b"status.removed")
4077 ui.status(msg, label=b"status.removed")
4045 else:
4078 else:
4046 ui.status(_(b"no backup changesets found\n"))
4079 ui.status(_(b"no backup changesets found\n"))
4047 return
4080 return
4048
4081
4049 for backup in backups:
4082 for backup in backups:
4050 # Much of this is copied from the hg incoming logic
4083 # Much of this is copied from the hg incoming logic
4051 source = os.path.relpath(backup, encoding.getcwd())
4084 source = os.path.relpath(backup, encoding.getcwd())
4052 source, branches = urlutil.get_unique_pull_path(
4085 source, branches = urlutil.get_unique_pull_path(
4053 b'debugbackupbundle',
4086 b'debugbackupbundle',
4054 repo,
4087 repo,
4055 ui,
4088 ui,
4056 source,
4089 source,
4057 default_branches=opts.get(b'branch'),
4090 default_branches=opts.get(b'branch'),
4058 )
4091 )
4059 try:
4092 try:
4060 other = hg.peer(repo, opts, source)
4093 other = hg.peer(repo, opts, source)
4061 except error.LookupError as ex:
4094 except error.LookupError as ex:
4062 msg = _(b"\nwarning: unable to open bundle %s") % source
4095 msg = _(b"\nwarning: unable to open bundle %s") % source
4063 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4096 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4064 ui.warn(msg, hint=hint)
4097 ui.warn(msg, hint=hint)
4065 continue
4098 continue
4066 revs, checkout = hg.addbranchrevs(
4099 revs, checkout = hg.addbranchrevs(
4067 repo, other, branches, opts.get(b"rev")
4100 repo, other, branches, opts.get(b"rev")
4068 )
4101 )
4069
4102
4070 if revs:
4103 if revs:
4071 revs = [other.lookup(rev) for rev in revs]
4104 revs = [other.lookup(rev) for rev in revs]
4072
4105
4073 with ui.silent():
4106 with ui.silent():
4074 try:
4107 try:
4075 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4108 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4076 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4109 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4077 )
4110 )
4078 except error.LookupError:
4111 except error.LookupError:
4079 continue
4112 continue
4080
4113
4081 try:
4114 try:
4082 if not chlist:
4115 if not chlist:
4083 continue
4116 continue
4084 if recovernode:
4117 if recovernode:
4085 with repo.lock(), repo.transaction(b"unbundle") as tr:
4118 with repo.lock(), repo.transaction(b"unbundle") as tr:
4086 if scmutil.isrevsymbol(other, recovernode):
4119 if scmutil.isrevsymbol(other, recovernode):
4087 ui.status(_(b"Unbundling %s\n") % (recovernode))
4120 ui.status(_(b"Unbundling %s\n") % (recovernode))
4088 f = hg.openpath(ui, source)
4121 f = hg.openpath(ui, source)
4089 gen = exchange.readbundle(ui, f, source)
4122 gen = exchange.readbundle(ui, f, source)
4090 if isinstance(gen, bundle2.unbundle20):
4123 if isinstance(gen, bundle2.unbundle20):
4091 bundle2.applybundle(
4124 bundle2.applybundle(
4092 repo,
4125 repo,
4093 gen,
4126 gen,
4094 tr,
4127 tr,
4095 source=b"unbundle",
4128 source=b"unbundle",
4096 url=b"bundle:" + source,
4129 url=b"bundle:" + source,
4097 )
4130 )
4098 else:
4131 else:
4099 gen.apply(repo, b"unbundle", b"bundle:" + source)
4132 gen.apply(repo, b"unbundle", b"bundle:" + source)
4100 break
4133 break
4101 else:
4134 else:
4102 backupdate = encoding.strtolocal(
4135 backupdate = encoding.strtolocal(
4103 time.strftime(
4136 time.strftime(
4104 "%a %H:%M, %Y-%m-%d",
4137 "%a %H:%M, %Y-%m-%d",
4105 time.localtime(os.path.getmtime(source)),
4138 time.localtime(os.path.getmtime(source)),
4106 )
4139 )
4107 )
4140 )
4108 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4141 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4109 if ui.verbose:
4142 if ui.verbose:
4110 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4143 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4111 else:
4144 else:
4112 opts[
4145 opts[
4113 b"template"
4146 b"template"
4114 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4147 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4115 displayer = logcmdutil.changesetdisplayer(
4148 displayer = logcmdutil.changesetdisplayer(
4116 ui, other, opts, False
4149 ui, other, opts, False
4117 )
4150 )
4118 display(other, chlist, displayer)
4151 display(other, chlist, displayer)
4119 displayer.close()
4152 displayer.close()
4120 finally:
4153 finally:
4121 cleanupfn()
4154 cleanupfn()
4122
4155
4123
4156
4124 @command(
4157 @command(
4125 b'debugsub',
4158 b'debugsub',
4126 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4159 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4127 _(b'[-r REV] [REV]'),
4160 _(b'[-r REV] [REV]'),
4128 )
4161 )
4129 def debugsub(ui, repo, rev=None):
4162 def debugsub(ui, repo, rev=None):
4130 ctx = scmutil.revsingle(repo, rev, None)
4163 ctx = scmutil.revsingle(repo, rev, None)
4131 for k, v in sorted(ctx.substate.items()):
4164 for k, v in sorted(ctx.substate.items()):
4132 ui.writenoi18n(b'path %s\n' % k)
4165 ui.writenoi18n(b'path %s\n' % k)
4133 ui.writenoi18n(b' source %s\n' % v[0])
4166 ui.writenoi18n(b' source %s\n' % v[0])
4134 ui.writenoi18n(b' revision %s\n' % v[1])
4167 ui.writenoi18n(b' revision %s\n' % v[1])
4135
4168
4136
4169
4137 @command(b'debugshell', optionalrepo=True)
4170 @command(b'debugshell', optionalrepo=True)
4138 def debugshell(ui, repo):
4171 def debugshell(ui, repo):
4139 """run an interactive Python interpreter
4172 """run an interactive Python interpreter
4140
4173
4141 The local namespace is provided with a reference to the ui and
4174 The local namespace is provided with a reference to the ui and
4142 the repo instance (if available).
4175 the repo instance (if available).
4143 """
4176 """
4144 import code
4177 import code
4145
4178
4146 imported_objects = {
4179 imported_objects = {
4147 'ui': ui,
4180 'ui': ui,
4148 'repo': repo,
4181 'repo': repo,
4149 }
4182 }
4150
4183
4151 code.interact(local=imported_objects)
4184 code.interact(local=imported_objects)
4152
4185
4153
4186
4154 @command(
4187 @command(
4155 b'debugsuccessorssets',
4188 b'debugsuccessorssets',
4156 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4189 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4157 _(b'[REV]'),
4190 _(b'[REV]'),
4158 )
4191 )
4159 def debugsuccessorssets(ui, repo, *revs, **opts):
4192 def debugsuccessorssets(ui, repo, *revs, **opts):
4160 """show set of successors for revision
4193 """show set of successors for revision
4161
4194
4162 A successors set of changeset A is a consistent group of revisions that
4195 A successors set of changeset A is a consistent group of revisions that
4163 succeed A. It contains non-obsolete changesets only unless closests
4196 succeed A. It contains non-obsolete changesets only unless closests
4164 successors set is set.
4197 successors set is set.
4165
4198
4166 In most cases a changeset A has a single successors set containing a single
4199 In most cases a changeset A has a single successors set containing a single
4167 successor (changeset A replaced by A').
4200 successor (changeset A replaced by A').
4168
4201
4169 A changeset that is made obsolete with no successors are called "pruned".
4202 A changeset that is made obsolete with no successors are called "pruned".
4170 Such changesets have no successors sets at all.
4203 Such changesets have no successors sets at all.
4171
4204
4172 A changeset that has been "split" will have a successors set containing
4205 A changeset that has been "split" will have a successors set containing
4173 more than one successor.
4206 more than one successor.
4174
4207
4175 A changeset that has been rewritten in multiple different ways is called
4208 A changeset that has been rewritten in multiple different ways is called
4176 "divergent". Such changesets have multiple successor sets (each of which
4209 "divergent". Such changesets have multiple successor sets (each of which
4177 may also be split, i.e. have multiple successors).
4210 may also be split, i.e. have multiple successors).
4178
4211
4179 Results are displayed as follows::
4212 Results are displayed as follows::
4180
4213
4181 <rev1>
4214 <rev1>
4182 <successors-1A>
4215 <successors-1A>
4183 <rev2>
4216 <rev2>
4184 <successors-2A>
4217 <successors-2A>
4185 <successors-2B1> <successors-2B2> <successors-2B3>
4218 <successors-2B1> <successors-2B2> <successors-2B3>
4186
4219
4187 Here rev2 has two possible (i.e. divergent) successors sets. The first
4220 Here rev2 has two possible (i.e. divergent) successors sets. The first
4188 holds one element, whereas the second holds three (i.e. the changeset has
4221 holds one element, whereas the second holds three (i.e. the changeset has
4189 been split).
4222 been split).
4190 """
4223 """
4191 # passed to successorssets caching computation from one call to another
4224 # passed to successorssets caching computation from one call to another
4192 cache = {}
4225 cache = {}
4193 ctx2str = bytes
4226 ctx2str = bytes
4194 node2str = short
4227 node2str = short
4195 for rev in logcmdutil.revrange(repo, revs):
4228 for rev in logcmdutil.revrange(repo, revs):
4196 ctx = repo[rev]
4229 ctx = repo[rev]
4197 ui.write(b'%s\n' % ctx2str(ctx))
4230 ui.write(b'%s\n' % ctx2str(ctx))
4198 for succsset in obsutil.successorssets(
4231 for succsset in obsutil.successorssets(
4199 repo, ctx.node(), closest=opts['closest'], cache=cache
4232 repo, ctx.node(), closest=opts['closest'], cache=cache
4200 ):
4233 ):
4201 if succsset:
4234 if succsset:
4202 ui.write(b' ')
4235 ui.write(b' ')
4203 ui.write(node2str(succsset[0]))
4236 ui.write(node2str(succsset[0]))
4204 for node in succsset[1:]:
4237 for node in succsset[1:]:
4205 ui.write(b' ')
4238 ui.write(b' ')
4206 ui.write(node2str(node))
4239 ui.write(node2str(node))
4207 ui.write(b'\n')
4240 ui.write(b'\n')
4208
4241
4209
4242
4210 @command(b'debugtagscache', [])
4243 @command(b'debugtagscache', [])
4211 def debugtagscache(ui, repo):
4244 def debugtagscache(ui, repo):
4212 """display the contents of .hg/cache/hgtagsfnodes1"""
4245 """display the contents of .hg/cache/hgtagsfnodes1"""
4213 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4246 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4214 flog = repo.file(b'.hgtags')
4247 flog = repo.file(b'.hgtags')
4215 for r in repo:
4248 for r in repo:
4216 node = repo[r].node()
4249 node = repo[r].node()
4217 tagsnode = cache.getfnode(node, computemissing=False)
4250 tagsnode = cache.getfnode(node, computemissing=False)
4218 if tagsnode:
4251 if tagsnode:
4219 tagsnodedisplay = hex(tagsnode)
4252 tagsnodedisplay = hex(tagsnode)
4220 if not flog.hasnode(tagsnode):
4253 if not flog.hasnode(tagsnode):
4221 tagsnodedisplay += b' (unknown node)'
4254 tagsnodedisplay += b' (unknown node)'
4222 elif tagsnode is None:
4255 elif tagsnode is None:
4223 tagsnodedisplay = b'missing'
4256 tagsnodedisplay = b'missing'
4224 else:
4257 else:
4225 tagsnodedisplay = b'invalid'
4258 tagsnodedisplay = b'invalid'
4226
4259
4227 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4260 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4228
4261
4229
4262
4230 @command(
4263 @command(
4231 b'debugtemplate',
4264 b'debugtemplate',
4232 [
4265 [
4233 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4266 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4234 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4267 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4235 ],
4268 ],
4236 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4269 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4237 optionalrepo=True,
4270 optionalrepo=True,
4238 )
4271 )
4239 def debugtemplate(ui, repo, tmpl, **opts):
4272 def debugtemplate(ui, repo, tmpl, **opts):
4240 """parse and apply a template
4273 """parse and apply a template
4241
4274
4242 If -r/--rev is given, the template is processed as a log template and
4275 If -r/--rev is given, the template is processed as a log template and
4243 applied to the given changesets. Otherwise, it is processed as a generic
4276 applied to the given changesets. Otherwise, it is processed as a generic
4244 template.
4277 template.
4245
4278
4246 Use --verbose to print the parsed tree.
4279 Use --verbose to print the parsed tree.
4247 """
4280 """
4248 revs = None
4281 revs = None
4249 if opts['rev']:
4282 if opts['rev']:
4250 if repo is None:
4283 if repo is None:
4251 raise error.RepoError(
4284 raise error.RepoError(
4252 _(b'there is no Mercurial repository here (.hg not found)')
4285 _(b'there is no Mercurial repository here (.hg not found)')
4253 )
4286 )
4254 revs = logcmdutil.revrange(repo, opts['rev'])
4287 revs = logcmdutil.revrange(repo, opts['rev'])
4255
4288
4256 props = {}
4289 props = {}
4257 for d in opts['define']:
4290 for d in opts['define']:
4258 try:
4291 try:
4259 k, v = (e.strip() for e in d.split(b'=', 1))
4292 k, v = (e.strip() for e in d.split(b'=', 1))
4260 if not k or k == b'ui':
4293 if not k or k == b'ui':
4261 raise ValueError
4294 raise ValueError
4262 props[k] = v
4295 props[k] = v
4263 except ValueError:
4296 except ValueError:
4264 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4297 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4265
4298
4266 if ui.verbose:
4299 if ui.verbose:
4267 aliases = ui.configitems(b'templatealias')
4300 aliases = ui.configitems(b'templatealias')
4268 tree = templater.parse(tmpl)
4301 tree = templater.parse(tmpl)
4269 ui.note(templater.prettyformat(tree), b'\n')
4302 ui.note(templater.prettyformat(tree), b'\n')
4270 newtree = templater.expandaliases(tree, aliases)
4303 newtree = templater.expandaliases(tree, aliases)
4271 if newtree != tree:
4304 if newtree != tree:
4272 ui.notenoi18n(
4305 ui.notenoi18n(
4273 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4306 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4274 )
4307 )
4275
4308
4276 if revs is None:
4309 if revs is None:
4277 tres = formatter.templateresources(ui, repo)
4310 tres = formatter.templateresources(ui, repo)
4278 t = formatter.maketemplater(ui, tmpl, resources=tres)
4311 t = formatter.maketemplater(ui, tmpl, resources=tres)
4279 if ui.verbose:
4312 if ui.verbose:
4280 kwds, funcs = t.symbolsuseddefault()
4313 kwds, funcs = t.symbolsuseddefault()
4281 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4314 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4282 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4315 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4283 ui.write(t.renderdefault(props))
4316 ui.write(t.renderdefault(props))
4284 else:
4317 else:
4285 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4318 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4286 if ui.verbose:
4319 if ui.verbose:
4287 kwds, funcs = displayer.t.symbolsuseddefault()
4320 kwds, funcs = displayer.t.symbolsuseddefault()
4288 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4321 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4289 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4322 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4290 for r in revs:
4323 for r in revs:
4291 displayer.show(repo[r], **pycompat.strkwargs(props))
4324 displayer.show(repo[r], **pycompat.strkwargs(props))
4292 displayer.close()
4325 displayer.close()
4293
4326
4294
4327
4295 @command(
4328 @command(
4296 b'debuguigetpass',
4329 b'debuguigetpass',
4297 [
4330 [
4298 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4331 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4299 ],
4332 ],
4300 _(b'[-p TEXT]'),
4333 _(b'[-p TEXT]'),
4301 norepo=True,
4334 norepo=True,
4302 )
4335 )
4303 def debuguigetpass(ui, prompt=b''):
4336 def debuguigetpass(ui, prompt=b''):
4304 """show prompt to type password"""
4337 """show prompt to type password"""
4305 r = ui.getpass(prompt)
4338 r = ui.getpass(prompt)
4306 if r is None:
4339 if r is None:
4307 r = b"<default response>"
4340 r = b"<default response>"
4308 ui.writenoi18n(b'response: %s\n' % r)
4341 ui.writenoi18n(b'response: %s\n' % r)
4309
4342
4310
4343
4311 @command(
4344 @command(
4312 b'debuguiprompt',
4345 b'debuguiprompt',
4313 [
4346 [
4314 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4347 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4315 ],
4348 ],
4316 _(b'[-p TEXT]'),
4349 _(b'[-p TEXT]'),
4317 norepo=True,
4350 norepo=True,
4318 )
4351 )
4319 def debuguiprompt(ui, prompt=b''):
4352 def debuguiprompt(ui, prompt=b''):
4320 """show plain prompt"""
4353 """show plain prompt"""
4321 r = ui.prompt(prompt)
4354 r = ui.prompt(prompt)
4322 ui.writenoi18n(b'response: %s\n' % r)
4355 ui.writenoi18n(b'response: %s\n' % r)
4323
4356
4324
4357
4325 @command(b'debugupdatecaches', [])
4358 @command(b'debugupdatecaches', [])
4326 def debugupdatecaches(ui, repo, *pats, **opts):
4359 def debugupdatecaches(ui, repo, *pats, **opts):
4327 """warm all known caches in the repository"""
4360 """warm all known caches in the repository"""
4328 with repo.wlock(), repo.lock():
4361 with repo.wlock(), repo.lock():
4329 repo.updatecaches(caches=repository.CACHES_ALL)
4362 repo.updatecaches(caches=repository.CACHES_ALL)
4330
4363
4331
4364
4332 @command(
4365 @command(
4333 b'debugupgraderepo',
4366 b'debugupgraderepo',
4334 [
4367 [
4335 (
4368 (
4336 b'o',
4369 b'o',
4337 b'optimize',
4370 b'optimize',
4338 [],
4371 [],
4339 _(b'extra optimization to perform'),
4372 _(b'extra optimization to perform'),
4340 _(b'NAME'),
4373 _(b'NAME'),
4341 ),
4374 ),
4342 (b'', b'run', False, _(b'performs an upgrade')),
4375 (b'', b'run', False, _(b'performs an upgrade')),
4343 (b'', b'backup', True, _(b'keep the old repository content around')),
4376 (b'', b'backup', True, _(b'keep the old repository content around')),
4344 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4377 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4345 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4378 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4346 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4379 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4347 ],
4380 ],
4348 )
4381 )
4349 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4382 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4350 """upgrade a repository to use different features
4383 """upgrade a repository to use different features
4351
4384
4352 If no arguments are specified, the repository is evaluated for upgrade
4385 If no arguments are specified, the repository is evaluated for upgrade
4353 and a list of problems and potential optimizations is printed.
4386 and a list of problems and potential optimizations is printed.
4354
4387
4355 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4388 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4356 can be influenced via additional arguments. More details will be provided
4389 can be influenced via additional arguments. More details will be provided
4357 by the command output when run without ``--run``.
4390 by the command output when run without ``--run``.
4358
4391
4359 During the upgrade, the repository will be locked and no writes will be
4392 During the upgrade, the repository will be locked and no writes will be
4360 allowed.
4393 allowed.
4361
4394
4362 At the end of the upgrade, the repository may not be readable while new
4395 At the end of the upgrade, the repository may not be readable while new
4363 repository data is swapped in. This window will be as long as it takes to
4396 repository data is swapped in. This window will be as long as it takes to
4364 rename some directories inside the ``.hg`` directory. On most machines, this
4397 rename some directories inside the ``.hg`` directory. On most machines, this
4365 should complete almost instantaneously and the chances of a consumer being
4398 should complete almost instantaneously and the chances of a consumer being
4366 unable to access the repository should be low.
4399 unable to access the repository should be low.
4367
4400
4368 By default, all revlogs will be upgraded. You can restrict this using flags
4401 By default, all revlogs will be upgraded. You can restrict this using flags
4369 such as `--manifest`:
4402 such as `--manifest`:
4370
4403
4371 * `--manifest`: only optimize the manifest
4404 * `--manifest`: only optimize the manifest
4372 * `--no-manifest`: optimize all revlog but the manifest
4405 * `--no-manifest`: optimize all revlog but the manifest
4373 * `--changelog`: optimize the changelog only
4406 * `--changelog`: optimize the changelog only
4374 * `--no-changelog --no-manifest`: optimize filelogs only
4407 * `--no-changelog --no-manifest`: optimize filelogs only
4375 * `--filelogs`: optimize the filelogs only
4408 * `--filelogs`: optimize the filelogs only
4376 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4409 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4377 """
4410 """
4378 return upgrade.upgraderepo(
4411 return upgrade.upgraderepo(
4379 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4412 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4380 )
4413 )
4381
4414
4382
4415
4383 @command(
4416 @command(
4384 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4417 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4385 )
4418 )
4386 def debugwalk(ui, repo, *pats, **opts):
4419 def debugwalk(ui, repo, *pats, **opts):
4387 """show how files match on given patterns"""
4420 """show how files match on given patterns"""
4388 opts = pycompat.byteskwargs(opts)
4421 opts = pycompat.byteskwargs(opts)
4389 m = scmutil.match(repo[None], pats, opts)
4422 m = scmutil.match(repo[None], pats, opts)
4390 if ui.verbose:
4423 if ui.verbose:
4391 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4424 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4392 items = list(repo[None].walk(m))
4425 items = list(repo[None].walk(m))
4393 if not items:
4426 if not items:
4394 return
4427 return
4395 f = lambda fn: fn
4428 f = lambda fn: fn
4396 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4429 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4397 f = lambda fn: util.normpath(fn)
4430 f = lambda fn: util.normpath(fn)
4398 fmt = b'f %%-%ds %%-%ds %%s' % (
4431 fmt = b'f %%-%ds %%-%ds %%s' % (
4399 max([len(abs) for abs in items]),
4432 max([len(abs) for abs in items]),
4400 max([len(repo.pathto(abs)) for abs in items]),
4433 max([len(repo.pathto(abs)) for abs in items]),
4401 )
4434 )
4402 for abs in items:
4435 for abs in items:
4403 line = fmt % (
4436 line = fmt % (
4404 abs,
4437 abs,
4405 f(repo.pathto(abs)),
4438 f(repo.pathto(abs)),
4406 m.exact(abs) and b'exact' or b'',
4439 m.exact(abs) and b'exact' or b'',
4407 )
4440 )
4408 ui.write(b"%s\n" % line.rstrip())
4441 ui.write(b"%s\n" % line.rstrip())
4409
4442
4410
4443
4411 @command(b'debugwhyunstable', [], _(b'REV'))
4444 @command(b'debugwhyunstable', [], _(b'REV'))
4412 def debugwhyunstable(ui, repo, rev):
4445 def debugwhyunstable(ui, repo, rev):
4413 """explain instabilities of a changeset"""
4446 """explain instabilities of a changeset"""
4414 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4447 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4415 dnodes = b''
4448 dnodes = b''
4416 if entry.get(b'divergentnodes'):
4449 if entry.get(b'divergentnodes'):
4417 dnodes = (
4450 dnodes = (
4418 b' '.join(
4451 b' '.join(
4419 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4452 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4420 for ctx in entry[b'divergentnodes']
4453 for ctx in entry[b'divergentnodes']
4421 )
4454 )
4422 + b' '
4455 + b' '
4423 )
4456 )
4424 ui.write(
4457 ui.write(
4425 b'%s: %s%s %s\n'
4458 b'%s: %s%s %s\n'
4426 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4459 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4427 )
4460 )
4428
4461
4429
4462
4430 @command(
4463 @command(
4431 b'debugwireargs',
4464 b'debugwireargs',
4432 [
4465 [
4433 (b'', b'three', b'', b'three'),
4466 (b'', b'three', b'', b'three'),
4434 (b'', b'four', b'', b'four'),
4467 (b'', b'four', b'', b'four'),
4435 (b'', b'five', b'', b'five'),
4468 (b'', b'five', b'', b'five'),
4436 ]
4469 ]
4437 + cmdutil.remoteopts,
4470 + cmdutil.remoteopts,
4438 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4471 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4439 norepo=True,
4472 norepo=True,
4440 )
4473 )
4441 def debugwireargs(ui, repopath, *vals, **opts):
4474 def debugwireargs(ui, repopath, *vals, **opts):
4442 opts = pycompat.byteskwargs(opts)
4475 opts = pycompat.byteskwargs(opts)
4443 repo = hg.peer(ui, opts, repopath)
4476 repo = hg.peer(ui, opts, repopath)
4444 try:
4477 try:
4445 for opt in cmdutil.remoteopts:
4478 for opt in cmdutil.remoteopts:
4446 del opts[opt[1]]
4479 del opts[opt[1]]
4447 args = {}
4480 args = {}
4448 for k, v in opts.items():
4481 for k, v in opts.items():
4449 if v:
4482 if v:
4450 args[k] = v
4483 args[k] = v
4451 args = pycompat.strkwargs(args)
4484 args = pycompat.strkwargs(args)
4452 # run twice to check that we don't mess up the stream for the next command
4485 # run twice to check that we don't mess up the stream for the next command
4453 res1 = repo.debugwireargs(*vals, **args)
4486 res1 = repo.debugwireargs(*vals, **args)
4454 res2 = repo.debugwireargs(*vals, **args)
4487 res2 = repo.debugwireargs(*vals, **args)
4455 ui.write(b"%s\n" % res1)
4488 ui.write(b"%s\n" % res1)
4456 if res1 != res2:
4489 if res1 != res2:
4457 ui.warn(b"%s\n" % res2)
4490 ui.warn(b"%s\n" % res2)
4458 finally:
4491 finally:
4459 repo.close()
4492 repo.close()
4460
4493
4461
4494
4462 def _parsewirelangblocks(fh):
4495 def _parsewirelangblocks(fh):
4463 activeaction = None
4496 activeaction = None
4464 blocklines = []
4497 blocklines = []
4465 lastindent = 0
4498 lastindent = 0
4466
4499
4467 for line in fh:
4500 for line in fh:
4468 line = line.rstrip()
4501 line = line.rstrip()
4469 if not line:
4502 if not line:
4470 continue
4503 continue
4471
4504
4472 if line.startswith(b'#'):
4505 if line.startswith(b'#'):
4473 continue
4506 continue
4474
4507
4475 if not line.startswith(b' '):
4508 if not line.startswith(b' '):
4476 # New block. Flush previous one.
4509 # New block. Flush previous one.
4477 if activeaction:
4510 if activeaction:
4478 yield activeaction, blocklines
4511 yield activeaction, blocklines
4479
4512
4480 activeaction = line
4513 activeaction = line
4481 blocklines = []
4514 blocklines = []
4482 lastindent = 0
4515 lastindent = 0
4483 continue
4516 continue
4484
4517
4485 # Else we start with an indent.
4518 # Else we start with an indent.
4486
4519
4487 if not activeaction:
4520 if not activeaction:
4488 raise error.Abort(_(b'indented line outside of block'))
4521 raise error.Abort(_(b'indented line outside of block'))
4489
4522
4490 indent = len(line) - len(line.lstrip())
4523 indent = len(line) - len(line.lstrip())
4491
4524
4492 # If this line is indented more than the last line, concatenate it.
4525 # If this line is indented more than the last line, concatenate it.
4493 if indent > lastindent and blocklines:
4526 if indent > lastindent and blocklines:
4494 blocklines[-1] += line.lstrip()
4527 blocklines[-1] += line.lstrip()
4495 else:
4528 else:
4496 blocklines.append(line)
4529 blocklines.append(line)
4497 lastindent = indent
4530 lastindent = indent
4498
4531
4499 # Flush last block.
4532 # Flush last block.
4500 if activeaction:
4533 if activeaction:
4501 yield activeaction, blocklines
4534 yield activeaction, blocklines
4502
4535
4503
4536
4504 @command(
4537 @command(
4505 b'debugwireproto',
4538 b'debugwireproto',
4506 [
4539 [
4507 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4540 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4508 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4541 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4509 (
4542 (
4510 b'',
4543 b'',
4511 b'noreadstderr',
4544 b'noreadstderr',
4512 False,
4545 False,
4513 _(b'do not read from stderr of the remote'),
4546 _(b'do not read from stderr of the remote'),
4514 ),
4547 ),
4515 (
4548 (
4516 b'',
4549 b'',
4517 b'nologhandshake',
4550 b'nologhandshake',
4518 False,
4551 False,
4519 _(b'do not log I/O related to the peer handshake'),
4552 _(b'do not log I/O related to the peer handshake'),
4520 ),
4553 ),
4521 ]
4554 ]
4522 + cmdutil.remoteopts,
4555 + cmdutil.remoteopts,
4523 _(b'[PATH]'),
4556 _(b'[PATH]'),
4524 optionalrepo=True,
4557 optionalrepo=True,
4525 )
4558 )
4526 def debugwireproto(ui, repo, path=None, **opts):
4559 def debugwireproto(ui, repo, path=None, **opts):
4527 """send wire protocol commands to a server
4560 """send wire protocol commands to a server
4528
4561
4529 This command can be used to issue wire protocol commands to remote
4562 This command can be used to issue wire protocol commands to remote
4530 peers and to debug the raw data being exchanged.
4563 peers and to debug the raw data being exchanged.
4531
4564
4532 ``--localssh`` will start an SSH server against the current repository
4565 ``--localssh`` will start an SSH server against the current repository
4533 and connect to that. By default, the connection will perform a handshake
4566 and connect to that. By default, the connection will perform a handshake
4534 and establish an appropriate peer instance.
4567 and establish an appropriate peer instance.
4535
4568
4536 ``--peer`` can be used to bypass the handshake protocol and construct a
4569 ``--peer`` can be used to bypass the handshake protocol and construct a
4537 peer instance using the specified class type. Valid values are ``raw``,
4570 peer instance using the specified class type. Valid values are ``raw``,
4538 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4571 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4539 don't support higher-level command actions.
4572 don't support higher-level command actions.
4540
4573
4541 ``--noreadstderr`` can be used to disable automatic reading from stderr
4574 ``--noreadstderr`` can be used to disable automatic reading from stderr
4542 of the peer (for SSH connections only). Disabling automatic reading of
4575 of the peer (for SSH connections only). Disabling automatic reading of
4543 stderr is useful for making output more deterministic.
4576 stderr is useful for making output more deterministic.
4544
4577
4545 Commands are issued via a mini language which is specified via stdin.
4578 Commands are issued via a mini language which is specified via stdin.
4546 The language consists of individual actions to perform. An action is
4579 The language consists of individual actions to perform. An action is
4547 defined by a block. A block is defined as a line with no leading
4580 defined by a block. A block is defined as a line with no leading
4548 space followed by 0 or more lines with leading space. Blocks are
4581 space followed by 0 or more lines with leading space. Blocks are
4549 effectively a high-level command with additional metadata.
4582 effectively a high-level command with additional metadata.
4550
4583
4551 Lines beginning with ``#`` are ignored.
4584 Lines beginning with ``#`` are ignored.
4552
4585
4553 The following sections denote available actions.
4586 The following sections denote available actions.
4554
4587
4555 raw
4588 raw
4556 ---
4589 ---
4557
4590
4558 Send raw data to the server.
4591 Send raw data to the server.
4559
4592
4560 The block payload contains the raw data to send as one atomic send
4593 The block payload contains the raw data to send as one atomic send
4561 operation. The data may not actually be delivered in a single system
4594 operation. The data may not actually be delivered in a single system
4562 call: it depends on the abilities of the transport being used.
4595 call: it depends on the abilities of the transport being used.
4563
4596
4564 Each line in the block is de-indented and concatenated. Then, that
4597 Each line in the block is de-indented and concatenated. Then, that
4565 value is evaluated as a Python b'' literal. This allows the use of
4598 value is evaluated as a Python b'' literal. This allows the use of
4566 backslash escaping, etc.
4599 backslash escaping, etc.
4567
4600
4568 raw+
4601 raw+
4569 ----
4602 ----
4570
4603
4571 Behaves like ``raw`` except flushes output afterwards.
4604 Behaves like ``raw`` except flushes output afterwards.
4572
4605
4573 command <X>
4606 command <X>
4574 -----------
4607 -----------
4575
4608
4576 Send a request to run a named command, whose name follows the ``command``
4609 Send a request to run a named command, whose name follows the ``command``
4577 string.
4610 string.
4578
4611
4579 Arguments to the command are defined as lines in this block. The format of
4612 Arguments to the command are defined as lines in this block. The format of
4580 each line is ``<key> <value>``. e.g.::
4613 each line is ``<key> <value>``. e.g.::
4581
4614
4582 command listkeys
4615 command listkeys
4583 namespace bookmarks
4616 namespace bookmarks
4584
4617
4585 If the value begins with ``eval:``, it will be interpreted as a Python
4618 If the value begins with ``eval:``, it will be interpreted as a Python
4586 literal expression. Otherwise values are interpreted as Python b'' literals.
4619 literal expression. Otherwise values are interpreted as Python b'' literals.
4587 This allows sending complex types and encoding special byte sequences via
4620 This allows sending complex types and encoding special byte sequences via
4588 backslash escaping.
4621 backslash escaping.
4589
4622
4590 The following arguments have special meaning:
4623 The following arguments have special meaning:
4591
4624
4592 ``PUSHFILE``
4625 ``PUSHFILE``
4593 When defined, the *push* mechanism of the peer will be used instead
4626 When defined, the *push* mechanism of the peer will be used instead
4594 of the static request-response mechanism and the content of the
4627 of the static request-response mechanism and the content of the
4595 file specified in the value of this argument will be sent as the
4628 file specified in the value of this argument will be sent as the
4596 command payload.
4629 command payload.
4597
4630
4598 This can be used to submit a local bundle file to the remote.
4631 This can be used to submit a local bundle file to the remote.
4599
4632
4600 batchbegin
4633 batchbegin
4601 ----------
4634 ----------
4602
4635
4603 Instruct the peer to begin a batched send.
4636 Instruct the peer to begin a batched send.
4604
4637
4605 All ``command`` blocks are queued for execution until the next
4638 All ``command`` blocks are queued for execution until the next
4606 ``batchsubmit`` block.
4639 ``batchsubmit`` block.
4607
4640
4608 batchsubmit
4641 batchsubmit
4609 -----------
4642 -----------
4610
4643
4611 Submit previously queued ``command`` blocks as a batch request.
4644 Submit previously queued ``command`` blocks as a batch request.
4612
4645
4613 This action MUST be paired with a ``batchbegin`` action.
4646 This action MUST be paired with a ``batchbegin`` action.
4614
4647
4615 httprequest <method> <path>
4648 httprequest <method> <path>
4616 ---------------------------
4649 ---------------------------
4617
4650
4618 (HTTP peer only)
4651 (HTTP peer only)
4619
4652
4620 Send an HTTP request to the peer.
4653 Send an HTTP request to the peer.
4621
4654
4622 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4655 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4623
4656
4624 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4657 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4625 headers to add to the request. e.g. ``Accept: foo``.
4658 headers to add to the request. e.g. ``Accept: foo``.
4626
4659
4627 The following arguments are special:
4660 The following arguments are special:
4628
4661
4629 ``BODYFILE``
4662 ``BODYFILE``
4630 The content of the file defined as the value to this argument will be
4663 The content of the file defined as the value to this argument will be
4631 transferred verbatim as the HTTP request body.
4664 transferred verbatim as the HTTP request body.
4632
4665
4633 ``frame <type> <flags> <payload>``
4666 ``frame <type> <flags> <payload>``
4634 Send a unified protocol frame as part of the request body.
4667 Send a unified protocol frame as part of the request body.
4635
4668
4636 All frames will be collected and sent as the body to the HTTP
4669 All frames will be collected and sent as the body to the HTTP
4637 request.
4670 request.
4638
4671
4639 close
4672 close
4640 -----
4673 -----
4641
4674
4642 Close the connection to the server.
4675 Close the connection to the server.
4643
4676
4644 flush
4677 flush
4645 -----
4678 -----
4646
4679
4647 Flush data written to the server.
4680 Flush data written to the server.
4648
4681
4649 readavailable
4682 readavailable
4650 -------------
4683 -------------
4651
4684
4652 Close the write end of the connection and read all available data from
4685 Close the write end of the connection and read all available data from
4653 the server.
4686 the server.
4654
4687
4655 If the connection to the server encompasses multiple pipes, we poll both
4688 If the connection to the server encompasses multiple pipes, we poll both
4656 pipes and read available data.
4689 pipes and read available data.
4657
4690
4658 readline
4691 readline
4659 --------
4692 --------
4660
4693
4661 Read a line of output from the server. If there are multiple output
4694 Read a line of output from the server. If there are multiple output
4662 pipes, reads only the main pipe.
4695 pipes, reads only the main pipe.
4663
4696
4664 ereadline
4697 ereadline
4665 ---------
4698 ---------
4666
4699
4667 Like ``readline``, but read from the stderr pipe, if available.
4700 Like ``readline``, but read from the stderr pipe, if available.
4668
4701
4669 read <X>
4702 read <X>
4670 --------
4703 --------
4671
4704
4672 ``read()`` N bytes from the server's main output pipe.
4705 ``read()`` N bytes from the server's main output pipe.
4673
4706
4674 eread <X>
4707 eread <X>
4675 ---------
4708 ---------
4676
4709
4677 ``read()`` N bytes from the server's stderr pipe, if available.
4710 ``read()`` N bytes from the server's stderr pipe, if available.
4678
4711
4679 Specifying Unified Frame-Based Protocol Frames
4712 Specifying Unified Frame-Based Protocol Frames
4680 ----------------------------------------------
4713 ----------------------------------------------
4681
4714
4682 It is possible to emit a *Unified Frame-Based Protocol* by using special
4715 It is possible to emit a *Unified Frame-Based Protocol* by using special
4683 syntax.
4716 syntax.
4684
4717
4685 A frame is composed as a type, flags, and payload. These can be parsed
4718 A frame is composed as a type, flags, and payload. These can be parsed
4686 from a string of the form:
4719 from a string of the form:
4687
4720
4688 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4721 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4689
4722
4690 ``request-id`` and ``stream-id`` are integers defining the request and
4723 ``request-id`` and ``stream-id`` are integers defining the request and
4691 stream identifiers.
4724 stream identifiers.
4692
4725
4693 ``type`` can be an integer value for the frame type or the string name
4726 ``type`` can be an integer value for the frame type or the string name
4694 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4727 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4695 ``command-name``.
4728 ``command-name``.
4696
4729
4697 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4730 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4698 components. Each component (and there can be just one) can be an integer
4731 components. Each component (and there can be just one) can be an integer
4699 or a flag name for stream flags or frame flags, respectively. Values are
4732 or a flag name for stream flags or frame flags, respectively. Values are
4700 resolved to integers and then bitwise OR'd together.
4733 resolved to integers and then bitwise OR'd together.
4701
4734
4702 ``payload`` represents the raw frame payload. If it begins with
4735 ``payload`` represents the raw frame payload. If it begins with
4703 ``cbor:``, the following string is evaluated as Python code and the
4736 ``cbor:``, the following string is evaluated as Python code and the
4704 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4737 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4705 as a Python byte string literal.
4738 as a Python byte string literal.
4706 """
4739 """
4707 opts = pycompat.byteskwargs(opts)
4740 opts = pycompat.byteskwargs(opts)
4708
4741
4709 if opts[b'localssh'] and not repo:
4742 if opts[b'localssh'] and not repo:
4710 raise error.Abort(_(b'--localssh requires a repository'))
4743 raise error.Abort(_(b'--localssh requires a repository'))
4711
4744
4712 if opts[b'peer'] and opts[b'peer'] not in (
4745 if opts[b'peer'] and opts[b'peer'] not in (
4713 b'raw',
4746 b'raw',
4714 b'ssh1',
4747 b'ssh1',
4715 ):
4748 ):
4716 raise error.Abort(
4749 raise error.Abort(
4717 _(b'invalid value for --peer'),
4750 _(b'invalid value for --peer'),
4718 hint=_(b'valid values are "raw" and "ssh1"'),
4751 hint=_(b'valid values are "raw" and "ssh1"'),
4719 )
4752 )
4720
4753
4721 if path and opts[b'localssh']:
4754 if path and opts[b'localssh']:
4722 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4755 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4723
4756
4724 if ui.interactive():
4757 if ui.interactive():
4725 ui.write(_(b'(waiting for commands on stdin)\n'))
4758 ui.write(_(b'(waiting for commands on stdin)\n'))
4726
4759
4727 blocks = list(_parsewirelangblocks(ui.fin))
4760 blocks = list(_parsewirelangblocks(ui.fin))
4728
4761
4729 proc = None
4762 proc = None
4730 stdin = None
4763 stdin = None
4731 stdout = None
4764 stdout = None
4732 stderr = None
4765 stderr = None
4733 opener = None
4766 opener = None
4734
4767
4735 if opts[b'localssh']:
4768 if opts[b'localssh']:
4736 # We start the SSH server in its own process so there is process
4769 # We start the SSH server in its own process so there is process
4737 # separation. This prevents a whole class of potential bugs around
4770 # separation. This prevents a whole class of potential bugs around
4738 # shared state from interfering with server operation.
4771 # shared state from interfering with server operation.
4739 args = procutil.hgcmd() + [
4772 args = procutil.hgcmd() + [
4740 b'-R',
4773 b'-R',
4741 repo.root,
4774 repo.root,
4742 b'debugserve',
4775 b'debugserve',
4743 b'--sshstdio',
4776 b'--sshstdio',
4744 ]
4777 ]
4745 proc = subprocess.Popen(
4778 proc = subprocess.Popen(
4746 pycompat.rapply(procutil.tonativestr, args),
4779 pycompat.rapply(procutil.tonativestr, args),
4747 stdin=subprocess.PIPE,
4780 stdin=subprocess.PIPE,
4748 stdout=subprocess.PIPE,
4781 stdout=subprocess.PIPE,
4749 stderr=subprocess.PIPE,
4782 stderr=subprocess.PIPE,
4750 bufsize=0,
4783 bufsize=0,
4751 )
4784 )
4752
4785
4753 stdin = proc.stdin
4786 stdin = proc.stdin
4754 stdout = proc.stdout
4787 stdout = proc.stdout
4755 stderr = proc.stderr
4788 stderr = proc.stderr
4756
4789
4757 # We turn the pipes into observers so we can log I/O.
4790 # We turn the pipes into observers so we can log I/O.
4758 if ui.verbose or opts[b'peer'] == b'raw':
4791 if ui.verbose or opts[b'peer'] == b'raw':
4759 stdin = util.makeloggingfileobject(
4792 stdin = util.makeloggingfileobject(
4760 ui, proc.stdin, b'i', logdata=True
4793 ui, proc.stdin, b'i', logdata=True
4761 )
4794 )
4762 stdout = util.makeloggingfileobject(
4795 stdout = util.makeloggingfileobject(
4763 ui, proc.stdout, b'o', logdata=True
4796 ui, proc.stdout, b'o', logdata=True
4764 )
4797 )
4765 stderr = util.makeloggingfileobject(
4798 stderr = util.makeloggingfileobject(
4766 ui, proc.stderr, b'e', logdata=True
4799 ui, proc.stderr, b'e', logdata=True
4767 )
4800 )
4768
4801
4769 # --localssh also implies the peer connection settings.
4802 # --localssh also implies the peer connection settings.
4770
4803
4771 url = b'ssh://localserver'
4804 url = b'ssh://localserver'
4772 autoreadstderr = not opts[b'noreadstderr']
4805 autoreadstderr = not opts[b'noreadstderr']
4773
4806
4774 if opts[b'peer'] == b'ssh1':
4807 if opts[b'peer'] == b'ssh1':
4775 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4808 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4776 peer = sshpeer.sshv1peer(
4809 peer = sshpeer.sshv1peer(
4777 ui,
4810 ui,
4778 url,
4811 url,
4779 proc,
4812 proc,
4780 stdin,
4813 stdin,
4781 stdout,
4814 stdout,
4782 stderr,
4815 stderr,
4783 None,
4816 None,
4784 autoreadstderr=autoreadstderr,
4817 autoreadstderr=autoreadstderr,
4785 )
4818 )
4786 elif opts[b'peer'] == b'raw':
4819 elif opts[b'peer'] == b'raw':
4787 ui.write(_(b'using raw connection to peer\n'))
4820 ui.write(_(b'using raw connection to peer\n'))
4788 peer = None
4821 peer = None
4789 else:
4822 else:
4790 ui.write(_(b'creating ssh peer from handshake results\n'))
4823 ui.write(_(b'creating ssh peer from handshake results\n'))
4791 peer = sshpeer.makepeer(
4824 peer = sshpeer.makepeer(
4792 ui,
4825 ui,
4793 url,
4826 url,
4794 proc,
4827 proc,
4795 stdin,
4828 stdin,
4796 stdout,
4829 stdout,
4797 stderr,
4830 stderr,
4798 autoreadstderr=autoreadstderr,
4831 autoreadstderr=autoreadstderr,
4799 )
4832 )
4800
4833
4801 elif path:
4834 elif path:
4802 # We bypass hg.peer() so we can proxy the sockets.
4835 # We bypass hg.peer() so we can proxy the sockets.
4803 # TODO consider not doing this because we skip
4836 # TODO consider not doing this because we skip
4804 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4837 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4805 u = urlutil.url(path)
4838 u = urlutil.url(path)
4806 if u.scheme != b'http':
4839 if u.scheme != b'http':
4807 raise error.Abort(_(b'only http:// paths are currently supported'))
4840 raise error.Abort(_(b'only http:// paths are currently supported'))
4808
4841
4809 url, authinfo = u.authinfo()
4842 url, authinfo = u.authinfo()
4810 openerargs = {
4843 openerargs = {
4811 'useragent': b'Mercurial debugwireproto',
4844 'useragent': b'Mercurial debugwireproto',
4812 }
4845 }
4813
4846
4814 # Turn pipes/sockets into observers so we can log I/O.
4847 # Turn pipes/sockets into observers so we can log I/O.
4815 if ui.verbose:
4848 if ui.verbose:
4816 openerargs.update(
4849 openerargs.update(
4817 {
4850 {
4818 'loggingfh': ui,
4851 'loggingfh': ui,
4819 'loggingname': b's',
4852 'loggingname': b's',
4820 'loggingopts': {
4853 'loggingopts': {
4821 'logdata': True,
4854 'logdata': True,
4822 'logdataapis': False,
4855 'logdataapis': False,
4823 },
4856 },
4824 }
4857 }
4825 )
4858 )
4826
4859
4827 if ui.debugflag:
4860 if ui.debugflag:
4828 openerargs['loggingopts']['logdataapis'] = True
4861 openerargs['loggingopts']['logdataapis'] = True
4829
4862
4830 # Don't send default headers when in raw mode. This allows us to
4863 # Don't send default headers when in raw mode. This allows us to
4831 # bypass most of the behavior of our URL handling code so we can
4864 # bypass most of the behavior of our URL handling code so we can
4832 # have near complete control over what's sent on the wire.
4865 # have near complete control over what's sent on the wire.
4833 if opts[b'peer'] == b'raw':
4866 if opts[b'peer'] == b'raw':
4834 openerargs['sendaccept'] = False
4867 openerargs['sendaccept'] = False
4835
4868
4836 opener = urlmod.opener(ui, authinfo, **openerargs)
4869 opener = urlmod.opener(ui, authinfo, **openerargs)
4837
4870
4838 if opts[b'peer'] == b'raw':
4871 if opts[b'peer'] == b'raw':
4839 ui.write(_(b'using raw connection to peer\n'))
4872 ui.write(_(b'using raw connection to peer\n'))
4840 peer = None
4873 peer = None
4841 elif opts[b'peer']:
4874 elif opts[b'peer']:
4842 raise error.Abort(
4875 raise error.Abort(
4843 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4876 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4844 )
4877 )
4845 else:
4878 else:
4846 peer = httppeer.makepeer(ui, path, opener=opener)
4879 peer = httppeer.makepeer(ui, path, opener=opener)
4847
4880
4848 # We /could/ populate stdin/stdout with sock.makefile()...
4881 # We /could/ populate stdin/stdout with sock.makefile()...
4849 else:
4882 else:
4850 raise error.Abort(_(b'unsupported connection configuration'))
4883 raise error.Abort(_(b'unsupported connection configuration'))
4851
4884
4852 batchedcommands = None
4885 batchedcommands = None
4853
4886
4854 # Now perform actions based on the parsed wire language instructions.
4887 # Now perform actions based on the parsed wire language instructions.
4855 for action, lines in blocks:
4888 for action, lines in blocks:
4856 if action in (b'raw', b'raw+'):
4889 if action in (b'raw', b'raw+'):
4857 if not stdin:
4890 if not stdin:
4858 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4891 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4859
4892
4860 # Concatenate the data together.
4893 # Concatenate the data together.
4861 data = b''.join(l.lstrip() for l in lines)
4894 data = b''.join(l.lstrip() for l in lines)
4862 data = stringutil.unescapestr(data)
4895 data = stringutil.unescapestr(data)
4863 stdin.write(data)
4896 stdin.write(data)
4864
4897
4865 if action == b'raw+':
4898 if action == b'raw+':
4866 stdin.flush()
4899 stdin.flush()
4867 elif action == b'flush':
4900 elif action == b'flush':
4868 if not stdin:
4901 if not stdin:
4869 raise error.Abort(_(b'cannot call flush on this peer'))
4902 raise error.Abort(_(b'cannot call flush on this peer'))
4870 stdin.flush()
4903 stdin.flush()
4871 elif action.startswith(b'command'):
4904 elif action.startswith(b'command'):
4872 if not peer:
4905 if not peer:
4873 raise error.Abort(
4906 raise error.Abort(
4874 _(
4907 _(
4875 b'cannot send commands unless peer instance '
4908 b'cannot send commands unless peer instance '
4876 b'is available'
4909 b'is available'
4877 )
4910 )
4878 )
4911 )
4879
4912
4880 command = action.split(b' ', 1)[1]
4913 command = action.split(b' ', 1)[1]
4881
4914
4882 args = {}
4915 args = {}
4883 for line in lines:
4916 for line in lines:
4884 # We need to allow empty values.
4917 # We need to allow empty values.
4885 fields = line.lstrip().split(b' ', 1)
4918 fields = line.lstrip().split(b' ', 1)
4886 if len(fields) == 1:
4919 if len(fields) == 1:
4887 key = fields[0]
4920 key = fields[0]
4888 value = b''
4921 value = b''
4889 else:
4922 else:
4890 key, value = fields
4923 key, value = fields
4891
4924
4892 if value.startswith(b'eval:'):
4925 if value.startswith(b'eval:'):
4893 value = stringutil.evalpythonliteral(value[5:])
4926 value = stringutil.evalpythonliteral(value[5:])
4894 else:
4927 else:
4895 value = stringutil.unescapestr(value)
4928 value = stringutil.unescapestr(value)
4896
4929
4897 args[key] = value
4930 args[key] = value
4898
4931
4899 if batchedcommands is not None:
4932 if batchedcommands is not None:
4900 batchedcommands.append((command, args))
4933 batchedcommands.append((command, args))
4901 continue
4934 continue
4902
4935
4903 ui.status(_(b'sending %s command\n') % command)
4936 ui.status(_(b'sending %s command\n') % command)
4904
4937
4905 if b'PUSHFILE' in args:
4938 if b'PUSHFILE' in args:
4906 with open(args[b'PUSHFILE'], 'rb') as fh:
4939 with open(args[b'PUSHFILE'], 'rb') as fh:
4907 del args[b'PUSHFILE']
4940 del args[b'PUSHFILE']
4908 res, output = peer._callpush(
4941 res, output = peer._callpush(
4909 command, fh, **pycompat.strkwargs(args)
4942 command, fh, **pycompat.strkwargs(args)
4910 )
4943 )
4911 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4944 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4912 ui.status(
4945 ui.status(
4913 _(b'remote output: %s\n') % stringutil.escapestr(output)
4946 _(b'remote output: %s\n') % stringutil.escapestr(output)
4914 )
4947 )
4915 else:
4948 else:
4916 with peer.commandexecutor() as e:
4949 with peer.commandexecutor() as e:
4917 res = e.callcommand(command, args).result()
4950 res = e.callcommand(command, args).result()
4918
4951
4919 ui.status(
4952 ui.status(
4920 _(b'response: %s\n')
4953 _(b'response: %s\n')
4921 % stringutil.pprint(res, bprefix=True, indent=2)
4954 % stringutil.pprint(res, bprefix=True, indent=2)
4922 )
4955 )
4923
4956
4924 elif action == b'batchbegin':
4957 elif action == b'batchbegin':
4925 if batchedcommands is not None:
4958 if batchedcommands is not None:
4926 raise error.Abort(_(b'nested batchbegin not allowed'))
4959 raise error.Abort(_(b'nested batchbegin not allowed'))
4927
4960
4928 batchedcommands = []
4961 batchedcommands = []
4929 elif action == b'batchsubmit':
4962 elif action == b'batchsubmit':
4930 # There is a batching API we could go through. But it would be
4963 # There is a batching API we could go through. But it would be
4931 # difficult to normalize requests into function calls. It is easier
4964 # difficult to normalize requests into function calls. It is easier
4932 # to bypass this layer and normalize to commands + args.
4965 # to bypass this layer and normalize to commands + args.
4933 ui.status(
4966 ui.status(
4934 _(b'sending batch with %d sub-commands\n')
4967 _(b'sending batch with %d sub-commands\n')
4935 % len(batchedcommands)
4968 % len(batchedcommands)
4936 )
4969 )
4937 assert peer is not None
4970 assert peer is not None
4938 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4971 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4939 ui.status(
4972 ui.status(
4940 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4973 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4941 )
4974 )
4942
4975
4943 batchedcommands = None
4976 batchedcommands = None
4944
4977
4945 elif action.startswith(b'httprequest '):
4978 elif action.startswith(b'httprequest '):
4946 if not opener:
4979 if not opener:
4947 raise error.Abort(
4980 raise error.Abort(
4948 _(b'cannot use httprequest without an HTTP peer')
4981 _(b'cannot use httprequest without an HTTP peer')
4949 )
4982 )
4950
4983
4951 request = action.split(b' ', 2)
4984 request = action.split(b' ', 2)
4952 if len(request) != 3:
4985 if len(request) != 3:
4953 raise error.Abort(
4986 raise error.Abort(
4954 _(
4987 _(
4955 b'invalid httprequest: expected format is '
4988 b'invalid httprequest: expected format is '
4956 b'"httprequest <method> <path>'
4989 b'"httprequest <method> <path>'
4957 )
4990 )
4958 )
4991 )
4959
4992
4960 method, httppath = request[1:]
4993 method, httppath = request[1:]
4961 headers = {}
4994 headers = {}
4962 body = None
4995 body = None
4963 frames = []
4996 frames = []
4964 for line in lines:
4997 for line in lines:
4965 line = line.lstrip()
4998 line = line.lstrip()
4966 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4999 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4967 if m:
5000 if m:
4968 # Headers need to use native strings.
5001 # Headers need to use native strings.
4969 key = pycompat.strurl(m.group(1))
5002 key = pycompat.strurl(m.group(1))
4970 value = pycompat.strurl(m.group(2))
5003 value = pycompat.strurl(m.group(2))
4971 headers[key] = value
5004 headers[key] = value
4972 continue
5005 continue
4973
5006
4974 if line.startswith(b'BODYFILE '):
5007 if line.startswith(b'BODYFILE '):
4975 with open(line.split(b' ', 1), b'rb') as fh:
5008 with open(line.split(b' ', 1), b'rb') as fh:
4976 body = fh.read()
5009 body = fh.read()
4977 elif line.startswith(b'frame '):
5010 elif line.startswith(b'frame '):
4978 frame = wireprotoframing.makeframefromhumanstring(
5011 frame = wireprotoframing.makeframefromhumanstring(
4979 line[len(b'frame ') :]
5012 line[len(b'frame ') :]
4980 )
5013 )
4981
5014
4982 frames.append(frame)
5015 frames.append(frame)
4983 else:
5016 else:
4984 raise error.Abort(
5017 raise error.Abort(
4985 _(b'unknown argument to httprequest: %s') % line
5018 _(b'unknown argument to httprequest: %s') % line
4986 )
5019 )
4987
5020
4988 url = path + httppath
5021 url = path + httppath
4989
5022
4990 if frames:
5023 if frames:
4991 body = b''.join(bytes(f) for f in frames)
5024 body = b''.join(bytes(f) for f in frames)
4992
5025
4993 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
5026 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4994
5027
4995 # urllib.Request insists on using has_data() as a proxy for
5028 # urllib.Request insists on using has_data() as a proxy for
4996 # determining the request method. Override that to use our
5029 # determining the request method. Override that to use our
4997 # explicitly requested method.
5030 # explicitly requested method.
4998 req.get_method = lambda: pycompat.sysstr(method)
5031 req.get_method = lambda: pycompat.sysstr(method)
4999
5032
5000 try:
5033 try:
5001 res = opener.open(req)
5034 res = opener.open(req)
5002 body = res.read()
5035 body = res.read()
5003 except util.urlerr.urlerror as e:
5036 except util.urlerr.urlerror as e:
5004 # read() method must be called, but only exists in Python 2
5037 # read() method must be called, but only exists in Python 2
5005 getattr(e, 'read', lambda: None)()
5038 getattr(e, 'read', lambda: None)()
5006 continue
5039 continue
5007
5040
5008 ct = res.headers.get('Content-Type')
5041 ct = res.headers.get('Content-Type')
5009 if ct == 'application/mercurial-cbor':
5042 if ct == 'application/mercurial-cbor':
5010 ui.write(
5043 ui.write(
5011 _(b'cbor> %s\n')
5044 _(b'cbor> %s\n')
5012 % stringutil.pprint(
5045 % stringutil.pprint(
5013 cborutil.decodeall(body), bprefix=True, indent=2
5046 cborutil.decodeall(body), bprefix=True, indent=2
5014 )
5047 )
5015 )
5048 )
5016
5049
5017 elif action == b'close':
5050 elif action == b'close':
5018 assert peer is not None
5051 assert peer is not None
5019 peer.close()
5052 peer.close()
5020 elif action == b'readavailable':
5053 elif action == b'readavailable':
5021 if not stdout or not stderr:
5054 if not stdout or not stderr:
5022 raise error.Abort(
5055 raise error.Abort(
5023 _(b'readavailable not available on this peer')
5056 _(b'readavailable not available on this peer')
5024 )
5057 )
5025
5058
5026 stdin.close()
5059 stdin.close()
5027 stdout.read()
5060 stdout.read()
5028 stderr.read()
5061 stderr.read()
5029
5062
5030 elif action == b'readline':
5063 elif action == b'readline':
5031 if not stdout:
5064 if not stdout:
5032 raise error.Abort(_(b'readline not available on this peer'))
5065 raise error.Abort(_(b'readline not available on this peer'))
5033 stdout.readline()
5066 stdout.readline()
5034 elif action == b'ereadline':
5067 elif action == b'ereadline':
5035 if not stderr:
5068 if not stderr:
5036 raise error.Abort(_(b'ereadline not available on this peer'))
5069 raise error.Abort(_(b'ereadline not available on this peer'))
5037 stderr.readline()
5070 stderr.readline()
5038 elif action.startswith(b'read '):
5071 elif action.startswith(b'read '):
5039 count = int(action.split(b' ', 1)[1])
5072 count = int(action.split(b' ', 1)[1])
5040 if not stdout:
5073 if not stdout:
5041 raise error.Abort(_(b'read not available on this peer'))
5074 raise error.Abort(_(b'read not available on this peer'))
5042 stdout.read(count)
5075 stdout.read(count)
5043 elif action.startswith(b'eread '):
5076 elif action.startswith(b'eread '):
5044 count = int(action.split(b' ', 1)[1])
5077 count = int(action.split(b' ', 1)[1])
5045 if not stderr:
5078 if not stderr:
5046 raise error.Abort(_(b'eread not available on this peer'))
5079 raise error.Abort(_(b'eread not available on this peer'))
5047 stderr.read(count)
5080 stderr.read(count)
5048 else:
5081 else:
5049 raise error.Abort(_(b'unknown action: %s') % action)
5082 raise error.Abort(_(b'unknown action: %s') % action)
5050
5083
5051 if batchedcommands is not None:
5084 if batchedcommands is not None:
5052 raise error.Abort(_(b'unclosed "batchbegin" request'))
5085 raise error.Abort(_(b'unclosed "batchbegin" request'))
5053
5086
5054 if peer:
5087 if peer:
5055 peer.close()
5088 peer.close()
5056
5089
5057 if proc:
5090 if proc:
5058 proc.kill()
5091 proc.kill()
@@ -1,449 +1,449 b''
1 Show all commands except debug commands
1 Show all commands except debug commands
2 $ hg debugcomplete
2 $ hg debugcomplete
3 abort
3 abort
4 add
4 add
5 addremove
5 addremove
6 annotate
6 annotate
7 archive
7 archive
8 backout
8 backout
9 bisect
9 bisect
10 bookmarks
10 bookmarks
11 branch
11 branch
12 branches
12 branches
13 bundle
13 bundle
14 cat
14 cat
15 clone
15 clone
16 commit
16 commit
17 config
17 config
18 continue
18 continue
19 copy
19 copy
20 diff
20 diff
21 export
21 export
22 files
22 files
23 forget
23 forget
24 graft
24 graft
25 grep
25 grep
26 heads
26 heads
27 help
27 help
28 identify
28 identify
29 import
29 import
30 incoming
30 incoming
31 init
31 init
32 locate
32 locate
33 log
33 log
34 manifest
34 manifest
35 merge
35 merge
36 outgoing
36 outgoing
37 parents
37 parents
38 paths
38 paths
39 phase
39 phase
40 pull
40 pull
41 purge
41 purge
42 push
42 push
43 recover
43 recover
44 remove
44 remove
45 rename
45 rename
46 resolve
46 resolve
47 revert
47 revert
48 rollback
48 rollback
49 root
49 root
50 serve
50 serve
51 shelve
51 shelve
52 status
52 status
53 summary
53 summary
54 tag
54 tag
55 tags
55 tags
56 tip
56 tip
57 unbundle
57 unbundle
58 unshelve
58 unshelve
59 update
59 update
60 verify
60 verify
61 version
61 version
62
62
63 Show all commands that start with "a"
63 Show all commands that start with "a"
64 $ hg debugcomplete a
64 $ hg debugcomplete a
65 abort
65 abort
66 add
66 add
67 addremove
67 addremove
68 annotate
68 annotate
69 archive
69 archive
70
70
71 Do not show debug commands if there are other candidates
71 Do not show debug commands if there are other candidates
72 $ hg debugcomplete d
72 $ hg debugcomplete d
73 diff
73 diff
74
74
75 Show debug commands if there are no other candidates
75 Show debug commands if there are no other candidates
76 $ hg debugcomplete debug
76 $ hg debugcomplete debug
77 debug-delta-find
77 debug-delta-find
78 debug-repair-issue6528
78 debug-repair-issue6528
79 debug-revlog-index
79 debug-revlog-index
80 debugancestor
80 debugancestor
81 debugantivirusrunning
81 debugantivirusrunning
82 debugapplystreamclonebundle
82 debugapplystreamclonebundle
83 debugbackupbundle
83 debugbackupbundle
84 debugbuilddag
84 debugbuilddag
85 debugbundle
85 debugbundle
86 debugcapabilities
86 debugcapabilities
87 debugchangedfiles
87 debugchangedfiles
88 debugcheckstate
88 debugcheckstate
89 debugcolor
89 debugcolor
90 debugcommands
90 debugcommands
91 debugcomplete
91 debugcomplete
92 debugconfig
92 debugconfig
93 debugcreatestreamclonebundle
93 debugcreatestreamclonebundle
94 debugdag
94 debugdag
95 debugdata
95 debugdata
96 debugdate
96 debugdate
97 debugdeltachain
97 debugdeltachain
98 debugdirstate
98 debugdirstate
99 debugdirstateignorepatternshash
99 debugdirstateignorepatternshash
100 debugdiscovery
100 debugdiscovery
101 debugdownload
101 debugdownload
102 debugextensions
102 debugextensions
103 debugfileset
103 debugfileset
104 debugformat
104 debugformat
105 debugfsinfo
105 debugfsinfo
106 debuggetbundle
106 debuggetbundle
107 debugignore
107 debugignore
108 debugindexdot
108 debugindexdot
109 debugindexstats
109 debugindexstats
110 debuginstall
110 debuginstall
111 debugknown
111 debugknown
112 debuglabelcomplete
112 debuglabelcomplete
113 debuglocks
113 debuglocks
114 debugmanifestfulltextcache
114 debugmanifestfulltextcache
115 debugmergestate
115 debugmergestate
116 debugnamecomplete
116 debugnamecomplete
117 debugnodemap
117 debugnodemap
118 debugobsolete
118 debugobsolete
119 debugp1copies
119 debugp1copies
120 debugp2copies
120 debugp2copies
121 debugpathcomplete
121 debugpathcomplete
122 debugpathcopies
122 debugpathcopies
123 debugpeer
123 debugpeer
124 debugpickmergetool
124 debugpickmergetool
125 debugpushkey
125 debugpushkey
126 debugpvec
126 debugpvec
127 debugrebuilddirstate
127 debugrebuilddirstate
128 debugrebuildfncache
128 debugrebuildfncache
129 debugrename
129 debugrename
130 debugrequires
130 debugrequires
131 debugrevlog
131 debugrevlog
132 debugrevlogindex
132 debugrevlogindex
133 debugrevspec
133 debugrevspec
134 debugserve
134 debugserve
135 debugsetparents
135 debugsetparents
136 debugshell
136 debugshell
137 debugsidedata
137 debugsidedata
138 debugssl
138 debugssl
139 debugstrip
139 debugstrip
140 debugsub
140 debugsub
141 debugsuccessorssets
141 debugsuccessorssets
142 debugtagscache
142 debugtagscache
143 debugtemplate
143 debugtemplate
144 debuguigetpass
144 debuguigetpass
145 debuguiprompt
145 debuguiprompt
146 debugupdatecaches
146 debugupdatecaches
147 debugupgraderepo
147 debugupgraderepo
148 debugwalk
148 debugwalk
149 debugwhyunstable
149 debugwhyunstable
150 debugwireargs
150 debugwireargs
151 debugwireproto
151 debugwireproto
152
152
153 Do not show the alias of a debug command if there are other candidates
153 Do not show the alias of a debug command if there are other candidates
154 (this should hide rawcommit)
154 (this should hide rawcommit)
155 $ hg debugcomplete r
155 $ hg debugcomplete r
156 recover
156 recover
157 remove
157 remove
158 rename
158 rename
159 resolve
159 resolve
160 revert
160 revert
161 rollback
161 rollback
162 root
162 root
163 Show the alias of a debug command if there are no other candidates
163 Show the alias of a debug command if there are no other candidates
164 $ hg debugcomplete rawc
164 $ hg debugcomplete rawc
165
165
166
166
167 Show the global options
167 Show the global options
168 $ hg debugcomplete --options | sort
168 $ hg debugcomplete --options | sort
169 --color
169 --color
170 --config
170 --config
171 --cwd
171 --cwd
172 --debug
172 --debug
173 --debugger
173 --debugger
174 --encoding
174 --encoding
175 --encodingmode
175 --encodingmode
176 --help
176 --help
177 --hidden
177 --hidden
178 --noninteractive
178 --noninteractive
179 --pager
179 --pager
180 --profile
180 --profile
181 --quiet
181 --quiet
182 --repository
182 --repository
183 --time
183 --time
184 --traceback
184 --traceback
185 --verbose
185 --verbose
186 --version
186 --version
187 -R
187 -R
188 -h
188 -h
189 -q
189 -q
190 -v
190 -v
191 -y
191 -y
192
192
193 Show the options for the "serve" command
193 Show the options for the "serve" command
194 $ hg debugcomplete --options serve | sort
194 $ hg debugcomplete --options serve | sort
195 --accesslog
195 --accesslog
196 --address
196 --address
197 --certificate
197 --certificate
198 --cmdserver
198 --cmdserver
199 --color
199 --color
200 --config
200 --config
201 --cwd
201 --cwd
202 --daemon
202 --daemon
203 --daemon-postexec
203 --daemon-postexec
204 --debug
204 --debug
205 --debugger
205 --debugger
206 --encoding
206 --encoding
207 --encodingmode
207 --encodingmode
208 --errorlog
208 --errorlog
209 --help
209 --help
210 --hidden
210 --hidden
211 --ipv6
211 --ipv6
212 --name
212 --name
213 --noninteractive
213 --noninteractive
214 --pager
214 --pager
215 --pid-file
215 --pid-file
216 --port
216 --port
217 --prefix
217 --prefix
218 --print-url
218 --print-url
219 --profile
219 --profile
220 --quiet
220 --quiet
221 --repository
221 --repository
222 --stdio
222 --stdio
223 --style
223 --style
224 --subrepos
224 --subrepos
225 --templates
225 --templates
226 --time
226 --time
227 --traceback
227 --traceback
228 --verbose
228 --verbose
229 --version
229 --version
230 --web-conf
230 --web-conf
231 -6
231 -6
232 -A
232 -A
233 -E
233 -E
234 -R
234 -R
235 -S
235 -S
236 -a
236 -a
237 -d
237 -d
238 -h
238 -h
239 -n
239 -n
240 -p
240 -p
241 -q
241 -q
242 -t
242 -t
243 -v
243 -v
244 -y
244 -y
245
245
246 Show an error if we use --options with an ambiguous abbreviation
246 Show an error if we use --options with an ambiguous abbreviation
247 $ hg debugcomplete --options s
247 $ hg debugcomplete --options s
248 hg: command 's' is ambiguous:
248 hg: command 's' is ambiguous:
249 serve shelve showconfig status summary
249 serve shelve showconfig status summary
250 [10]
250 [10]
251
251
252 Show all commands + options
252 Show all commands + options
253 $ hg debugcommands
253 $ hg debugcommands
254 abort: dry-run
254 abort: dry-run
255 add: include, exclude, subrepos, dry-run
255 add: include, exclude, subrepos, dry-run
256 addremove: similarity, subrepos, include, exclude, dry-run
256 addremove: similarity, subrepos, include, exclude, dry-run
257 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, include, exclude, template
257 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, include, exclude, template
258 archive: no-decode, prefix, rev, type, subrepos, include, exclude
258 archive: no-decode, prefix, rev, type, subrepos, include, exclude
259 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
259 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
260 bisect: reset, good, bad, skip, extend, command, noupdate
260 bisect: reset, good, bad, skip, extend, command, noupdate
261 bookmarks: force, rev, delete, rename, inactive, list, template
261 bookmarks: force, rev, delete, rename, inactive, list, template
262 branch: force, clean, rev
262 branch: force, clean, rev
263 branches: active, closed, rev, template
263 branches: active, closed, rev, template
264 bundle: exact, force, rev, branch, base, all, type, ssh, remotecmd, insecure
264 bundle: exact, force, rev, branch, base, all, type, ssh, remotecmd, insecure
265 cat: output, rev, decode, include, exclude, template
265 cat: output, rev, decode, include, exclude, template
266 clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
266 clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
267 commit: addremove, close-branch, amend, secret, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos
267 commit: addremove, close-branch, amend, secret, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos
268 config: untrusted, exp-all-known, edit, local, source, shared, non-shared, global, template
268 config: untrusted, exp-all-known, edit, local, source, shared, non-shared, global, template
269 continue: dry-run
269 continue: dry-run
270 copy: forget, after, at-rev, force, include, exclude, dry-run
270 copy: forget, after, at-rev, force, include, exclude, dry-run
271 debug-delta-find: changelog, manifest, dir, template
271 debug-delta-find: changelog, manifest, dir, template, source
272 debug-repair-issue6528: to-report, from-report, paranoid, dry-run
272 debug-repair-issue6528: to-report, from-report, paranoid, dry-run
273 debug-revlog-index: changelog, manifest, dir, template
273 debug-revlog-index: changelog, manifest, dir, template
274 debugancestor:
274 debugancestor:
275 debugantivirusrunning:
275 debugantivirusrunning:
276 debugapplystreamclonebundle:
276 debugapplystreamclonebundle:
277 debugbackupbundle: recover, patch, git, limit, no-merges, stat, graph, style, template
277 debugbackupbundle: recover, patch, git, limit, no-merges, stat, graph, style, template
278 debugbuilddag: mergeable-file, overwritten-file, new-file, from-existing
278 debugbuilddag: mergeable-file, overwritten-file, new-file, from-existing
279 debugbundle: all, part-type, spec
279 debugbundle: all, part-type, spec
280 debugcapabilities:
280 debugcapabilities:
281 debugchangedfiles: compute
281 debugchangedfiles: compute
282 debugcheckstate:
282 debugcheckstate:
283 debugcolor: style
283 debugcolor: style
284 debugcommands:
284 debugcommands:
285 debugcomplete: options
285 debugcomplete: options
286 debugcreatestreamclonebundle:
286 debugcreatestreamclonebundle:
287 debugdag: tags, branches, dots, spaces
287 debugdag: tags, branches, dots, spaces
288 debugdata: changelog, manifest, dir
288 debugdata: changelog, manifest, dir
289 debugdate: extended
289 debugdate: extended
290 debugdeltachain: changelog, manifest, dir, template
290 debugdeltachain: changelog, manifest, dir, template
291 debugdirstateignorepatternshash:
291 debugdirstateignorepatternshash:
292 debugdirstate: nodates, dates, datesort, docket, all
292 debugdirstate: nodates, dates, datesort, docket, all
293 debugdiscovery: old, nonheads, rev, seed, local-as-revs, remote-as-revs, ssh, remotecmd, insecure, template
293 debugdiscovery: old, nonheads, rev, seed, local-as-revs, remote-as-revs, ssh, remotecmd, insecure, template
294 debugdownload: output
294 debugdownload: output
295 debugextensions: template
295 debugextensions: template
296 debugfileset: rev, all-files, show-matcher, show-stage
296 debugfileset: rev, all-files, show-matcher, show-stage
297 debugformat: template
297 debugformat: template
298 debugfsinfo:
298 debugfsinfo:
299 debuggetbundle: head, common, type
299 debuggetbundle: head, common, type
300 debugignore:
300 debugignore:
301 debugindexdot: changelog, manifest, dir
301 debugindexdot: changelog, manifest, dir
302 debugindexstats:
302 debugindexstats:
303 debuginstall: template
303 debuginstall: template
304 debugknown:
304 debugknown:
305 debuglabelcomplete:
305 debuglabelcomplete:
306 debuglocks: force-free-lock, force-free-wlock, set-lock, set-wlock
306 debuglocks: force-free-lock, force-free-wlock, set-lock, set-wlock
307 debugmanifestfulltextcache: clear, add
307 debugmanifestfulltextcache: clear, add
308 debugmergestate: style, template
308 debugmergestate: style, template
309 debugnamecomplete:
309 debugnamecomplete:
310 debugnodemap: dump-new, dump-disk, check, metadata
310 debugnodemap: dump-new, dump-disk, check, metadata
311 debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
311 debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
312 debugp1copies: rev
312 debugp1copies: rev
313 debugp2copies: rev
313 debugp2copies: rev
314 debugpathcomplete: full, normal, added, removed
314 debugpathcomplete: full, normal, added, removed
315 debugpathcopies: include, exclude
315 debugpathcopies: include, exclude
316 debugpeer:
316 debugpeer:
317 debugpickmergetool: rev, changedelete, include, exclude, tool
317 debugpickmergetool: rev, changedelete, include, exclude, tool
318 debugpushkey:
318 debugpushkey:
319 debugpvec:
319 debugpvec:
320 debugrebuilddirstate: rev, minimal
320 debugrebuilddirstate: rev, minimal
321 debugrebuildfncache: only-data
321 debugrebuildfncache: only-data
322 debugrename: rev
322 debugrename: rev
323 debugrequires:
323 debugrequires:
324 debugrevlog: changelog, manifest, dir, dump
324 debugrevlog: changelog, manifest, dir, dump
325 debugrevlogindex: changelog, manifest, dir, format
325 debugrevlogindex: changelog, manifest, dir, format
326 debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
326 debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
327 debugserve: sshstdio, logiofd, logiofile
327 debugserve: sshstdio, logiofd, logiofile
328 debugsetparents:
328 debugsetparents:
329 debugshell:
329 debugshell:
330 debugsidedata: changelog, manifest, dir
330 debugsidedata: changelog, manifest, dir
331 debugssl:
331 debugssl:
332 debugstrip: rev, force, no-backup, nobackup, , keep, bookmark, soft
332 debugstrip: rev, force, no-backup, nobackup, , keep, bookmark, soft
333 debugsub: rev
333 debugsub: rev
334 debugsuccessorssets: closest
334 debugsuccessorssets: closest
335 debugtagscache:
335 debugtagscache:
336 debugtemplate: rev, define
336 debugtemplate: rev, define
337 debuguigetpass: prompt
337 debuguigetpass: prompt
338 debuguiprompt: prompt
338 debuguiprompt: prompt
339 debugupdatecaches:
339 debugupdatecaches:
340 debugupgraderepo: optimize, run, backup, changelog, manifest, filelogs
340 debugupgraderepo: optimize, run, backup, changelog, manifest, filelogs
341 debugwalk: include, exclude
341 debugwalk: include, exclude
342 debugwhyunstable:
342 debugwhyunstable:
343 debugwireargs: three, four, five, ssh, remotecmd, insecure
343 debugwireargs: three, four, five, ssh, remotecmd, insecure
344 debugwireproto: localssh, peer, noreadstderr, nologhandshake, ssh, remotecmd, insecure
344 debugwireproto: localssh, peer, noreadstderr, nologhandshake, ssh, remotecmd, insecure
345 diff: rev, from, to, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
345 diff: rev, from, to, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
346 export: bookmark, output, switch-parent, rev, text, git, binary, nodates, template
346 export: bookmark, output, switch-parent, rev, text, git, binary, nodates, template
347 files: rev, print0, include, exclude, template, subrepos
347 files: rev, print0, include, exclude, template, subrepos
348 forget: interactive, include, exclude, dry-run
348 forget: interactive, include, exclude, dry-run
349 graft: rev, base, continue, stop, abort, edit, log, no-commit, force, currentdate, currentuser, date, user, tool, dry-run
349 graft: rev, base, continue, stop, abort, edit, log, no-commit, force, currentdate, currentuser, date, user, tool, dry-run
350 grep: print0, all, diff, text, follow, ignore-case, files-with-matches, line-number, rev, all-files, user, date, template, include, exclude
350 grep: print0, all, diff, text, follow, ignore-case, files-with-matches, line-number, rev, all-files, user, date, template, include, exclude
351 heads: rev, topo, active, closed, style, template
351 heads: rev, topo, active, closed, style, template
352 help: extension, command, keyword, system
352 help: extension, command, keyword, system
353 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure, template
353 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure, template
354 import: strip, base, secret, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
354 import: strip, base, secret, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
355 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
355 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
356 init: ssh, remotecmd, insecure
356 init: ssh, remotecmd, insecure
357 locate: rev, print0, fullpath, include, exclude
357 locate: rev, print0, fullpath, include, exclude
358 log: follow, follow-first, date, copies, keyword, rev, line-range, removed, only-merges, user, only-branch, branch, bookmark, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
358 log: follow, follow-first, date, copies, keyword, rev, line-range, removed, only-merges, user, only-branch, branch, bookmark, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
359 manifest: rev, all, template
359 manifest: rev, all, template
360 merge: force, rev, preview, abort, tool
360 merge: force, rev, preview, abort, tool
361 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
361 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
362 parents: rev, style, template
362 parents: rev, style, template
363 paths: template
363 paths: template
364 phase: public, draft, secret, force, rev
364 phase: public, draft, secret, force, rev
365 pull: update, force, confirm, rev, bookmark, branch, ssh, remotecmd, insecure
365 pull: update, force, confirm, rev, bookmark, branch, ssh, remotecmd, insecure
366 purge: abort-on-err, all, ignored, dirs, files, print, print0, confirm, include, exclude
366 purge: abort-on-err, all, ignored, dirs, files, print, print0, confirm, include, exclude
367 push: force, rev, bookmark, all-bookmarks, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure
367 push: force, rev, bookmark, all-bookmarks, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure
368 recover: verify
368 recover: verify
369 remove: after, force, subrepos, include, exclude, dry-run
369 remove: after, force, subrepos, include, exclude, dry-run
370 rename: forget, after, at-rev, force, include, exclude, dry-run
370 rename: forget, after, at-rev, force, include, exclude, dry-run
371 resolve: all, list, mark, unmark, no-status, re-merge, tool, include, exclude, template
371 resolve: all, list, mark, unmark, no-status, re-merge, tool, include, exclude, template
372 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
372 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
373 rollback: dry-run, force
373 rollback: dry-run, force
374 root: template
374 root: template
375 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, print-url, subrepos
375 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, print-url, subrepos
376 shelve: addremove, unknown, cleanup, date, delete, edit, keep, list, message, name, patch, interactive, stat, include, exclude
376 shelve: addremove, unknown, cleanup, date, delete, edit, keep, list, message, name, patch, interactive, stat, include, exclude
377 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, terse, copies, print0, rev, change, include, exclude, subrepos, template
377 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, terse, copies, print0, rev, change, include, exclude, subrepos, template
378 summary: remote
378 summary: remote
379 tag: force, local, rev, remove, edit, message, date, user
379 tag: force, local, rev, remove, edit, message, date, user
380 tags: template
380 tags: template
381 tip: patch, git, style, template
381 tip: patch, git, style, template
382 unbundle: update
382 unbundle: update
383 unshelve: abort, continue, interactive, keep, name, tool, date
383 unshelve: abort, continue, interactive, keep, name, tool, date
384 update: clean, check, merge, date, rev, tool
384 update: clean, check, merge, date, rev, tool
385 verify: full
385 verify: full
386 version: template
386 version: template
387
387
388 $ hg init a
388 $ hg init a
389 $ cd a
389 $ cd a
390 $ echo fee > fee
390 $ echo fee > fee
391 $ hg ci -q -Amfee
391 $ hg ci -q -Amfee
392 $ hg tag fee
392 $ hg tag fee
393 $ mkdir fie
393 $ mkdir fie
394 $ echo dead > fie/dead
394 $ echo dead > fie/dead
395 $ echo live > fie/live
395 $ echo live > fie/live
396 $ hg bookmark fo
396 $ hg bookmark fo
397 $ hg branch -q fie
397 $ hg branch -q fie
398 $ hg ci -q -Amfie
398 $ hg ci -q -Amfie
399 $ echo fo > fo
399 $ echo fo > fo
400 $ hg branch -qf default
400 $ hg branch -qf default
401 $ hg ci -q -Amfo
401 $ hg ci -q -Amfo
402 $ echo Fum > Fum
402 $ echo Fum > Fum
403 $ hg ci -q -AmFum
403 $ hg ci -q -AmFum
404 $ hg bookmark Fum
404 $ hg bookmark Fum
405
405
406 Test debugpathcomplete
406 Test debugpathcomplete
407
407
408 $ hg debugpathcomplete f
408 $ hg debugpathcomplete f
409 fee
409 fee
410 fie
410 fie
411 fo
411 fo
412 $ hg debugpathcomplete -f f
412 $ hg debugpathcomplete -f f
413 fee
413 fee
414 fie/dead
414 fie/dead
415 fie/live
415 fie/live
416 fo
416 fo
417
417
418 $ hg rm Fum
418 $ hg rm Fum
419 $ hg debugpathcomplete -r F
419 $ hg debugpathcomplete -r F
420 Fum
420 Fum
421
421
422 Test debugnamecomplete
422 Test debugnamecomplete
423
423
424 $ hg debugnamecomplete
424 $ hg debugnamecomplete
425 Fum
425 Fum
426 default
426 default
427 fee
427 fee
428 fie
428 fie
429 fo
429 fo
430 tip
430 tip
431 $ hg debugnamecomplete f
431 $ hg debugnamecomplete f
432 fee
432 fee
433 fie
433 fie
434 fo
434 fo
435
435
436 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
436 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
437 used for completions in some shells.
437 used for completions in some shells.
438
438
439 $ hg debuglabelcomplete
439 $ hg debuglabelcomplete
440 Fum
440 Fum
441 default
441 default
442 fee
442 fee
443 fie
443 fie
444 fo
444 fo
445 tip
445 tip
446 $ hg debuglabelcomplete f
446 $ hg debuglabelcomplete f
447 fee
447 fee
448 fie
448 fie
449 fo
449 fo
@@ -1,194 +1,346 b''
1 ====================================
1 ====================================
2 Test delta choice with sparse revlog
2 Test delta choice with sparse revlog
3 ====================================
3 ====================================
4
4
5 Sparse-revlog usually shows the most gain on Manifest. However, it is simpler
5 Sparse-revlog usually shows the most gain on Manifest. However, it is simpler
6 to general an appropriate file, so we test with a single file instead. The
6 to general an appropriate file, so we test with a single file instead. The
7 goal is to observe intermediate snapshot being created.
7 goal is to observe intermediate snapshot being created.
8
8
9 We need a large enough file. Part of the content needs to be replaced
9 We need a large enough file. Part of the content needs to be replaced
10 repeatedly while some of it changes rarely.
10 repeatedly while some of it changes rarely.
11
11
12 $ bundlepath="$TESTDIR/artifacts/cache/big-file-churn.hg"
12 $ bundlepath="$TESTDIR/artifacts/cache/big-file-churn.hg"
13
13
14 $ expectedhash=`cat "$bundlepath".md5`
14 $ expectedhash=`cat "$bundlepath".md5`
15
15
16 #if slow
16 #if slow
17
17
18 $ if [ ! -f "$bundlepath" ]; then
18 $ if [ ! -f "$bundlepath" ]; then
19 > "$TESTDIR"/artifacts/scripts/generate-churning-bundle.py > /dev/null
19 > "$TESTDIR"/artifacts/scripts/generate-churning-bundle.py > /dev/null
20 > fi
20 > fi
21
21
22 #else
22 #else
23
23
24 $ if [ ! -f "$bundlepath" ]; then
24 $ if [ ! -f "$bundlepath" ]; then
25 > echo 'skipped: missing artifact, run "'"$TESTDIR"'/artifacts/scripts/generate-churning-bundle.py"'
25 > echo 'skipped: missing artifact, run "'"$TESTDIR"'/artifacts/scripts/generate-churning-bundle.py"'
26 > exit 80
26 > exit 80
27 > fi
27 > fi
28
28
29 #endif
29 #endif
30
30
31 $ currenthash=`f -M "$bundlepath" | cut -d = -f 2`
31 $ currenthash=`f -M "$bundlepath" | cut -d = -f 2`
32 $ if [ "$currenthash" != "$expectedhash" ]; then
32 $ if [ "$currenthash" != "$expectedhash" ]; then
33 > echo 'skipped: outdated artifact, md5 "'"$currenthash"'" expected "'"$expectedhash"'" run "'"$TESTDIR"'/artifacts/scripts/generate-churning-bundle.py"'
33 > echo 'skipped: outdated artifact, md5 "'"$currenthash"'" expected "'"$expectedhash"'" run "'"$TESTDIR"'/artifacts/scripts/generate-churning-bundle.py"'
34 > exit 80
34 > exit 80
35 > fi
35 > fi
36
36
37 $ cat >> $HGRCPATH << EOF
37 $ cat >> $HGRCPATH << EOF
38 > [format]
38 > [format]
39 > sparse-revlog = yes
39 > sparse-revlog = yes
40 > maxchainlen = 15
40 > maxchainlen = 15
41 > [storage]
41 > [storage]
42 > revlog.optimize-delta-parent-choice = yes
42 > revlog.optimize-delta-parent-choice = yes
43 > revlog.reuse-external-delta = no
43 > revlog.reuse-external-delta = no
44 > EOF
44 > EOF
45 $ hg init sparse-repo
45 $ hg init sparse-repo
46 $ cd sparse-repo
46 $ cd sparse-repo
47 $ hg unbundle $bundlepath
47 $ hg unbundle $bundlepath
48 adding changesets
48 adding changesets
49 adding manifests
49 adding manifests
50 adding file changes
50 adding file changes
51 added 5001 changesets with 5001 changes to 1 files (+89 heads)
51 added 5001 changesets with 5001 changes to 1 files (+89 heads)
52 new changesets 9706f5af64f4:d9032adc8114 (5001 drafts)
52 new changesets 9706f5af64f4:d9032adc8114 (5001 drafts)
53 (run 'hg heads' to see heads, 'hg merge' to merge)
53 (run 'hg heads' to see heads, 'hg merge' to merge)
54 $ hg up
54 $ hg up
55 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
55 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
56 updated to "d9032adc8114: commit #5000"
56 updated to "d9032adc8114: commit #5000"
57 89 other heads for branch "default"
57 89 other heads for branch "default"
58
58
59 $ hg log --stat -r 0:3
59 $ hg log --stat -r 0:3
60 changeset: 0:9706f5af64f4
60 changeset: 0:9706f5af64f4
61 user: test
61 user: test
62 date: Thu Jan 01 00:00:00 1970 +0000
62 date: Thu Jan 01 00:00:00 1970 +0000
63 summary: initial commit
63 summary: initial commit
64
64
65 SPARSE-REVLOG-TEST-FILE | 10500 ++++++++++++++++++++++++++++++++++++++++++++++
65 SPARSE-REVLOG-TEST-FILE | 10500 ++++++++++++++++++++++++++++++++++++++++++++++
66 1 files changed, 10500 insertions(+), 0 deletions(-)
66 1 files changed, 10500 insertions(+), 0 deletions(-)
67
67
68 changeset: 1:724907deaa5e
68 changeset: 1:724907deaa5e
69 user: test
69 user: test
70 date: Thu Jan 01 00:00:00 1970 +0000
70 date: Thu Jan 01 00:00:00 1970 +0000
71 summary: commit #1
71 summary: commit #1
72
72
73 SPARSE-REVLOG-TEST-FILE | 1068 +++++++++++++++++++++++-----------------------
73 SPARSE-REVLOG-TEST-FILE | 1068 +++++++++++++++++++++++-----------------------
74 1 files changed, 534 insertions(+), 534 deletions(-)
74 1 files changed, 534 insertions(+), 534 deletions(-)
75
75
76 changeset: 2:62c41bce3e5d
76 changeset: 2:62c41bce3e5d
77 user: test
77 user: test
78 date: Thu Jan 01 00:00:00 1970 +0000
78 date: Thu Jan 01 00:00:00 1970 +0000
79 summary: commit #2
79 summary: commit #2
80
80
81 SPARSE-REVLOG-TEST-FILE | 1068 +++++++++++++++++++++++-----------------------
81 SPARSE-REVLOG-TEST-FILE | 1068 +++++++++++++++++++++++-----------------------
82 1 files changed, 534 insertions(+), 534 deletions(-)
82 1 files changed, 534 insertions(+), 534 deletions(-)
83
83
84 changeset: 3:348a9cbd6959
84 changeset: 3:348a9cbd6959
85 user: test
85 user: test
86 date: Thu Jan 01 00:00:00 1970 +0000
86 date: Thu Jan 01 00:00:00 1970 +0000
87 summary: commit #3
87 summary: commit #3
88
88
89 SPARSE-REVLOG-TEST-FILE | 1068 +++++++++++++++++++++++-----------------------
89 SPARSE-REVLOG-TEST-FILE | 1068 +++++++++++++++++++++++-----------------------
90 1 files changed, 534 insertions(+), 534 deletions(-)
90 1 files changed, 534 insertions(+), 534 deletions(-)
91
91
92
92
93 $ f -s .hg/store/data/*.d
93 $ f -s .hg/store/data/*.d
94 .hg/store/data/_s_p_a_r_s_e-_r_e_v_l_o_g-_t_e_s_t-_f_i_l_e.d: size=58616973
94 .hg/store/data/_s_p_a_r_s_e-_r_e_v_l_o_g-_t_e_s_t-_f_i_l_e.d: size=58616973
95 $ hg debugrevlog *
95 $ hg debugrevlog *
96 format : 1
96 format : 1
97 flags : generaldelta
97 flags : generaldelta
98
98
99 revisions : 5001
99 revisions : 5001
100 merges : 625 (12.50%)
100 merges : 625 (12.50%)
101 normal : 4376 (87.50%)
101 normal : 4376 (87.50%)
102 revisions : 5001
102 revisions : 5001
103 empty : 0 ( 0.00%)
103 empty : 0 ( 0.00%)
104 text : 0 (100.00%)
104 text : 0 (100.00%)
105 delta : 0 (100.00%)
105 delta : 0 (100.00%)
106 snapshot : 383 ( 7.66%)
106 snapshot : 383 ( 7.66%)
107 lvl-0 : 3 ( 0.06%)
107 lvl-0 : 3 ( 0.06%)
108 lvl-1 : 18 ( 0.36%)
108 lvl-1 : 18 ( 0.36%)
109 lvl-2 : 62 ( 1.24%)
109 lvl-2 : 62 ( 1.24%)
110 lvl-3 : 108 ( 2.16%)
110 lvl-3 : 108 ( 2.16%)
111 lvl-4 : 191 ( 3.82%)
111 lvl-4 : 191 ( 3.82%)
112 lvl-5 : 1 ( 0.02%)
112 lvl-5 : 1 ( 0.02%)
113 deltas : 4618 (92.34%)
113 deltas : 4618 (92.34%)
114 revision size : 58616973
114 revision size : 58616973
115 snapshot : 9247844 (15.78%)
115 snapshot : 9247844 (15.78%)
116 lvl-0 : 539532 ( 0.92%)
116 lvl-0 : 539532 ( 0.92%)
117 lvl-1 : 1467743 ( 2.50%)
117 lvl-1 : 1467743 ( 2.50%)
118 lvl-2 : 1873820 ( 3.20%)
118 lvl-2 : 1873820 ( 3.20%)
119 lvl-3 : 2326874 ( 3.97%)
119 lvl-3 : 2326874 ( 3.97%)
120 lvl-4 : 3029118 ( 5.17%)
120 lvl-4 : 3029118 ( 5.17%)
121 lvl-5 : 10757 ( 0.02%)
121 lvl-5 : 10757 ( 0.02%)
122 deltas : 49369129 (84.22%)
122 deltas : 49369129 (84.22%)
123
123
124 chunks : 5001
124 chunks : 5001
125 0x28 : 5001 (100.00%)
125 0x28 : 5001 (100.00%)
126 chunks size : 58616973
126 chunks size : 58616973
127 0x28 : 58616973 (100.00%)
127 0x28 : 58616973 (100.00%)
128
128
129 avg chain length : 9
129 avg chain length : 9
130 max chain length : 15
130 max chain length : 15
131 max chain reach : 27366701
131 max chain reach : 27366701
132 compression ratio : 29
132 compression ratio : 29
133
133
134 uncompressed data size (min/max/avg) : 346468 / 346472 / 346471
134 uncompressed data size (min/max/avg) : 346468 / 346472 / 346471
135 full revision size (min/max/avg) : 179288 / 180786 / 179844
135 full revision size (min/max/avg) : 179288 / 180786 / 179844
136 inter-snapshot size (min/max/avg) : 10757 / 169507 / 22916
136 inter-snapshot size (min/max/avg) : 10757 / 169507 / 22916
137 level-1 (min/max/avg) : 13905 / 169507 / 81541
137 level-1 (min/max/avg) : 13905 / 169507 / 81541
138 level-2 (min/max/avg) : 10887 / 83873 / 30222
138 level-2 (min/max/avg) : 10887 / 83873 / 30222
139 level-3 (min/max/avg) : 10911 / 43047 / 21545
139 level-3 (min/max/avg) : 10911 / 43047 / 21545
140 level-4 (min/max/avg) : 10838 / 21390 / 15859
140 level-4 (min/max/avg) : 10838 / 21390 / 15859
141 level-5 (min/max/avg) : 10757 / 10757 / 10757
141 level-5 (min/max/avg) : 10757 / 10757 / 10757
142 delta size (min/max/avg) : 9672 / 108072 / 10690
142 delta size (min/max/avg) : 9672 / 108072 / 10690
143
143
144 deltas against prev : 3906 (84.58%)
144 deltas against prev : 3906 (84.58%)
145 where prev = p1 : 3906 (100.00%)
145 where prev = p1 : 3906 (100.00%)
146 where prev = p2 : 0 ( 0.00%)
146 where prev = p2 : 0 ( 0.00%)
147 other : 0 ( 0.00%)
147 other : 0 ( 0.00%)
148 deltas against p1 : 649 (14.05%)
148 deltas against p1 : 649 (14.05%)
149 deltas against p2 : 63 ( 1.36%)
149 deltas against p2 : 63 ( 1.36%)
150 deltas against other : 0 ( 0.00%)
150 deltas against other : 0 ( 0.00%)
151
151
152
152
153 Test `debug-delta-find`
153 Test `debug-delta-find`
154 -----------------------
154 -----------------------
155
155
156 $ ls -1
156 $ ls -1
157 SPARSE-REVLOG-TEST-FILE
157 SPARSE-REVLOG-TEST-FILE
158 $ hg debugdeltachain SPARSE-REVLOG-TEST-FILE | grep snap | tail -1
158 $ hg debugdeltachain SPARSE-REVLOG-TEST-FILE | grep snap | tail -1
159 4971 4970 -1 3 5 4930 snap 19179 346472 427596 1.23414 15994877 15567281 36.40652 427596 179288 1.00000 5
159 4971 4970 -1 3 5 4930 snap 19179 346472 427596 1.23414 15994877 15567281 36.40652 427596 179288 1.00000 5
160 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971
160 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971
161 DBG-DELTAS-SEARCH: SEARCH rev=4971
161 DBG-DELTAS-SEARCH: SEARCH rev=4971
162 DBG-DELTAS-SEARCH: ROUND #1 - 2 candidates - search-down
162 DBG-DELTAS-SEARCH: ROUND #1 - 2 candidates - search-down
163 DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
163 DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
164 DBG-DELTAS-SEARCH: type=snapshot-4
164 DBG-DELTAS-SEARCH: type=snapshot-4
165 DBG-DELTAS-SEARCH: size=18296
165 DBG-DELTAS-SEARCH: size=18296
166 DBG-DELTAS-SEARCH: base=4930
166 DBG-DELTAS-SEARCH: base=4930
167 DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
167 DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
168 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
168 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
169 DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
169 DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
170 DBG-DELTAS-SEARCH: CANDIDATE: rev=4971
170 DBG-DELTAS-SEARCH: CANDIDATE: rev=4971
171 DBG-DELTAS-SEARCH: type=snapshot-4
171 DBG-DELTAS-SEARCH: type=snapshot-4
172 DBG-DELTAS-SEARCH: size=19179
172 DBG-DELTAS-SEARCH: size=19179
173 DBG-DELTAS-SEARCH: base=4930
173 DBG-DELTAS-SEARCH: base=4930
174 DBG-DELTAS-SEARCH: TOO-HIGH
174 DBG-DELTAS-SEARCH: TOO-HIGH
175 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
175 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
176 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
176 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
177 DBG-DELTAS-SEARCH: type=snapshot-3
177 DBG-DELTAS-SEARCH: type=snapshot-3
178 DBG-DELTAS-SEARCH: size=39228
178 DBG-DELTAS-SEARCH: size=39228
179 DBG-DELTAS-SEARCH: base=4799
179 DBG-DELTAS-SEARCH: base=4799
180 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
180 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
181 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
181 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
182 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
182 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
183 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
183 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
184 DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
184 DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
185 DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
185 DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
186 DBG-DELTAS-SEARCH: type=snapshot-2
186 DBG-DELTAS-SEARCH: type=snapshot-2
187 DBG-DELTAS-SEARCH: size=50213
187 DBG-DELTAS-SEARCH: size=50213
188 DBG-DELTAS-SEARCH: base=4623
188 DBG-DELTAS-SEARCH: base=4623
189 DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
189 DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
190 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
190 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
191 DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
191 DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
192 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
192 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
193
193
194 $ cat << EOF >>.hg/hgrc
195 > [storage]
196 > revlog.optimize-delta-parent-choice = no
197 > revlog.reuse-external-delta = yes
198 > EOF
199
200 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --quiet
201 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
202 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source full
203 DBG-DELTAS-SEARCH: SEARCH rev=4971
204 DBG-DELTAS-SEARCH: ROUND #1 - 2 candidates - search-down
205 DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
206 DBG-DELTAS-SEARCH: type=snapshot-4
207 DBG-DELTAS-SEARCH: size=18296
208 DBG-DELTAS-SEARCH: base=4930
209 DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
210 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
211 DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
212 DBG-DELTAS-SEARCH: CANDIDATE: rev=4971
213 DBG-DELTAS-SEARCH: type=snapshot-4
214 DBG-DELTAS-SEARCH: size=19179
215 DBG-DELTAS-SEARCH: base=4930
216 DBG-DELTAS-SEARCH: TOO-HIGH
217 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
218 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
219 DBG-DELTAS-SEARCH: type=snapshot-3
220 DBG-DELTAS-SEARCH: size=39228
221 DBG-DELTAS-SEARCH: base=4799
222 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
223 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
224 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
225 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
226 DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
227 DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
228 DBG-DELTAS-SEARCH: type=snapshot-2
229 DBG-DELTAS-SEARCH: size=50213
230 DBG-DELTAS-SEARCH: base=4623
231 DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
232 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
233 DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
234 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
235 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source storage
236 DBG-DELTAS-SEARCH: SEARCH rev=4971
237 DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
238 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
239 DBG-DELTAS-SEARCH: type=snapshot-3
240 DBG-DELTAS-SEARCH: size=39228
241 DBG-DELTAS-SEARCH: base=4799
242 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
243 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
244 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
245 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=1 try-count=1 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
246 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source p1
247 DBG-DELTAS-SEARCH: SEARCH rev=4971
248 DBG-DELTAS-SEARCH: ROUND #1 - 2 candidates - search-down
249 DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
250 DBG-DELTAS-SEARCH: type=snapshot-4
251 DBG-DELTAS-SEARCH: size=18296
252 DBG-DELTAS-SEARCH: base=4930
253 DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
254 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
255 DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
256 DBG-DELTAS-SEARCH: CANDIDATE: rev=4971
257 DBG-DELTAS-SEARCH: type=snapshot-4
258 DBG-DELTAS-SEARCH: size=19179
259 DBG-DELTAS-SEARCH: base=4930
260 DBG-DELTAS-SEARCH: TOO-HIGH
261 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
262 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
263 DBG-DELTAS-SEARCH: type=snapshot-3
264 DBG-DELTAS-SEARCH: size=39228
265 DBG-DELTAS-SEARCH: base=4799
266 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
267 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
268 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
269 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
270 DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
271 DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
272 DBG-DELTAS-SEARCH: type=snapshot-2
273 DBG-DELTAS-SEARCH: size=50213
274 DBG-DELTAS-SEARCH: base=4623
275 DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
276 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
277 DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
278 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
279 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source p2
280 DBG-DELTAS-SEARCH: SEARCH rev=4971
281 DBG-DELTAS-SEARCH: ROUND #1 - 2 candidates - search-down
282 DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
283 DBG-DELTAS-SEARCH: type=snapshot-4
284 DBG-DELTAS-SEARCH: size=18296
285 DBG-DELTAS-SEARCH: base=4930
286 DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
287 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
288 DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
289 DBG-DELTAS-SEARCH: CANDIDATE: rev=4971
290 DBG-DELTAS-SEARCH: type=snapshot-4
291 DBG-DELTAS-SEARCH: size=19179
292 DBG-DELTAS-SEARCH: base=4930
293 DBG-DELTAS-SEARCH: TOO-HIGH
294 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
295 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
296 DBG-DELTAS-SEARCH: type=snapshot-3
297 DBG-DELTAS-SEARCH: size=39228
298 DBG-DELTAS-SEARCH: base=4799
299 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
300 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
301 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
302 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
303 DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
304 DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
305 DBG-DELTAS-SEARCH: type=snapshot-2
306 DBG-DELTAS-SEARCH: size=50213
307 DBG-DELTAS-SEARCH: base=4623
308 DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
309 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
310 DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
311 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
312 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source prev
313 DBG-DELTAS-SEARCH: SEARCH rev=4971
314 DBG-DELTAS-SEARCH: ROUND #1 - 2 candidates - search-down
315 DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
316 DBG-DELTAS-SEARCH: type=snapshot-4
317 DBG-DELTAS-SEARCH: size=18296
318 DBG-DELTAS-SEARCH: base=4930
319 DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
320 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
321 DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
322 DBG-DELTAS-SEARCH: CANDIDATE: rev=4971
323 DBG-DELTAS-SEARCH: type=snapshot-4
324 DBG-DELTAS-SEARCH: size=19179
325 DBG-DELTAS-SEARCH: base=4930
326 DBG-DELTAS-SEARCH: TOO-HIGH
327 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
328 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
329 DBG-DELTAS-SEARCH: type=snapshot-3
330 DBG-DELTAS-SEARCH: size=39228
331 DBG-DELTAS-SEARCH: base=4799
332 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
333 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
334 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
335 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
336 DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
337 DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
338 DBG-DELTAS-SEARCH: type=snapshot-2
339 DBG-DELTAS-SEARCH: size=50213
340 DBG-DELTAS-SEARCH: base=4623
341 DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
342 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
343 DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
344 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
345
194 $ cd ..
346 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now