##// END OF EJS Templates
debug-discovery: fix a typo in the doc...
marmoute -
r50294:a3fdc4fc stable
parent child Browse files
Show More
@@ -1,5031 +1,5031 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import binascii
9 import binascii
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import string
24 import string
25 import subprocess
25 import subprocess
26 import sys
26 import sys
27 import time
27 import time
28
28
29 from .i18n import _
29 from .i18n import _
30 from .node import (
30 from .node import (
31 bin,
31 bin,
32 hex,
32 hex,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 dirstateutils,
49 dirstateutils,
50 encoding,
50 encoding,
51 error,
51 error,
52 exchange,
52 exchange,
53 extensions,
53 extensions,
54 filemerge,
54 filemerge,
55 filesetlang,
55 filesetlang,
56 formatter,
56 formatter,
57 hg,
57 hg,
58 httppeer,
58 httppeer,
59 localrepo,
59 localrepo,
60 lock as lockmod,
60 lock as lockmod,
61 logcmdutil,
61 logcmdutil,
62 mergestate as mergestatemod,
62 mergestate as mergestatemod,
63 metadata,
63 metadata,
64 obsolete,
64 obsolete,
65 obsutil,
65 obsutil,
66 pathutil,
66 pathutil,
67 phases,
67 phases,
68 policy,
68 policy,
69 pvec,
69 pvec,
70 pycompat,
70 pycompat,
71 registrar,
71 registrar,
72 repair,
72 repair,
73 repoview,
73 repoview,
74 requirements,
74 requirements,
75 revlog,
75 revlog,
76 revlogutils,
76 revlogutils,
77 revset,
77 revset,
78 revsetlang,
78 revsetlang,
79 scmutil,
79 scmutil,
80 setdiscovery,
80 setdiscovery,
81 simplemerge,
81 simplemerge,
82 sshpeer,
82 sshpeer,
83 sslutil,
83 sslutil,
84 streamclone,
84 streamclone,
85 strip,
85 strip,
86 tags as tagsmod,
86 tags as tagsmod,
87 templater,
87 templater,
88 treediscovery,
88 treediscovery,
89 upgrade,
89 upgrade,
90 url as urlmod,
90 url as urlmod,
91 util,
91 util,
92 vfs as vfsmod,
92 vfs as vfsmod,
93 wireprotoframing,
93 wireprotoframing,
94 wireprotoserver,
94 wireprotoserver,
95 )
95 )
96 from .interfaces import repository
96 from .interfaces import repository
97 from .utils import (
97 from .utils import (
98 cborutil,
98 cborutil,
99 compression,
99 compression,
100 dateutil,
100 dateutil,
101 procutil,
101 procutil,
102 stringutil,
102 stringutil,
103 urlutil,
103 urlutil,
104 )
104 )
105
105
106 from .revlogutils import (
106 from .revlogutils import (
107 constants as revlog_constants,
107 constants as revlog_constants,
108 debug as revlog_debug,
108 debug as revlog_debug,
109 deltas as deltautil,
109 deltas as deltautil,
110 nodemap,
110 nodemap,
111 rewrite,
111 rewrite,
112 sidedata,
112 sidedata,
113 )
113 )
114
114
115 release = lockmod.release
115 release = lockmod.release
116
116
117 table = {}
117 table = {}
118 table.update(strip.command._table)
118 table.update(strip.command._table)
119 command = registrar.command(table)
119 command = registrar.command(table)
120
120
121
121
122 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
122 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
123 def debugancestor(ui, repo, *args):
123 def debugancestor(ui, repo, *args):
124 """find the ancestor revision of two revisions in a given index"""
124 """find the ancestor revision of two revisions in a given index"""
125 if len(args) == 3:
125 if len(args) == 3:
126 index, rev1, rev2 = args
126 index, rev1, rev2 = args
127 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
127 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
128 lookup = r.lookup
128 lookup = r.lookup
129 elif len(args) == 2:
129 elif len(args) == 2:
130 if not repo:
130 if not repo:
131 raise error.Abort(
131 raise error.Abort(
132 _(b'there is no Mercurial repository here (.hg not found)')
132 _(b'there is no Mercurial repository here (.hg not found)')
133 )
133 )
134 rev1, rev2 = args
134 rev1, rev2 = args
135 r = repo.changelog
135 r = repo.changelog
136 lookup = repo.lookup
136 lookup = repo.lookup
137 else:
137 else:
138 raise error.Abort(_(b'either two or three arguments required'))
138 raise error.Abort(_(b'either two or three arguments required'))
139 a = r.ancestor(lookup(rev1), lookup(rev2))
139 a = r.ancestor(lookup(rev1), lookup(rev2))
140 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
140 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
141
141
142
142
143 @command(b'debugantivirusrunning', [])
143 @command(b'debugantivirusrunning', [])
144 def debugantivirusrunning(ui, repo):
144 def debugantivirusrunning(ui, repo):
145 """attempt to trigger an antivirus scanner to see if one is active"""
145 """attempt to trigger an antivirus scanner to see if one is active"""
146 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
146 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
147 f.write(
147 f.write(
148 util.b85decode(
148 util.b85decode(
149 # This is a base85-armored version of the EICAR test file. See
149 # This is a base85-armored version of the EICAR test file. See
150 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
150 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
151 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
151 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
152 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
152 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
153 )
153 )
154 )
154 )
155 # Give an AV engine time to scan the file.
155 # Give an AV engine time to scan the file.
156 time.sleep(2)
156 time.sleep(2)
157 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
157 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
158
158
159
159
160 @command(b'debugapplystreamclonebundle', [], b'FILE')
160 @command(b'debugapplystreamclonebundle', [], b'FILE')
161 def debugapplystreamclonebundle(ui, repo, fname):
161 def debugapplystreamclonebundle(ui, repo, fname):
162 """apply a stream clone bundle file"""
162 """apply a stream clone bundle file"""
163 f = hg.openpath(ui, fname)
163 f = hg.openpath(ui, fname)
164 gen = exchange.readbundle(ui, f, fname)
164 gen = exchange.readbundle(ui, f, fname)
165 gen.apply(repo)
165 gen.apply(repo)
166
166
167
167
168 @command(
168 @command(
169 b'debugbuilddag',
169 b'debugbuilddag',
170 [
170 [
171 (
171 (
172 b'm',
172 b'm',
173 b'mergeable-file',
173 b'mergeable-file',
174 None,
174 None,
175 _(b'add single file mergeable changes'),
175 _(b'add single file mergeable changes'),
176 ),
176 ),
177 (
177 (
178 b'o',
178 b'o',
179 b'overwritten-file',
179 b'overwritten-file',
180 None,
180 None,
181 _(b'add single file all revs overwrite'),
181 _(b'add single file all revs overwrite'),
182 ),
182 ),
183 (b'n', b'new-file', None, _(b'add new file at each rev')),
183 (b'n', b'new-file', None, _(b'add new file at each rev')),
184 (
184 (
185 b'',
185 b'',
186 b'from-existing',
186 b'from-existing',
187 None,
187 None,
188 _(b'continue from a non-empty repository'),
188 _(b'continue from a non-empty repository'),
189 ),
189 ),
190 ],
190 ],
191 _(b'[OPTION]... [TEXT]'),
191 _(b'[OPTION]... [TEXT]'),
192 )
192 )
193 def debugbuilddag(
193 def debugbuilddag(
194 ui,
194 ui,
195 repo,
195 repo,
196 text=None,
196 text=None,
197 mergeable_file=False,
197 mergeable_file=False,
198 overwritten_file=False,
198 overwritten_file=False,
199 new_file=False,
199 new_file=False,
200 from_existing=False,
200 from_existing=False,
201 ):
201 ):
202 """builds a repo with a given DAG from scratch in the current empty repo
202 """builds a repo with a given DAG from scratch in the current empty repo
203
203
204 The description of the DAG is read from stdin if not given on the
204 The description of the DAG is read from stdin if not given on the
205 command line.
205 command line.
206
206
207 Elements:
207 Elements:
208
208
209 - "+n" is a linear run of n nodes based on the current default parent
209 - "+n" is a linear run of n nodes based on the current default parent
210 - "." is a single node based on the current default parent
210 - "." is a single node based on the current default parent
211 - "$" resets the default parent to null (implied at the start);
211 - "$" resets the default parent to null (implied at the start);
212 otherwise the default parent is always the last node created
212 otherwise the default parent is always the last node created
213 - "<p" sets the default parent to the backref p
213 - "<p" sets the default parent to the backref p
214 - "*p" is a fork at parent p, which is a backref
214 - "*p" is a fork at parent p, which is a backref
215 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
215 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
216 - "/p2" is a merge of the preceding node and p2
216 - "/p2" is a merge of the preceding node and p2
217 - ":tag" defines a local tag for the preceding node
217 - ":tag" defines a local tag for the preceding node
218 - "@branch" sets the named branch for subsequent nodes
218 - "@branch" sets the named branch for subsequent nodes
219 - "#...\\n" is a comment up to the end of the line
219 - "#...\\n" is a comment up to the end of the line
220
220
221 Whitespace between the above elements is ignored.
221 Whitespace between the above elements is ignored.
222
222
223 A backref is either
223 A backref is either
224
224
225 - a number n, which references the node curr-n, where curr is the current
225 - a number n, which references the node curr-n, where curr is the current
226 node, or
226 node, or
227 - the name of a local tag you placed earlier using ":tag", or
227 - the name of a local tag you placed earlier using ":tag", or
228 - empty to denote the default parent.
228 - empty to denote the default parent.
229
229
230 All string valued-elements are either strictly alphanumeric, or must
230 All string valued-elements are either strictly alphanumeric, or must
231 be enclosed in double quotes ("..."), with "\\" as escape character.
231 be enclosed in double quotes ("..."), with "\\" as escape character.
232 """
232 """
233
233
234 if text is None:
234 if text is None:
235 ui.status(_(b"reading DAG from stdin\n"))
235 ui.status(_(b"reading DAG from stdin\n"))
236 text = ui.fin.read()
236 text = ui.fin.read()
237
237
238 cl = repo.changelog
238 cl = repo.changelog
239 if len(cl) > 0 and not from_existing:
239 if len(cl) > 0 and not from_existing:
240 raise error.Abort(_(b'repository is not empty'))
240 raise error.Abort(_(b'repository is not empty'))
241
241
242 # determine number of revs in DAG
242 # determine number of revs in DAG
243 total = 0
243 total = 0
244 for type, data in dagparser.parsedag(text):
244 for type, data in dagparser.parsedag(text):
245 if type == b'n':
245 if type == b'n':
246 total += 1
246 total += 1
247
247
248 if mergeable_file:
248 if mergeable_file:
249 linesperrev = 2
249 linesperrev = 2
250 # make a file with k lines per rev
250 # make a file with k lines per rev
251 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
251 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
252 initialmergedlines.append(b"")
252 initialmergedlines.append(b"")
253
253
254 tags = []
254 tags = []
255 progress = ui.makeprogress(
255 progress = ui.makeprogress(
256 _(b'building'), unit=_(b'revisions'), total=total
256 _(b'building'), unit=_(b'revisions'), total=total
257 )
257 )
258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
259 at = -1
259 at = -1
260 atbranch = b'default'
260 atbranch = b'default'
261 nodeids = []
261 nodeids = []
262 id = 0
262 id = 0
263 progress.update(id)
263 progress.update(id)
264 for type, data in dagparser.parsedag(text):
264 for type, data in dagparser.parsedag(text):
265 if type == b'n':
265 if type == b'n':
266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
267 id, ps = data
267 id, ps = data
268
268
269 files = []
269 files = []
270 filecontent = {}
270 filecontent = {}
271
271
272 p2 = None
272 p2 = None
273 if mergeable_file:
273 if mergeable_file:
274 fn = b"mf"
274 fn = b"mf"
275 p1 = repo[ps[0]]
275 p1 = repo[ps[0]]
276 if len(ps) > 1:
276 if len(ps) > 1:
277 p2 = repo[ps[1]]
277 p2 = repo[ps[1]]
278 pa = p1.ancestor(p2)
278 pa = p1.ancestor(p2)
279 base, local, other = [
279 base, local, other = [
280 x[fn].data() for x in (pa, p1, p2)
280 x[fn].data() for x in (pa, p1, p2)
281 ]
281 ]
282 m3 = simplemerge.Merge3Text(base, local, other)
282 m3 = simplemerge.Merge3Text(base, local, other)
283 ml = [
283 ml = [
284 l.strip()
284 l.strip()
285 for l in simplemerge.render_minimized(m3)[0]
285 for l in simplemerge.render_minimized(m3)[0]
286 ]
286 ]
287 ml.append(b"")
287 ml.append(b"")
288 elif at > 0:
288 elif at > 0:
289 ml = p1[fn].data().split(b"\n")
289 ml = p1[fn].data().split(b"\n")
290 else:
290 else:
291 ml = initialmergedlines
291 ml = initialmergedlines
292 ml[id * linesperrev] += b" r%i" % id
292 ml[id * linesperrev] += b" r%i" % id
293 mergedtext = b"\n".join(ml)
293 mergedtext = b"\n".join(ml)
294 files.append(fn)
294 files.append(fn)
295 filecontent[fn] = mergedtext
295 filecontent[fn] = mergedtext
296
296
297 if overwritten_file:
297 if overwritten_file:
298 fn = b"of"
298 fn = b"of"
299 files.append(fn)
299 files.append(fn)
300 filecontent[fn] = b"r%i\n" % id
300 filecontent[fn] = b"r%i\n" % id
301
301
302 if new_file:
302 if new_file:
303 fn = b"nf%i" % id
303 fn = b"nf%i" % id
304 files.append(fn)
304 files.append(fn)
305 filecontent[fn] = b"r%i\n" % id
305 filecontent[fn] = b"r%i\n" % id
306 if len(ps) > 1:
306 if len(ps) > 1:
307 if not p2:
307 if not p2:
308 p2 = repo[ps[1]]
308 p2 = repo[ps[1]]
309 for fn in p2:
309 for fn in p2:
310 if fn.startswith(b"nf"):
310 if fn.startswith(b"nf"):
311 files.append(fn)
311 files.append(fn)
312 filecontent[fn] = p2[fn].data()
312 filecontent[fn] = p2[fn].data()
313
313
314 def fctxfn(repo, cx, path):
314 def fctxfn(repo, cx, path):
315 if path in filecontent:
315 if path in filecontent:
316 return context.memfilectx(
316 return context.memfilectx(
317 repo, cx, path, filecontent[path]
317 repo, cx, path, filecontent[path]
318 )
318 )
319 return None
319 return None
320
320
321 if len(ps) == 0 or ps[0] < 0:
321 if len(ps) == 0 or ps[0] < 0:
322 pars = [None, None]
322 pars = [None, None]
323 elif len(ps) == 1:
323 elif len(ps) == 1:
324 pars = [nodeids[ps[0]], None]
324 pars = [nodeids[ps[0]], None]
325 else:
325 else:
326 pars = [nodeids[p] for p in ps]
326 pars = [nodeids[p] for p in ps]
327 cx = context.memctx(
327 cx = context.memctx(
328 repo,
328 repo,
329 pars,
329 pars,
330 b"r%i" % id,
330 b"r%i" % id,
331 files,
331 files,
332 fctxfn,
332 fctxfn,
333 date=(id, 0),
333 date=(id, 0),
334 user=b"debugbuilddag",
334 user=b"debugbuilddag",
335 extra={b'branch': atbranch},
335 extra={b'branch': atbranch},
336 )
336 )
337 nodeid = repo.commitctx(cx)
337 nodeid = repo.commitctx(cx)
338 nodeids.append(nodeid)
338 nodeids.append(nodeid)
339 at = id
339 at = id
340 elif type == b'l':
340 elif type == b'l':
341 id, name = data
341 id, name = data
342 ui.note((b'tag %s\n' % name))
342 ui.note((b'tag %s\n' % name))
343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
344 elif type == b'a':
344 elif type == b'a':
345 ui.note((b'branch %s\n' % data))
345 ui.note((b'branch %s\n' % data))
346 atbranch = data
346 atbranch = data
347 progress.update(id)
347 progress.update(id)
348
348
349 if tags:
349 if tags:
350 repo.vfs.write(b"localtags", b"".join(tags))
350 repo.vfs.write(b"localtags", b"".join(tags))
351
351
352
352
353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
354 indent_string = b' ' * indent
354 indent_string = b' ' * indent
355 if all:
355 if all:
356 ui.writenoi18n(
356 ui.writenoi18n(
357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
358 % indent_string
358 % indent_string
359 )
359 )
360
360
361 def showchunks(named):
361 def showchunks(named):
362 ui.write(b"\n%s%s\n" % (indent_string, named))
362 ui.write(b"\n%s%s\n" % (indent_string, named))
363 for deltadata in gen.deltaiter():
363 for deltadata in gen.deltaiter():
364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
365 ui.write(
365 ui.write(
366 b"%s%s %s %s %s %s %d\n"
366 b"%s%s %s %s %s %s %d\n"
367 % (
367 % (
368 indent_string,
368 indent_string,
369 hex(node),
369 hex(node),
370 hex(p1),
370 hex(p1),
371 hex(p2),
371 hex(p2),
372 hex(cs),
372 hex(cs),
373 hex(deltabase),
373 hex(deltabase),
374 len(delta),
374 len(delta),
375 )
375 )
376 )
376 )
377
377
378 gen.changelogheader()
378 gen.changelogheader()
379 showchunks(b"changelog")
379 showchunks(b"changelog")
380 gen.manifestheader()
380 gen.manifestheader()
381 showchunks(b"manifest")
381 showchunks(b"manifest")
382 for chunkdata in iter(gen.filelogheader, {}):
382 for chunkdata in iter(gen.filelogheader, {}):
383 fname = chunkdata[b'filename']
383 fname = chunkdata[b'filename']
384 showchunks(fname)
384 showchunks(fname)
385 else:
385 else:
386 if isinstance(gen, bundle2.unbundle20):
386 if isinstance(gen, bundle2.unbundle20):
387 raise error.Abort(_(b'use debugbundle2 for this file'))
387 raise error.Abort(_(b'use debugbundle2 for this file'))
388 gen.changelogheader()
388 gen.changelogheader()
389 for deltadata in gen.deltaiter():
389 for deltadata in gen.deltaiter():
390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
392
392
393
393
394 def _debugobsmarkers(ui, part, indent=0, **opts):
394 def _debugobsmarkers(ui, part, indent=0, **opts):
395 """display version and markers contained in 'data'"""
395 """display version and markers contained in 'data'"""
396 opts = pycompat.byteskwargs(opts)
396 opts = pycompat.byteskwargs(opts)
397 data = part.read()
397 data = part.read()
398 indent_string = b' ' * indent
398 indent_string = b' ' * indent
399 try:
399 try:
400 version, markers = obsolete._readmarkers(data)
400 version, markers = obsolete._readmarkers(data)
401 except error.UnknownVersion as exc:
401 except error.UnknownVersion as exc:
402 msg = b"%sunsupported version: %s (%d bytes)\n"
402 msg = b"%sunsupported version: %s (%d bytes)\n"
403 msg %= indent_string, exc.version, len(data)
403 msg %= indent_string, exc.version, len(data)
404 ui.write(msg)
404 ui.write(msg)
405 else:
405 else:
406 msg = b"%sversion: %d (%d bytes)\n"
406 msg = b"%sversion: %d (%d bytes)\n"
407 msg %= indent_string, version, len(data)
407 msg %= indent_string, version, len(data)
408 ui.write(msg)
408 ui.write(msg)
409 fm = ui.formatter(b'debugobsolete', opts)
409 fm = ui.formatter(b'debugobsolete', opts)
410 for rawmarker in sorted(markers):
410 for rawmarker in sorted(markers):
411 m = obsutil.marker(None, rawmarker)
411 m = obsutil.marker(None, rawmarker)
412 fm.startitem()
412 fm.startitem()
413 fm.plain(indent_string)
413 fm.plain(indent_string)
414 cmdutil.showmarker(fm, m)
414 cmdutil.showmarker(fm, m)
415 fm.end()
415 fm.end()
416
416
417
417
418 def _debugphaseheads(ui, data, indent=0):
418 def _debugphaseheads(ui, data, indent=0):
419 """display version and markers contained in 'data'"""
419 """display version and markers contained in 'data'"""
420 indent_string = b' ' * indent
420 indent_string = b' ' * indent
421 headsbyphase = phases.binarydecode(data)
421 headsbyphase = phases.binarydecode(data)
422 for phase in phases.allphases:
422 for phase in phases.allphases:
423 for head in headsbyphase[phase]:
423 for head in headsbyphase[phase]:
424 ui.write(indent_string)
424 ui.write(indent_string)
425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
426
426
427
427
428 def _quasirepr(thing):
428 def _quasirepr(thing):
429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
430 return b'{%s}' % (
430 return b'{%s}' % (
431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
432 )
432 )
433 return pycompat.bytestr(repr(thing))
433 return pycompat.bytestr(repr(thing))
434
434
435
435
436 def _debugbundle2(ui, gen, all=None, **opts):
436 def _debugbundle2(ui, gen, all=None, **opts):
437 """lists the contents of a bundle2"""
437 """lists the contents of a bundle2"""
438 if not isinstance(gen, bundle2.unbundle20):
438 if not isinstance(gen, bundle2.unbundle20):
439 raise error.Abort(_(b'not a bundle2 file'))
439 raise error.Abort(_(b'not a bundle2 file'))
440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
441 parttypes = opts.get('part_type', [])
441 parttypes = opts.get('part_type', [])
442 for part in gen.iterparts():
442 for part in gen.iterparts():
443 if parttypes and part.type not in parttypes:
443 if parttypes and part.type not in parttypes:
444 continue
444 continue
445 msg = b'%s -- %s (mandatory: %r)\n'
445 msg = b'%s -- %s (mandatory: %r)\n'
446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
447 if part.type == b'changegroup':
447 if part.type == b'changegroup':
448 version = part.params.get(b'version', b'01')
448 version = part.params.get(b'version', b'01')
449 cg = changegroup.getunbundler(version, part, b'UN')
449 cg = changegroup.getunbundler(version, part, b'UN')
450 if not ui.quiet:
450 if not ui.quiet:
451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
452 if part.type == b'obsmarkers':
452 if part.type == b'obsmarkers':
453 if not ui.quiet:
453 if not ui.quiet:
454 _debugobsmarkers(ui, part, indent=4, **opts)
454 _debugobsmarkers(ui, part, indent=4, **opts)
455 if part.type == b'phase-heads':
455 if part.type == b'phase-heads':
456 if not ui.quiet:
456 if not ui.quiet:
457 _debugphaseheads(ui, part, indent=4)
457 _debugphaseheads(ui, part, indent=4)
458
458
459
459
460 @command(
460 @command(
461 b'debugbundle',
461 b'debugbundle',
462 [
462 [
463 (b'a', b'all', None, _(b'show all details')),
463 (b'a', b'all', None, _(b'show all details')),
464 (b'', b'part-type', [], _(b'show only the named part type')),
464 (b'', b'part-type', [], _(b'show only the named part type')),
465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
466 ],
466 ],
467 _(b'FILE'),
467 _(b'FILE'),
468 norepo=True,
468 norepo=True,
469 )
469 )
470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
471 """lists the contents of a bundle"""
471 """lists the contents of a bundle"""
472 with hg.openpath(ui, bundlepath) as f:
472 with hg.openpath(ui, bundlepath) as f:
473 if spec:
473 if spec:
474 spec = exchange.getbundlespec(ui, f)
474 spec = exchange.getbundlespec(ui, f)
475 ui.write(b'%s\n' % spec)
475 ui.write(b'%s\n' % spec)
476 return
476 return
477
477
478 gen = exchange.readbundle(ui, f, bundlepath)
478 gen = exchange.readbundle(ui, f, bundlepath)
479 if isinstance(gen, bundle2.unbundle20):
479 if isinstance(gen, bundle2.unbundle20):
480 return _debugbundle2(ui, gen, all=all, **opts)
480 return _debugbundle2(ui, gen, all=all, **opts)
481 _debugchangegroup(ui, gen, all=all, **opts)
481 _debugchangegroup(ui, gen, all=all, **opts)
482
482
483
483
484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
485 def debugcapabilities(ui, path, **opts):
485 def debugcapabilities(ui, path, **opts):
486 """lists the capabilities of a remote peer"""
486 """lists the capabilities of a remote peer"""
487 opts = pycompat.byteskwargs(opts)
487 opts = pycompat.byteskwargs(opts)
488 peer = hg.peer(ui, opts, path)
488 peer = hg.peer(ui, opts, path)
489 try:
489 try:
490 caps = peer.capabilities()
490 caps = peer.capabilities()
491 ui.writenoi18n(b'Main capabilities:\n')
491 ui.writenoi18n(b'Main capabilities:\n')
492 for c in sorted(caps):
492 for c in sorted(caps):
493 ui.write(b' %s\n' % c)
493 ui.write(b' %s\n' % c)
494 b2caps = bundle2.bundle2caps(peer)
494 b2caps = bundle2.bundle2caps(peer)
495 if b2caps:
495 if b2caps:
496 ui.writenoi18n(b'Bundle2 capabilities:\n')
496 ui.writenoi18n(b'Bundle2 capabilities:\n')
497 for key, values in sorted(b2caps.items()):
497 for key, values in sorted(b2caps.items()):
498 ui.write(b' %s\n' % key)
498 ui.write(b' %s\n' % key)
499 for v in values:
499 for v in values:
500 ui.write(b' %s\n' % v)
500 ui.write(b' %s\n' % v)
501 finally:
501 finally:
502 peer.close()
502 peer.close()
503
503
504
504
505 @command(
505 @command(
506 b'debugchangedfiles',
506 b'debugchangedfiles',
507 [
507 [
508 (
508 (
509 b'',
509 b'',
510 b'compute',
510 b'compute',
511 False,
511 False,
512 b"compute information instead of reading it from storage",
512 b"compute information instead of reading it from storage",
513 ),
513 ),
514 ],
514 ],
515 b'REV',
515 b'REV',
516 )
516 )
517 def debugchangedfiles(ui, repo, rev, **opts):
517 def debugchangedfiles(ui, repo, rev, **opts):
518 """list the stored files changes for a revision"""
518 """list the stored files changes for a revision"""
519 ctx = logcmdutil.revsingle(repo, rev, None)
519 ctx = logcmdutil.revsingle(repo, rev, None)
520 files = None
520 files = None
521
521
522 if opts['compute']:
522 if opts['compute']:
523 files = metadata.compute_all_files_changes(ctx)
523 files = metadata.compute_all_files_changes(ctx)
524 else:
524 else:
525 sd = repo.changelog.sidedata(ctx.rev())
525 sd = repo.changelog.sidedata(ctx.rev())
526 files_block = sd.get(sidedata.SD_FILES)
526 files_block = sd.get(sidedata.SD_FILES)
527 if files_block is not None:
527 if files_block is not None:
528 files = metadata.decode_files_sidedata(sd)
528 files = metadata.decode_files_sidedata(sd)
529 if files is not None:
529 if files is not None:
530 for f in sorted(files.touched):
530 for f in sorted(files.touched):
531 if f in files.added:
531 if f in files.added:
532 action = b"added"
532 action = b"added"
533 elif f in files.removed:
533 elif f in files.removed:
534 action = b"removed"
534 action = b"removed"
535 elif f in files.merged:
535 elif f in files.merged:
536 action = b"merged"
536 action = b"merged"
537 elif f in files.salvaged:
537 elif f in files.salvaged:
538 action = b"salvaged"
538 action = b"salvaged"
539 else:
539 else:
540 action = b"touched"
540 action = b"touched"
541
541
542 copy_parent = b""
542 copy_parent = b""
543 copy_source = b""
543 copy_source = b""
544 if f in files.copied_from_p1:
544 if f in files.copied_from_p1:
545 copy_parent = b"p1"
545 copy_parent = b"p1"
546 copy_source = files.copied_from_p1[f]
546 copy_source = files.copied_from_p1[f]
547 elif f in files.copied_from_p2:
547 elif f in files.copied_from_p2:
548 copy_parent = b"p2"
548 copy_parent = b"p2"
549 copy_source = files.copied_from_p2[f]
549 copy_source = files.copied_from_p2[f]
550
550
551 data = (action, copy_parent, f, copy_source)
551 data = (action, copy_parent, f, copy_source)
552 template = b"%-8s %2s: %s, %s;\n"
552 template = b"%-8s %2s: %s, %s;\n"
553 ui.write(template % data)
553 ui.write(template % data)
554
554
555
555
556 @command(b'debugcheckstate', [], b'')
556 @command(b'debugcheckstate', [], b'')
557 def debugcheckstate(ui, repo):
557 def debugcheckstate(ui, repo):
558 """validate the correctness of the current dirstate"""
558 """validate the correctness of the current dirstate"""
559 parent1, parent2 = repo.dirstate.parents()
559 parent1, parent2 = repo.dirstate.parents()
560 m1 = repo[parent1].manifest()
560 m1 = repo[parent1].manifest()
561 m2 = repo[parent2].manifest()
561 m2 = repo[parent2].manifest()
562 errors = 0
562 errors = 0
563 for err in repo.dirstate.verify(m1, m2):
563 for err in repo.dirstate.verify(m1, m2):
564 ui.warn(err[0] % err[1:])
564 ui.warn(err[0] % err[1:])
565 errors += 1
565 errors += 1
566 if errors:
566 if errors:
567 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
567 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
568 raise error.Abort(errstr)
568 raise error.Abort(errstr)
569
569
570
570
571 @command(
571 @command(
572 b'debugcolor',
572 b'debugcolor',
573 [(b'', b'style', None, _(b'show all configured styles'))],
573 [(b'', b'style', None, _(b'show all configured styles'))],
574 b'hg debugcolor',
574 b'hg debugcolor',
575 )
575 )
576 def debugcolor(ui, repo, **opts):
576 def debugcolor(ui, repo, **opts):
577 """show available color, effects or style"""
577 """show available color, effects or style"""
578 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
578 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
579 if opts.get('style'):
579 if opts.get('style'):
580 return _debugdisplaystyle(ui)
580 return _debugdisplaystyle(ui)
581 else:
581 else:
582 return _debugdisplaycolor(ui)
582 return _debugdisplaycolor(ui)
583
583
584
584
585 def _debugdisplaycolor(ui):
585 def _debugdisplaycolor(ui):
586 ui = ui.copy()
586 ui = ui.copy()
587 ui._styles.clear()
587 ui._styles.clear()
588 for effect in color._activeeffects(ui).keys():
588 for effect in color._activeeffects(ui).keys():
589 ui._styles[effect] = effect
589 ui._styles[effect] = effect
590 if ui._terminfoparams:
590 if ui._terminfoparams:
591 for k, v in ui.configitems(b'color'):
591 for k, v in ui.configitems(b'color'):
592 if k.startswith(b'color.'):
592 if k.startswith(b'color.'):
593 ui._styles[k] = k[6:]
593 ui._styles[k] = k[6:]
594 elif k.startswith(b'terminfo.'):
594 elif k.startswith(b'terminfo.'):
595 ui._styles[k] = k[9:]
595 ui._styles[k] = k[9:]
596 ui.write(_(b'available colors:\n'))
596 ui.write(_(b'available colors:\n'))
597 # sort label with a '_' after the other to group '_background' entry.
597 # sort label with a '_' after the other to group '_background' entry.
598 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
598 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
599 for colorname, label in items:
599 for colorname, label in items:
600 ui.write(b'%s\n' % colorname, label=label)
600 ui.write(b'%s\n' % colorname, label=label)
601
601
602
602
603 def _debugdisplaystyle(ui):
603 def _debugdisplaystyle(ui):
604 ui.write(_(b'available style:\n'))
604 ui.write(_(b'available style:\n'))
605 if not ui._styles:
605 if not ui._styles:
606 return
606 return
607 width = max(len(s) for s in ui._styles)
607 width = max(len(s) for s in ui._styles)
608 for label, effects in sorted(ui._styles.items()):
608 for label, effects in sorted(ui._styles.items()):
609 ui.write(b'%s' % label, label=label)
609 ui.write(b'%s' % label, label=label)
610 if effects:
610 if effects:
611 # 50
611 # 50
612 ui.write(b': ')
612 ui.write(b': ')
613 ui.write(b' ' * (max(0, width - len(label))))
613 ui.write(b' ' * (max(0, width - len(label))))
614 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
614 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
615 ui.write(b'\n')
615 ui.write(b'\n')
616
616
617
617
618 @command(b'debugcreatestreamclonebundle', [], b'FILE')
618 @command(b'debugcreatestreamclonebundle', [], b'FILE')
619 def debugcreatestreamclonebundle(ui, repo, fname):
619 def debugcreatestreamclonebundle(ui, repo, fname):
620 """create a stream clone bundle file
620 """create a stream clone bundle file
621
621
622 Stream bundles are special bundles that are essentially archives of
622 Stream bundles are special bundles that are essentially archives of
623 revlog files. They are commonly used for cloning very quickly.
623 revlog files. They are commonly used for cloning very quickly.
624 """
624 """
625 # TODO we may want to turn this into an abort when this functionality
625 # TODO we may want to turn this into an abort when this functionality
626 # is moved into `hg bundle`.
626 # is moved into `hg bundle`.
627 if phases.hassecret(repo):
627 if phases.hassecret(repo):
628 ui.warn(
628 ui.warn(
629 _(
629 _(
630 b'(warning: stream clone bundle will contain secret '
630 b'(warning: stream clone bundle will contain secret '
631 b'revisions)\n'
631 b'revisions)\n'
632 )
632 )
633 )
633 )
634
634
635 requirements, gen = streamclone.generatebundlev1(repo)
635 requirements, gen = streamclone.generatebundlev1(repo)
636 changegroup.writechunks(ui, gen, fname)
636 changegroup.writechunks(ui, gen, fname)
637
637
638 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
638 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
639
639
640
640
641 @command(
641 @command(
642 b'debugdag',
642 b'debugdag',
643 [
643 [
644 (b't', b'tags', None, _(b'use tags as labels')),
644 (b't', b'tags', None, _(b'use tags as labels')),
645 (b'b', b'branches', None, _(b'annotate with branch names')),
645 (b'b', b'branches', None, _(b'annotate with branch names')),
646 (b'', b'dots', None, _(b'use dots for runs')),
646 (b'', b'dots', None, _(b'use dots for runs')),
647 (b's', b'spaces', None, _(b'separate elements by spaces')),
647 (b's', b'spaces', None, _(b'separate elements by spaces')),
648 ],
648 ],
649 _(b'[OPTION]... [FILE [REV]...]'),
649 _(b'[OPTION]... [FILE [REV]...]'),
650 optionalrepo=True,
650 optionalrepo=True,
651 )
651 )
652 def debugdag(ui, repo, file_=None, *revs, **opts):
652 def debugdag(ui, repo, file_=None, *revs, **opts):
653 """format the changelog or an index DAG as a concise textual description
653 """format the changelog or an index DAG as a concise textual description
654
654
655 If you pass a revlog index, the revlog's DAG is emitted. If you list
655 If you pass a revlog index, the revlog's DAG is emitted. If you list
656 revision numbers, they get labeled in the output as rN.
656 revision numbers, they get labeled in the output as rN.
657
657
658 Otherwise, the changelog DAG of the current repo is emitted.
658 Otherwise, the changelog DAG of the current repo is emitted.
659 """
659 """
660 spaces = opts.get('spaces')
660 spaces = opts.get('spaces')
661 dots = opts.get('dots')
661 dots = opts.get('dots')
662 if file_:
662 if file_:
663 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
663 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
664 revs = {int(r) for r in revs}
664 revs = {int(r) for r in revs}
665
665
666 def events():
666 def events():
667 for r in rlog:
667 for r in rlog:
668 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
668 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
669 if r in revs:
669 if r in revs:
670 yield b'l', (r, b"r%i" % r)
670 yield b'l', (r, b"r%i" % r)
671
671
672 elif repo:
672 elif repo:
673 cl = repo.changelog
673 cl = repo.changelog
674 tags = opts.get('tags')
674 tags = opts.get('tags')
675 branches = opts.get('branches')
675 branches = opts.get('branches')
676 if tags:
676 if tags:
677 labels = {}
677 labels = {}
678 for l, n in repo.tags().items():
678 for l, n in repo.tags().items():
679 labels.setdefault(cl.rev(n), []).append(l)
679 labels.setdefault(cl.rev(n), []).append(l)
680
680
681 def events():
681 def events():
682 b = b"default"
682 b = b"default"
683 for r in cl:
683 for r in cl:
684 if branches:
684 if branches:
685 newb = cl.read(cl.node(r))[5][b'branch']
685 newb = cl.read(cl.node(r))[5][b'branch']
686 if newb != b:
686 if newb != b:
687 yield b'a', newb
687 yield b'a', newb
688 b = newb
688 b = newb
689 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
689 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
690 if tags:
690 if tags:
691 ls = labels.get(r)
691 ls = labels.get(r)
692 if ls:
692 if ls:
693 for l in ls:
693 for l in ls:
694 yield b'l', (r, l)
694 yield b'l', (r, l)
695
695
696 else:
696 else:
697 raise error.Abort(_(b'need repo for changelog dag'))
697 raise error.Abort(_(b'need repo for changelog dag'))
698
698
699 for line in dagparser.dagtextlines(
699 for line in dagparser.dagtextlines(
700 events(),
700 events(),
701 addspaces=spaces,
701 addspaces=spaces,
702 wraplabels=True,
702 wraplabels=True,
703 wrapannotations=True,
703 wrapannotations=True,
704 wrapnonlinear=dots,
704 wrapnonlinear=dots,
705 usedots=dots,
705 usedots=dots,
706 maxlinewidth=70,
706 maxlinewidth=70,
707 ):
707 ):
708 ui.write(line)
708 ui.write(line)
709 ui.write(b"\n")
709 ui.write(b"\n")
710
710
711
711
712 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
712 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
713 def debugdata(ui, repo, file_, rev=None, **opts):
713 def debugdata(ui, repo, file_, rev=None, **opts):
714 """dump the contents of a data file revision"""
714 """dump the contents of a data file revision"""
715 opts = pycompat.byteskwargs(opts)
715 opts = pycompat.byteskwargs(opts)
716 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
716 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
717 if rev is not None:
717 if rev is not None:
718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 file_, rev = None, file_
719 file_, rev = None, file_
720 elif rev is None:
720 elif rev is None:
721 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
721 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
722 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
722 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
723 try:
723 try:
724 ui.write(r.rawdata(r.lookup(rev)))
724 ui.write(r.rawdata(r.lookup(rev)))
725 except KeyError:
725 except KeyError:
726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727
727
728
728
729 @command(
729 @command(
730 b'debugdate',
730 b'debugdate',
731 [(b'e', b'extended', None, _(b'try extended date formats'))],
731 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 _(b'[-e] DATE [RANGE]'),
732 _(b'[-e] DATE [RANGE]'),
733 norepo=True,
733 norepo=True,
734 optionalrepo=True,
734 optionalrepo=True,
735 )
735 )
736 def debugdate(ui, date, range=None, **opts):
736 def debugdate(ui, date, range=None, **opts):
737 """parse and display a date"""
737 """parse and display a date"""
738 if opts["extended"]:
738 if opts["extended"]:
739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 else:
740 else:
741 d = dateutil.parsedate(date)
741 d = dateutil.parsedate(date)
742 ui.writenoi18n(b"internal: %d %d\n" % d)
742 ui.writenoi18n(b"internal: %d %d\n" % d)
743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 if range:
744 if range:
745 m = dateutil.matchdate(range)
745 m = dateutil.matchdate(range)
746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747
747
748
748
749 @command(
749 @command(
750 b'debugdeltachain',
750 b'debugdeltachain',
751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 _(b'-c|-m|FILE'),
752 _(b'-c|-m|FILE'),
753 optionalrepo=True,
753 optionalrepo=True,
754 )
754 )
755 def debugdeltachain(ui, repo, file_=None, **opts):
755 def debugdeltachain(ui, repo, file_=None, **opts):
756 """dump information about delta chains in a revlog
756 """dump information about delta chains in a revlog
757
757
758 Output can be templatized. Available template keywords are:
758 Output can be templatized. Available template keywords are:
759
759
760 :``rev``: revision number
760 :``rev``: revision number
761 :``p1``: parent 1 revision number (for reference)
761 :``p1``: parent 1 revision number (for reference)
762 :``p2``: parent 2 revision number (for reference)
762 :``p2``: parent 2 revision number (for reference)
763 :``chainid``: delta chain identifier (numbered by unique base)
763 :``chainid``: delta chain identifier (numbered by unique base)
764 :``chainlen``: delta chain length to this revision
764 :``chainlen``: delta chain length to this revision
765 :``prevrev``: previous revision in delta chain
765 :``prevrev``: previous revision in delta chain
766 :``deltatype``: role of delta / how it was computed
766 :``deltatype``: role of delta / how it was computed
767 - base: a full snapshot
767 - base: a full snapshot
768 - snap: an intermediate snapshot
768 - snap: an intermediate snapshot
769 - p1: a delta against the first parent
769 - p1: a delta against the first parent
770 - p2: a delta against the second parent
770 - p2: a delta against the second parent
771 - skip1: a delta against the same base as p1
771 - skip1: a delta against the same base as p1
772 (when p1 has empty delta
772 (when p1 has empty delta
773 - skip2: a delta against the same base as p2
773 - skip2: a delta against the same base as p2
774 (when p2 has empty delta
774 (when p2 has empty delta
775 - prev: a delta against the previous revision
775 - prev: a delta against the previous revision
776 - other: a delta against an arbitrary revision
776 - other: a delta against an arbitrary revision
777 :``compsize``: compressed size of revision
777 :``compsize``: compressed size of revision
778 :``uncompsize``: uncompressed size of revision
778 :``uncompsize``: uncompressed size of revision
779 :``chainsize``: total size of compressed revisions in chain
779 :``chainsize``: total size of compressed revisions in chain
780 :``chainratio``: total chain size divided by uncompressed revision size
780 :``chainratio``: total chain size divided by uncompressed revision size
781 (new delta chains typically start at ratio 2.00)
781 (new delta chains typically start at ratio 2.00)
782 :``lindist``: linear distance from base revision in delta chain to end
782 :``lindist``: linear distance from base revision in delta chain to end
783 of this revision
783 of this revision
784 :``extradist``: total size of revisions not part of this delta chain from
784 :``extradist``: total size of revisions not part of this delta chain from
785 base of delta chain to end of this revision; a measurement
785 base of delta chain to end of this revision; a measurement
786 of how much extra data we need to read/seek across to read
786 of how much extra data we need to read/seek across to read
787 the delta chain for this revision
787 the delta chain for this revision
788 :``extraratio``: extradist divided by chainsize; another representation of
788 :``extraratio``: extradist divided by chainsize; another representation of
789 how much unrelated data is needed to load this delta chain
789 how much unrelated data is needed to load this delta chain
790
790
791 If the repository is configured to use the sparse read, additional keywords
791 If the repository is configured to use the sparse read, additional keywords
792 are available:
792 are available:
793
793
794 :``readsize``: total size of data read from the disk for a revision
794 :``readsize``: total size of data read from the disk for a revision
795 (sum of the sizes of all the blocks)
795 (sum of the sizes of all the blocks)
796 :``largestblock``: size of the largest block of data read from the disk
796 :``largestblock``: size of the largest block of data read from the disk
797 :``readdensity``: density of useful bytes in the data read from the disk
797 :``readdensity``: density of useful bytes in the data read from the disk
798 :``srchunks``: in how many data hunks the whole revision would be read
798 :``srchunks``: in how many data hunks the whole revision would be read
799
799
800 The sparse read can be enabled with experimental.sparse-read = True
800 The sparse read can be enabled with experimental.sparse-read = True
801 """
801 """
802 opts = pycompat.byteskwargs(opts)
802 opts = pycompat.byteskwargs(opts)
803 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
803 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
804 index = r.index
804 index = r.index
805 start = r.start
805 start = r.start
806 length = r.length
806 length = r.length
807 generaldelta = r._generaldelta
807 generaldelta = r._generaldelta
808 withsparseread = getattr(r, '_withsparseread', False)
808 withsparseread = getattr(r, '_withsparseread', False)
809
809
810 # security to avoid crash on corrupted revlogs
810 # security to avoid crash on corrupted revlogs
811 total_revs = len(index)
811 total_revs = len(index)
812
812
813 def revinfo(rev):
813 def revinfo(rev):
814 e = index[rev]
814 e = index[rev]
815 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
815 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
816 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
816 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
817 chainsize = 0
817 chainsize = 0
818
818
819 base = e[revlog_constants.ENTRY_DELTA_BASE]
819 base = e[revlog_constants.ENTRY_DELTA_BASE]
820 p1 = e[revlog_constants.ENTRY_PARENT_1]
820 p1 = e[revlog_constants.ENTRY_PARENT_1]
821 p2 = e[revlog_constants.ENTRY_PARENT_2]
821 p2 = e[revlog_constants.ENTRY_PARENT_2]
822
822
823 # If the parents of a revision has an empty delta, we never try to delta
823 # If the parents of a revision has an empty delta, we never try to delta
824 # against that parent, but directly against the delta base of that
824 # against that parent, but directly against the delta base of that
825 # parent (recursively). It avoids adding a useless entry in the chain.
825 # parent (recursively). It avoids adding a useless entry in the chain.
826 #
826 #
827 # However we need to detect that as a special case for delta-type, that
827 # However we need to detect that as a special case for delta-type, that
828 # is not simply "other".
828 # is not simply "other".
829 p1_base = p1
829 p1_base = p1
830 if p1 != nullrev and p1 < total_revs:
830 if p1 != nullrev and p1 < total_revs:
831 e1 = index[p1]
831 e1 = index[p1]
832 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
832 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
833 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
833 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
834 if (
834 if (
835 new_base == p1_base
835 new_base == p1_base
836 or new_base == nullrev
836 or new_base == nullrev
837 or new_base >= total_revs
837 or new_base >= total_revs
838 ):
838 ):
839 break
839 break
840 p1_base = new_base
840 p1_base = new_base
841 e1 = index[p1_base]
841 e1 = index[p1_base]
842 p2_base = p2
842 p2_base = p2
843 if p2 != nullrev and p2 < total_revs:
843 if p2 != nullrev and p2 < total_revs:
844 e2 = index[p2]
844 e2 = index[p2]
845 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
845 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
846 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
846 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
847 if (
847 if (
848 new_base == p2_base
848 new_base == p2_base
849 or new_base == nullrev
849 or new_base == nullrev
850 or new_base >= total_revs
850 or new_base >= total_revs
851 ):
851 ):
852 break
852 break
853 p2_base = new_base
853 p2_base = new_base
854 e2 = index[p2_base]
854 e2 = index[p2_base]
855
855
856 if generaldelta:
856 if generaldelta:
857 if base == p1:
857 if base == p1:
858 deltatype = b'p1'
858 deltatype = b'p1'
859 elif base == p2:
859 elif base == p2:
860 deltatype = b'p2'
860 deltatype = b'p2'
861 elif base == rev:
861 elif base == rev:
862 deltatype = b'base'
862 deltatype = b'base'
863 elif base == p1_base:
863 elif base == p1_base:
864 deltatype = b'skip1'
864 deltatype = b'skip1'
865 elif base == p2_base:
865 elif base == p2_base:
866 deltatype = b'skip2'
866 deltatype = b'skip2'
867 elif r.issnapshot(rev):
867 elif r.issnapshot(rev):
868 deltatype = b'snap'
868 deltatype = b'snap'
869 elif base == rev - 1:
869 elif base == rev - 1:
870 deltatype = b'prev'
870 deltatype = b'prev'
871 else:
871 else:
872 deltatype = b'other'
872 deltatype = b'other'
873 else:
873 else:
874 if base == rev:
874 if base == rev:
875 deltatype = b'base'
875 deltatype = b'base'
876 else:
876 else:
877 deltatype = b'prev'
877 deltatype = b'prev'
878
878
879 chain = r._deltachain(rev)[0]
879 chain = r._deltachain(rev)[0]
880 for iterrev in chain:
880 for iterrev in chain:
881 e = index[iterrev]
881 e = index[iterrev]
882 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
882 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
883
883
884 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
884 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
885
885
886 fm = ui.formatter(b'debugdeltachain', opts)
886 fm = ui.formatter(b'debugdeltachain', opts)
887
887
888 fm.plain(
888 fm.plain(
889 b' rev p1 p2 chain# chainlen prev delta '
889 b' rev p1 p2 chain# chainlen prev delta '
890 b'size rawsize chainsize ratio lindist extradist '
890 b'size rawsize chainsize ratio lindist extradist '
891 b'extraratio'
891 b'extraratio'
892 )
892 )
893 if withsparseread:
893 if withsparseread:
894 fm.plain(b' readsize largestblk rddensity srchunks')
894 fm.plain(b' readsize largestblk rddensity srchunks')
895 fm.plain(b'\n')
895 fm.plain(b'\n')
896
896
897 chainbases = {}
897 chainbases = {}
898 for rev in r:
898 for rev in r:
899 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
899 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
900 chainbase = chain[0]
900 chainbase = chain[0]
901 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
901 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
902 basestart = start(chainbase)
902 basestart = start(chainbase)
903 revstart = start(rev)
903 revstart = start(rev)
904 lineardist = revstart + comp - basestart
904 lineardist = revstart + comp - basestart
905 extradist = lineardist - chainsize
905 extradist = lineardist - chainsize
906 try:
906 try:
907 prevrev = chain[-2]
907 prevrev = chain[-2]
908 except IndexError:
908 except IndexError:
909 prevrev = -1
909 prevrev = -1
910
910
911 if uncomp != 0:
911 if uncomp != 0:
912 chainratio = float(chainsize) / float(uncomp)
912 chainratio = float(chainsize) / float(uncomp)
913 else:
913 else:
914 chainratio = chainsize
914 chainratio = chainsize
915
915
916 if chainsize != 0:
916 if chainsize != 0:
917 extraratio = float(extradist) / float(chainsize)
917 extraratio = float(extradist) / float(chainsize)
918 else:
918 else:
919 extraratio = extradist
919 extraratio = extradist
920
920
921 fm.startitem()
921 fm.startitem()
922 fm.write(
922 fm.write(
923 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
923 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
924 b'uncompsize chainsize chainratio lindist extradist '
924 b'uncompsize chainsize chainratio lindist extradist '
925 b'extraratio',
925 b'extraratio',
926 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
926 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
927 rev,
927 rev,
928 p1,
928 p1,
929 p2,
929 p2,
930 chainid,
930 chainid,
931 len(chain),
931 len(chain),
932 prevrev,
932 prevrev,
933 deltatype,
933 deltatype,
934 comp,
934 comp,
935 uncomp,
935 uncomp,
936 chainsize,
936 chainsize,
937 chainratio,
937 chainratio,
938 lineardist,
938 lineardist,
939 extradist,
939 extradist,
940 extraratio,
940 extraratio,
941 rev=rev,
941 rev=rev,
942 chainid=chainid,
942 chainid=chainid,
943 chainlen=len(chain),
943 chainlen=len(chain),
944 prevrev=prevrev,
944 prevrev=prevrev,
945 deltatype=deltatype,
945 deltatype=deltatype,
946 compsize=comp,
946 compsize=comp,
947 uncompsize=uncomp,
947 uncompsize=uncomp,
948 chainsize=chainsize,
948 chainsize=chainsize,
949 chainratio=chainratio,
949 chainratio=chainratio,
950 lindist=lineardist,
950 lindist=lineardist,
951 extradist=extradist,
951 extradist=extradist,
952 extraratio=extraratio,
952 extraratio=extraratio,
953 )
953 )
954 if withsparseread:
954 if withsparseread:
955 readsize = 0
955 readsize = 0
956 largestblock = 0
956 largestblock = 0
957 srchunks = 0
957 srchunks = 0
958
958
959 for revschunk in deltautil.slicechunk(r, chain):
959 for revschunk in deltautil.slicechunk(r, chain):
960 srchunks += 1
960 srchunks += 1
961 blkend = start(revschunk[-1]) + length(revschunk[-1])
961 blkend = start(revschunk[-1]) + length(revschunk[-1])
962 blksize = blkend - start(revschunk[0])
962 blksize = blkend - start(revschunk[0])
963
963
964 readsize += blksize
964 readsize += blksize
965 if largestblock < blksize:
965 if largestblock < blksize:
966 largestblock = blksize
966 largestblock = blksize
967
967
968 if readsize:
968 if readsize:
969 readdensity = float(chainsize) / float(readsize)
969 readdensity = float(chainsize) / float(readsize)
970 else:
970 else:
971 readdensity = 1
971 readdensity = 1
972
972
973 fm.write(
973 fm.write(
974 b'readsize largestblock readdensity srchunks',
974 b'readsize largestblock readdensity srchunks',
975 b' %10d %10d %9.5f %8d',
975 b' %10d %10d %9.5f %8d',
976 readsize,
976 readsize,
977 largestblock,
977 largestblock,
978 readdensity,
978 readdensity,
979 srchunks,
979 srchunks,
980 readsize=readsize,
980 readsize=readsize,
981 largestblock=largestblock,
981 largestblock=largestblock,
982 readdensity=readdensity,
982 readdensity=readdensity,
983 srchunks=srchunks,
983 srchunks=srchunks,
984 )
984 )
985
985
986 fm.plain(b'\n')
986 fm.plain(b'\n')
987
987
988 fm.end()
988 fm.end()
989
989
990
990
991 @command(
991 @command(
992 b'debug-delta-find',
992 b'debug-delta-find',
993 cmdutil.debugrevlogopts + cmdutil.formatteropts,
993 cmdutil.debugrevlogopts + cmdutil.formatteropts,
994 _(b'-c|-m|FILE REV'),
994 _(b'-c|-m|FILE REV'),
995 optionalrepo=True,
995 optionalrepo=True,
996 )
996 )
997 def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
997 def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
998 """display the computation to get to a valid delta for storing REV
998 """display the computation to get to a valid delta for storing REV
999
999
1000 This command will replay the process used to find the "best" delta to store
1000 This command will replay the process used to find the "best" delta to store
1001 a revision and display information about all the steps used to get to that
1001 a revision and display information about all the steps used to get to that
1002 result.
1002 result.
1003
1003
1004 The revision use the revision number of the target storage (not changelog
1004 The revision use the revision number of the target storage (not changelog
1005 revision number).
1005 revision number).
1006
1006
1007 note: the process is initiated from a full text of the revision to store.
1007 note: the process is initiated from a full text of the revision to store.
1008 """
1008 """
1009 opts = pycompat.byteskwargs(opts)
1009 opts = pycompat.byteskwargs(opts)
1010 if arg_2 is None:
1010 if arg_2 is None:
1011 file_ = None
1011 file_ = None
1012 rev = arg_1
1012 rev = arg_1
1013 else:
1013 else:
1014 file_ = arg_1
1014 file_ = arg_1
1015 rev = arg_2
1015 rev = arg_2
1016
1016
1017 rev = int(rev)
1017 rev = int(rev)
1018
1018
1019 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1019 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1020
1020
1021 deltacomputer = deltautil.deltacomputer(
1021 deltacomputer = deltautil.deltacomputer(
1022 revlog,
1022 revlog,
1023 write_debug=ui.write,
1023 write_debug=ui.write,
1024 debug_search=True,
1024 debug_search=True,
1025 )
1025 )
1026
1026
1027 node = revlog.node(rev)
1027 node = revlog.node(rev)
1028 p1r, p2r = revlog.parentrevs(rev)
1028 p1r, p2r = revlog.parentrevs(rev)
1029 p1 = revlog.node(p1r)
1029 p1 = revlog.node(p1r)
1030 p2 = revlog.node(p2r)
1030 p2 = revlog.node(p2r)
1031 btext = [revlog.revision(rev)]
1031 btext = [revlog.revision(rev)]
1032 textlen = len(btext[0])
1032 textlen = len(btext[0])
1033 cachedelta = None
1033 cachedelta = None
1034 flags = revlog.flags(rev)
1034 flags = revlog.flags(rev)
1035
1035
1036 revinfo = revlogutils.revisioninfo(
1036 revinfo = revlogutils.revisioninfo(
1037 node,
1037 node,
1038 p1,
1038 p1,
1039 p2,
1039 p2,
1040 btext,
1040 btext,
1041 textlen,
1041 textlen,
1042 cachedelta,
1042 cachedelta,
1043 flags,
1043 flags,
1044 )
1044 )
1045
1045
1046 fh = revlog._datafp()
1046 fh = revlog._datafp()
1047 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1047 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1048
1048
1049
1049
1050 @command(
1050 @command(
1051 b'debugdirstate|debugstate',
1051 b'debugdirstate|debugstate',
1052 [
1052 [
1053 (
1053 (
1054 b'',
1054 b'',
1055 b'nodates',
1055 b'nodates',
1056 None,
1056 None,
1057 _(b'do not display the saved mtime (DEPRECATED)'),
1057 _(b'do not display the saved mtime (DEPRECATED)'),
1058 ),
1058 ),
1059 (b'', b'dates', True, _(b'display the saved mtime')),
1059 (b'', b'dates', True, _(b'display the saved mtime')),
1060 (b'', b'datesort', None, _(b'sort by saved mtime')),
1060 (b'', b'datesort', None, _(b'sort by saved mtime')),
1061 (
1061 (
1062 b'',
1062 b'',
1063 b'docket',
1063 b'docket',
1064 False,
1064 False,
1065 _(b'display the docket (metadata file) instead'),
1065 _(b'display the docket (metadata file) instead'),
1066 ),
1066 ),
1067 (
1067 (
1068 b'',
1068 b'',
1069 b'all',
1069 b'all',
1070 False,
1070 False,
1071 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1071 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1072 ),
1072 ),
1073 ],
1073 ],
1074 _(b'[OPTION]...'),
1074 _(b'[OPTION]...'),
1075 )
1075 )
1076 def debugstate(ui, repo, **opts):
1076 def debugstate(ui, repo, **opts):
1077 """show the contents of the current dirstate"""
1077 """show the contents of the current dirstate"""
1078
1078
1079 if opts.get("docket"):
1079 if opts.get("docket"):
1080 if not repo.dirstate._use_dirstate_v2:
1080 if not repo.dirstate._use_dirstate_v2:
1081 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1081 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1082
1082
1083 docket = repo.dirstate._map.docket
1083 docket = repo.dirstate._map.docket
1084 (
1084 (
1085 start_offset,
1085 start_offset,
1086 root_nodes,
1086 root_nodes,
1087 nodes_with_entry,
1087 nodes_with_entry,
1088 nodes_with_copy,
1088 nodes_with_copy,
1089 unused_bytes,
1089 unused_bytes,
1090 _unused,
1090 _unused,
1091 ignore_pattern,
1091 ignore_pattern,
1092 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1092 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1093
1093
1094 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1094 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1095 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1095 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1096 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1096 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1097 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1097 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1098 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1098 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1099 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1099 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1100 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1100 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1101 ui.write(
1101 ui.write(
1102 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1102 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1103 )
1103 )
1104 return
1104 return
1105
1105
1106 nodates = not opts['dates']
1106 nodates = not opts['dates']
1107 if opts.get('nodates') is not None:
1107 if opts.get('nodates') is not None:
1108 nodates = True
1108 nodates = True
1109 datesort = opts.get('datesort')
1109 datesort = opts.get('datesort')
1110
1110
1111 if datesort:
1111 if datesort:
1112
1112
1113 def keyfunc(entry):
1113 def keyfunc(entry):
1114 filename, _state, _mode, _size, mtime = entry
1114 filename, _state, _mode, _size, mtime = entry
1115 return (mtime, filename)
1115 return (mtime, filename)
1116
1116
1117 else:
1117 else:
1118 keyfunc = None # sort by filename
1118 keyfunc = None # sort by filename
1119 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1119 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1120 entries.sort(key=keyfunc)
1120 entries.sort(key=keyfunc)
1121 for entry in entries:
1121 for entry in entries:
1122 filename, state, mode, size, mtime = entry
1122 filename, state, mode, size, mtime = entry
1123 if mtime == -1:
1123 if mtime == -1:
1124 timestr = b'unset '
1124 timestr = b'unset '
1125 elif nodates:
1125 elif nodates:
1126 timestr = b'set '
1126 timestr = b'set '
1127 else:
1127 else:
1128 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1128 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1129 timestr = encoding.strtolocal(timestr)
1129 timestr = encoding.strtolocal(timestr)
1130 if mode & 0o20000:
1130 if mode & 0o20000:
1131 mode = b'lnk'
1131 mode = b'lnk'
1132 else:
1132 else:
1133 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1133 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1134 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1134 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1135 for f in repo.dirstate.copies():
1135 for f in repo.dirstate.copies():
1136 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1136 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1137
1137
1138
1138
1139 @command(
1139 @command(
1140 b'debugdirstateignorepatternshash',
1140 b'debugdirstateignorepatternshash',
1141 [],
1141 [],
1142 _(b''),
1142 _(b''),
1143 )
1143 )
1144 def debugdirstateignorepatternshash(ui, repo, **opts):
1144 def debugdirstateignorepatternshash(ui, repo, **opts):
1145 """show the hash of ignore patterns stored in dirstate if v2,
1145 """show the hash of ignore patterns stored in dirstate if v2,
1146 or nothing for dirstate-v2
1146 or nothing for dirstate-v2
1147 """
1147 """
1148 if repo.dirstate._use_dirstate_v2:
1148 if repo.dirstate._use_dirstate_v2:
1149 docket = repo.dirstate._map.docket
1149 docket = repo.dirstate._map.docket
1150 hash_len = 20 # 160 bits for SHA-1
1150 hash_len = 20 # 160 bits for SHA-1
1151 hash_bytes = docket.tree_metadata[-hash_len:]
1151 hash_bytes = docket.tree_metadata[-hash_len:]
1152 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1152 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1153
1153
1154
1154
1155 @command(
1155 @command(
1156 b'debugdiscovery',
1156 b'debugdiscovery',
1157 [
1157 [
1158 (b'', b'old', None, _(b'use old-style discovery')),
1158 (b'', b'old', None, _(b'use old-style discovery')),
1159 (
1159 (
1160 b'',
1160 b'',
1161 b'nonheads',
1161 b'nonheads',
1162 None,
1162 None,
1163 _(b'use old-style discovery with non-heads included'),
1163 _(b'use old-style discovery with non-heads included'),
1164 ),
1164 ),
1165 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1165 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1166 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1166 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1167 (
1167 (
1168 b'',
1168 b'',
1169 b'local-as-revs',
1169 b'local-as-revs',
1170 b"",
1170 b"",
1171 b'treat local has having these revisions only',
1171 b'treat local has having these revisions only',
1172 ),
1172 ),
1173 (
1173 (
1174 b'',
1174 b'',
1175 b'remote-as-revs',
1175 b'remote-as-revs',
1176 b"",
1176 b"",
1177 b'use local as remote, with only these revisions',
1177 b'use local as remote, with only these revisions',
1178 ),
1178 ),
1179 ]
1179 ]
1180 + cmdutil.remoteopts
1180 + cmdutil.remoteopts
1181 + cmdutil.formatteropts,
1181 + cmdutil.formatteropts,
1182 _(b'[--rev REV] [OTHER]'),
1182 _(b'[--rev REV] [OTHER]'),
1183 )
1183 )
1184 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1184 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1185 """runs the changeset discovery protocol in isolation
1185 """runs the changeset discovery protocol in isolation
1186
1186
1187 The local peer can be "replaced" by a subset of the local repository by
1187 The local peer can be "replaced" by a subset of the local repository by
1188 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1188 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1189 be "replaced" by a subset of the local repository using the
1189 be "replaced" by a subset of the local repository using the
1190 `--local-as-revs` flag. This is useful to efficiently debug pathological
1190 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1191 discovery situation.
1191 discovery situation.
1192
1192
1193 The following developer oriented config are relevant for people playing with this command:
1193 The following developer oriented config are relevant for people playing with this command:
1194
1194
1195 * devel.discovery.exchange-heads=True
1195 * devel.discovery.exchange-heads=True
1196
1196
1197 If False, the discovery will not start with
1197 If False, the discovery will not start with
1198 remote head fetching and local head querying.
1198 remote head fetching and local head querying.
1199
1199
1200 * devel.discovery.grow-sample=True
1200 * devel.discovery.grow-sample=True
1201
1201
1202 If False, the sample size used in set discovery will not be increased
1202 If False, the sample size used in set discovery will not be increased
1203 through the process
1203 through the process
1204
1204
1205 * devel.discovery.grow-sample.dynamic=True
1205 * devel.discovery.grow-sample.dynamic=True
1206
1206
1207 When discovery.grow-sample.dynamic is True, the default, the sample size is
1207 When discovery.grow-sample.dynamic is True, the default, the sample size is
1208 adapted to the shape of the undecided set (it is set to the max of:
1208 adapted to the shape of the undecided set (it is set to the max of:
1209 <target-size>, len(roots(undecided)), len(heads(undecided)
1209 <target-size>, len(roots(undecided)), len(heads(undecided)
1210
1210
1211 * devel.discovery.grow-sample.rate=1.05
1211 * devel.discovery.grow-sample.rate=1.05
1212
1212
1213 the rate at which the sample grow
1213 the rate at which the sample grow
1214
1214
1215 * devel.discovery.randomize=True
1215 * devel.discovery.randomize=True
1216
1216
1217 If andom sampling during discovery are deterministic. It is meant for
1217 If andom sampling during discovery are deterministic. It is meant for
1218 integration tests.
1218 integration tests.
1219
1219
1220 * devel.discovery.sample-size=200
1220 * devel.discovery.sample-size=200
1221
1221
1222 Control the initial size of the discovery sample
1222 Control the initial size of the discovery sample
1223
1223
1224 * devel.discovery.sample-size.initial=100
1224 * devel.discovery.sample-size.initial=100
1225
1225
1226 Control the initial size of the discovery for initial change
1226 Control the initial size of the discovery for initial change
1227 """
1227 """
1228 opts = pycompat.byteskwargs(opts)
1228 opts = pycompat.byteskwargs(opts)
1229 unfi = repo.unfiltered()
1229 unfi = repo.unfiltered()
1230
1230
1231 # setup potential extra filtering
1231 # setup potential extra filtering
1232 local_revs = opts[b"local_as_revs"]
1232 local_revs = opts[b"local_as_revs"]
1233 remote_revs = opts[b"remote_as_revs"]
1233 remote_revs = opts[b"remote_as_revs"]
1234
1234
1235 # make sure tests are repeatable
1235 # make sure tests are repeatable
1236 random.seed(int(opts[b'seed']))
1236 random.seed(int(opts[b'seed']))
1237
1237
1238 if not remote_revs:
1238 if not remote_revs:
1239
1239
1240 remoteurl, branches = urlutil.get_unique_pull_path(
1240 remoteurl, branches = urlutil.get_unique_pull_path(
1241 b'debugdiscovery', repo, ui, remoteurl
1241 b'debugdiscovery', repo, ui, remoteurl
1242 )
1242 )
1243 remote = hg.peer(repo, opts, remoteurl)
1243 remote = hg.peer(repo, opts, remoteurl)
1244 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1244 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1245 else:
1245 else:
1246 branches = (None, [])
1246 branches = (None, [])
1247 remote_filtered_revs = logcmdutil.revrange(
1247 remote_filtered_revs = logcmdutil.revrange(
1248 unfi, [b"not (::(%s))" % remote_revs]
1248 unfi, [b"not (::(%s))" % remote_revs]
1249 )
1249 )
1250 remote_filtered_revs = frozenset(remote_filtered_revs)
1250 remote_filtered_revs = frozenset(remote_filtered_revs)
1251
1251
1252 def remote_func(x):
1252 def remote_func(x):
1253 return remote_filtered_revs
1253 return remote_filtered_revs
1254
1254
1255 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1255 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1256
1256
1257 remote = repo.peer()
1257 remote = repo.peer()
1258 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1258 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1259
1259
1260 if local_revs:
1260 if local_revs:
1261 local_filtered_revs = logcmdutil.revrange(
1261 local_filtered_revs = logcmdutil.revrange(
1262 unfi, [b"not (::(%s))" % local_revs]
1262 unfi, [b"not (::(%s))" % local_revs]
1263 )
1263 )
1264 local_filtered_revs = frozenset(local_filtered_revs)
1264 local_filtered_revs = frozenset(local_filtered_revs)
1265
1265
1266 def local_func(x):
1266 def local_func(x):
1267 return local_filtered_revs
1267 return local_filtered_revs
1268
1268
1269 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1269 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1270 repo = repo.filtered(b'debug-discovery-local-filter')
1270 repo = repo.filtered(b'debug-discovery-local-filter')
1271
1271
1272 data = {}
1272 data = {}
1273 if opts.get(b'old'):
1273 if opts.get(b'old'):
1274
1274
1275 def doit(pushedrevs, remoteheads, remote=remote):
1275 def doit(pushedrevs, remoteheads, remote=remote):
1276 if not util.safehasattr(remote, b'branches'):
1276 if not util.safehasattr(remote, b'branches'):
1277 # enable in-client legacy support
1277 # enable in-client legacy support
1278 remote = localrepo.locallegacypeer(remote.local())
1278 remote = localrepo.locallegacypeer(remote.local())
1279 common, _in, hds = treediscovery.findcommonincoming(
1279 common, _in, hds = treediscovery.findcommonincoming(
1280 repo, remote, force=True, audit=data
1280 repo, remote, force=True, audit=data
1281 )
1281 )
1282 common = set(common)
1282 common = set(common)
1283 if not opts.get(b'nonheads'):
1283 if not opts.get(b'nonheads'):
1284 ui.writenoi18n(
1284 ui.writenoi18n(
1285 b"unpruned common: %s\n"
1285 b"unpruned common: %s\n"
1286 % b" ".join(sorted(short(n) for n in common))
1286 % b" ".join(sorted(short(n) for n in common))
1287 )
1287 )
1288
1288
1289 clnode = repo.changelog.node
1289 clnode = repo.changelog.node
1290 common = repo.revs(b'heads(::%ln)', common)
1290 common = repo.revs(b'heads(::%ln)', common)
1291 common = {clnode(r) for r in common}
1291 common = {clnode(r) for r in common}
1292 return common, hds
1292 return common, hds
1293
1293
1294 else:
1294 else:
1295
1295
1296 def doit(pushedrevs, remoteheads, remote=remote):
1296 def doit(pushedrevs, remoteheads, remote=remote):
1297 nodes = None
1297 nodes = None
1298 if pushedrevs:
1298 if pushedrevs:
1299 revs = logcmdutil.revrange(repo, pushedrevs)
1299 revs = logcmdutil.revrange(repo, pushedrevs)
1300 nodes = [repo[r].node() for r in revs]
1300 nodes = [repo[r].node() for r in revs]
1301 common, any, hds = setdiscovery.findcommonheads(
1301 common, any, hds = setdiscovery.findcommonheads(
1302 ui, repo, remote, ancestorsof=nodes, audit=data
1302 ui, repo, remote, ancestorsof=nodes, audit=data
1303 )
1303 )
1304 return common, hds
1304 return common, hds
1305
1305
1306 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1306 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1307 localrevs = opts[b'rev']
1307 localrevs = opts[b'rev']
1308
1308
1309 fm = ui.formatter(b'debugdiscovery', opts)
1309 fm = ui.formatter(b'debugdiscovery', opts)
1310 if fm.strict_format:
1310 if fm.strict_format:
1311
1311
1312 @contextlib.contextmanager
1312 @contextlib.contextmanager
1313 def may_capture_output():
1313 def may_capture_output():
1314 ui.pushbuffer()
1314 ui.pushbuffer()
1315 yield
1315 yield
1316 data[b'output'] = ui.popbuffer()
1316 data[b'output'] = ui.popbuffer()
1317
1317
1318 else:
1318 else:
1319 may_capture_output = util.nullcontextmanager
1319 may_capture_output = util.nullcontextmanager
1320 with may_capture_output():
1320 with may_capture_output():
1321 with util.timedcm('debug-discovery') as t:
1321 with util.timedcm('debug-discovery') as t:
1322 common, hds = doit(localrevs, remoterevs)
1322 common, hds = doit(localrevs, remoterevs)
1323
1323
1324 # compute all statistics
1324 # compute all statistics
1325 heads_common = set(common)
1325 heads_common = set(common)
1326 heads_remote = set(hds)
1326 heads_remote = set(hds)
1327 heads_local = set(repo.heads())
1327 heads_local = set(repo.heads())
1328 # note: they cannot be a local or remote head that is in common and not
1328 # note: they cannot be a local or remote head that is in common and not
1329 # itself a head of common.
1329 # itself a head of common.
1330 heads_common_local = heads_common & heads_local
1330 heads_common_local = heads_common & heads_local
1331 heads_common_remote = heads_common & heads_remote
1331 heads_common_remote = heads_common & heads_remote
1332 heads_common_both = heads_common & heads_remote & heads_local
1332 heads_common_both = heads_common & heads_remote & heads_local
1333
1333
1334 all = repo.revs(b'all()')
1334 all = repo.revs(b'all()')
1335 common = repo.revs(b'::%ln', common)
1335 common = repo.revs(b'::%ln', common)
1336 roots_common = repo.revs(b'roots(::%ld)', common)
1336 roots_common = repo.revs(b'roots(::%ld)', common)
1337 missing = repo.revs(b'not ::%ld', common)
1337 missing = repo.revs(b'not ::%ld', common)
1338 heads_missing = repo.revs(b'heads(%ld)', missing)
1338 heads_missing = repo.revs(b'heads(%ld)', missing)
1339 roots_missing = repo.revs(b'roots(%ld)', missing)
1339 roots_missing = repo.revs(b'roots(%ld)', missing)
1340 assert len(common) + len(missing) == len(all)
1340 assert len(common) + len(missing) == len(all)
1341
1341
1342 initial_undecided = repo.revs(
1342 initial_undecided = repo.revs(
1343 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1343 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1344 )
1344 )
1345 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1345 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1346 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1346 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1347 common_initial_undecided = initial_undecided & common
1347 common_initial_undecided = initial_undecided & common
1348 missing_initial_undecided = initial_undecided & missing
1348 missing_initial_undecided = initial_undecided & missing
1349
1349
1350 data[b'elapsed'] = t.elapsed
1350 data[b'elapsed'] = t.elapsed
1351 data[b'nb-common-heads'] = len(heads_common)
1351 data[b'nb-common-heads'] = len(heads_common)
1352 data[b'nb-common-heads-local'] = len(heads_common_local)
1352 data[b'nb-common-heads-local'] = len(heads_common_local)
1353 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1353 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1354 data[b'nb-common-heads-both'] = len(heads_common_both)
1354 data[b'nb-common-heads-both'] = len(heads_common_both)
1355 data[b'nb-common-roots'] = len(roots_common)
1355 data[b'nb-common-roots'] = len(roots_common)
1356 data[b'nb-head-local'] = len(heads_local)
1356 data[b'nb-head-local'] = len(heads_local)
1357 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1357 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1358 data[b'nb-head-remote'] = len(heads_remote)
1358 data[b'nb-head-remote'] = len(heads_remote)
1359 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1359 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1360 heads_common_remote
1360 heads_common_remote
1361 )
1361 )
1362 data[b'nb-revs'] = len(all)
1362 data[b'nb-revs'] = len(all)
1363 data[b'nb-revs-common'] = len(common)
1363 data[b'nb-revs-common'] = len(common)
1364 data[b'nb-revs-missing'] = len(missing)
1364 data[b'nb-revs-missing'] = len(missing)
1365 data[b'nb-missing-heads'] = len(heads_missing)
1365 data[b'nb-missing-heads'] = len(heads_missing)
1366 data[b'nb-missing-roots'] = len(roots_missing)
1366 data[b'nb-missing-roots'] = len(roots_missing)
1367 data[b'nb-ini_und'] = len(initial_undecided)
1367 data[b'nb-ini_und'] = len(initial_undecided)
1368 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1368 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1369 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1369 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1370 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1370 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1371 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1371 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1372
1372
1373 fm.startitem()
1373 fm.startitem()
1374 fm.data(**pycompat.strkwargs(data))
1374 fm.data(**pycompat.strkwargs(data))
1375 # display discovery summary
1375 # display discovery summary
1376 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1376 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1377 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1377 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1378 fm.plain(b"queries: %(total-queries)9d\n" % data)
1378 fm.plain(b"queries: %(total-queries)9d\n" % data)
1379 fm.plain(b"heads summary:\n")
1379 fm.plain(b"heads summary:\n")
1380 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1380 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1381 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1381 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1382 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1382 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1383 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1383 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1384 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1384 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1385 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1385 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1386 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1386 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1387 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1387 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1388 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1388 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1389 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1389 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1390 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1390 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1391 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1391 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1392 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1392 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1393 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1393 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1394 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1394 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1395 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1395 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1396 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1396 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1397 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1397 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1398 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1398 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1399 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1399 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1400 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1400 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1401 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1401 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1402
1402
1403 if ui.verbose:
1403 if ui.verbose:
1404 fm.plain(
1404 fm.plain(
1405 b"common heads: %s\n"
1405 b"common heads: %s\n"
1406 % b" ".join(sorted(short(n) for n in heads_common))
1406 % b" ".join(sorted(short(n) for n in heads_common))
1407 )
1407 )
1408 fm.end()
1408 fm.end()
1409
1409
1410
1410
1411 _chunksize = 4 << 10
1411 _chunksize = 4 << 10
1412
1412
1413
1413
1414 @command(
1414 @command(
1415 b'debugdownload',
1415 b'debugdownload',
1416 [
1416 [
1417 (b'o', b'output', b'', _(b'path')),
1417 (b'o', b'output', b'', _(b'path')),
1418 ],
1418 ],
1419 optionalrepo=True,
1419 optionalrepo=True,
1420 )
1420 )
1421 def debugdownload(ui, repo, url, output=None, **opts):
1421 def debugdownload(ui, repo, url, output=None, **opts):
1422 """download a resource using Mercurial logic and config"""
1422 """download a resource using Mercurial logic and config"""
1423 fh = urlmod.open(ui, url, output)
1423 fh = urlmod.open(ui, url, output)
1424
1424
1425 dest = ui
1425 dest = ui
1426 if output:
1426 if output:
1427 dest = open(output, b"wb", _chunksize)
1427 dest = open(output, b"wb", _chunksize)
1428 try:
1428 try:
1429 data = fh.read(_chunksize)
1429 data = fh.read(_chunksize)
1430 while data:
1430 while data:
1431 dest.write(data)
1431 dest.write(data)
1432 data = fh.read(_chunksize)
1432 data = fh.read(_chunksize)
1433 finally:
1433 finally:
1434 if output:
1434 if output:
1435 dest.close()
1435 dest.close()
1436
1436
1437
1437
1438 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1438 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1439 def debugextensions(ui, repo, **opts):
1439 def debugextensions(ui, repo, **opts):
1440 '''show information about active extensions'''
1440 '''show information about active extensions'''
1441 opts = pycompat.byteskwargs(opts)
1441 opts = pycompat.byteskwargs(opts)
1442 exts = extensions.extensions(ui)
1442 exts = extensions.extensions(ui)
1443 hgver = util.version()
1443 hgver = util.version()
1444 fm = ui.formatter(b'debugextensions', opts)
1444 fm = ui.formatter(b'debugextensions', opts)
1445 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1445 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1446 isinternal = extensions.ismoduleinternal(extmod)
1446 isinternal = extensions.ismoduleinternal(extmod)
1447 extsource = None
1447 extsource = None
1448
1448
1449 if util.safehasattr(extmod, '__file__'):
1449 if util.safehasattr(extmod, '__file__'):
1450 extsource = pycompat.fsencode(extmod.__file__)
1450 extsource = pycompat.fsencode(extmod.__file__)
1451 elif getattr(sys, 'oxidized', False):
1451 elif getattr(sys, 'oxidized', False):
1452 extsource = pycompat.sysexecutable
1452 extsource = pycompat.sysexecutable
1453 if isinternal:
1453 if isinternal:
1454 exttestedwith = [] # never expose magic string to users
1454 exttestedwith = [] # never expose magic string to users
1455 else:
1455 else:
1456 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1456 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1457 extbuglink = getattr(extmod, 'buglink', None)
1457 extbuglink = getattr(extmod, 'buglink', None)
1458
1458
1459 fm.startitem()
1459 fm.startitem()
1460
1460
1461 if ui.quiet or ui.verbose:
1461 if ui.quiet or ui.verbose:
1462 fm.write(b'name', b'%s\n', extname)
1462 fm.write(b'name', b'%s\n', extname)
1463 else:
1463 else:
1464 fm.write(b'name', b'%s', extname)
1464 fm.write(b'name', b'%s', extname)
1465 if isinternal or hgver in exttestedwith:
1465 if isinternal or hgver in exttestedwith:
1466 fm.plain(b'\n')
1466 fm.plain(b'\n')
1467 elif not exttestedwith:
1467 elif not exttestedwith:
1468 fm.plain(_(b' (untested!)\n'))
1468 fm.plain(_(b' (untested!)\n'))
1469 else:
1469 else:
1470 lasttestedversion = exttestedwith[-1]
1470 lasttestedversion = exttestedwith[-1]
1471 fm.plain(b' (%s!)\n' % lasttestedversion)
1471 fm.plain(b' (%s!)\n' % lasttestedversion)
1472
1472
1473 fm.condwrite(
1473 fm.condwrite(
1474 ui.verbose and extsource,
1474 ui.verbose and extsource,
1475 b'source',
1475 b'source',
1476 _(b' location: %s\n'),
1476 _(b' location: %s\n'),
1477 extsource or b"",
1477 extsource or b"",
1478 )
1478 )
1479
1479
1480 if ui.verbose:
1480 if ui.verbose:
1481 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1481 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1482 fm.data(bundled=isinternal)
1482 fm.data(bundled=isinternal)
1483
1483
1484 fm.condwrite(
1484 fm.condwrite(
1485 ui.verbose and exttestedwith,
1485 ui.verbose and exttestedwith,
1486 b'testedwith',
1486 b'testedwith',
1487 _(b' tested with: %s\n'),
1487 _(b' tested with: %s\n'),
1488 fm.formatlist(exttestedwith, name=b'ver'),
1488 fm.formatlist(exttestedwith, name=b'ver'),
1489 )
1489 )
1490
1490
1491 fm.condwrite(
1491 fm.condwrite(
1492 ui.verbose and extbuglink,
1492 ui.verbose and extbuglink,
1493 b'buglink',
1493 b'buglink',
1494 _(b' bug reporting: %s\n'),
1494 _(b' bug reporting: %s\n'),
1495 extbuglink or b"",
1495 extbuglink or b"",
1496 )
1496 )
1497
1497
1498 fm.end()
1498 fm.end()
1499
1499
1500
1500
1501 @command(
1501 @command(
1502 b'debugfileset',
1502 b'debugfileset',
1503 [
1503 [
1504 (
1504 (
1505 b'r',
1505 b'r',
1506 b'rev',
1506 b'rev',
1507 b'',
1507 b'',
1508 _(b'apply the filespec on this revision'),
1508 _(b'apply the filespec on this revision'),
1509 _(b'REV'),
1509 _(b'REV'),
1510 ),
1510 ),
1511 (
1511 (
1512 b'',
1512 b'',
1513 b'all-files',
1513 b'all-files',
1514 False,
1514 False,
1515 _(b'test files from all revisions and working directory'),
1515 _(b'test files from all revisions and working directory'),
1516 ),
1516 ),
1517 (
1517 (
1518 b's',
1518 b's',
1519 b'show-matcher',
1519 b'show-matcher',
1520 None,
1520 None,
1521 _(b'print internal representation of matcher'),
1521 _(b'print internal representation of matcher'),
1522 ),
1522 ),
1523 (
1523 (
1524 b'p',
1524 b'p',
1525 b'show-stage',
1525 b'show-stage',
1526 [],
1526 [],
1527 _(b'print parsed tree at the given stage'),
1527 _(b'print parsed tree at the given stage'),
1528 _(b'NAME'),
1528 _(b'NAME'),
1529 ),
1529 ),
1530 ],
1530 ],
1531 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1531 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1532 )
1532 )
1533 def debugfileset(ui, repo, expr, **opts):
1533 def debugfileset(ui, repo, expr, **opts):
1534 '''parse and apply a fileset specification'''
1534 '''parse and apply a fileset specification'''
1535 from . import fileset
1535 from . import fileset
1536
1536
1537 fileset.symbols # force import of fileset so we have predicates to optimize
1537 fileset.symbols # force import of fileset so we have predicates to optimize
1538 opts = pycompat.byteskwargs(opts)
1538 opts = pycompat.byteskwargs(opts)
1539 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1539 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1540
1540
1541 stages = [
1541 stages = [
1542 (b'parsed', pycompat.identity),
1542 (b'parsed', pycompat.identity),
1543 (b'analyzed', filesetlang.analyze),
1543 (b'analyzed', filesetlang.analyze),
1544 (b'optimized', filesetlang.optimize),
1544 (b'optimized', filesetlang.optimize),
1545 ]
1545 ]
1546 stagenames = {n for n, f in stages}
1546 stagenames = {n for n, f in stages}
1547
1547
1548 showalways = set()
1548 showalways = set()
1549 if ui.verbose and not opts[b'show_stage']:
1549 if ui.verbose and not opts[b'show_stage']:
1550 # show parsed tree by --verbose (deprecated)
1550 # show parsed tree by --verbose (deprecated)
1551 showalways.add(b'parsed')
1551 showalways.add(b'parsed')
1552 if opts[b'show_stage'] == [b'all']:
1552 if opts[b'show_stage'] == [b'all']:
1553 showalways.update(stagenames)
1553 showalways.update(stagenames)
1554 else:
1554 else:
1555 for n in opts[b'show_stage']:
1555 for n in opts[b'show_stage']:
1556 if n not in stagenames:
1556 if n not in stagenames:
1557 raise error.Abort(_(b'invalid stage name: %s') % n)
1557 raise error.Abort(_(b'invalid stage name: %s') % n)
1558 showalways.update(opts[b'show_stage'])
1558 showalways.update(opts[b'show_stage'])
1559
1559
1560 tree = filesetlang.parse(expr)
1560 tree = filesetlang.parse(expr)
1561 for n, f in stages:
1561 for n, f in stages:
1562 tree = f(tree)
1562 tree = f(tree)
1563 if n in showalways:
1563 if n in showalways:
1564 if opts[b'show_stage'] or n != b'parsed':
1564 if opts[b'show_stage'] or n != b'parsed':
1565 ui.write(b"* %s:\n" % n)
1565 ui.write(b"* %s:\n" % n)
1566 ui.write(filesetlang.prettyformat(tree), b"\n")
1566 ui.write(filesetlang.prettyformat(tree), b"\n")
1567
1567
1568 files = set()
1568 files = set()
1569 if opts[b'all_files']:
1569 if opts[b'all_files']:
1570 for r in repo:
1570 for r in repo:
1571 c = repo[r]
1571 c = repo[r]
1572 files.update(c.files())
1572 files.update(c.files())
1573 files.update(c.substate)
1573 files.update(c.substate)
1574 if opts[b'all_files'] or ctx.rev() is None:
1574 if opts[b'all_files'] or ctx.rev() is None:
1575 wctx = repo[None]
1575 wctx = repo[None]
1576 files.update(
1576 files.update(
1577 repo.dirstate.walk(
1577 repo.dirstate.walk(
1578 scmutil.matchall(repo),
1578 scmutil.matchall(repo),
1579 subrepos=list(wctx.substate),
1579 subrepos=list(wctx.substate),
1580 unknown=True,
1580 unknown=True,
1581 ignored=True,
1581 ignored=True,
1582 )
1582 )
1583 )
1583 )
1584 files.update(wctx.substate)
1584 files.update(wctx.substate)
1585 else:
1585 else:
1586 files.update(ctx.files())
1586 files.update(ctx.files())
1587 files.update(ctx.substate)
1587 files.update(ctx.substate)
1588
1588
1589 m = ctx.matchfileset(repo.getcwd(), expr)
1589 m = ctx.matchfileset(repo.getcwd(), expr)
1590 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1590 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1591 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1591 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1592 for f in sorted(files):
1592 for f in sorted(files):
1593 if not m(f):
1593 if not m(f):
1594 continue
1594 continue
1595 ui.write(b"%s\n" % f)
1595 ui.write(b"%s\n" % f)
1596
1596
1597
1597
1598 @command(
1598 @command(
1599 b"debug-repair-issue6528",
1599 b"debug-repair-issue6528",
1600 [
1600 [
1601 (
1601 (
1602 b'',
1602 b'',
1603 b'to-report',
1603 b'to-report',
1604 b'',
1604 b'',
1605 _(b'build a report of affected revisions to this file'),
1605 _(b'build a report of affected revisions to this file'),
1606 _(b'FILE'),
1606 _(b'FILE'),
1607 ),
1607 ),
1608 (
1608 (
1609 b'',
1609 b'',
1610 b'from-report',
1610 b'from-report',
1611 b'',
1611 b'',
1612 _(b'repair revisions listed in this report file'),
1612 _(b'repair revisions listed in this report file'),
1613 _(b'FILE'),
1613 _(b'FILE'),
1614 ),
1614 ),
1615 (
1615 (
1616 b'',
1616 b'',
1617 b'paranoid',
1617 b'paranoid',
1618 False,
1618 False,
1619 _(b'check that both detection methods do the same thing'),
1619 _(b'check that both detection methods do the same thing'),
1620 ),
1620 ),
1621 ]
1621 ]
1622 + cmdutil.dryrunopts,
1622 + cmdutil.dryrunopts,
1623 )
1623 )
1624 def debug_repair_issue6528(ui, repo, **opts):
1624 def debug_repair_issue6528(ui, repo, **opts):
1625 """find affected revisions and repair them. See issue6528 for more details.
1625 """find affected revisions and repair them. See issue6528 for more details.
1626
1626
1627 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1627 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1628 computation of affected revisions for a given repository across clones.
1628 computation of affected revisions for a given repository across clones.
1629 The report format is line-based (with empty lines ignored):
1629 The report format is line-based (with empty lines ignored):
1630
1630
1631 ```
1631 ```
1632 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1632 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1633 ```
1633 ```
1634
1634
1635 There can be multiple broken revisions per filelog, they are separated by
1635 There can be multiple broken revisions per filelog, they are separated by
1636 a comma with no spaces. The only space is between the revision(s) and the
1636 a comma with no spaces. The only space is between the revision(s) and the
1637 filename.
1637 filename.
1638
1638
1639 Note that this does *not* mean that this repairs future affected revisions,
1639 Note that this does *not* mean that this repairs future affected revisions,
1640 that needs a separate fix at the exchange level that was introduced in
1640 that needs a separate fix at the exchange level that was introduced in
1641 Mercurial 5.9.1.
1641 Mercurial 5.9.1.
1642
1642
1643 There is a `--paranoid` flag to test that the fast implementation is correct
1643 There is a `--paranoid` flag to test that the fast implementation is correct
1644 by checking it against the slow implementation. Since this matter is quite
1644 by checking it against the slow implementation. Since this matter is quite
1645 urgent and testing every edge-case is probably quite costly, we use this
1645 urgent and testing every edge-case is probably quite costly, we use this
1646 method to test on large repositories as a fuzzing method of sorts.
1646 method to test on large repositories as a fuzzing method of sorts.
1647 """
1647 """
1648 cmdutil.check_incompatible_arguments(
1648 cmdutil.check_incompatible_arguments(
1649 opts, 'to_report', ['from_report', 'dry_run']
1649 opts, 'to_report', ['from_report', 'dry_run']
1650 )
1650 )
1651 dry_run = opts.get('dry_run')
1651 dry_run = opts.get('dry_run')
1652 to_report = opts.get('to_report')
1652 to_report = opts.get('to_report')
1653 from_report = opts.get('from_report')
1653 from_report = opts.get('from_report')
1654 paranoid = opts.get('paranoid')
1654 paranoid = opts.get('paranoid')
1655 # TODO maybe add filelog pattern and revision pattern parameters to help
1655 # TODO maybe add filelog pattern and revision pattern parameters to help
1656 # narrow down the search for users that know what they're looking for?
1656 # narrow down the search for users that know what they're looking for?
1657
1657
1658 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1658 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1659 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1659 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1660 raise error.Abort(_(msg))
1660 raise error.Abort(_(msg))
1661
1661
1662 rewrite.repair_issue6528(
1662 rewrite.repair_issue6528(
1663 ui,
1663 ui,
1664 repo,
1664 repo,
1665 dry_run=dry_run,
1665 dry_run=dry_run,
1666 to_report=to_report,
1666 to_report=to_report,
1667 from_report=from_report,
1667 from_report=from_report,
1668 paranoid=paranoid,
1668 paranoid=paranoid,
1669 )
1669 )
1670
1670
1671
1671
1672 @command(b'debugformat', [] + cmdutil.formatteropts)
1672 @command(b'debugformat', [] + cmdutil.formatteropts)
1673 def debugformat(ui, repo, **opts):
1673 def debugformat(ui, repo, **opts):
1674 """display format information about the current repository
1674 """display format information about the current repository
1675
1675
1676 Use --verbose to get extra information about current config value and
1676 Use --verbose to get extra information about current config value and
1677 Mercurial default."""
1677 Mercurial default."""
1678 opts = pycompat.byteskwargs(opts)
1678 opts = pycompat.byteskwargs(opts)
1679 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1679 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1680 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1680 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1681
1681
1682 def makeformatname(name):
1682 def makeformatname(name):
1683 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1683 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1684
1684
1685 fm = ui.formatter(b'debugformat', opts)
1685 fm = ui.formatter(b'debugformat', opts)
1686 if fm.isplain():
1686 if fm.isplain():
1687
1687
1688 def formatvalue(value):
1688 def formatvalue(value):
1689 if util.safehasattr(value, b'startswith'):
1689 if util.safehasattr(value, b'startswith'):
1690 return value
1690 return value
1691 if value:
1691 if value:
1692 return b'yes'
1692 return b'yes'
1693 else:
1693 else:
1694 return b'no'
1694 return b'no'
1695
1695
1696 else:
1696 else:
1697 formatvalue = pycompat.identity
1697 formatvalue = pycompat.identity
1698
1698
1699 fm.plain(b'format-variant')
1699 fm.plain(b'format-variant')
1700 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1700 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1701 fm.plain(b' repo')
1701 fm.plain(b' repo')
1702 if ui.verbose:
1702 if ui.verbose:
1703 fm.plain(b' config default')
1703 fm.plain(b' config default')
1704 fm.plain(b'\n')
1704 fm.plain(b'\n')
1705 for fv in upgrade.allformatvariant:
1705 for fv in upgrade.allformatvariant:
1706 fm.startitem()
1706 fm.startitem()
1707 repovalue = fv.fromrepo(repo)
1707 repovalue = fv.fromrepo(repo)
1708 configvalue = fv.fromconfig(repo)
1708 configvalue = fv.fromconfig(repo)
1709
1709
1710 if repovalue != configvalue:
1710 if repovalue != configvalue:
1711 namelabel = b'formatvariant.name.mismatchconfig'
1711 namelabel = b'formatvariant.name.mismatchconfig'
1712 repolabel = b'formatvariant.repo.mismatchconfig'
1712 repolabel = b'formatvariant.repo.mismatchconfig'
1713 elif repovalue != fv.default:
1713 elif repovalue != fv.default:
1714 namelabel = b'formatvariant.name.mismatchdefault'
1714 namelabel = b'formatvariant.name.mismatchdefault'
1715 repolabel = b'formatvariant.repo.mismatchdefault'
1715 repolabel = b'formatvariant.repo.mismatchdefault'
1716 else:
1716 else:
1717 namelabel = b'formatvariant.name.uptodate'
1717 namelabel = b'formatvariant.name.uptodate'
1718 repolabel = b'formatvariant.repo.uptodate'
1718 repolabel = b'formatvariant.repo.uptodate'
1719
1719
1720 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1720 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1721 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1721 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1722 if fv.default != configvalue:
1722 if fv.default != configvalue:
1723 configlabel = b'formatvariant.config.special'
1723 configlabel = b'formatvariant.config.special'
1724 else:
1724 else:
1725 configlabel = b'formatvariant.config.default'
1725 configlabel = b'formatvariant.config.default'
1726 fm.condwrite(
1726 fm.condwrite(
1727 ui.verbose,
1727 ui.verbose,
1728 b'config',
1728 b'config',
1729 b' %6s',
1729 b' %6s',
1730 formatvalue(configvalue),
1730 formatvalue(configvalue),
1731 label=configlabel,
1731 label=configlabel,
1732 )
1732 )
1733 fm.condwrite(
1733 fm.condwrite(
1734 ui.verbose,
1734 ui.verbose,
1735 b'default',
1735 b'default',
1736 b' %7s',
1736 b' %7s',
1737 formatvalue(fv.default),
1737 formatvalue(fv.default),
1738 label=b'formatvariant.default',
1738 label=b'formatvariant.default',
1739 )
1739 )
1740 fm.plain(b'\n')
1740 fm.plain(b'\n')
1741 fm.end()
1741 fm.end()
1742
1742
1743
1743
1744 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1744 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1745 def debugfsinfo(ui, path=b"."):
1745 def debugfsinfo(ui, path=b"."):
1746 """show information detected about current filesystem"""
1746 """show information detected about current filesystem"""
1747 ui.writenoi18n(b'path: %s\n' % path)
1747 ui.writenoi18n(b'path: %s\n' % path)
1748 ui.writenoi18n(
1748 ui.writenoi18n(
1749 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1749 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1750 )
1750 )
1751 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1751 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1752 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1752 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1753 ui.writenoi18n(
1753 ui.writenoi18n(
1754 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1754 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1755 )
1755 )
1756 ui.writenoi18n(
1756 ui.writenoi18n(
1757 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1757 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1758 )
1758 )
1759 casesensitive = b'(unknown)'
1759 casesensitive = b'(unknown)'
1760 try:
1760 try:
1761 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1761 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1762 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1762 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1763 except OSError:
1763 except OSError:
1764 pass
1764 pass
1765 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1765 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1766
1766
1767
1767
1768 @command(
1768 @command(
1769 b'debuggetbundle',
1769 b'debuggetbundle',
1770 [
1770 [
1771 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1771 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1772 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1772 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1773 (
1773 (
1774 b't',
1774 b't',
1775 b'type',
1775 b'type',
1776 b'bzip2',
1776 b'bzip2',
1777 _(b'bundle compression type to use'),
1777 _(b'bundle compression type to use'),
1778 _(b'TYPE'),
1778 _(b'TYPE'),
1779 ),
1779 ),
1780 ],
1780 ],
1781 _(b'REPO FILE [-H|-C ID]...'),
1781 _(b'REPO FILE [-H|-C ID]...'),
1782 norepo=True,
1782 norepo=True,
1783 )
1783 )
1784 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1784 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1785 """retrieves a bundle from a repo
1785 """retrieves a bundle from a repo
1786
1786
1787 Every ID must be a full-length hex node id string. Saves the bundle to the
1787 Every ID must be a full-length hex node id string. Saves the bundle to the
1788 given file.
1788 given file.
1789 """
1789 """
1790 opts = pycompat.byteskwargs(opts)
1790 opts = pycompat.byteskwargs(opts)
1791 repo = hg.peer(ui, opts, repopath)
1791 repo = hg.peer(ui, opts, repopath)
1792 if not repo.capable(b'getbundle'):
1792 if not repo.capable(b'getbundle'):
1793 raise error.Abort(b"getbundle() not supported by target repository")
1793 raise error.Abort(b"getbundle() not supported by target repository")
1794 args = {}
1794 args = {}
1795 if common:
1795 if common:
1796 args['common'] = [bin(s) for s in common]
1796 args['common'] = [bin(s) for s in common]
1797 if head:
1797 if head:
1798 args['heads'] = [bin(s) for s in head]
1798 args['heads'] = [bin(s) for s in head]
1799 # TODO: get desired bundlecaps from command line.
1799 # TODO: get desired bundlecaps from command line.
1800 args['bundlecaps'] = None
1800 args['bundlecaps'] = None
1801 bundle = repo.getbundle(b'debug', **args)
1801 bundle = repo.getbundle(b'debug', **args)
1802
1802
1803 bundletype = opts.get(b'type', b'bzip2').lower()
1803 bundletype = opts.get(b'type', b'bzip2').lower()
1804 btypes = {
1804 btypes = {
1805 b'none': b'HG10UN',
1805 b'none': b'HG10UN',
1806 b'bzip2': b'HG10BZ',
1806 b'bzip2': b'HG10BZ',
1807 b'gzip': b'HG10GZ',
1807 b'gzip': b'HG10GZ',
1808 b'bundle2': b'HG20',
1808 b'bundle2': b'HG20',
1809 }
1809 }
1810 bundletype = btypes.get(bundletype)
1810 bundletype = btypes.get(bundletype)
1811 if bundletype not in bundle2.bundletypes:
1811 if bundletype not in bundle2.bundletypes:
1812 raise error.Abort(_(b'unknown bundle type specified with --type'))
1812 raise error.Abort(_(b'unknown bundle type specified with --type'))
1813 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1813 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1814
1814
1815
1815
1816 @command(b'debugignore', [], b'[FILE]')
1816 @command(b'debugignore', [], b'[FILE]')
1817 def debugignore(ui, repo, *files, **opts):
1817 def debugignore(ui, repo, *files, **opts):
1818 """display the combined ignore pattern and information about ignored files
1818 """display the combined ignore pattern and information about ignored files
1819
1819
1820 With no argument display the combined ignore pattern.
1820 With no argument display the combined ignore pattern.
1821
1821
1822 Given space separated file names, shows if the given file is ignored and
1822 Given space separated file names, shows if the given file is ignored and
1823 if so, show the ignore rule (file and line number) that matched it.
1823 if so, show the ignore rule (file and line number) that matched it.
1824 """
1824 """
1825 ignore = repo.dirstate._ignore
1825 ignore = repo.dirstate._ignore
1826 if not files:
1826 if not files:
1827 # Show all the patterns
1827 # Show all the patterns
1828 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1828 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1829 else:
1829 else:
1830 m = scmutil.match(repo[None], pats=files)
1830 m = scmutil.match(repo[None], pats=files)
1831 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1831 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1832 for f in m.files():
1832 for f in m.files():
1833 nf = util.normpath(f)
1833 nf = util.normpath(f)
1834 ignored = None
1834 ignored = None
1835 ignoredata = None
1835 ignoredata = None
1836 if nf != b'.':
1836 if nf != b'.':
1837 if ignore(nf):
1837 if ignore(nf):
1838 ignored = nf
1838 ignored = nf
1839 ignoredata = repo.dirstate._ignorefileandline(nf)
1839 ignoredata = repo.dirstate._ignorefileandline(nf)
1840 else:
1840 else:
1841 for p in pathutil.finddirs(nf):
1841 for p in pathutil.finddirs(nf):
1842 if ignore(p):
1842 if ignore(p):
1843 ignored = p
1843 ignored = p
1844 ignoredata = repo.dirstate._ignorefileandline(p)
1844 ignoredata = repo.dirstate._ignorefileandline(p)
1845 break
1845 break
1846 if ignored:
1846 if ignored:
1847 if ignored == nf:
1847 if ignored == nf:
1848 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1848 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1849 else:
1849 else:
1850 ui.write(
1850 ui.write(
1851 _(
1851 _(
1852 b"%s is ignored because of "
1852 b"%s is ignored because of "
1853 b"containing directory %s\n"
1853 b"containing directory %s\n"
1854 )
1854 )
1855 % (uipathfn(f), ignored)
1855 % (uipathfn(f), ignored)
1856 )
1856 )
1857 ignorefile, lineno, line = ignoredata
1857 ignorefile, lineno, line = ignoredata
1858 ui.write(
1858 ui.write(
1859 _(b"(ignore rule in %s, line %d: '%s')\n")
1859 _(b"(ignore rule in %s, line %d: '%s')\n")
1860 % (ignorefile, lineno, line)
1860 % (ignorefile, lineno, line)
1861 )
1861 )
1862 else:
1862 else:
1863 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1863 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1864
1864
1865
1865
1866 @command(
1866 @command(
1867 b'debug-revlog-index|debugindex',
1867 b'debug-revlog-index|debugindex',
1868 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1868 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1869 _(b'-c|-m|FILE'),
1869 _(b'-c|-m|FILE'),
1870 )
1870 )
1871 def debugindex(ui, repo, file_=None, **opts):
1871 def debugindex(ui, repo, file_=None, **opts):
1872 """dump index data for a revlog"""
1872 """dump index data for a revlog"""
1873 opts = pycompat.byteskwargs(opts)
1873 opts = pycompat.byteskwargs(opts)
1874 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1874 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1875
1875
1876 fm = ui.formatter(b'debugindex', opts)
1876 fm = ui.formatter(b'debugindex', opts)
1877
1877
1878 revlog = getattr(store, b'_revlog', store)
1878 revlog = getattr(store, b'_revlog', store)
1879
1879
1880 return revlog_debug.debug_index(
1880 return revlog_debug.debug_index(
1881 ui,
1881 ui,
1882 repo,
1882 repo,
1883 formatter=fm,
1883 formatter=fm,
1884 revlog=revlog,
1884 revlog=revlog,
1885 full_node=ui.debugflag,
1885 full_node=ui.debugflag,
1886 )
1886 )
1887
1887
1888
1888
1889 @command(
1889 @command(
1890 b'debugindexdot',
1890 b'debugindexdot',
1891 cmdutil.debugrevlogopts,
1891 cmdutil.debugrevlogopts,
1892 _(b'-c|-m|FILE'),
1892 _(b'-c|-m|FILE'),
1893 optionalrepo=True,
1893 optionalrepo=True,
1894 )
1894 )
1895 def debugindexdot(ui, repo, file_=None, **opts):
1895 def debugindexdot(ui, repo, file_=None, **opts):
1896 """dump an index DAG as a graphviz dot file"""
1896 """dump an index DAG as a graphviz dot file"""
1897 opts = pycompat.byteskwargs(opts)
1897 opts = pycompat.byteskwargs(opts)
1898 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1898 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1899 ui.writenoi18n(b"digraph G {\n")
1899 ui.writenoi18n(b"digraph G {\n")
1900 for i in r:
1900 for i in r:
1901 node = r.node(i)
1901 node = r.node(i)
1902 pp = r.parents(node)
1902 pp = r.parents(node)
1903 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1903 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1904 if pp[1] != repo.nullid:
1904 if pp[1] != repo.nullid:
1905 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1905 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1906 ui.write(b"}\n")
1906 ui.write(b"}\n")
1907
1907
1908
1908
1909 @command(b'debugindexstats', [])
1909 @command(b'debugindexstats', [])
1910 def debugindexstats(ui, repo):
1910 def debugindexstats(ui, repo):
1911 """show stats related to the changelog index"""
1911 """show stats related to the changelog index"""
1912 repo.changelog.shortest(repo.nullid, 1)
1912 repo.changelog.shortest(repo.nullid, 1)
1913 index = repo.changelog.index
1913 index = repo.changelog.index
1914 if not util.safehasattr(index, b'stats'):
1914 if not util.safehasattr(index, b'stats'):
1915 raise error.Abort(_(b'debugindexstats only works with native code'))
1915 raise error.Abort(_(b'debugindexstats only works with native code'))
1916 for k, v in sorted(index.stats().items()):
1916 for k, v in sorted(index.stats().items()):
1917 ui.write(b'%s: %d\n' % (k, v))
1917 ui.write(b'%s: %d\n' % (k, v))
1918
1918
1919
1919
1920 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1920 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1921 def debuginstall(ui, **opts):
1921 def debuginstall(ui, **opts):
1922 """test Mercurial installation
1922 """test Mercurial installation
1923
1923
1924 Returns 0 on success.
1924 Returns 0 on success.
1925 """
1925 """
1926 opts = pycompat.byteskwargs(opts)
1926 opts = pycompat.byteskwargs(opts)
1927
1927
1928 problems = 0
1928 problems = 0
1929
1929
1930 fm = ui.formatter(b'debuginstall', opts)
1930 fm = ui.formatter(b'debuginstall', opts)
1931 fm.startitem()
1931 fm.startitem()
1932
1932
1933 # encoding might be unknown or wrong. don't translate these messages.
1933 # encoding might be unknown or wrong. don't translate these messages.
1934 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1934 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1935 err = None
1935 err = None
1936 try:
1936 try:
1937 codecs.lookup(pycompat.sysstr(encoding.encoding))
1937 codecs.lookup(pycompat.sysstr(encoding.encoding))
1938 except LookupError as inst:
1938 except LookupError as inst:
1939 err = stringutil.forcebytestr(inst)
1939 err = stringutil.forcebytestr(inst)
1940 problems += 1
1940 problems += 1
1941 fm.condwrite(
1941 fm.condwrite(
1942 err,
1942 err,
1943 b'encodingerror',
1943 b'encodingerror',
1944 b" %s\n (check that your locale is properly set)\n",
1944 b" %s\n (check that your locale is properly set)\n",
1945 err,
1945 err,
1946 )
1946 )
1947
1947
1948 # Python
1948 # Python
1949 pythonlib = None
1949 pythonlib = None
1950 if util.safehasattr(os, '__file__'):
1950 if util.safehasattr(os, '__file__'):
1951 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1951 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1952 elif getattr(sys, 'oxidized', False):
1952 elif getattr(sys, 'oxidized', False):
1953 pythonlib = pycompat.sysexecutable
1953 pythonlib = pycompat.sysexecutable
1954
1954
1955 fm.write(
1955 fm.write(
1956 b'pythonexe',
1956 b'pythonexe',
1957 _(b"checking Python executable (%s)\n"),
1957 _(b"checking Python executable (%s)\n"),
1958 pycompat.sysexecutable or _(b"unknown"),
1958 pycompat.sysexecutable or _(b"unknown"),
1959 )
1959 )
1960 fm.write(
1960 fm.write(
1961 b'pythonimplementation',
1961 b'pythonimplementation',
1962 _(b"checking Python implementation (%s)\n"),
1962 _(b"checking Python implementation (%s)\n"),
1963 pycompat.sysbytes(platform.python_implementation()),
1963 pycompat.sysbytes(platform.python_implementation()),
1964 )
1964 )
1965 fm.write(
1965 fm.write(
1966 b'pythonver',
1966 b'pythonver',
1967 _(b"checking Python version (%s)\n"),
1967 _(b"checking Python version (%s)\n"),
1968 (b"%d.%d.%d" % sys.version_info[:3]),
1968 (b"%d.%d.%d" % sys.version_info[:3]),
1969 )
1969 )
1970 fm.write(
1970 fm.write(
1971 b'pythonlib',
1971 b'pythonlib',
1972 _(b"checking Python lib (%s)...\n"),
1972 _(b"checking Python lib (%s)...\n"),
1973 pythonlib or _(b"unknown"),
1973 pythonlib or _(b"unknown"),
1974 )
1974 )
1975
1975
1976 try:
1976 try:
1977 from . import rustext # pytype: disable=import-error
1977 from . import rustext # pytype: disable=import-error
1978
1978
1979 rustext.__doc__ # trigger lazy import
1979 rustext.__doc__ # trigger lazy import
1980 except ImportError:
1980 except ImportError:
1981 rustext = None
1981 rustext = None
1982
1982
1983 security = set(sslutil.supportedprotocols)
1983 security = set(sslutil.supportedprotocols)
1984 if sslutil.hassni:
1984 if sslutil.hassni:
1985 security.add(b'sni')
1985 security.add(b'sni')
1986
1986
1987 fm.write(
1987 fm.write(
1988 b'pythonsecurity',
1988 b'pythonsecurity',
1989 _(b"checking Python security support (%s)\n"),
1989 _(b"checking Python security support (%s)\n"),
1990 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1990 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1991 )
1991 )
1992
1992
1993 # These are warnings, not errors. So don't increment problem count. This
1993 # These are warnings, not errors. So don't increment problem count. This
1994 # may change in the future.
1994 # may change in the future.
1995 if b'tls1.2' not in security:
1995 if b'tls1.2' not in security:
1996 fm.plain(
1996 fm.plain(
1997 _(
1997 _(
1998 b' TLS 1.2 not supported by Python install; '
1998 b' TLS 1.2 not supported by Python install; '
1999 b'network connections lack modern security\n'
1999 b'network connections lack modern security\n'
2000 )
2000 )
2001 )
2001 )
2002 if b'sni' not in security:
2002 if b'sni' not in security:
2003 fm.plain(
2003 fm.plain(
2004 _(
2004 _(
2005 b' SNI not supported by Python install; may have '
2005 b' SNI not supported by Python install; may have '
2006 b'connectivity issues with some servers\n'
2006 b'connectivity issues with some servers\n'
2007 )
2007 )
2008 )
2008 )
2009
2009
2010 fm.plain(
2010 fm.plain(
2011 _(
2011 _(
2012 b"checking Rust extensions (%s)\n"
2012 b"checking Rust extensions (%s)\n"
2013 % (b'missing' if rustext is None else b'installed')
2013 % (b'missing' if rustext is None else b'installed')
2014 ),
2014 ),
2015 )
2015 )
2016
2016
2017 # TODO print CA cert info
2017 # TODO print CA cert info
2018
2018
2019 # hg version
2019 # hg version
2020 hgver = util.version()
2020 hgver = util.version()
2021 fm.write(
2021 fm.write(
2022 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2022 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2023 )
2023 )
2024 fm.write(
2024 fm.write(
2025 b'hgverextra',
2025 b'hgverextra',
2026 _(b"checking Mercurial custom build (%s)\n"),
2026 _(b"checking Mercurial custom build (%s)\n"),
2027 b'+'.join(hgver.split(b'+')[1:]),
2027 b'+'.join(hgver.split(b'+')[1:]),
2028 )
2028 )
2029
2029
2030 # compiled modules
2030 # compiled modules
2031 hgmodules = None
2031 hgmodules = None
2032 if util.safehasattr(sys.modules[__name__], '__file__'):
2032 if util.safehasattr(sys.modules[__name__], '__file__'):
2033 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2033 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2034 elif getattr(sys, 'oxidized', False):
2034 elif getattr(sys, 'oxidized', False):
2035 hgmodules = pycompat.sysexecutable
2035 hgmodules = pycompat.sysexecutable
2036
2036
2037 fm.write(
2037 fm.write(
2038 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2038 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2039 )
2039 )
2040 fm.write(
2040 fm.write(
2041 b'hgmodules',
2041 b'hgmodules',
2042 _(b"checking installed modules (%s)...\n"),
2042 _(b"checking installed modules (%s)...\n"),
2043 hgmodules or _(b"unknown"),
2043 hgmodules or _(b"unknown"),
2044 )
2044 )
2045
2045
2046 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2046 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2047 rustext = rustandc # for now, that's the only case
2047 rustext = rustandc # for now, that's the only case
2048 cext = policy.policy in (b'c', b'allow') or rustandc
2048 cext = policy.policy in (b'c', b'allow') or rustandc
2049 nopure = cext or rustext
2049 nopure = cext or rustext
2050 if nopure:
2050 if nopure:
2051 err = None
2051 err = None
2052 try:
2052 try:
2053 if cext:
2053 if cext:
2054 from .cext import ( # pytype: disable=import-error
2054 from .cext import ( # pytype: disable=import-error
2055 base85,
2055 base85,
2056 bdiff,
2056 bdiff,
2057 mpatch,
2057 mpatch,
2058 osutil,
2058 osutil,
2059 )
2059 )
2060
2060
2061 # quiet pyflakes
2061 # quiet pyflakes
2062 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2062 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2063 if rustext:
2063 if rustext:
2064 from .rustext import ( # pytype: disable=import-error
2064 from .rustext import ( # pytype: disable=import-error
2065 ancestor,
2065 ancestor,
2066 dirstate,
2066 dirstate,
2067 )
2067 )
2068
2068
2069 dir(ancestor), dir(dirstate) # quiet pyflakes
2069 dir(ancestor), dir(dirstate) # quiet pyflakes
2070 except Exception as inst:
2070 except Exception as inst:
2071 err = stringutil.forcebytestr(inst)
2071 err = stringutil.forcebytestr(inst)
2072 problems += 1
2072 problems += 1
2073 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2073 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2074
2074
2075 compengines = util.compengines._engines.values()
2075 compengines = util.compengines._engines.values()
2076 fm.write(
2076 fm.write(
2077 b'compengines',
2077 b'compengines',
2078 _(b'checking registered compression engines (%s)\n'),
2078 _(b'checking registered compression engines (%s)\n'),
2079 fm.formatlist(
2079 fm.formatlist(
2080 sorted(e.name() for e in compengines),
2080 sorted(e.name() for e in compengines),
2081 name=b'compengine',
2081 name=b'compengine',
2082 fmt=b'%s',
2082 fmt=b'%s',
2083 sep=b', ',
2083 sep=b', ',
2084 ),
2084 ),
2085 )
2085 )
2086 fm.write(
2086 fm.write(
2087 b'compenginesavail',
2087 b'compenginesavail',
2088 _(b'checking available compression engines (%s)\n'),
2088 _(b'checking available compression engines (%s)\n'),
2089 fm.formatlist(
2089 fm.formatlist(
2090 sorted(e.name() for e in compengines if e.available()),
2090 sorted(e.name() for e in compengines if e.available()),
2091 name=b'compengine',
2091 name=b'compengine',
2092 fmt=b'%s',
2092 fmt=b'%s',
2093 sep=b', ',
2093 sep=b', ',
2094 ),
2094 ),
2095 )
2095 )
2096 wirecompengines = compression.compengines.supportedwireengines(
2096 wirecompengines = compression.compengines.supportedwireengines(
2097 compression.SERVERROLE
2097 compression.SERVERROLE
2098 )
2098 )
2099 fm.write(
2099 fm.write(
2100 b'compenginesserver',
2100 b'compenginesserver',
2101 _(
2101 _(
2102 b'checking available compression engines '
2102 b'checking available compression engines '
2103 b'for wire protocol (%s)\n'
2103 b'for wire protocol (%s)\n'
2104 ),
2104 ),
2105 fm.formatlist(
2105 fm.formatlist(
2106 [e.name() for e in wirecompengines if e.wireprotosupport()],
2106 [e.name() for e in wirecompengines if e.wireprotosupport()],
2107 name=b'compengine',
2107 name=b'compengine',
2108 fmt=b'%s',
2108 fmt=b'%s',
2109 sep=b', ',
2109 sep=b', ',
2110 ),
2110 ),
2111 )
2111 )
2112 re2 = b'missing'
2112 re2 = b'missing'
2113 if util._re2:
2113 if util._re2:
2114 re2 = b'available'
2114 re2 = b'available'
2115 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2115 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2116 fm.data(re2=bool(util._re2))
2116 fm.data(re2=bool(util._re2))
2117
2117
2118 # templates
2118 # templates
2119 p = templater.templatedir()
2119 p = templater.templatedir()
2120 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2120 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2121 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2121 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2122 if p:
2122 if p:
2123 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2123 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2124 if m:
2124 if m:
2125 # template found, check if it is working
2125 # template found, check if it is working
2126 err = None
2126 err = None
2127 try:
2127 try:
2128 templater.templater.frommapfile(m)
2128 templater.templater.frommapfile(m)
2129 except Exception as inst:
2129 except Exception as inst:
2130 err = stringutil.forcebytestr(inst)
2130 err = stringutil.forcebytestr(inst)
2131 p = None
2131 p = None
2132 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2132 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2133 else:
2133 else:
2134 p = None
2134 p = None
2135 fm.condwrite(
2135 fm.condwrite(
2136 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2136 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2137 )
2137 )
2138 fm.condwrite(
2138 fm.condwrite(
2139 not m,
2139 not m,
2140 b'defaulttemplatenotfound',
2140 b'defaulttemplatenotfound',
2141 _(b" template '%s' not found\n"),
2141 _(b" template '%s' not found\n"),
2142 b"default",
2142 b"default",
2143 )
2143 )
2144 if not p:
2144 if not p:
2145 problems += 1
2145 problems += 1
2146 fm.condwrite(
2146 fm.condwrite(
2147 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2147 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2148 )
2148 )
2149
2149
2150 # editor
2150 # editor
2151 editor = ui.geteditor()
2151 editor = ui.geteditor()
2152 editor = util.expandpath(editor)
2152 editor = util.expandpath(editor)
2153 editorbin = procutil.shellsplit(editor)[0]
2153 editorbin = procutil.shellsplit(editor)[0]
2154 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2154 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2155 cmdpath = procutil.findexe(editorbin)
2155 cmdpath = procutil.findexe(editorbin)
2156 fm.condwrite(
2156 fm.condwrite(
2157 not cmdpath and editor == b'vi',
2157 not cmdpath and editor == b'vi',
2158 b'vinotfound',
2158 b'vinotfound',
2159 _(
2159 _(
2160 b" No commit editor set and can't find %s in PATH\n"
2160 b" No commit editor set and can't find %s in PATH\n"
2161 b" (specify a commit editor in your configuration"
2161 b" (specify a commit editor in your configuration"
2162 b" file)\n"
2162 b" file)\n"
2163 ),
2163 ),
2164 not cmdpath and editor == b'vi' and editorbin,
2164 not cmdpath and editor == b'vi' and editorbin,
2165 )
2165 )
2166 fm.condwrite(
2166 fm.condwrite(
2167 not cmdpath and editor != b'vi',
2167 not cmdpath and editor != b'vi',
2168 b'editornotfound',
2168 b'editornotfound',
2169 _(
2169 _(
2170 b" Can't find editor '%s' in PATH\n"
2170 b" Can't find editor '%s' in PATH\n"
2171 b" (specify a commit editor in your configuration"
2171 b" (specify a commit editor in your configuration"
2172 b" file)\n"
2172 b" file)\n"
2173 ),
2173 ),
2174 not cmdpath and editorbin,
2174 not cmdpath and editorbin,
2175 )
2175 )
2176 if not cmdpath and editor != b'vi':
2176 if not cmdpath and editor != b'vi':
2177 problems += 1
2177 problems += 1
2178
2178
2179 # check username
2179 # check username
2180 username = None
2180 username = None
2181 err = None
2181 err = None
2182 try:
2182 try:
2183 username = ui.username()
2183 username = ui.username()
2184 except error.Abort as e:
2184 except error.Abort as e:
2185 err = e.message
2185 err = e.message
2186 problems += 1
2186 problems += 1
2187
2187
2188 fm.condwrite(
2188 fm.condwrite(
2189 username, b'username', _(b"checking username (%s)\n"), username
2189 username, b'username', _(b"checking username (%s)\n"), username
2190 )
2190 )
2191 fm.condwrite(
2191 fm.condwrite(
2192 err,
2192 err,
2193 b'usernameerror',
2193 b'usernameerror',
2194 _(
2194 _(
2195 b"checking username...\n %s\n"
2195 b"checking username...\n %s\n"
2196 b" (specify a username in your configuration file)\n"
2196 b" (specify a username in your configuration file)\n"
2197 ),
2197 ),
2198 err,
2198 err,
2199 )
2199 )
2200
2200
2201 for name, mod in extensions.extensions():
2201 for name, mod in extensions.extensions():
2202 handler = getattr(mod, 'debuginstall', None)
2202 handler = getattr(mod, 'debuginstall', None)
2203 if handler is not None:
2203 if handler is not None:
2204 problems += handler(ui, fm)
2204 problems += handler(ui, fm)
2205
2205
2206 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2206 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2207 if not problems:
2207 if not problems:
2208 fm.data(problems=problems)
2208 fm.data(problems=problems)
2209 fm.condwrite(
2209 fm.condwrite(
2210 problems,
2210 problems,
2211 b'problems',
2211 b'problems',
2212 _(b"%d problems detected, please check your install!\n"),
2212 _(b"%d problems detected, please check your install!\n"),
2213 problems,
2213 problems,
2214 )
2214 )
2215 fm.end()
2215 fm.end()
2216
2216
2217 return problems
2217 return problems
2218
2218
2219
2219
2220 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2220 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2221 def debugknown(ui, repopath, *ids, **opts):
2221 def debugknown(ui, repopath, *ids, **opts):
2222 """test whether node ids are known to a repo
2222 """test whether node ids are known to a repo
2223
2223
2224 Every ID must be a full-length hex node id string. Returns a list of 0s
2224 Every ID must be a full-length hex node id string. Returns a list of 0s
2225 and 1s indicating unknown/known.
2225 and 1s indicating unknown/known.
2226 """
2226 """
2227 opts = pycompat.byteskwargs(opts)
2227 opts = pycompat.byteskwargs(opts)
2228 repo = hg.peer(ui, opts, repopath)
2228 repo = hg.peer(ui, opts, repopath)
2229 if not repo.capable(b'known'):
2229 if not repo.capable(b'known'):
2230 raise error.Abort(b"known() not supported by target repository")
2230 raise error.Abort(b"known() not supported by target repository")
2231 flags = repo.known([bin(s) for s in ids])
2231 flags = repo.known([bin(s) for s in ids])
2232 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2232 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2233
2233
2234
2234
2235 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2235 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2236 def debuglabelcomplete(ui, repo, *args):
2236 def debuglabelcomplete(ui, repo, *args):
2237 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2237 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2238 debugnamecomplete(ui, repo, *args)
2238 debugnamecomplete(ui, repo, *args)
2239
2239
2240
2240
2241 @command(
2241 @command(
2242 b'debuglocks',
2242 b'debuglocks',
2243 [
2243 [
2244 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2244 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2245 (
2245 (
2246 b'W',
2246 b'W',
2247 b'force-free-wlock',
2247 b'force-free-wlock',
2248 None,
2248 None,
2249 _(b'free the working state lock (DANGEROUS)'),
2249 _(b'free the working state lock (DANGEROUS)'),
2250 ),
2250 ),
2251 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2251 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2252 (
2252 (
2253 b'S',
2253 b'S',
2254 b'set-wlock',
2254 b'set-wlock',
2255 None,
2255 None,
2256 _(b'set the working state lock until stopped'),
2256 _(b'set the working state lock until stopped'),
2257 ),
2257 ),
2258 ],
2258 ],
2259 _(b'[OPTION]...'),
2259 _(b'[OPTION]...'),
2260 )
2260 )
2261 def debuglocks(ui, repo, **opts):
2261 def debuglocks(ui, repo, **opts):
2262 """show or modify state of locks
2262 """show or modify state of locks
2263
2263
2264 By default, this command will show which locks are held. This
2264 By default, this command will show which locks are held. This
2265 includes the user and process holding the lock, the amount of time
2265 includes the user and process holding the lock, the amount of time
2266 the lock has been held, and the machine name where the process is
2266 the lock has been held, and the machine name where the process is
2267 running if it's not local.
2267 running if it's not local.
2268
2268
2269 Locks protect the integrity of Mercurial's data, so should be
2269 Locks protect the integrity of Mercurial's data, so should be
2270 treated with care. System crashes or other interruptions may cause
2270 treated with care. System crashes or other interruptions may cause
2271 locks to not be properly released, though Mercurial will usually
2271 locks to not be properly released, though Mercurial will usually
2272 detect and remove such stale locks automatically.
2272 detect and remove such stale locks automatically.
2273
2273
2274 However, detecting stale locks may not always be possible (for
2274 However, detecting stale locks may not always be possible (for
2275 instance, on a shared filesystem). Removing locks may also be
2275 instance, on a shared filesystem). Removing locks may also be
2276 blocked by filesystem permissions.
2276 blocked by filesystem permissions.
2277
2277
2278 Setting a lock will prevent other commands from changing the data.
2278 Setting a lock will prevent other commands from changing the data.
2279 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2279 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2280 The set locks are removed when the command exits.
2280 The set locks are removed when the command exits.
2281
2281
2282 Returns 0 if no locks are held.
2282 Returns 0 if no locks are held.
2283
2283
2284 """
2284 """
2285
2285
2286 if opts.get('force_free_lock'):
2286 if opts.get('force_free_lock'):
2287 repo.svfs.tryunlink(b'lock')
2287 repo.svfs.tryunlink(b'lock')
2288 if opts.get('force_free_wlock'):
2288 if opts.get('force_free_wlock'):
2289 repo.vfs.tryunlink(b'wlock')
2289 repo.vfs.tryunlink(b'wlock')
2290 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2290 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2291 return 0
2291 return 0
2292
2292
2293 locks = []
2293 locks = []
2294 try:
2294 try:
2295 if opts.get('set_wlock'):
2295 if opts.get('set_wlock'):
2296 try:
2296 try:
2297 locks.append(repo.wlock(False))
2297 locks.append(repo.wlock(False))
2298 except error.LockHeld:
2298 except error.LockHeld:
2299 raise error.Abort(_(b'wlock is already held'))
2299 raise error.Abort(_(b'wlock is already held'))
2300 if opts.get('set_lock'):
2300 if opts.get('set_lock'):
2301 try:
2301 try:
2302 locks.append(repo.lock(False))
2302 locks.append(repo.lock(False))
2303 except error.LockHeld:
2303 except error.LockHeld:
2304 raise error.Abort(_(b'lock is already held'))
2304 raise error.Abort(_(b'lock is already held'))
2305 if len(locks):
2305 if len(locks):
2306 try:
2306 try:
2307 if ui.interactive():
2307 if ui.interactive():
2308 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2308 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2309 ui.promptchoice(prompt)
2309 ui.promptchoice(prompt)
2310 else:
2310 else:
2311 msg = b"%d locks held, waiting for signal\n"
2311 msg = b"%d locks held, waiting for signal\n"
2312 msg %= len(locks)
2312 msg %= len(locks)
2313 ui.status(msg)
2313 ui.status(msg)
2314 while True: # XXX wait for a signal
2314 while True: # XXX wait for a signal
2315 time.sleep(0.1)
2315 time.sleep(0.1)
2316 except KeyboardInterrupt:
2316 except KeyboardInterrupt:
2317 msg = b"signal-received releasing locks\n"
2317 msg = b"signal-received releasing locks\n"
2318 ui.status(msg)
2318 ui.status(msg)
2319 return 0
2319 return 0
2320 finally:
2320 finally:
2321 release(*locks)
2321 release(*locks)
2322
2322
2323 now = time.time()
2323 now = time.time()
2324 held = 0
2324 held = 0
2325
2325
2326 def report(vfs, name, method):
2326 def report(vfs, name, method):
2327 # this causes stale locks to get reaped for more accurate reporting
2327 # this causes stale locks to get reaped for more accurate reporting
2328 try:
2328 try:
2329 l = method(False)
2329 l = method(False)
2330 except error.LockHeld:
2330 except error.LockHeld:
2331 l = None
2331 l = None
2332
2332
2333 if l:
2333 if l:
2334 l.release()
2334 l.release()
2335 else:
2335 else:
2336 try:
2336 try:
2337 st = vfs.lstat(name)
2337 st = vfs.lstat(name)
2338 age = now - st[stat.ST_MTIME]
2338 age = now - st[stat.ST_MTIME]
2339 user = util.username(st.st_uid)
2339 user = util.username(st.st_uid)
2340 locker = vfs.readlock(name)
2340 locker = vfs.readlock(name)
2341 if b":" in locker:
2341 if b":" in locker:
2342 host, pid = locker.split(b':')
2342 host, pid = locker.split(b':')
2343 if host == socket.gethostname():
2343 if host == socket.gethostname():
2344 locker = b'user %s, process %s' % (user or b'None', pid)
2344 locker = b'user %s, process %s' % (user or b'None', pid)
2345 else:
2345 else:
2346 locker = b'user %s, process %s, host %s' % (
2346 locker = b'user %s, process %s, host %s' % (
2347 user or b'None',
2347 user or b'None',
2348 pid,
2348 pid,
2349 host,
2349 host,
2350 )
2350 )
2351 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2351 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2352 return 1
2352 return 1
2353 except FileNotFoundError:
2353 except FileNotFoundError:
2354 pass
2354 pass
2355
2355
2356 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2356 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2357 return 0
2357 return 0
2358
2358
2359 held += report(repo.svfs, b"lock", repo.lock)
2359 held += report(repo.svfs, b"lock", repo.lock)
2360 held += report(repo.vfs, b"wlock", repo.wlock)
2360 held += report(repo.vfs, b"wlock", repo.wlock)
2361
2361
2362 return held
2362 return held
2363
2363
2364
2364
2365 @command(
2365 @command(
2366 b'debugmanifestfulltextcache',
2366 b'debugmanifestfulltextcache',
2367 [
2367 [
2368 (b'', b'clear', False, _(b'clear the cache')),
2368 (b'', b'clear', False, _(b'clear the cache')),
2369 (
2369 (
2370 b'a',
2370 b'a',
2371 b'add',
2371 b'add',
2372 [],
2372 [],
2373 _(b'add the given manifest nodes to the cache'),
2373 _(b'add the given manifest nodes to the cache'),
2374 _(b'NODE'),
2374 _(b'NODE'),
2375 ),
2375 ),
2376 ],
2376 ],
2377 b'',
2377 b'',
2378 )
2378 )
2379 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2379 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2380 """show, clear or amend the contents of the manifest fulltext cache"""
2380 """show, clear or amend the contents of the manifest fulltext cache"""
2381
2381
2382 def getcache():
2382 def getcache():
2383 r = repo.manifestlog.getstorage(b'')
2383 r = repo.manifestlog.getstorage(b'')
2384 try:
2384 try:
2385 return r._fulltextcache
2385 return r._fulltextcache
2386 except AttributeError:
2386 except AttributeError:
2387 msg = _(
2387 msg = _(
2388 b"Current revlog implementation doesn't appear to have a "
2388 b"Current revlog implementation doesn't appear to have a "
2389 b"manifest fulltext cache\n"
2389 b"manifest fulltext cache\n"
2390 )
2390 )
2391 raise error.Abort(msg)
2391 raise error.Abort(msg)
2392
2392
2393 if opts.get('clear'):
2393 if opts.get('clear'):
2394 with repo.wlock():
2394 with repo.wlock():
2395 cache = getcache()
2395 cache = getcache()
2396 cache.clear(clear_persisted_data=True)
2396 cache.clear(clear_persisted_data=True)
2397 return
2397 return
2398
2398
2399 if add:
2399 if add:
2400 with repo.wlock():
2400 with repo.wlock():
2401 m = repo.manifestlog
2401 m = repo.manifestlog
2402 store = m.getstorage(b'')
2402 store = m.getstorage(b'')
2403 for n in add:
2403 for n in add:
2404 try:
2404 try:
2405 manifest = m[store.lookup(n)]
2405 manifest = m[store.lookup(n)]
2406 except error.LookupError as e:
2406 except error.LookupError as e:
2407 raise error.Abort(
2407 raise error.Abort(
2408 bytes(e), hint=b"Check your manifest node id"
2408 bytes(e), hint=b"Check your manifest node id"
2409 )
2409 )
2410 manifest.read() # stores revisision in cache too
2410 manifest.read() # stores revisision in cache too
2411 return
2411 return
2412
2412
2413 cache = getcache()
2413 cache = getcache()
2414 if not len(cache):
2414 if not len(cache):
2415 ui.write(_(b'cache empty\n'))
2415 ui.write(_(b'cache empty\n'))
2416 else:
2416 else:
2417 ui.write(
2417 ui.write(
2418 _(
2418 _(
2419 b'cache contains %d manifest entries, in order of most to '
2419 b'cache contains %d manifest entries, in order of most to '
2420 b'least recent:\n'
2420 b'least recent:\n'
2421 )
2421 )
2422 % (len(cache),)
2422 % (len(cache),)
2423 )
2423 )
2424 totalsize = 0
2424 totalsize = 0
2425 for nodeid in cache:
2425 for nodeid in cache:
2426 # Use cache.get to not update the LRU order
2426 # Use cache.get to not update the LRU order
2427 data = cache.peek(nodeid)
2427 data = cache.peek(nodeid)
2428 size = len(data)
2428 size = len(data)
2429 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2429 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2430 ui.write(
2430 ui.write(
2431 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2431 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2432 )
2432 )
2433 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2433 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2434 ui.write(
2434 ui.write(
2435 _(b'total cache data size %s, on-disk %s\n')
2435 _(b'total cache data size %s, on-disk %s\n')
2436 % (util.bytecount(totalsize), util.bytecount(ondisk))
2436 % (util.bytecount(totalsize), util.bytecount(ondisk))
2437 )
2437 )
2438
2438
2439
2439
2440 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2440 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2441 def debugmergestate(ui, repo, *args, **opts):
2441 def debugmergestate(ui, repo, *args, **opts):
2442 """print merge state
2442 """print merge state
2443
2443
2444 Use --verbose to print out information about whether v1 or v2 merge state
2444 Use --verbose to print out information about whether v1 or v2 merge state
2445 was chosen."""
2445 was chosen."""
2446
2446
2447 if ui.verbose:
2447 if ui.verbose:
2448 ms = mergestatemod.mergestate(repo)
2448 ms = mergestatemod.mergestate(repo)
2449
2449
2450 # sort so that reasonable information is on top
2450 # sort so that reasonable information is on top
2451 v1records = ms._readrecordsv1()
2451 v1records = ms._readrecordsv1()
2452 v2records = ms._readrecordsv2()
2452 v2records = ms._readrecordsv2()
2453
2453
2454 if not v1records and not v2records:
2454 if not v1records and not v2records:
2455 pass
2455 pass
2456 elif not v2records:
2456 elif not v2records:
2457 ui.writenoi18n(b'no version 2 merge state\n')
2457 ui.writenoi18n(b'no version 2 merge state\n')
2458 elif ms._v1v2match(v1records, v2records):
2458 elif ms._v1v2match(v1records, v2records):
2459 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2459 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2460 else:
2460 else:
2461 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2461 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2462
2462
2463 opts = pycompat.byteskwargs(opts)
2463 opts = pycompat.byteskwargs(opts)
2464 if not opts[b'template']:
2464 if not opts[b'template']:
2465 opts[b'template'] = (
2465 opts[b'template'] = (
2466 b'{if(commits, "", "no merge state found\n")}'
2466 b'{if(commits, "", "no merge state found\n")}'
2467 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2467 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2468 b'{files % "file: {path} (state \\"{state}\\")\n'
2468 b'{files % "file: {path} (state \\"{state}\\")\n'
2469 b'{if(local_path, "'
2469 b'{if(local_path, "'
2470 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2470 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2471 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2471 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2472 b' other path: {other_path} (node {other_node})\n'
2472 b' other path: {other_path} (node {other_node})\n'
2473 b'")}'
2473 b'")}'
2474 b'{if(rename_side, "'
2474 b'{if(rename_side, "'
2475 b' rename side: {rename_side}\n'
2475 b' rename side: {rename_side}\n'
2476 b' renamed path: {renamed_path}\n'
2476 b' renamed path: {renamed_path}\n'
2477 b'")}'
2477 b'")}'
2478 b'{extras % " extra: {key} = {value}\n"}'
2478 b'{extras % " extra: {key} = {value}\n"}'
2479 b'"}'
2479 b'"}'
2480 b'{extras % "extra: {file} ({key} = {value})\n"}'
2480 b'{extras % "extra: {file} ({key} = {value})\n"}'
2481 )
2481 )
2482
2482
2483 ms = mergestatemod.mergestate.read(repo)
2483 ms = mergestatemod.mergestate.read(repo)
2484
2484
2485 fm = ui.formatter(b'debugmergestate', opts)
2485 fm = ui.formatter(b'debugmergestate', opts)
2486 fm.startitem()
2486 fm.startitem()
2487
2487
2488 fm_commits = fm.nested(b'commits')
2488 fm_commits = fm.nested(b'commits')
2489 if ms.active():
2489 if ms.active():
2490 for name, node, label_index in (
2490 for name, node, label_index in (
2491 (b'local', ms.local, 0),
2491 (b'local', ms.local, 0),
2492 (b'other', ms.other, 1),
2492 (b'other', ms.other, 1),
2493 ):
2493 ):
2494 fm_commits.startitem()
2494 fm_commits.startitem()
2495 fm_commits.data(name=name)
2495 fm_commits.data(name=name)
2496 fm_commits.data(node=hex(node))
2496 fm_commits.data(node=hex(node))
2497 if ms._labels and len(ms._labels) > label_index:
2497 if ms._labels and len(ms._labels) > label_index:
2498 fm_commits.data(label=ms._labels[label_index])
2498 fm_commits.data(label=ms._labels[label_index])
2499 fm_commits.end()
2499 fm_commits.end()
2500
2500
2501 fm_files = fm.nested(b'files')
2501 fm_files = fm.nested(b'files')
2502 if ms.active():
2502 if ms.active():
2503 for f in ms:
2503 for f in ms:
2504 fm_files.startitem()
2504 fm_files.startitem()
2505 fm_files.data(path=f)
2505 fm_files.data(path=f)
2506 state = ms._state[f]
2506 state = ms._state[f]
2507 fm_files.data(state=state[0])
2507 fm_files.data(state=state[0])
2508 if state[0] in (
2508 if state[0] in (
2509 mergestatemod.MERGE_RECORD_UNRESOLVED,
2509 mergestatemod.MERGE_RECORD_UNRESOLVED,
2510 mergestatemod.MERGE_RECORD_RESOLVED,
2510 mergestatemod.MERGE_RECORD_RESOLVED,
2511 ):
2511 ):
2512 fm_files.data(local_key=state[1])
2512 fm_files.data(local_key=state[1])
2513 fm_files.data(local_path=state[2])
2513 fm_files.data(local_path=state[2])
2514 fm_files.data(ancestor_path=state[3])
2514 fm_files.data(ancestor_path=state[3])
2515 fm_files.data(ancestor_node=state[4])
2515 fm_files.data(ancestor_node=state[4])
2516 fm_files.data(other_path=state[5])
2516 fm_files.data(other_path=state[5])
2517 fm_files.data(other_node=state[6])
2517 fm_files.data(other_node=state[6])
2518 fm_files.data(local_flags=state[7])
2518 fm_files.data(local_flags=state[7])
2519 elif state[0] in (
2519 elif state[0] in (
2520 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2520 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2521 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2521 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2522 ):
2522 ):
2523 fm_files.data(renamed_path=state[1])
2523 fm_files.data(renamed_path=state[1])
2524 fm_files.data(rename_side=state[2])
2524 fm_files.data(rename_side=state[2])
2525 fm_extras = fm_files.nested(b'extras')
2525 fm_extras = fm_files.nested(b'extras')
2526 for k, v in sorted(ms.extras(f).items()):
2526 for k, v in sorted(ms.extras(f).items()):
2527 fm_extras.startitem()
2527 fm_extras.startitem()
2528 fm_extras.data(key=k)
2528 fm_extras.data(key=k)
2529 fm_extras.data(value=v)
2529 fm_extras.data(value=v)
2530 fm_extras.end()
2530 fm_extras.end()
2531
2531
2532 fm_files.end()
2532 fm_files.end()
2533
2533
2534 fm_extras = fm.nested(b'extras')
2534 fm_extras = fm.nested(b'extras')
2535 for f, d in sorted(ms.allextras().items()):
2535 for f, d in sorted(ms.allextras().items()):
2536 if f in ms:
2536 if f in ms:
2537 # If file is in mergestate, we have already processed it's extras
2537 # If file is in mergestate, we have already processed it's extras
2538 continue
2538 continue
2539 for k, v in d.items():
2539 for k, v in d.items():
2540 fm_extras.startitem()
2540 fm_extras.startitem()
2541 fm_extras.data(file=f)
2541 fm_extras.data(file=f)
2542 fm_extras.data(key=k)
2542 fm_extras.data(key=k)
2543 fm_extras.data(value=v)
2543 fm_extras.data(value=v)
2544 fm_extras.end()
2544 fm_extras.end()
2545
2545
2546 fm.end()
2546 fm.end()
2547
2547
2548
2548
2549 @command(b'debugnamecomplete', [], _(b'NAME...'))
2549 @command(b'debugnamecomplete', [], _(b'NAME...'))
2550 def debugnamecomplete(ui, repo, *args):
2550 def debugnamecomplete(ui, repo, *args):
2551 '''complete "names" - tags, open branch names, bookmark names'''
2551 '''complete "names" - tags, open branch names, bookmark names'''
2552
2552
2553 names = set()
2553 names = set()
2554 # since we previously only listed open branches, we will handle that
2554 # since we previously only listed open branches, we will handle that
2555 # specially (after this for loop)
2555 # specially (after this for loop)
2556 for name, ns in repo.names.items():
2556 for name, ns in repo.names.items():
2557 if name != b'branches':
2557 if name != b'branches':
2558 names.update(ns.listnames(repo))
2558 names.update(ns.listnames(repo))
2559 names.update(
2559 names.update(
2560 tag
2560 tag
2561 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2561 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2562 if not closed
2562 if not closed
2563 )
2563 )
2564 completions = set()
2564 completions = set()
2565 if not args:
2565 if not args:
2566 args = [b'']
2566 args = [b'']
2567 for a in args:
2567 for a in args:
2568 completions.update(n for n in names if n.startswith(a))
2568 completions.update(n for n in names if n.startswith(a))
2569 ui.write(b'\n'.join(sorted(completions)))
2569 ui.write(b'\n'.join(sorted(completions)))
2570 ui.write(b'\n')
2570 ui.write(b'\n')
2571
2571
2572
2572
2573 @command(
2573 @command(
2574 b'debugnodemap',
2574 b'debugnodemap',
2575 [
2575 [
2576 (
2576 (
2577 b'',
2577 b'',
2578 b'dump-new',
2578 b'dump-new',
2579 False,
2579 False,
2580 _(b'write a (new) persistent binary nodemap on stdout'),
2580 _(b'write a (new) persistent binary nodemap on stdout'),
2581 ),
2581 ),
2582 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2582 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2583 (
2583 (
2584 b'',
2584 b'',
2585 b'check',
2585 b'check',
2586 False,
2586 False,
2587 _(b'check that the data on disk data are correct.'),
2587 _(b'check that the data on disk data are correct.'),
2588 ),
2588 ),
2589 (
2589 (
2590 b'',
2590 b'',
2591 b'metadata',
2591 b'metadata',
2592 False,
2592 False,
2593 _(b'display the on disk meta data for the nodemap'),
2593 _(b'display the on disk meta data for the nodemap'),
2594 ),
2594 ),
2595 ],
2595 ],
2596 )
2596 )
2597 def debugnodemap(ui, repo, **opts):
2597 def debugnodemap(ui, repo, **opts):
2598 """write and inspect on disk nodemap"""
2598 """write and inspect on disk nodemap"""
2599 if opts['dump_new']:
2599 if opts['dump_new']:
2600 unfi = repo.unfiltered()
2600 unfi = repo.unfiltered()
2601 cl = unfi.changelog
2601 cl = unfi.changelog
2602 if util.safehasattr(cl.index, "nodemap_data_all"):
2602 if util.safehasattr(cl.index, "nodemap_data_all"):
2603 data = cl.index.nodemap_data_all()
2603 data = cl.index.nodemap_data_all()
2604 else:
2604 else:
2605 data = nodemap.persistent_data(cl.index)
2605 data = nodemap.persistent_data(cl.index)
2606 ui.write(data)
2606 ui.write(data)
2607 elif opts['dump_disk']:
2607 elif opts['dump_disk']:
2608 unfi = repo.unfiltered()
2608 unfi = repo.unfiltered()
2609 cl = unfi.changelog
2609 cl = unfi.changelog
2610 nm_data = nodemap.persisted_data(cl)
2610 nm_data = nodemap.persisted_data(cl)
2611 if nm_data is not None:
2611 if nm_data is not None:
2612 docket, data = nm_data
2612 docket, data = nm_data
2613 ui.write(data[:])
2613 ui.write(data[:])
2614 elif opts['check']:
2614 elif opts['check']:
2615 unfi = repo.unfiltered()
2615 unfi = repo.unfiltered()
2616 cl = unfi.changelog
2616 cl = unfi.changelog
2617 nm_data = nodemap.persisted_data(cl)
2617 nm_data = nodemap.persisted_data(cl)
2618 if nm_data is not None:
2618 if nm_data is not None:
2619 docket, data = nm_data
2619 docket, data = nm_data
2620 return nodemap.check_data(ui, cl.index, data)
2620 return nodemap.check_data(ui, cl.index, data)
2621 elif opts['metadata']:
2621 elif opts['metadata']:
2622 unfi = repo.unfiltered()
2622 unfi = repo.unfiltered()
2623 cl = unfi.changelog
2623 cl = unfi.changelog
2624 nm_data = nodemap.persisted_data(cl)
2624 nm_data = nodemap.persisted_data(cl)
2625 if nm_data is not None:
2625 if nm_data is not None:
2626 docket, data = nm_data
2626 docket, data = nm_data
2627 ui.write((b"uid: %s\n") % docket.uid)
2627 ui.write((b"uid: %s\n") % docket.uid)
2628 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2628 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2629 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2629 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2630 ui.write((b"data-length: %d\n") % docket.data_length)
2630 ui.write((b"data-length: %d\n") % docket.data_length)
2631 ui.write((b"data-unused: %d\n") % docket.data_unused)
2631 ui.write((b"data-unused: %d\n") % docket.data_unused)
2632 unused_perc = docket.data_unused * 100.0 / docket.data_length
2632 unused_perc = docket.data_unused * 100.0 / docket.data_length
2633 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2633 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2634
2634
2635
2635
2636 @command(
2636 @command(
2637 b'debugobsolete',
2637 b'debugobsolete',
2638 [
2638 [
2639 (b'', b'flags', 0, _(b'markers flag')),
2639 (b'', b'flags', 0, _(b'markers flag')),
2640 (
2640 (
2641 b'',
2641 b'',
2642 b'record-parents',
2642 b'record-parents',
2643 False,
2643 False,
2644 _(b'record parent information for the precursor'),
2644 _(b'record parent information for the precursor'),
2645 ),
2645 ),
2646 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2646 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2647 (
2647 (
2648 b'',
2648 b'',
2649 b'exclusive',
2649 b'exclusive',
2650 False,
2650 False,
2651 _(b'restrict display to markers only relevant to REV'),
2651 _(b'restrict display to markers only relevant to REV'),
2652 ),
2652 ),
2653 (b'', b'index', False, _(b'display index of the marker')),
2653 (b'', b'index', False, _(b'display index of the marker')),
2654 (b'', b'delete', [], _(b'delete markers specified by indices')),
2654 (b'', b'delete', [], _(b'delete markers specified by indices')),
2655 ]
2655 ]
2656 + cmdutil.commitopts2
2656 + cmdutil.commitopts2
2657 + cmdutil.formatteropts,
2657 + cmdutil.formatteropts,
2658 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2658 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2659 )
2659 )
2660 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2660 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2661 """create arbitrary obsolete marker
2661 """create arbitrary obsolete marker
2662
2662
2663 With no arguments, displays the list of obsolescence markers."""
2663 With no arguments, displays the list of obsolescence markers."""
2664
2664
2665 opts = pycompat.byteskwargs(opts)
2665 opts = pycompat.byteskwargs(opts)
2666
2666
2667 def parsenodeid(s):
2667 def parsenodeid(s):
2668 try:
2668 try:
2669 # We do not use revsingle/revrange functions here to accept
2669 # We do not use revsingle/revrange functions here to accept
2670 # arbitrary node identifiers, possibly not present in the
2670 # arbitrary node identifiers, possibly not present in the
2671 # local repository.
2671 # local repository.
2672 n = bin(s)
2672 n = bin(s)
2673 if len(n) != repo.nodeconstants.nodelen:
2673 if len(n) != repo.nodeconstants.nodelen:
2674 raise ValueError
2674 raise ValueError
2675 return n
2675 return n
2676 except ValueError:
2676 except ValueError:
2677 raise error.InputError(
2677 raise error.InputError(
2678 b'changeset references must be full hexadecimal '
2678 b'changeset references must be full hexadecimal '
2679 b'node identifiers'
2679 b'node identifiers'
2680 )
2680 )
2681
2681
2682 if opts.get(b'delete'):
2682 if opts.get(b'delete'):
2683 indices = []
2683 indices = []
2684 for v in opts.get(b'delete'):
2684 for v in opts.get(b'delete'):
2685 try:
2685 try:
2686 indices.append(int(v))
2686 indices.append(int(v))
2687 except ValueError:
2687 except ValueError:
2688 raise error.InputError(
2688 raise error.InputError(
2689 _(b'invalid index value: %r') % v,
2689 _(b'invalid index value: %r') % v,
2690 hint=_(b'use integers for indices'),
2690 hint=_(b'use integers for indices'),
2691 )
2691 )
2692
2692
2693 if repo.currenttransaction():
2693 if repo.currenttransaction():
2694 raise error.Abort(
2694 raise error.Abort(
2695 _(b'cannot delete obsmarkers in the middle of transaction.')
2695 _(b'cannot delete obsmarkers in the middle of transaction.')
2696 )
2696 )
2697
2697
2698 with repo.lock():
2698 with repo.lock():
2699 n = repair.deleteobsmarkers(repo.obsstore, indices)
2699 n = repair.deleteobsmarkers(repo.obsstore, indices)
2700 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2700 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2701
2701
2702 return
2702 return
2703
2703
2704 if precursor is not None:
2704 if precursor is not None:
2705 if opts[b'rev']:
2705 if opts[b'rev']:
2706 raise error.InputError(
2706 raise error.InputError(
2707 b'cannot select revision when creating marker'
2707 b'cannot select revision when creating marker'
2708 )
2708 )
2709 metadata = {}
2709 metadata = {}
2710 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2710 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2711 succs = tuple(parsenodeid(succ) for succ in successors)
2711 succs = tuple(parsenodeid(succ) for succ in successors)
2712 l = repo.lock()
2712 l = repo.lock()
2713 try:
2713 try:
2714 tr = repo.transaction(b'debugobsolete')
2714 tr = repo.transaction(b'debugobsolete')
2715 try:
2715 try:
2716 date = opts.get(b'date')
2716 date = opts.get(b'date')
2717 if date:
2717 if date:
2718 date = dateutil.parsedate(date)
2718 date = dateutil.parsedate(date)
2719 else:
2719 else:
2720 date = None
2720 date = None
2721 prec = parsenodeid(precursor)
2721 prec = parsenodeid(precursor)
2722 parents = None
2722 parents = None
2723 if opts[b'record_parents']:
2723 if opts[b'record_parents']:
2724 if prec not in repo.unfiltered():
2724 if prec not in repo.unfiltered():
2725 raise error.Abort(
2725 raise error.Abort(
2726 b'cannot used --record-parents on '
2726 b'cannot used --record-parents on '
2727 b'unknown changesets'
2727 b'unknown changesets'
2728 )
2728 )
2729 parents = repo.unfiltered()[prec].parents()
2729 parents = repo.unfiltered()[prec].parents()
2730 parents = tuple(p.node() for p in parents)
2730 parents = tuple(p.node() for p in parents)
2731 repo.obsstore.create(
2731 repo.obsstore.create(
2732 tr,
2732 tr,
2733 prec,
2733 prec,
2734 succs,
2734 succs,
2735 opts[b'flags'],
2735 opts[b'flags'],
2736 parents=parents,
2736 parents=parents,
2737 date=date,
2737 date=date,
2738 metadata=metadata,
2738 metadata=metadata,
2739 ui=ui,
2739 ui=ui,
2740 )
2740 )
2741 tr.close()
2741 tr.close()
2742 except ValueError as exc:
2742 except ValueError as exc:
2743 raise error.Abort(
2743 raise error.Abort(
2744 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2744 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2745 )
2745 )
2746 finally:
2746 finally:
2747 tr.release()
2747 tr.release()
2748 finally:
2748 finally:
2749 l.release()
2749 l.release()
2750 else:
2750 else:
2751 if opts[b'rev']:
2751 if opts[b'rev']:
2752 revs = logcmdutil.revrange(repo, opts[b'rev'])
2752 revs = logcmdutil.revrange(repo, opts[b'rev'])
2753 nodes = [repo[r].node() for r in revs]
2753 nodes = [repo[r].node() for r in revs]
2754 markers = list(
2754 markers = list(
2755 obsutil.getmarkers(
2755 obsutil.getmarkers(
2756 repo, nodes=nodes, exclusive=opts[b'exclusive']
2756 repo, nodes=nodes, exclusive=opts[b'exclusive']
2757 )
2757 )
2758 )
2758 )
2759 markers.sort(key=lambda x: x._data)
2759 markers.sort(key=lambda x: x._data)
2760 else:
2760 else:
2761 markers = obsutil.getmarkers(repo)
2761 markers = obsutil.getmarkers(repo)
2762
2762
2763 markerstoiter = markers
2763 markerstoiter = markers
2764 isrelevant = lambda m: True
2764 isrelevant = lambda m: True
2765 if opts.get(b'rev') and opts.get(b'index'):
2765 if opts.get(b'rev') and opts.get(b'index'):
2766 markerstoiter = obsutil.getmarkers(repo)
2766 markerstoiter = obsutil.getmarkers(repo)
2767 markerset = set(markers)
2767 markerset = set(markers)
2768 isrelevant = lambda m: m in markerset
2768 isrelevant = lambda m: m in markerset
2769
2769
2770 fm = ui.formatter(b'debugobsolete', opts)
2770 fm = ui.formatter(b'debugobsolete', opts)
2771 for i, m in enumerate(markerstoiter):
2771 for i, m in enumerate(markerstoiter):
2772 if not isrelevant(m):
2772 if not isrelevant(m):
2773 # marker can be irrelevant when we're iterating over a set
2773 # marker can be irrelevant when we're iterating over a set
2774 # of markers (markerstoiter) which is bigger than the set
2774 # of markers (markerstoiter) which is bigger than the set
2775 # of markers we want to display (markers)
2775 # of markers we want to display (markers)
2776 # this can happen if both --index and --rev options are
2776 # this can happen if both --index and --rev options are
2777 # provided and thus we need to iterate over all of the markers
2777 # provided and thus we need to iterate over all of the markers
2778 # to get the correct indices, but only display the ones that
2778 # to get the correct indices, but only display the ones that
2779 # are relevant to --rev value
2779 # are relevant to --rev value
2780 continue
2780 continue
2781 fm.startitem()
2781 fm.startitem()
2782 ind = i if opts.get(b'index') else None
2782 ind = i if opts.get(b'index') else None
2783 cmdutil.showmarker(fm, m, index=ind)
2783 cmdutil.showmarker(fm, m, index=ind)
2784 fm.end()
2784 fm.end()
2785
2785
2786
2786
2787 @command(
2787 @command(
2788 b'debugp1copies',
2788 b'debugp1copies',
2789 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2789 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2790 _(b'[-r REV]'),
2790 _(b'[-r REV]'),
2791 )
2791 )
2792 def debugp1copies(ui, repo, **opts):
2792 def debugp1copies(ui, repo, **opts):
2793 """dump copy information compared to p1"""
2793 """dump copy information compared to p1"""
2794
2794
2795 opts = pycompat.byteskwargs(opts)
2795 opts = pycompat.byteskwargs(opts)
2796 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2796 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2797 for dst, src in ctx.p1copies().items():
2797 for dst, src in ctx.p1copies().items():
2798 ui.write(b'%s -> %s\n' % (src, dst))
2798 ui.write(b'%s -> %s\n' % (src, dst))
2799
2799
2800
2800
2801 @command(
2801 @command(
2802 b'debugp2copies',
2802 b'debugp2copies',
2803 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2803 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2804 _(b'[-r REV]'),
2804 _(b'[-r REV]'),
2805 )
2805 )
2806 def debugp2copies(ui, repo, **opts):
2806 def debugp2copies(ui, repo, **opts):
2807 """dump copy information compared to p2"""
2807 """dump copy information compared to p2"""
2808
2808
2809 opts = pycompat.byteskwargs(opts)
2809 opts = pycompat.byteskwargs(opts)
2810 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2810 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2811 for dst, src in ctx.p2copies().items():
2811 for dst, src in ctx.p2copies().items():
2812 ui.write(b'%s -> %s\n' % (src, dst))
2812 ui.write(b'%s -> %s\n' % (src, dst))
2813
2813
2814
2814
2815 @command(
2815 @command(
2816 b'debugpathcomplete',
2816 b'debugpathcomplete',
2817 [
2817 [
2818 (b'f', b'full', None, _(b'complete an entire path')),
2818 (b'f', b'full', None, _(b'complete an entire path')),
2819 (b'n', b'normal', None, _(b'show only normal files')),
2819 (b'n', b'normal', None, _(b'show only normal files')),
2820 (b'a', b'added', None, _(b'show only added files')),
2820 (b'a', b'added', None, _(b'show only added files')),
2821 (b'r', b'removed', None, _(b'show only removed files')),
2821 (b'r', b'removed', None, _(b'show only removed files')),
2822 ],
2822 ],
2823 _(b'FILESPEC...'),
2823 _(b'FILESPEC...'),
2824 )
2824 )
2825 def debugpathcomplete(ui, repo, *specs, **opts):
2825 def debugpathcomplete(ui, repo, *specs, **opts):
2826 """complete part or all of a tracked path
2826 """complete part or all of a tracked path
2827
2827
2828 This command supports shells that offer path name completion. It
2828 This command supports shells that offer path name completion. It
2829 currently completes only files already known to the dirstate.
2829 currently completes only files already known to the dirstate.
2830
2830
2831 Completion extends only to the next path segment unless
2831 Completion extends only to the next path segment unless
2832 --full is specified, in which case entire paths are used."""
2832 --full is specified, in which case entire paths are used."""
2833
2833
2834 def complete(path, acceptable):
2834 def complete(path, acceptable):
2835 dirstate = repo.dirstate
2835 dirstate = repo.dirstate
2836 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2836 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2837 rootdir = repo.root + pycompat.ossep
2837 rootdir = repo.root + pycompat.ossep
2838 if spec != repo.root and not spec.startswith(rootdir):
2838 if spec != repo.root and not spec.startswith(rootdir):
2839 return [], []
2839 return [], []
2840 if os.path.isdir(spec):
2840 if os.path.isdir(spec):
2841 spec += b'/'
2841 spec += b'/'
2842 spec = spec[len(rootdir) :]
2842 spec = spec[len(rootdir) :]
2843 fixpaths = pycompat.ossep != b'/'
2843 fixpaths = pycompat.ossep != b'/'
2844 if fixpaths:
2844 if fixpaths:
2845 spec = spec.replace(pycompat.ossep, b'/')
2845 spec = spec.replace(pycompat.ossep, b'/')
2846 speclen = len(spec)
2846 speclen = len(spec)
2847 fullpaths = opts['full']
2847 fullpaths = opts['full']
2848 files, dirs = set(), set()
2848 files, dirs = set(), set()
2849 adddir, addfile = dirs.add, files.add
2849 adddir, addfile = dirs.add, files.add
2850 for f, st in dirstate.items():
2850 for f, st in dirstate.items():
2851 if f.startswith(spec) and st.state in acceptable:
2851 if f.startswith(spec) and st.state in acceptable:
2852 if fixpaths:
2852 if fixpaths:
2853 f = f.replace(b'/', pycompat.ossep)
2853 f = f.replace(b'/', pycompat.ossep)
2854 if fullpaths:
2854 if fullpaths:
2855 addfile(f)
2855 addfile(f)
2856 continue
2856 continue
2857 s = f.find(pycompat.ossep, speclen)
2857 s = f.find(pycompat.ossep, speclen)
2858 if s >= 0:
2858 if s >= 0:
2859 adddir(f[:s])
2859 adddir(f[:s])
2860 else:
2860 else:
2861 addfile(f)
2861 addfile(f)
2862 return files, dirs
2862 return files, dirs
2863
2863
2864 acceptable = b''
2864 acceptable = b''
2865 if opts['normal']:
2865 if opts['normal']:
2866 acceptable += b'nm'
2866 acceptable += b'nm'
2867 if opts['added']:
2867 if opts['added']:
2868 acceptable += b'a'
2868 acceptable += b'a'
2869 if opts['removed']:
2869 if opts['removed']:
2870 acceptable += b'r'
2870 acceptable += b'r'
2871 cwd = repo.getcwd()
2871 cwd = repo.getcwd()
2872 if not specs:
2872 if not specs:
2873 specs = [b'.']
2873 specs = [b'.']
2874
2874
2875 files, dirs = set(), set()
2875 files, dirs = set(), set()
2876 for spec in specs:
2876 for spec in specs:
2877 f, d = complete(spec, acceptable or b'nmar')
2877 f, d = complete(spec, acceptable or b'nmar')
2878 files.update(f)
2878 files.update(f)
2879 dirs.update(d)
2879 dirs.update(d)
2880 files.update(dirs)
2880 files.update(dirs)
2881 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2881 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2882 ui.write(b'\n')
2882 ui.write(b'\n')
2883
2883
2884
2884
2885 @command(
2885 @command(
2886 b'debugpathcopies',
2886 b'debugpathcopies',
2887 cmdutil.walkopts,
2887 cmdutil.walkopts,
2888 b'hg debugpathcopies REV1 REV2 [FILE]',
2888 b'hg debugpathcopies REV1 REV2 [FILE]',
2889 inferrepo=True,
2889 inferrepo=True,
2890 )
2890 )
2891 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2891 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2892 """show copies between two revisions"""
2892 """show copies between two revisions"""
2893 ctx1 = scmutil.revsingle(repo, rev1)
2893 ctx1 = scmutil.revsingle(repo, rev1)
2894 ctx2 = scmutil.revsingle(repo, rev2)
2894 ctx2 = scmutil.revsingle(repo, rev2)
2895 m = scmutil.match(ctx1, pats, opts)
2895 m = scmutil.match(ctx1, pats, opts)
2896 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2896 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2897 ui.write(b'%s -> %s\n' % (src, dst))
2897 ui.write(b'%s -> %s\n' % (src, dst))
2898
2898
2899
2899
2900 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2900 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2901 def debugpeer(ui, path):
2901 def debugpeer(ui, path):
2902 """establish a connection to a peer repository"""
2902 """establish a connection to a peer repository"""
2903 # Always enable peer request logging. Requires --debug to display
2903 # Always enable peer request logging. Requires --debug to display
2904 # though.
2904 # though.
2905 overrides = {
2905 overrides = {
2906 (b'devel', b'debug.peer-request'): True,
2906 (b'devel', b'debug.peer-request'): True,
2907 }
2907 }
2908
2908
2909 with ui.configoverride(overrides):
2909 with ui.configoverride(overrides):
2910 peer = hg.peer(ui, {}, path)
2910 peer = hg.peer(ui, {}, path)
2911
2911
2912 try:
2912 try:
2913 local = peer.local() is not None
2913 local = peer.local() is not None
2914 canpush = peer.canpush()
2914 canpush = peer.canpush()
2915
2915
2916 ui.write(_(b'url: %s\n') % peer.url())
2916 ui.write(_(b'url: %s\n') % peer.url())
2917 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2917 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2918 ui.write(
2918 ui.write(
2919 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2919 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2920 )
2920 )
2921 finally:
2921 finally:
2922 peer.close()
2922 peer.close()
2923
2923
2924
2924
2925 @command(
2925 @command(
2926 b'debugpickmergetool',
2926 b'debugpickmergetool',
2927 [
2927 [
2928 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2928 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2929 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2929 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2930 ]
2930 ]
2931 + cmdutil.walkopts
2931 + cmdutil.walkopts
2932 + cmdutil.mergetoolopts,
2932 + cmdutil.mergetoolopts,
2933 _(b'[PATTERN]...'),
2933 _(b'[PATTERN]...'),
2934 inferrepo=True,
2934 inferrepo=True,
2935 )
2935 )
2936 def debugpickmergetool(ui, repo, *pats, **opts):
2936 def debugpickmergetool(ui, repo, *pats, **opts):
2937 """examine which merge tool is chosen for specified file
2937 """examine which merge tool is chosen for specified file
2938
2938
2939 As described in :hg:`help merge-tools`, Mercurial examines
2939 As described in :hg:`help merge-tools`, Mercurial examines
2940 configurations below in this order to decide which merge tool is
2940 configurations below in this order to decide which merge tool is
2941 chosen for specified file.
2941 chosen for specified file.
2942
2942
2943 1. ``--tool`` option
2943 1. ``--tool`` option
2944 2. ``HGMERGE`` environment variable
2944 2. ``HGMERGE`` environment variable
2945 3. configurations in ``merge-patterns`` section
2945 3. configurations in ``merge-patterns`` section
2946 4. configuration of ``ui.merge``
2946 4. configuration of ``ui.merge``
2947 5. configurations in ``merge-tools`` section
2947 5. configurations in ``merge-tools`` section
2948 6. ``hgmerge`` tool (for historical reason only)
2948 6. ``hgmerge`` tool (for historical reason only)
2949 7. default tool for fallback (``:merge`` or ``:prompt``)
2949 7. default tool for fallback (``:merge`` or ``:prompt``)
2950
2950
2951 This command writes out examination result in the style below::
2951 This command writes out examination result in the style below::
2952
2952
2953 FILE = MERGETOOL
2953 FILE = MERGETOOL
2954
2954
2955 By default, all files known in the first parent context of the
2955 By default, all files known in the first parent context of the
2956 working directory are examined. Use file patterns and/or -I/-X
2956 working directory are examined. Use file patterns and/or -I/-X
2957 options to limit target files. -r/--rev is also useful to examine
2957 options to limit target files. -r/--rev is also useful to examine
2958 files in another context without actual updating to it.
2958 files in another context without actual updating to it.
2959
2959
2960 With --debug, this command shows warning messages while matching
2960 With --debug, this command shows warning messages while matching
2961 against ``merge-patterns`` and so on, too. It is recommended to
2961 against ``merge-patterns`` and so on, too. It is recommended to
2962 use this option with explicit file patterns and/or -I/-X options,
2962 use this option with explicit file patterns and/or -I/-X options,
2963 because this option increases amount of output per file according
2963 because this option increases amount of output per file according
2964 to configurations in hgrc.
2964 to configurations in hgrc.
2965
2965
2966 With -v/--verbose, this command shows configurations below at
2966 With -v/--verbose, this command shows configurations below at
2967 first (only if specified).
2967 first (only if specified).
2968
2968
2969 - ``--tool`` option
2969 - ``--tool`` option
2970 - ``HGMERGE`` environment variable
2970 - ``HGMERGE`` environment variable
2971 - configuration of ``ui.merge``
2971 - configuration of ``ui.merge``
2972
2972
2973 If merge tool is chosen before matching against
2973 If merge tool is chosen before matching against
2974 ``merge-patterns``, this command can't show any helpful
2974 ``merge-patterns``, this command can't show any helpful
2975 information, even with --debug. In such case, information above is
2975 information, even with --debug. In such case, information above is
2976 useful to know why a merge tool is chosen.
2976 useful to know why a merge tool is chosen.
2977 """
2977 """
2978 opts = pycompat.byteskwargs(opts)
2978 opts = pycompat.byteskwargs(opts)
2979 overrides = {}
2979 overrides = {}
2980 if opts[b'tool']:
2980 if opts[b'tool']:
2981 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2981 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2982 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2982 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2983
2983
2984 with ui.configoverride(overrides, b'debugmergepatterns'):
2984 with ui.configoverride(overrides, b'debugmergepatterns'):
2985 hgmerge = encoding.environ.get(b"HGMERGE")
2985 hgmerge = encoding.environ.get(b"HGMERGE")
2986 if hgmerge is not None:
2986 if hgmerge is not None:
2987 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2987 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2988 uimerge = ui.config(b"ui", b"merge")
2988 uimerge = ui.config(b"ui", b"merge")
2989 if uimerge:
2989 if uimerge:
2990 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2990 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2991
2991
2992 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2992 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2993 m = scmutil.match(ctx, pats, opts)
2993 m = scmutil.match(ctx, pats, opts)
2994 changedelete = opts[b'changedelete']
2994 changedelete = opts[b'changedelete']
2995 for path in ctx.walk(m):
2995 for path in ctx.walk(m):
2996 fctx = ctx[path]
2996 fctx = ctx[path]
2997 with ui.silent(
2997 with ui.silent(
2998 error=True
2998 error=True
2999 ) if not ui.debugflag else util.nullcontextmanager():
2999 ) if not ui.debugflag else util.nullcontextmanager():
3000 tool, toolpath = filemerge._picktool(
3000 tool, toolpath = filemerge._picktool(
3001 repo,
3001 repo,
3002 ui,
3002 ui,
3003 path,
3003 path,
3004 fctx.isbinary(),
3004 fctx.isbinary(),
3005 b'l' in fctx.flags(),
3005 b'l' in fctx.flags(),
3006 changedelete,
3006 changedelete,
3007 )
3007 )
3008 ui.write(b'%s = %s\n' % (path, tool))
3008 ui.write(b'%s = %s\n' % (path, tool))
3009
3009
3010
3010
3011 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3011 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3012 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3012 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3013 """access the pushkey key/value protocol
3013 """access the pushkey key/value protocol
3014
3014
3015 With two args, list the keys in the given namespace.
3015 With two args, list the keys in the given namespace.
3016
3016
3017 With five args, set a key to new if it currently is set to old.
3017 With five args, set a key to new if it currently is set to old.
3018 Reports success or failure.
3018 Reports success or failure.
3019 """
3019 """
3020
3020
3021 target = hg.peer(ui, {}, repopath)
3021 target = hg.peer(ui, {}, repopath)
3022 try:
3022 try:
3023 if keyinfo:
3023 if keyinfo:
3024 key, old, new = keyinfo
3024 key, old, new = keyinfo
3025 with target.commandexecutor() as e:
3025 with target.commandexecutor() as e:
3026 r = e.callcommand(
3026 r = e.callcommand(
3027 b'pushkey',
3027 b'pushkey',
3028 {
3028 {
3029 b'namespace': namespace,
3029 b'namespace': namespace,
3030 b'key': key,
3030 b'key': key,
3031 b'old': old,
3031 b'old': old,
3032 b'new': new,
3032 b'new': new,
3033 },
3033 },
3034 ).result()
3034 ).result()
3035
3035
3036 ui.status(pycompat.bytestr(r) + b'\n')
3036 ui.status(pycompat.bytestr(r) + b'\n')
3037 return not r
3037 return not r
3038 else:
3038 else:
3039 for k, v in sorted(target.listkeys(namespace).items()):
3039 for k, v in sorted(target.listkeys(namespace).items()):
3040 ui.write(
3040 ui.write(
3041 b"%s\t%s\n"
3041 b"%s\t%s\n"
3042 % (stringutil.escapestr(k), stringutil.escapestr(v))
3042 % (stringutil.escapestr(k), stringutil.escapestr(v))
3043 )
3043 )
3044 finally:
3044 finally:
3045 target.close()
3045 target.close()
3046
3046
3047
3047
3048 @command(b'debugpvec', [], _(b'A B'))
3048 @command(b'debugpvec', [], _(b'A B'))
3049 def debugpvec(ui, repo, a, b=None):
3049 def debugpvec(ui, repo, a, b=None):
3050 ca = scmutil.revsingle(repo, a)
3050 ca = scmutil.revsingle(repo, a)
3051 cb = scmutil.revsingle(repo, b)
3051 cb = scmutil.revsingle(repo, b)
3052 pa = pvec.ctxpvec(ca)
3052 pa = pvec.ctxpvec(ca)
3053 pb = pvec.ctxpvec(cb)
3053 pb = pvec.ctxpvec(cb)
3054 if pa == pb:
3054 if pa == pb:
3055 rel = b"="
3055 rel = b"="
3056 elif pa > pb:
3056 elif pa > pb:
3057 rel = b">"
3057 rel = b">"
3058 elif pa < pb:
3058 elif pa < pb:
3059 rel = b"<"
3059 rel = b"<"
3060 elif pa | pb:
3060 elif pa | pb:
3061 rel = b"|"
3061 rel = b"|"
3062 ui.write(_(b"a: %s\n") % pa)
3062 ui.write(_(b"a: %s\n") % pa)
3063 ui.write(_(b"b: %s\n") % pb)
3063 ui.write(_(b"b: %s\n") % pb)
3064 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3064 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3065 ui.write(
3065 ui.write(
3066 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3066 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3067 % (
3067 % (
3068 abs(pa._depth - pb._depth),
3068 abs(pa._depth - pb._depth),
3069 pvec._hamming(pa._vec, pb._vec),
3069 pvec._hamming(pa._vec, pb._vec),
3070 pa.distance(pb),
3070 pa.distance(pb),
3071 rel,
3071 rel,
3072 )
3072 )
3073 )
3073 )
3074
3074
3075
3075
3076 @command(
3076 @command(
3077 b'debugrebuilddirstate|debugrebuildstate',
3077 b'debugrebuilddirstate|debugrebuildstate',
3078 [
3078 [
3079 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3079 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3080 (
3080 (
3081 b'',
3081 b'',
3082 b'minimal',
3082 b'minimal',
3083 None,
3083 None,
3084 _(
3084 _(
3085 b'only rebuild files that are inconsistent with '
3085 b'only rebuild files that are inconsistent with '
3086 b'the working copy parent'
3086 b'the working copy parent'
3087 ),
3087 ),
3088 ),
3088 ),
3089 ],
3089 ],
3090 _(b'[-r REV]'),
3090 _(b'[-r REV]'),
3091 )
3091 )
3092 def debugrebuilddirstate(ui, repo, rev, **opts):
3092 def debugrebuilddirstate(ui, repo, rev, **opts):
3093 """rebuild the dirstate as it would look like for the given revision
3093 """rebuild the dirstate as it would look like for the given revision
3094
3094
3095 If no revision is specified the first current parent will be used.
3095 If no revision is specified the first current parent will be used.
3096
3096
3097 The dirstate will be set to the files of the given revision.
3097 The dirstate will be set to the files of the given revision.
3098 The actual working directory content or existing dirstate
3098 The actual working directory content or existing dirstate
3099 information such as adds or removes is not considered.
3099 information such as adds or removes is not considered.
3100
3100
3101 ``minimal`` will only rebuild the dirstate status for files that claim to be
3101 ``minimal`` will only rebuild the dirstate status for files that claim to be
3102 tracked but are not in the parent manifest, or that exist in the parent
3102 tracked but are not in the parent manifest, or that exist in the parent
3103 manifest but are not in the dirstate. It will not change adds, removes, or
3103 manifest but are not in the dirstate. It will not change adds, removes, or
3104 modified files that are in the working copy parent.
3104 modified files that are in the working copy parent.
3105
3105
3106 One use of this command is to make the next :hg:`status` invocation
3106 One use of this command is to make the next :hg:`status` invocation
3107 check the actual file content.
3107 check the actual file content.
3108 """
3108 """
3109 ctx = scmutil.revsingle(repo, rev)
3109 ctx = scmutil.revsingle(repo, rev)
3110 with repo.wlock():
3110 with repo.wlock():
3111 dirstate = repo.dirstate
3111 dirstate = repo.dirstate
3112 changedfiles = None
3112 changedfiles = None
3113 # See command doc for what minimal does.
3113 # See command doc for what minimal does.
3114 if opts.get('minimal'):
3114 if opts.get('minimal'):
3115 manifestfiles = set(ctx.manifest().keys())
3115 manifestfiles = set(ctx.manifest().keys())
3116 dirstatefiles = set(dirstate)
3116 dirstatefiles = set(dirstate)
3117 manifestonly = manifestfiles - dirstatefiles
3117 manifestonly = manifestfiles - dirstatefiles
3118 dsonly = dirstatefiles - manifestfiles
3118 dsonly = dirstatefiles - manifestfiles
3119 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3119 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3120 changedfiles = manifestonly | dsnotadded
3120 changedfiles = manifestonly | dsnotadded
3121
3121
3122 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3122 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3123
3123
3124
3124
3125 @command(
3125 @command(
3126 b'debugrebuildfncache',
3126 b'debugrebuildfncache',
3127 [
3127 [
3128 (
3128 (
3129 b'',
3129 b'',
3130 b'only-data',
3130 b'only-data',
3131 False,
3131 False,
3132 _(b'only look for wrong .d files (much faster)'),
3132 _(b'only look for wrong .d files (much faster)'),
3133 )
3133 )
3134 ],
3134 ],
3135 b'',
3135 b'',
3136 )
3136 )
3137 def debugrebuildfncache(ui, repo, **opts):
3137 def debugrebuildfncache(ui, repo, **opts):
3138 """rebuild the fncache file"""
3138 """rebuild the fncache file"""
3139 opts = pycompat.byteskwargs(opts)
3139 opts = pycompat.byteskwargs(opts)
3140 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3140 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3141
3141
3142
3142
3143 @command(
3143 @command(
3144 b'debugrename',
3144 b'debugrename',
3145 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3145 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3146 _(b'[-r REV] [FILE]...'),
3146 _(b'[-r REV] [FILE]...'),
3147 )
3147 )
3148 def debugrename(ui, repo, *pats, **opts):
3148 def debugrename(ui, repo, *pats, **opts):
3149 """dump rename information"""
3149 """dump rename information"""
3150
3150
3151 opts = pycompat.byteskwargs(opts)
3151 opts = pycompat.byteskwargs(opts)
3152 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3152 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3153 m = scmutil.match(ctx, pats, opts)
3153 m = scmutil.match(ctx, pats, opts)
3154 for abs in ctx.walk(m):
3154 for abs in ctx.walk(m):
3155 fctx = ctx[abs]
3155 fctx = ctx[abs]
3156 o = fctx.filelog().renamed(fctx.filenode())
3156 o = fctx.filelog().renamed(fctx.filenode())
3157 rel = repo.pathto(abs)
3157 rel = repo.pathto(abs)
3158 if o:
3158 if o:
3159 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3159 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3160 else:
3160 else:
3161 ui.write(_(b"%s not renamed\n") % rel)
3161 ui.write(_(b"%s not renamed\n") % rel)
3162
3162
3163
3163
3164 @command(b'debugrequires|debugrequirements', [], b'')
3164 @command(b'debugrequires|debugrequirements', [], b'')
3165 def debugrequirements(ui, repo):
3165 def debugrequirements(ui, repo):
3166 """print the current repo requirements"""
3166 """print the current repo requirements"""
3167 for r in sorted(repo.requirements):
3167 for r in sorted(repo.requirements):
3168 ui.write(b"%s\n" % r)
3168 ui.write(b"%s\n" % r)
3169
3169
3170
3170
3171 @command(
3171 @command(
3172 b'debugrevlog',
3172 b'debugrevlog',
3173 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3173 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3174 _(b'-c|-m|FILE'),
3174 _(b'-c|-m|FILE'),
3175 optionalrepo=True,
3175 optionalrepo=True,
3176 )
3176 )
3177 def debugrevlog(ui, repo, file_=None, **opts):
3177 def debugrevlog(ui, repo, file_=None, **opts):
3178 """show data and statistics about a revlog"""
3178 """show data and statistics about a revlog"""
3179 opts = pycompat.byteskwargs(opts)
3179 opts = pycompat.byteskwargs(opts)
3180 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3180 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3181
3181
3182 if opts.get(b"dump"):
3182 if opts.get(b"dump"):
3183 numrevs = len(r)
3183 numrevs = len(r)
3184 ui.write(
3184 ui.write(
3185 (
3185 (
3186 b"# rev p1rev p2rev start end deltastart base p1 p2"
3186 b"# rev p1rev p2rev start end deltastart base p1 p2"
3187 b" rawsize totalsize compression heads chainlen\n"
3187 b" rawsize totalsize compression heads chainlen\n"
3188 )
3188 )
3189 )
3189 )
3190 ts = 0
3190 ts = 0
3191 heads = set()
3191 heads = set()
3192
3192
3193 for rev in range(numrevs):
3193 for rev in range(numrevs):
3194 dbase = r.deltaparent(rev)
3194 dbase = r.deltaparent(rev)
3195 if dbase == -1:
3195 if dbase == -1:
3196 dbase = rev
3196 dbase = rev
3197 cbase = r.chainbase(rev)
3197 cbase = r.chainbase(rev)
3198 clen = r.chainlen(rev)
3198 clen = r.chainlen(rev)
3199 p1, p2 = r.parentrevs(rev)
3199 p1, p2 = r.parentrevs(rev)
3200 rs = r.rawsize(rev)
3200 rs = r.rawsize(rev)
3201 ts = ts + rs
3201 ts = ts + rs
3202 heads -= set(r.parentrevs(rev))
3202 heads -= set(r.parentrevs(rev))
3203 heads.add(rev)
3203 heads.add(rev)
3204 try:
3204 try:
3205 compression = ts / r.end(rev)
3205 compression = ts / r.end(rev)
3206 except ZeroDivisionError:
3206 except ZeroDivisionError:
3207 compression = 0
3207 compression = 0
3208 ui.write(
3208 ui.write(
3209 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3209 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3210 b"%11d %5d %8d\n"
3210 b"%11d %5d %8d\n"
3211 % (
3211 % (
3212 rev,
3212 rev,
3213 p1,
3213 p1,
3214 p2,
3214 p2,
3215 r.start(rev),
3215 r.start(rev),
3216 r.end(rev),
3216 r.end(rev),
3217 r.start(dbase),
3217 r.start(dbase),
3218 r.start(cbase),
3218 r.start(cbase),
3219 r.start(p1),
3219 r.start(p1),
3220 r.start(p2),
3220 r.start(p2),
3221 rs,
3221 rs,
3222 ts,
3222 ts,
3223 compression,
3223 compression,
3224 len(heads),
3224 len(heads),
3225 clen,
3225 clen,
3226 )
3226 )
3227 )
3227 )
3228 return 0
3228 return 0
3229
3229
3230 format = r._format_version
3230 format = r._format_version
3231 v = r._format_flags
3231 v = r._format_flags
3232 flags = []
3232 flags = []
3233 gdelta = False
3233 gdelta = False
3234 if v & revlog.FLAG_INLINE_DATA:
3234 if v & revlog.FLAG_INLINE_DATA:
3235 flags.append(b'inline')
3235 flags.append(b'inline')
3236 if v & revlog.FLAG_GENERALDELTA:
3236 if v & revlog.FLAG_GENERALDELTA:
3237 gdelta = True
3237 gdelta = True
3238 flags.append(b'generaldelta')
3238 flags.append(b'generaldelta')
3239 if not flags:
3239 if not flags:
3240 flags = [b'(none)']
3240 flags = [b'(none)']
3241
3241
3242 ### tracks merge vs single parent
3242 ### tracks merge vs single parent
3243 nummerges = 0
3243 nummerges = 0
3244
3244
3245 ### tracks ways the "delta" are build
3245 ### tracks ways the "delta" are build
3246 # nodelta
3246 # nodelta
3247 numempty = 0
3247 numempty = 0
3248 numemptytext = 0
3248 numemptytext = 0
3249 numemptydelta = 0
3249 numemptydelta = 0
3250 # full file content
3250 # full file content
3251 numfull = 0
3251 numfull = 0
3252 # intermediate snapshot against a prior snapshot
3252 # intermediate snapshot against a prior snapshot
3253 numsemi = 0
3253 numsemi = 0
3254 # snapshot count per depth
3254 # snapshot count per depth
3255 numsnapdepth = collections.defaultdict(lambda: 0)
3255 numsnapdepth = collections.defaultdict(lambda: 0)
3256 # delta against previous revision
3256 # delta against previous revision
3257 numprev = 0
3257 numprev = 0
3258 # delta against first or second parent (not prev)
3258 # delta against first or second parent (not prev)
3259 nump1 = 0
3259 nump1 = 0
3260 nump2 = 0
3260 nump2 = 0
3261 # delta against neither prev nor parents
3261 # delta against neither prev nor parents
3262 numother = 0
3262 numother = 0
3263 # delta against prev that are also first or second parent
3263 # delta against prev that are also first or second parent
3264 # (details of `numprev`)
3264 # (details of `numprev`)
3265 nump1prev = 0
3265 nump1prev = 0
3266 nump2prev = 0
3266 nump2prev = 0
3267
3267
3268 # data about delta chain of each revs
3268 # data about delta chain of each revs
3269 chainlengths = []
3269 chainlengths = []
3270 chainbases = []
3270 chainbases = []
3271 chainspans = []
3271 chainspans = []
3272
3272
3273 # data about each revision
3273 # data about each revision
3274 datasize = [None, 0, 0]
3274 datasize = [None, 0, 0]
3275 fullsize = [None, 0, 0]
3275 fullsize = [None, 0, 0]
3276 semisize = [None, 0, 0]
3276 semisize = [None, 0, 0]
3277 # snapshot count per depth
3277 # snapshot count per depth
3278 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3278 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3279 deltasize = [None, 0, 0]
3279 deltasize = [None, 0, 0]
3280 chunktypecounts = {}
3280 chunktypecounts = {}
3281 chunktypesizes = {}
3281 chunktypesizes = {}
3282
3282
3283 def addsize(size, l):
3283 def addsize(size, l):
3284 if l[0] is None or size < l[0]:
3284 if l[0] is None or size < l[0]:
3285 l[0] = size
3285 l[0] = size
3286 if size > l[1]:
3286 if size > l[1]:
3287 l[1] = size
3287 l[1] = size
3288 l[2] += size
3288 l[2] += size
3289
3289
3290 numrevs = len(r)
3290 numrevs = len(r)
3291 for rev in range(numrevs):
3291 for rev in range(numrevs):
3292 p1, p2 = r.parentrevs(rev)
3292 p1, p2 = r.parentrevs(rev)
3293 delta = r.deltaparent(rev)
3293 delta = r.deltaparent(rev)
3294 if format > 0:
3294 if format > 0:
3295 addsize(r.rawsize(rev), datasize)
3295 addsize(r.rawsize(rev), datasize)
3296 if p2 != nullrev:
3296 if p2 != nullrev:
3297 nummerges += 1
3297 nummerges += 1
3298 size = r.length(rev)
3298 size = r.length(rev)
3299 if delta == nullrev:
3299 if delta == nullrev:
3300 chainlengths.append(0)
3300 chainlengths.append(0)
3301 chainbases.append(r.start(rev))
3301 chainbases.append(r.start(rev))
3302 chainspans.append(size)
3302 chainspans.append(size)
3303 if size == 0:
3303 if size == 0:
3304 numempty += 1
3304 numempty += 1
3305 numemptytext += 1
3305 numemptytext += 1
3306 else:
3306 else:
3307 numfull += 1
3307 numfull += 1
3308 numsnapdepth[0] += 1
3308 numsnapdepth[0] += 1
3309 addsize(size, fullsize)
3309 addsize(size, fullsize)
3310 addsize(size, snapsizedepth[0])
3310 addsize(size, snapsizedepth[0])
3311 else:
3311 else:
3312 chainlengths.append(chainlengths[delta] + 1)
3312 chainlengths.append(chainlengths[delta] + 1)
3313 baseaddr = chainbases[delta]
3313 baseaddr = chainbases[delta]
3314 revaddr = r.start(rev)
3314 revaddr = r.start(rev)
3315 chainbases.append(baseaddr)
3315 chainbases.append(baseaddr)
3316 chainspans.append((revaddr - baseaddr) + size)
3316 chainspans.append((revaddr - baseaddr) + size)
3317 if size == 0:
3317 if size == 0:
3318 numempty += 1
3318 numempty += 1
3319 numemptydelta += 1
3319 numemptydelta += 1
3320 elif r.issnapshot(rev):
3320 elif r.issnapshot(rev):
3321 addsize(size, semisize)
3321 addsize(size, semisize)
3322 numsemi += 1
3322 numsemi += 1
3323 depth = r.snapshotdepth(rev)
3323 depth = r.snapshotdepth(rev)
3324 numsnapdepth[depth] += 1
3324 numsnapdepth[depth] += 1
3325 addsize(size, snapsizedepth[depth])
3325 addsize(size, snapsizedepth[depth])
3326 else:
3326 else:
3327 addsize(size, deltasize)
3327 addsize(size, deltasize)
3328 if delta == rev - 1:
3328 if delta == rev - 1:
3329 numprev += 1
3329 numprev += 1
3330 if delta == p1:
3330 if delta == p1:
3331 nump1prev += 1
3331 nump1prev += 1
3332 elif delta == p2:
3332 elif delta == p2:
3333 nump2prev += 1
3333 nump2prev += 1
3334 elif delta == p1:
3334 elif delta == p1:
3335 nump1 += 1
3335 nump1 += 1
3336 elif delta == p2:
3336 elif delta == p2:
3337 nump2 += 1
3337 nump2 += 1
3338 elif delta != nullrev:
3338 elif delta != nullrev:
3339 numother += 1
3339 numother += 1
3340
3340
3341 # Obtain data on the raw chunks in the revlog.
3341 # Obtain data on the raw chunks in the revlog.
3342 if util.safehasattr(r, b'_getsegmentforrevs'):
3342 if util.safehasattr(r, b'_getsegmentforrevs'):
3343 segment = r._getsegmentforrevs(rev, rev)[1]
3343 segment = r._getsegmentforrevs(rev, rev)[1]
3344 else:
3344 else:
3345 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3345 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3346 if segment:
3346 if segment:
3347 chunktype = bytes(segment[0:1])
3347 chunktype = bytes(segment[0:1])
3348 else:
3348 else:
3349 chunktype = b'empty'
3349 chunktype = b'empty'
3350
3350
3351 if chunktype not in chunktypecounts:
3351 if chunktype not in chunktypecounts:
3352 chunktypecounts[chunktype] = 0
3352 chunktypecounts[chunktype] = 0
3353 chunktypesizes[chunktype] = 0
3353 chunktypesizes[chunktype] = 0
3354
3354
3355 chunktypecounts[chunktype] += 1
3355 chunktypecounts[chunktype] += 1
3356 chunktypesizes[chunktype] += size
3356 chunktypesizes[chunktype] += size
3357
3357
3358 # Adjust size min value for empty cases
3358 # Adjust size min value for empty cases
3359 for size in (datasize, fullsize, semisize, deltasize):
3359 for size in (datasize, fullsize, semisize, deltasize):
3360 if size[0] is None:
3360 if size[0] is None:
3361 size[0] = 0
3361 size[0] = 0
3362
3362
3363 numdeltas = numrevs - numfull - numempty - numsemi
3363 numdeltas = numrevs - numfull - numempty - numsemi
3364 numoprev = numprev - nump1prev - nump2prev
3364 numoprev = numprev - nump1prev - nump2prev
3365 totalrawsize = datasize[2]
3365 totalrawsize = datasize[2]
3366 datasize[2] /= numrevs
3366 datasize[2] /= numrevs
3367 fulltotal = fullsize[2]
3367 fulltotal = fullsize[2]
3368 if numfull == 0:
3368 if numfull == 0:
3369 fullsize[2] = 0
3369 fullsize[2] = 0
3370 else:
3370 else:
3371 fullsize[2] /= numfull
3371 fullsize[2] /= numfull
3372 semitotal = semisize[2]
3372 semitotal = semisize[2]
3373 snaptotal = {}
3373 snaptotal = {}
3374 if numsemi > 0:
3374 if numsemi > 0:
3375 semisize[2] /= numsemi
3375 semisize[2] /= numsemi
3376 for depth in snapsizedepth:
3376 for depth in snapsizedepth:
3377 snaptotal[depth] = snapsizedepth[depth][2]
3377 snaptotal[depth] = snapsizedepth[depth][2]
3378 snapsizedepth[depth][2] /= numsnapdepth[depth]
3378 snapsizedepth[depth][2] /= numsnapdepth[depth]
3379
3379
3380 deltatotal = deltasize[2]
3380 deltatotal = deltasize[2]
3381 if numdeltas > 0:
3381 if numdeltas > 0:
3382 deltasize[2] /= numdeltas
3382 deltasize[2] /= numdeltas
3383 totalsize = fulltotal + semitotal + deltatotal
3383 totalsize = fulltotal + semitotal + deltatotal
3384 avgchainlen = sum(chainlengths) / numrevs
3384 avgchainlen = sum(chainlengths) / numrevs
3385 maxchainlen = max(chainlengths)
3385 maxchainlen = max(chainlengths)
3386 maxchainspan = max(chainspans)
3386 maxchainspan = max(chainspans)
3387 compratio = 1
3387 compratio = 1
3388 if totalsize:
3388 if totalsize:
3389 compratio = totalrawsize / totalsize
3389 compratio = totalrawsize / totalsize
3390
3390
3391 basedfmtstr = b'%%%dd\n'
3391 basedfmtstr = b'%%%dd\n'
3392 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3392 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3393
3393
3394 def dfmtstr(max):
3394 def dfmtstr(max):
3395 return basedfmtstr % len(str(max))
3395 return basedfmtstr % len(str(max))
3396
3396
3397 def pcfmtstr(max, padding=0):
3397 def pcfmtstr(max, padding=0):
3398 return basepcfmtstr % (len(str(max)), b' ' * padding)
3398 return basepcfmtstr % (len(str(max)), b' ' * padding)
3399
3399
3400 def pcfmt(value, total):
3400 def pcfmt(value, total):
3401 if total:
3401 if total:
3402 return (value, 100 * float(value) / total)
3402 return (value, 100 * float(value) / total)
3403 else:
3403 else:
3404 return value, 100.0
3404 return value, 100.0
3405
3405
3406 ui.writenoi18n(b'format : %d\n' % format)
3406 ui.writenoi18n(b'format : %d\n' % format)
3407 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3407 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3408
3408
3409 ui.write(b'\n')
3409 ui.write(b'\n')
3410 fmt = pcfmtstr(totalsize)
3410 fmt = pcfmtstr(totalsize)
3411 fmt2 = dfmtstr(totalsize)
3411 fmt2 = dfmtstr(totalsize)
3412 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3412 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3413 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3413 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3414 ui.writenoi18n(
3414 ui.writenoi18n(
3415 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3415 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3416 )
3416 )
3417 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3417 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3418 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3418 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3419 ui.writenoi18n(
3419 ui.writenoi18n(
3420 b' text : '
3420 b' text : '
3421 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3421 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3422 )
3422 )
3423 ui.writenoi18n(
3423 ui.writenoi18n(
3424 b' delta : '
3424 b' delta : '
3425 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3425 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3426 )
3426 )
3427 ui.writenoi18n(
3427 ui.writenoi18n(
3428 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3428 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3429 )
3429 )
3430 for depth in sorted(numsnapdepth):
3430 for depth in sorted(numsnapdepth):
3431 ui.write(
3431 ui.write(
3432 (b' lvl-%-3d : ' % depth)
3432 (b' lvl-%-3d : ' % depth)
3433 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3433 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3434 )
3434 )
3435 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3435 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3436 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3436 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3437 ui.writenoi18n(
3437 ui.writenoi18n(
3438 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3438 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3439 )
3439 )
3440 for depth in sorted(numsnapdepth):
3440 for depth in sorted(numsnapdepth):
3441 ui.write(
3441 ui.write(
3442 (b' lvl-%-3d : ' % depth)
3442 (b' lvl-%-3d : ' % depth)
3443 + fmt % pcfmt(snaptotal[depth], totalsize)
3443 + fmt % pcfmt(snaptotal[depth], totalsize)
3444 )
3444 )
3445 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3445 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3446
3446
3447 def fmtchunktype(chunktype):
3447 def fmtchunktype(chunktype):
3448 if chunktype == b'empty':
3448 if chunktype == b'empty':
3449 return b' %s : ' % chunktype
3449 return b' %s : ' % chunktype
3450 elif chunktype in pycompat.bytestr(string.ascii_letters):
3450 elif chunktype in pycompat.bytestr(string.ascii_letters):
3451 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3451 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3452 else:
3452 else:
3453 return b' 0x%s : ' % hex(chunktype)
3453 return b' 0x%s : ' % hex(chunktype)
3454
3454
3455 ui.write(b'\n')
3455 ui.write(b'\n')
3456 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3456 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3457 for chunktype in sorted(chunktypecounts):
3457 for chunktype in sorted(chunktypecounts):
3458 ui.write(fmtchunktype(chunktype))
3458 ui.write(fmtchunktype(chunktype))
3459 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3459 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3460 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3460 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3461 for chunktype in sorted(chunktypecounts):
3461 for chunktype in sorted(chunktypecounts):
3462 ui.write(fmtchunktype(chunktype))
3462 ui.write(fmtchunktype(chunktype))
3463 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3463 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3464
3464
3465 ui.write(b'\n')
3465 ui.write(b'\n')
3466 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3466 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3467 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3467 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3468 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3468 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3469 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3469 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3470 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3470 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3471
3471
3472 if format > 0:
3472 if format > 0:
3473 ui.write(b'\n')
3473 ui.write(b'\n')
3474 ui.writenoi18n(
3474 ui.writenoi18n(
3475 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3475 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3476 % tuple(datasize)
3476 % tuple(datasize)
3477 )
3477 )
3478 ui.writenoi18n(
3478 ui.writenoi18n(
3479 b'full revision size (min/max/avg) : %d / %d / %d\n'
3479 b'full revision size (min/max/avg) : %d / %d / %d\n'
3480 % tuple(fullsize)
3480 % tuple(fullsize)
3481 )
3481 )
3482 ui.writenoi18n(
3482 ui.writenoi18n(
3483 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3483 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3484 % tuple(semisize)
3484 % tuple(semisize)
3485 )
3485 )
3486 for depth in sorted(snapsizedepth):
3486 for depth in sorted(snapsizedepth):
3487 if depth == 0:
3487 if depth == 0:
3488 continue
3488 continue
3489 ui.writenoi18n(
3489 ui.writenoi18n(
3490 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3490 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3491 % ((depth,) + tuple(snapsizedepth[depth]))
3491 % ((depth,) + tuple(snapsizedepth[depth]))
3492 )
3492 )
3493 ui.writenoi18n(
3493 ui.writenoi18n(
3494 b'delta size (min/max/avg) : %d / %d / %d\n'
3494 b'delta size (min/max/avg) : %d / %d / %d\n'
3495 % tuple(deltasize)
3495 % tuple(deltasize)
3496 )
3496 )
3497
3497
3498 if numdeltas > 0:
3498 if numdeltas > 0:
3499 ui.write(b'\n')
3499 ui.write(b'\n')
3500 fmt = pcfmtstr(numdeltas)
3500 fmt = pcfmtstr(numdeltas)
3501 fmt2 = pcfmtstr(numdeltas, 4)
3501 fmt2 = pcfmtstr(numdeltas, 4)
3502 ui.writenoi18n(
3502 ui.writenoi18n(
3503 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3503 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3504 )
3504 )
3505 if numprev > 0:
3505 if numprev > 0:
3506 ui.writenoi18n(
3506 ui.writenoi18n(
3507 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3507 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3508 )
3508 )
3509 ui.writenoi18n(
3509 ui.writenoi18n(
3510 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3510 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3511 )
3511 )
3512 ui.writenoi18n(
3512 ui.writenoi18n(
3513 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3513 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3514 )
3514 )
3515 if gdelta:
3515 if gdelta:
3516 ui.writenoi18n(
3516 ui.writenoi18n(
3517 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3517 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3518 )
3518 )
3519 ui.writenoi18n(
3519 ui.writenoi18n(
3520 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3520 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3521 )
3521 )
3522 ui.writenoi18n(
3522 ui.writenoi18n(
3523 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3523 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3524 )
3524 )
3525
3525
3526
3526
3527 @command(
3527 @command(
3528 b'debugrevlogindex',
3528 b'debugrevlogindex',
3529 cmdutil.debugrevlogopts
3529 cmdutil.debugrevlogopts
3530 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3530 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3531 _(b'[-f FORMAT] -c|-m|FILE'),
3531 _(b'[-f FORMAT] -c|-m|FILE'),
3532 optionalrepo=True,
3532 optionalrepo=True,
3533 )
3533 )
3534 def debugrevlogindex(ui, repo, file_=None, **opts):
3534 def debugrevlogindex(ui, repo, file_=None, **opts):
3535 """dump the contents of a revlog index"""
3535 """dump the contents of a revlog index"""
3536 opts = pycompat.byteskwargs(opts)
3536 opts = pycompat.byteskwargs(opts)
3537 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3537 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3538 format = opts.get(b'format', 0)
3538 format = opts.get(b'format', 0)
3539 if format not in (0, 1):
3539 if format not in (0, 1):
3540 raise error.Abort(_(b"unknown format %d") % format)
3540 raise error.Abort(_(b"unknown format %d") % format)
3541
3541
3542 if ui.debugflag:
3542 if ui.debugflag:
3543 shortfn = hex
3543 shortfn = hex
3544 else:
3544 else:
3545 shortfn = short
3545 shortfn = short
3546
3546
3547 # There might not be anything in r, so have a sane default
3547 # There might not be anything in r, so have a sane default
3548 idlen = 12
3548 idlen = 12
3549 for i in r:
3549 for i in r:
3550 idlen = len(shortfn(r.node(i)))
3550 idlen = len(shortfn(r.node(i)))
3551 break
3551 break
3552
3552
3553 if format == 0:
3553 if format == 0:
3554 if ui.verbose:
3554 if ui.verbose:
3555 ui.writenoi18n(
3555 ui.writenoi18n(
3556 b" rev offset length linkrev %s %s p2\n"
3556 b" rev offset length linkrev %s %s p2\n"
3557 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3557 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3558 )
3558 )
3559 else:
3559 else:
3560 ui.writenoi18n(
3560 ui.writenoi18n(
3561 b" rev linkrev %s %s p2\n"
3561 b" rev linkrev %s %s p2\n"
3562 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3562 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3563 )
3563 )
3564 elif format == 1:
3564 elif format == 1:
3565 if ui.verbose:
3565 if ui.verbose:
3566 ui.writenoi18n(
3566 ui.writenoi18n(
3567 (
3567 (
3568 b" rev flag offset length size link p1"
3568 b" rev flag offset length size link p1"
3569 b" p2 %s\n"
3569 b" p2 %s\n"
3570 )
3570 )
3571 % b"nodeid".rjust(idlen)
3571 % b"nodeid".rjust(idlen)
3572 )
3572 )
3573 else:
3573 else:
3574 ui.writenoi18n(
3574 ui.writenoi18n(
3575 b" rev flag size link p1 p2 %s\n"
3575 b" rev flag size link p1 p2 %s\n"
3576 % b"nodeid".rjust(idlen)
3576 % b"nodeid".rjust(idlen)
3577 )
3577 )
3578
3578
3579 for i in r:
3579 for i in r:
3580 node = r.node(i)
3580 node = r.node(i)
3581 if format == 0:
3581 if format == 0:
3582 try:
3582 try:
3583 pp = r.parents(node)
3583 pp = r.parents(node)
3584 except Exception:
3584 except Exception:
3585 pp = [repo.nullid, repo.nullid]
3585 pp = [repo.nullid, repo.nullid]
3586 if ui.verbose:
3586 if ui.verbose:
3587 ui.write(
3587 ui.write(
3588 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3588 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3589 % (
3589 % (
3590 i,
3590 i,
3591 r.start(i),
3591 r.start(i),
3592 r.length(i),
3592 r.length(i),
3593 r.linkrev(i),
3593 r.linkrev(i),
3594 shortfn(node),
3594 shortfn(node),
3595 shortfn(pp[0]),
3595 shortfn(pp[0]),
3596 shortfn(pp[1]),
3596 shortfn(pp[1]),
3597 )
3597 )
3598 )
3598 )
3599 else:
3599 else:
3600 ui.write(
3600 ui.write(
3601 b"% 6d % 7d %s %s %s\n"
3601 b"% 6d % 7d %s %s %s\n"
3602 % (
3602 % (
3603 i,
3603 i,
3604 r.linkrev(i),
3604 r.linkrev(i),
3605 shortfn(node),
3605 shortfn(node),
3606 shortfn(pp[0]),
3606 shortfn(pp[0]),
3607 shortfn(pp[1]),
3607 shortfn(pp[1]),
3608 )
3608 )
3609 )
3609 )
3610 elif format == 1:
3610 elif format == 1:
3611 pr = r.parentrevs(i)
3611 pr = r.parentrevs(i)
3612 if ui.verbose:
3612 if ui.verbose:
3613 ui.write(
3613 ui.write(
3614 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3614 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3615 % (
3615 % (
3616 i,
3616 i,
3617 r.flags(i),
3617 r.flags(i),
3618 r.start(i),
3618 r.start(i),
3619 r.length(i),
3619 r.length(i),
3620 r.rawsize(i),
3620 r.rawsize(i),
3621 r.linkrev(i),
3621 r.linkrev(i),
3622 pr[0],
3622 pr[0],
3623 pr[1],
3623 pr[1],
3624 shortfn(node),
3624 shortfn(node),
3625 )
3625 )
3626 )
3626 )
3627 else:
3627 else:
3628 ui.write(
3628 ui.write(
3629 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3629 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3630 % (
3630 % (
3631 i,
3631 i,
3632 r.flags(i),
3632 r.flags(i),
3633 r.rawsize(i),
3633 r.rawsize(i),
3634 r.linkrev(i),
3634 r.linkrev(i),
3635 pr[0],
3635 pr[0],
3636 pr[1],
3636 pr[1],
3637 shortfn(node),
3637 shortfn(node),
3638 )
3638 )
3639 )
3639 )
3640
3640
3641
3641
3642 @command(
3642 @command(
3643 b'debugrevspec',
3643 b'debugrevspec',
3644 [
3644 [
3645 (
3645 (
3646 b'',
3646 b'',
3647 b'optimize',
3647 b'optimize',
3648 None,
3648 None,
3649 _(b'print parsed tree after optimizing (DEPRECATED)'),
3649 _(b'print parsed tree after optimizing (DEPRECATED)'),
3650 ),
3650 ),
3651 (
3651 (
3652 b'',
3652 b'',
3653 b'show-revs',
3653 b'show-revs',
3654 True,
3654 True,
3655 _(b'print list of result revisions (default)'),
3655 _(b'print list of result revisions (default)'),
3656 ),
3656 ),
3657 (
3657 (
3658 b's',
3658 b's',
3659 b'show-set',
3659 b'show-set',
3660 None,
3660 None,
3661 _(b'print internal representation of result set'),
3661 _(b'print internal representation of result set'),
3662 ),
3662 ),
3663 (
3663 (
3664 b'p',
3664 b'p',
3665 b'show-stage',
3665 b'show-stage',
3666 [],
3666 [],
3667 _(b'print parsed tree at the given stage'),
3667 _(b'print parsed tree at the given stage'),
3668 _(b'NAME'),
3668 _(b'NAME'),
3669 ),
3669 ),
3670 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3670 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3671 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3671 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3672 ],
3672 ],
3673 b'REVSPEC',
3673 b'REVSPEC',
3674 )
3674 )
3675 def debugrevspec(ui, repo, expr, **opts):
3675 def debugrevspec(ui, repo, expr, **opts):
3676 """parse and apply a revision specification
3676 """parse and apply a revision specification
3677
3677
3678 Use -p/--show-stage option to print the parsed tree at the given stages.
3678 Use -p/--show-stage option to print the parsed tree at the given stages.
3679 Use -p all to print tree at every stage.
3679 Use -p all to print tree at every stage.
3680
3680
3681 Use --no-show-revs option with -s or -p to print only the set
3681 Use --no-show-revs option with -s or -p to print only the set
3682 representation or the parsed tree respectively.
3682 representation or the parsed tree respectively.
3683
3683
3684 Use --verify-optimized to compare the optimized result with the unoptimized
3684 Use --verify-optimized to compare the optimized result with the unoptimized
3685 one. Returns 1 if the optimized result differs.
3685 one. Returns 1 if the optimized result differs.
3686 """
3686 """
3687 opts = pycompat.byteskwargs(opts)
3687 opts = pycompat.byteskwargs(opts)
3688 aliases = ui.configitems(b'revsetalias')
3688 aliases = ui.configitems(b'revsetalias')
3689 stages = [
3689 stages = [
3690 (b'parsed', lambda tree: tree),
3690 (b'parsed', lambda tree: tree),
3691 (
3691 (
3692 b'expanded',
3692 b'expanded',
3693 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3693 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3694 ),
3694 ),
3695 (b'concatenated', revsetlang.foldconcat),
3695 (b'concatenated', revsetlang.foldconcat),
3696 (b'analyzed', revsetlang.analyze),
3696 (b'analyzed', revsetlang.analyze),
3697 (b'optimized', revsetlang.optimize),
3697 (b'optimized', revsetlang.optimize),
3698 ]
3698 ]
3699 if opts[b'no_optimized']:
3699 if opts[b'no_optimized']:
3700 stages = stages[:-1]
3700 stages = stages[:-1]
3701 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3701 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3702 raise error.Abort(
3702 raise error.Abort(
3703 _(b'cannot use --verify-optimized with --no-optimized')
3703 _(b'cannot use --verify-optimized with --no-optimized')
3704 )
3704 )
3705 stagenames = {n for n, f in stages}
3705 stagenames = {n for n, f in stages}
3706
3706
3707 showalways = set()
3707 showalways = set()
3708 showchanged = set()
3708 showchanged = set()
3709 if ui.verbose and not opts[b'show_stage']:
3709 if ui.verbose and not opts[b'show_stage']:
3710 # show parsed tree by --verbose (deprecated)
3710 # show parsed tree by --verbose (deprecated)
3711 showalways.add(b'parsed')
3711 showalways.add(b'parsed')
3712 showchanged.update([b'expanded', b'concatenated'])
3712 showchanged.update([b'expanded', b'concatenated'])
3713 if opts[b'optimize']:
3713 if opts[b'optimize']:
3714 showalways.add(b'optimized')
3714 showalways.add(b'optimized')
3715 if opts[b'show_stage'] and opts[b'optimize']:
3715 if opts[b'show_stage'] and opts[b'optimize']:
3716 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3716 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3717 if opts[b'show_stage'] == [b'all']:
3717 if opts[b'show_stage'] == [b'all']:
3718 showalways.update(stagenames)
3718 showalways.update(stagenames)
3719 else:
3719 else:
3720 for n in opts[b'show_stage']:
3720 for n in opts[b'show_stage']:
3721 if n not in stagenames:
3721 if n not in stagenames:
3722 raise error.Abort(_(b'invalid stage name: %s') % n)
3722 raise error.Abort(_(b'invalid stage name: %s') % n)
3723 showalways.update(opts[b'show_stage'])
3723 showalways.update(opts[b'show_stage'])
3724
3724
3725 treebystage = {}
3725 treebystage = {}
3726 printedtree = None
3726 printedtree = None
3727 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3727 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3728 for n, f in stages:
3728 for n, f in stages:
3729 treebystage[n] = tree = f(tree)
3729 treebystage[n] = tree = f(tree)
3730 if n in showalways or (n in showchanged and tree != printedtree):
3730 if n in showalways or (n in showchanged and tree != printedtree):
3731 if opts[b'show_stage'] or n != b'parsed':
3731 if opts[b'show_stage'] or n != b'parsed':
3732 ui.write(b"* %s:\n" % n)
3732 ui.write(b"* %s:\n" % n)
3733 ui.write(revsetlang.prettyformat(tree), b"\n")
3733 ui.write(revsetlang.prettyformat(tree), b"\n")
3734 printedtree = tree
3734 printedtree = tree
3735
3735
3736 if opts[b'verify_optimized']:
3736 if opts[b'verify_optimized']:
3737 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3737 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3738 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3738 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3739 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3739 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3740 ui.writenoi18n(
3740 ui.writenoi18n(
3741 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3741 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3742 )
3742 )
3743 ui.writenoi18n(
3743 ui.writenoi18n(
3744 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3744 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3745 )
3745 )
3746 arevs = list(arevs)
3746 arevs = list(arevs)
3747 brevs = list(brevs)
3747 brevs = list(brevs)
3748 if arevs == brevs:
3748 if arevs == brevs:
3749 return 0
3749 return 0
3750 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3750 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3751 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3751 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3752 sm = difflib.SequenceMatcher(None, arevs, brevs)
3752 sm = difflib.SequenceMatcher(None, arevs, brevs)
3753 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3753 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3754 if tag in ('delete', 'replace'):
3754 if tag in ('delete', 'replace'):
3755 for c in arevs[alo:ahi]:
3755 for c in arevs[alo:ahi]:
3756 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3756 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3757 if tag in ('insert', 'replace'):
3757 if tag in ('insert', 'replace'):
3758 for c in brevs[blo:bhi]:
3758 for c in brevs[blo:bhi]:
3759 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3759 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3760 if tag == 'equal':
3760 if tag == 'equal':
3761 for c in arevs[alo:ahi]:
3761 for c in arevs[alo:ahi]:
3762 ui.write(b' %d\n' % c)
3762 ui.write(b' %d\n' % c)
3763 return 1
3763 return 1
3764
3764
3765 func = revset.makematcher(tree)
3765 func = revset.makematcher(tree)
3766 revs = func(repo)
3766 revs = func(repo)
3767 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3767 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3768 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3768 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3769 if not opts[b'show_revs']:
3769 if not opts[b'show_revs']:
3770 return
3770 return
3771 for c in revs:
3771 for c in revs:
3772 ui.write(b"%d\n" % c)
3772 ui.write(b"%d\n" % c)
3773
3773
3774
3774
3775 @command(
3775 @command(
3776 b'debugserve',
3776 b'debugserve',
3777 [
3777 [
3778 (
3778 (
3779 b'',
3779 b'',
3780 b'sshstdio',
3780 b'sshstdio',
3781 False,
3781 False,
3782 _(b'run an SSH server bound to process handles'),
3782 _(b'run an SSH server bound to process handles'),
3783 ),
3783 ),
3784 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3784 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3785 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3785 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3786 ],
3786 ],
3787 b'',
3787 b'',
3788 )
3788 )
3789 def debugserve(ui, repo, **opts):
3789 def debugserve(ui, repo, **opts):
3790 """run a server with advanced settings
3790 """run a server with advanced settings
3791
3791
3792 This command is similar to :hg:`serve`. It exists partially as a
3792 This command is similar to :hg:`serve`. It exists partially as a
3793 workaround to the fact that ``hg serve --stdio`` must have specific
3793 workaround to the fact that ``hg serve --stdio`` must have specific
3794 arguments for security reasons.
3794 arguments for security reasons.
3795 """
3795 """
3796 opts = pycompat.byteskwargs(opts)
3796 opts = pycompat.byteskwargs(opts)
3797
3797
3798 if not opts[b'sshstdio']:
3798 if not opts[b'sshstdio']:
3799 raise error.Abort(_(b'only --sshstdio is currently supported'))
3799 raise error.Abort(_(b'only --sshstdio is currently supported'))
3800
3800
3801 logfh = None
3801 logfh = None
3802
3802
3803 if opts[b'logiofd'] and opts[b'logiofile']:
3803 if opts[b'logiofd'] and opts[b'logiofile']:
3804 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3804 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3805
3805
3806 if opts[b'logiofd']:
3806 if opts[b'logiofd']:
3807 # Ideally we would be line buffered. But line buffering in binary
3807 # Ideally we would be line buffered. But line buffering in binary
3808 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3808 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3809 # buffering could have performance impacts. But since this isn't
3809 # buffering could have performance impacts. But since this isn't
3810 # performance critical code, it should be fine.
3810 # performance critical code, it should be fine.
3811 try:
3811 try:
3812 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3812 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3813 except OSError as e:
3813 except OSError as e:
3814 if e.errno != errno.ESPIPE:
3814 if e.errno != errno.ESPIPE:
3815 raise
3815 raise
3816 # can't seek a pipe, so `ab` mode fails on py3
3816 # can't seek a pipe, so `ab` mode fails on py3
3817 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3817 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3818 elif opts[b'logiofile']:
3818 elif opts[b'logiofile']:
3819 logfh = open(opts[b'logiofile'], b'ab', 0)
3819 logfh = open(opts[b'logiofile'], b'ab', 0)
3820
3820
3821 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3821 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3822 s.serve_forever()
3822 s.serve_forever()
3823
3823
3824
3824
3825 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3825 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3826 def debugsetparents(ui, repo, rev1, rev2=None):
3826 def debugsetparents(ui, repo, rev1, rev2=None):
3827 """manually set the parents of the current working directory (DANGEROUS)
3827 """manually set the parents of the current working directory (DANGEROUS)
3828
3828
3829 This command is not what you are looking for and should not be used. Using
3829 This command is not what you are looking for and should not be used. Using
3830 this command will most certainly results in slight corruption of the file
3830 this command will most certainly results in slight corruption of the file
3831 level histories withing your repository. DO NOT USE THIS COMMAND.
3831 level histories withing your repository. DO NOT USE THIS COMMAND.
3832
3832
3833 The command update the p1 and p2 field in the dirstate, and not touching
3833 The command update the p1 and p2 field in the dirstate, and not touching
3834 anything else. This useful for writing repository conversion tools, but
3834 anything else. This useful for writing repository conversion tools, but
3835 should be used with extreme care. For example, neither the working
3835 should be used with extreme care. For example, neither the working
3836 directory nor the dirstate is updated, so file status may be incorrect
3836 directory nor the dirstate is updated, so file status may be incorrect
3837 after running this command. Only used if you are one of the few people that
3837 after running this command. Only used if you are one of the few people that
3838 deeply unstand both conversion tools and file level histories. If you are
3838 deeply unstand both conversion tools and file level histories. If you are
3839 reading this help, you are not one of this people (most of them sailed west
3839 reading this help, you are not one of this people (most of them sailed west
3840 from Mithlond anyway.
3840 from Mithlond anyway.
3841
3841
3842 So one last time DO NOT USE THIS COMMAND.
3842 So one last time DO NOT USE THIS COMMAND.
3843
3843
3844 Returns 0 on success.
3844 Returns 0 on success.
3845 """
3845 """
3846
3846
3847 node1 = scmutil.revsingle(repo, rev1).node()
3847 node1 = scmutil.revsingle(repo, rev1).node()
3848 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3848 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3849
3849
3850 with repo.wlock():
3850 with repo.wlock():
3851 repo.setparents(node1, node2)
3851 repo.setparents(node1, node2)
3852
3852
3853
3853
3854 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3854 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3855 def debugsidedata(ui, repo, file_, rev=None, **opts):
3855 def debugsidedata(ui, repo, file_, rev=None, **opts):
3856 """dump the side data for a cl/manifest/file revision
3856 """dump the side data for a cl/manifest/file revision
3857
3857
3858 Use --verbose to dump the sidedata content."""
3858 Use --verbose to dump the sidedata content."""
3859 opts = pycompat.byteskwargs(opts)
3859 opts = pycompat.byteskwargs(opts)
3860 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3860 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3861 if rev is not None:
3861 if rev is not None:
3862 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3862 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3863 file_, rev = None, file_
3863 file_, rev = None, file_
3864 elif rev is None:
3864 elif rev is None:
3865 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3865 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3866 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3866 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3867 r = getattr(r, '_revlog', r)
3867 r = getattr(r, '_revlog', r)
3868 try:
3868 try:
3869 sidedata = r.sidedata(r.lookup(rev))
3869 sidedata = r.sidedata(r.lookup(rev))
3870 except KeyError:
3870 except KeyError:
3871 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3871 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3872 if sidedata:
3872 if sidedata:
3873 sidedata = list(sidedata.items())
3873 sidedata = list(sidedata.items())
3874 sidedata.sort()
3874 sidedata.sort()
3875 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3875 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3876 for key, value in sidedata:
3876 for key, value in sidedata:
3877 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3877 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3878 if ui.verbose:
3878 if ui.verbose:
3879 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3879 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3880
3880
3881
3881
3882 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3882 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3883 def debugssl(ui, repo, source=None, **opts):
3883 def debugssl(ui, repo, source=None, **opts):
3884 """test a secure connection to a server
3884 """test a secure connection to a server
3885
3885
3886 This builds the certificate chain for the server on Windows, installing the
3886 This builds the certificate chain for the server on Windows, installing the
3887 missing intermediates and trusted root via Windows Update if necessary. It
3887 missing intermediates and trusted root via Windows Update if necessary. It
3888 does nothing on other platforms.
3888 does nothing on other platforms.
3889
3889
3890 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3890 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3891 that server is used. See :hg:`help urls` for more information.
3891 that server is used. See :hg:`help urls` for more information.
3892
3892
3893 If the update succeeds, retry the original operation. Otherwise, the cause
3893 If the update succeeds, retry the original operation. Otherwise, the cause
3894 of the SSL error is likely another issue.
3894 of the SSL error is likely another issue.
3895 """
3895 """
3896 if not pycompat.iswindows:
3896 if not pycompat.iswindows:
3897 raise error.Abort(
3897 raise error.Abort(
3898 _(b'certificate chain building is only possible on Windows')
3898 _(b'certificate chain building is only possible on Windows')
3899 )
3899 )
3900
3900
3901 if not source:
3901 if not source:
3902 if not repo:
3902 if not repo:
3903 raise error.Abort(
3903 raise error.Abort(
3904 _(
3904 _(
3905 b"there is no Mercurial repository here, and no "
3905 b"there is no Mercurial repository here, and no "
3906 b"server specified"
3906 b"server specified"
3907 )
3907 )
3908 )
3908 )
3909 source = b"default"
3909 source = b"default"
3910
3910
3911 source, branches = urlutil.get_unique_pull_path(
3911 source, branches = urlutil.get_unique_pull_path(
3912 b'debugssl', repo, ui, source
3912 b'debugssl', repo, ui, source
3913 )
3913 )
3914 url = urlutil.url(source)
3914 url = urlutil.url(source)
3915
3915
3916 defaultport = {b'https': 443, b'ssh': 22}
3916 defaultport = {b'https': 443, b'ssh': 22}
3917 if url.scheme in defaultport:
3917 if url.scheme in defaultport:
3918 try:
3918 try:
3919 addr = (url.host, int(url.port or defaultport[url.scheme]))
3919 addr = (url.host, int(url.port or defaultport[url.scheme]))
3920 except ValueError:
3920 except ValueError:
3921 raise error.Abort(_(b"malformed port number in URL"))
3921 raise error.Abort(_(b"malformed port number in URL"))
3922 else:
3922 else:
3923 raise error.Abort(_(b"only https and ssh connections are supported"))
3923 raise error.Abort(_(b"only https and ssh connections are supported"))
3924
3924
3925 from . import win32
3925 from . import win32
3926
3926
3927 s = ssl.wrap_socket(
3927 s = ssl.wrap_socket(
3928 socket.socket(),
3928 socket.socket(),
3929 ssl_version=ssl.PROTOCOL_TLS,
3929 ssl_version=ssl.PROTOCOL_TLS,
3930 cert_reqs=ssl.CERT_NONE,
3930 cert_reqs=ssl.CERT_NONE,
3931 ca_certs=None,
3931 ca_certs=None,
3932 )
3932 )
3933
3933
3934 try:
3934 try:
3935 s.connect(addr)
3935 s.connect(addr)
3936 cert = s.getpeercert(True)
3936 cert = s.getpeercert(True)
3937
3937
3938 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3938 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3939
3939
3940 complete = win32.checkcertificatechain(cert, build=False)
3940 complete = win32.checkcertificatechain(cert, build=False)
3941
3941
3942 if not complete:
3942 if not complete:
3943 ui.status(_(b'certificate chain is incomplete, updating... '))
3943 ui.status(_(b'certificate chain is incomplete, updating... '))
3944
3944
3945 if not win32.checkcertificatechain(cert):
3945 if not win32.checkcertificatechain(cert):
3946 ui.status(_(b'failed.\n'))
3946 ui.status(_(b'failed.\n'))
3947 else:
3947 else:
3948 ui.status(_(b'done.\n'))
3948 ui.status(_(b'done.\n'))
3949 else:
3949 else:
3950 ui.status(_(b'full certificate chain is available\n'))
3950 ui.status(_(b'full certificate chain is available\n'))
3951 finally:
3951 finally:
3952 s.close()
3952 s.close()
3953
3953
3954
3954
3955 @command(
3955 @command(
3956 b"debugbackupbundle",
3956 b"debugbackupbundle",
3957 [
3957 [
3958 (
3958 (
3959 b"",
3959 b"",
3960 b"recover",
3960 b"recover",
3961 b"",
3961 b"",
3962 b"brings the specified changeset back into the repository",
3962 b"brings the specified changeset back into the repository",
3963 )
3963 )
3964 ]
3964 ]
3965 + cmdutil.logopts,
3965 + cmdutil.logopts,
3966 _(b"hg debugbackupbundle [--recover HASH]"),
3966 _(b"hg debugbackupbundle [--recover HASH]"),
3967 )
3967 )
3968 def debugbackupbundle(ui, repo, *pats, **opts):
3968 def debugbackupbundle(ui, repo, *pats, **opts):
3969 """lists the changesets available in backup bundles
3969 """lists the changesets available in backup bundles
3970
3970
3971 Without any arguments, this command prints a list of the changesets in each
3971 Without any arguments, this command prints a list of the changesets in each
3972 backup bundle.
3972 backup bundle.
3973
3973
3974 --recover takes a changeset hash and unbundles the first bundle that
3974 --recover takes a changeset hash and unbundles the first bundle that
3975 contains that hash, which puts that changeset back in your repository.
3975 contains that hash, which puts that changeset back in your repository.
3976
3976
3977 --verbose will print the entire commit message and the bundle path for that
3977 --verbose will print the entire commit message and the bundle path for that
3978 backup.
3978 backup.
3979 """
3979 """
3980 backups = list(
3980 backups = list(
3981 filter(
3981 filter(
3982 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3982 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3983 )
3983 )
3984 )
3984 )
3985 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3985 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3986
3986
3987 opts = pycompat.byteskwargs(opts)
3987 opts = pycompat.byteskwargs(opts)
3988 opts[b"bundle"] = b""
3988 opts[b"bundle"] = b""
3989 opts[b"force"] = None
3989 opts[b"force"] = None
3990 limit = logcmdutil.getlimit(opts)
3990 limit = logcmdutil.getlimit(opts)
3991
3991
3992 def display(other, chlist, displayer):
3992 def display(other, chlist, displayer):
3993 if opts.get(b"newest_first"):
3993 if opts.get(b"newest_first"):
3994 chlist.reverse()
3994 chlist.reverse()
3995 count = 0
3995 count = 0
3996 for n in chlist:
3996 for n in chlist:
3997 if limit is not None and count >= limit:
3997 if limit is not None and count >= limit:
3998 break
3998 break
3999 parents = [
3999 parents = [
4000 True for p in other.changelog.parents(n) if p != repo.nullid
4000 True for p in other.changelog.parents(n) if p != repo.nullid
4001 ]
4001 ]
4002 if opts.get(b"no_merges") and len(parents) == 2:
4002 if opts.get(b"no_merges") and len(parents) == 2:
4003 continue
4003 continue
4004 count += 1
4004 count += 1
4005 displayer.show(other[n])
4005 displayer.show(other[n])
4006
4006
4007 recovernode = opts.get(b"recover")
4007 recovernode = opts.get(b"recover")
4008 if recovernode:
4008 if recovernode:
4009 if scmutil.isrevsymbol(repo, recovernode):
4009 if scmutil.isrevsymbol(repo, recovernode):
4010 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4010 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4011 return
4011 return
4012 elif backups:
4012 elif backups:
4013 msg = _(
4013 msg = _(
4014 b"Recover changesets using: hg debugbackupbundle --recover "
4014 b"Recover changesets using: hg debugbackupbundle --recover "
4015 b"<changeset hash>\n\nAvailable backup changesets:"
4015 b"<changeset hash>\n\nAvailable backup changesets:"
4016 )
4016 )
4017 ui.status(msg, label=b"status.removed")
4017 ui.status(msg, label=b"status.removed")
4018 else:
4018 else:
4019 ui.status(_(b"no backup changesets found\n"))
4019 ui.status(_(b"no backup changesets found\n"))
4020 return
4020 return
4021
4021
4022 for backup in backups:
4022 for backup in backups:
4023 # Much of this is copied from the hg incoming logic
4023 # Much of this is copied from the hg incoming logic
4024 source = os.path.relpath(backup, encoding.getcwd())
4024 source = os.path.relpath(backup, encoding.getcwd())
4025 source, branches = urlutil.get_unique_pull_path(
4025 source, branches = urlutil.get_unique_pull_path(
4026 b'debugbackupbundle',
4026 b'debugbackupbundle',
4027 repo,
4027 repo,
4028 ui,
4028 ui,
4029 source,
4029 source,
4030 default_branches=opts.get(b'branch'),
4030 default_branches=opts.get(b'branch'),
4031 )
4031 )
4032 try:
4032 try:
4033 other = hg.peer(repo, opts, source)
4033 other = hg.peer(repo, opts, source)
4034 except error.LookupError as ex:
4034 except error.LookupError as ex:
4035 msg = _(b"\nwarning: unable to open bundle %s") % source
4035 msg = _(b"\nwarning: unable to open bundle %s") % source
4036 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4036 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4037 ui.warn(msg, hint=hint)
4037 ui.warn(msg, hint=hint)
4038 continue
4038 continue
4039 revs, checkout = hg.addbranchrevs(
4039 revs, checkout = hg.addbranchrevs(
4040 repo, other, branches, opts.get(b"rev")
4040 repo, other, branches, opts.get(b"rev")
4041 )
4041 )
4042
4042
4043 if revs:
4043 if revs:
4044 revs = [other.lookup(rev) for rev in revs]
4044 revs = [other.lookup(rev) for rev in revs]
4045
4045
4046 with ui.silent():
4046 with ui.silent():
4047 try:
4047 try:
4048 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4048 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4049 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4049 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4050 )
4050 )
4051 except error.LookupError:
4051 except error.LookupError:
4052 continue
4052 continue
4053
4053
4054 try:
4054 try:
4055 if not chlist:
4055 if not chlist:
4056 continue
4056 continue
4057 if recovernode:
4057 if recovernode:
4058 with repo.lock(), repo.transaction(b"unbundle") as tr:
4058 with repo.lock(), repo.transaction(b"unbundle") as tr:
4059 if scmutil.isrevsymbol(other, recovernode):
4059 if scmutil.isrevsymbol(other, recovernode):
4060 ui.status(_(b"Unbundling %s\n") % (recovernode))
4060 ui.status(_(b"Unbundling %s\n") % (recovernode))
4061 f = hg.openpath(ui, source)
4061 f = hg.openpath(ui, source)
4062 gen = exchange.readbundle(ui, f, source)
4062 gen = exchange.readbundle(ui, f, source)
4063 if isinstance(gen, bundle2.unbundle20):
4063 if isinstance(gen, bundle2.unbundle20):
4064 bundle2.applybundle(
4064 bundle2.applybundle(
4065 repo,
4065 repo,
4066 gen,
4066 gen,
4067 tr,
4067 tr,
4068 source=b"unbundle",
4068 source=b"unbundle",
4069 url=b"bundle:" + source,
4069 url=b"bundle:" + source,
4070 )
4070 )
4071 else:
4071 else:
4072 gen.apply(repo, b"unbundle", b"bundle:" + source)
4072 gen.apply(repo, b"unbundle", b"bundle:" + source)
4073 break
4073 break
4074 else:
4074 else:
4075 backupdate = encoding.strtolocal(
4075 backupdate = encoding.strtolocal(
4076 time.strftime(
4076 time.strftime(
4077 "%a %H:%M, %Y-%m-%d",
4077 "%a %H:%M, %Y-%m-%d",
4078 time.localtime(os.path.getmtime(source)),
4078 time.localtime(os.path.getmtime(source)),
4079 )
4079 )
4080 )
4080 )
4081 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4081 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4082 if ui.verbose:
4082 if ui.verbose:
4083 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4083 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4084 else:
4084 else:
4085 opts[
4085 opts[
4086 b"template"
4086 b"template"
4087 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4087 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4088 displayer = logcmdutil.changesetdisplayer(
4088 displayer = logcmdutil.changesetdisplayer(
4089 ui, other, opts, False
4089 ui, other, opts, False
4090 )
4090 )
4091 display(other, chlist, displayer)
4091 display(other, chlist, displayer)
4092 displayer.close()
4092 displayer.close()
4093 finally:
4093 finally:
4094 cleanupfn()
4094 cleanupfn()
4095
4095
4096
4096
4097 @command(
4097 @command(
4098 b'debugsub',
4098 b'debugsub',
4099 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4099 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4100 _(b'[-r REV] [REV]'),
4100 _(b'[-r REV] [REV]'),
4101 )
4101 )
4102 def debugsub(ui, repo, rev=None):
4102 def debugsub(ui, repo, rev=None):
4103 ctx = scmutil.revsingle(repo, rev, None)
4103 ctx = scmutil.revsingle(repo, rev, None)
4104 for k, v in sorted(ctx.substate.items()):
4104 for k, v in sorted(ctx.substate.items()):
4105 ui.writenoi18n(b'path %s\n' % k)
4105 ui.writenoi18n(b'path %s\n' % k)
4106 ui.writenoi18n(b' source %s\n' % v[0])
4106 ui.writenoi18n(b' source %s\n' % v[0])
4107 ui.writenoi18n(b' revision %s\n' % v[1])
4107 ui.writenoi18n(b' revision %s\n' % v[1])
4108
4108
4109
4109
4110 @command(b'debugshell', optionalrepo=True)
4110 @command(b'debugshell', optionalrepo=True)
4111 def debugshell(ui, repo):
4111 def debugshell(ui, repo):
4112 """run an interactive Python interpreter
4112 """run an interactive Python interpreter
4113
4113
4114 The local namespace is provided with a reference to the ui and
4114 The local namespace is provided with a reference to the ui and
4115 the repo instance (if available).
4115 the repo instance (if available).
4116 """
4116 """
4117 import code
4117 import code
4118
4118
4119 imported_objects = {
4119 imported_objects = {
4120 'ui': ui,
4120 'ui': ui,
4121 'repo': repo,
4121 'repo': repo,
4122 }
4122 }
4123
4123
4124 code.interact(local=imported_objects)
4124 code.interact(local=imported_objects)
4125
4125
4126
4126
4127 @command(
4127 @command(
4128 b'debugsuccessorssets',
4128 b'debugsuccessorssets',
4129 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4129 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4130 _(b'[REV]'),
4130 _(b'[REV]'),
4131 )
4131 )
4132 def debugsuccessorssets(ui, repo, *revs, **opts):
4132 def debugsuccessorssets(ui, repo, *revs, **opts):
4133 """show set of successors for revision
4133 """show set of successors for revision
4134
4134
4135 A successors set of changeset A is a consistent group of revisions that
4135 A successors set of changeset A is a consistent group of revisions that
4136 succeed A. It contains non-obsolete changesets only unless closests
4136 succeed A. It contains non-obsolete changesets only unless closests
4137 successors set is set.
4137 successors set is set.
4138
4138
4139 In most cases a changeset A has a single successors set containing a single
4139 In most cases a changeset A has a single successors set containing a single
4140 successor (changeset A replaced by A').
4140 successor (changeset A replaced by A').
4141
4141
4142 A changeset that is made obsolete with no successors are called "pruned".
4142 A changeset that is made obsolete with no successors are called "pruned".
4143 Such changesets have no successors sets at all.
4143 Such changesets have no successors sets at all.
4144
4144
4145 A changeset that has been "split" will have a successors set containing
4145 A changeset that has been "split" will have a successors set containing
4146 more than one successor.
4146 more than one successor.
4147
4147
4148 A changeset that has been rewritten in multiple different ways is called
4148 A changeset that has been rewritten in multiple different ways is called
4149 "divergent". Such changesets have multiple successor sets (each of which
4149 "divergent". Such changesets have multiple successor sets (each of which
4150 may also be split, i.e. have multiple successors).
4150 may also be split, i.e. have multiple successors).
4151
4151
4152 Results are displayed as follows::
4152 Results are displayed as follows::
4153
4153
4154 <rev1>
4154 <rev1>
4155 <successors-1A>
4155 <successors-1A>
4156 <rev2>
4156 <rev2>
4157 <successors-2A>
4157 <successors-2A>
4158 <successors-2B1> <successors-2B2> <successors-2B3>
4158 <successors-2B1> <successors-2B2> <successors-2B3>
4159
4159
4160 Here rev2 has two possible (i.e. divergent) successors sets. The first
4160 Here rev2 has two possible (i.e. divergent) successors sets. The first
4161 holds one element, whereas the second holds three (i.e. the changeset has
4161 holds one element, whereas the second holds three (i.e. the changeset has
4162 been split).
4162 been split).
4163 """
4163 """
4164 # passed to successorssets caching computation from one call to another
4164 # passed to successorssets caching computation from one call to another
4165 cache = {}
4165 cache = {}
4166 ctx2str = bytes
4166 ctx2str = bytes
4167 node2str = short
4167 node2str = short
4168 for rev in logcmdutil.revrange(repo, revs):
4168 for rev in logcmdutil.revrange(repo, revs):
4169 ctx = repo[rev]
4169 ctx = repo[rev]
4170 ui.write(b'%s\n' % ctx2str(ctx))
4170 ui.write(b'%s\n' % ctx2str(ctx))
4171 for succsset in obsutil.successorssets(
4171 for succsset in obsutil.successorssets(
4172 repo, ctx.node(), closest=opts['closest'], cache=cache
4172 repo, ctx.node(), closest=opts['closest'], cache=cache
4173 ):
4173 ):
4174 if succsset:
4174 if succsset:
4175 ui.write(b' ')
4175 ui.write(b' ')
4176 ui.write(node2str(succsset[0]))
4176 ui.write(node2str(succsset[0]))
4177 for node in succsset[1:]:
4177 for node in succsset[1:]:
4178 ui.write(b' ')
4178 ui.write(b' ')
4179 ui.write(node2str(node))
4179 ui.write(node2str(node))
4180 ui.write(b'\n')
4180 ui.write(b'\n')
4181
4181
4182
4182
4183 @command(b'debugtagscache', [])
4183 @command(b'debugtagscache', [])
4184 def debugtagscache(ui, repo):
4184 def debugtagscache(ui, repo):
4185 """display the contents of .hg/cache/hgtagsfnodes1"""
4185 """display the contents of .hg/cache/hgtagsfnodes1"""
4186 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4186 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4187 flog = repo.file(b'.hgtags')
4187 flog = repo.file(b'.hgtags')
4188 for r in repo:
4188 for r in repo:
4189 node = repo[r].node()
4189 node = repo[r].node()
4190 tagsnode = cache.getfnode(node, computemissing=False)
4190 tagsnode = cache.getfnode(node, computemissing=False)
4191 if tagsnode:
4191 if tagsnode:
4192 tagsnodedisplay = hex(tagsnode)
4192 tagsnodedisplay = hex(tagsnode)
4193 if not flog.hasnode(tagsnode):
4193 if not flog.hasnode(tagsnode):
4194 tagsnodedisplay += b' (unknown node)'
4194 tagsnodedisplay += b' (unknown node)'
4195 elif tagsnode is None:
4195 elif tagsnode is None:
4196 tagsnodedisplay = b'missing'
4196 tagsnodedisplay = b'missing'
4197 else:
4197 else:
4198 tagsnodedisplay = b'invalid'
4198 tagsnodedisplay = b'invalid'
4199
4199
4200 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4200 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4201
4201
4202
4202
4203 @command(
4203 @command(
4204 b'debugtemplate',
4204 b'debugtemplate',
4205 [
4205 [
4206 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4206 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4207 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4207 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4208 ],
4208 ],
4209 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4209 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4210 optionalrepo=True,
4210 optionalrepo=True,
4211 )
4211 )
4212 def debugtemplate(ui, repo, tmpl, **opts):
4212 def debugtemplate(ui, repo, tmpl, **opts):
4213 """parse and apply a template
4213 """parse and apply a template
4214
4214
4215 If -r/--rev is given, the template is processed as a log template and
4215 If -r/--rev is given, the template is processed as a log template and
4216 applied to the given changesets. Otherwise, it is processed as a generic
4216 applied to the given changesets. Otherwise, it is processed as a generic
4217 template.
4217 template.
4218
4218
4219 Use --verbose to print the parsed tree.
4219 Use --verbose to print the parsed tree.
4220 """
4220 """
4221 revs = None
4221 revs = None
4222 if opts['rev']:
4222 if opts['rev']:
4223 if repo is None:
4223 if repo is None:
4224 raise error.RepoError(
4224 raise error.RepoError(
4225 _(b'there is no Mercurial repository here (.hg not found)')
4225 _(b'there is no Mercurial repository here (.hg not found)')
4226 )
4226 )
4227 revs = logcmdutil.revrange(repo, opts['rev'])
4227 revs = logcmdutil.revrange(repo, opts['rev'])
4228
4228
4229 props = {}
4229 props = {}
4230 for d in opts['define']:
4230 for d in opts['define']:
4231 try:
4231 try:
4232 k, v = (e.strip() for e in d.split(b'=', 1))
4232 k, v = (e.strip() for e in d.split(b'=', 1))
4233 if not k or k == b'ui':
4233 if not k or k == b'ui':
4234 raise ValueError
4234 raise ValueError
4235 props[k] = v
4235 props[k] = v
4236 except ValueError:
4236 except ValueError:
4237 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4237 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4238
4238
4239 if ui.verbose:
4239 if ui.verbose:
4240 aliases = ui.configitems(b'templatealias')
4240 aliases = ui.configitems(b'templatealias')
4241 tree = templater.parse(tmpl)
4241 tree = templater.parse(tmpl)
4242 ui.note(templater.prettyformat(tree), b'\n')
4242 ui.note(templater.prettyformat(tree), b'\n')
4243 newtree = templater.expandaliases(tree, aliases)
4243 newtree = templater.expandaliases(tree, aliases)
4244 if newtree != tree:
4244 if newtree != tree:
4245 ui.notenoi18n(
4245 ui.notenoi18n(
4246 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4246 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4247 )
4247 )
4248
4248
4249 if revs is None:
4249 if revs is None:
4250 tres = formatter.templateresources(ui, repo)
4250 tres = formatter.templateresources(ui, repo)
4251 t = formatter.maketemplater(ui, tmpl, resources=tres)
4251 t = formatter.maketemplater(ui, tmpl, resources=tres)
4252 if ui.verbose:
4252 if ui.verbose:
4253 kwds, funcs = t.symbolsuseddefault()
4253 kwds, funcs = t.symbolsuseddefault()
4254 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4254 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4255 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4255 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4256 ui.write(t.renderdefault(props))
4256 ui.write(t.renderdefault(props))
4257 else:
4257 else:
4258 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4258 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4259 if ui.verbose:
4259 if ui.verbose:
4260 kwds, funcs = displayer.t.symbolsuseddefault()
4260 kwds, funcs = displayer.t.symbolsuseddefault()
4261 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4261 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4262 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4262 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4263 for r in revs:
4263 for r in revs:
4264 displayer.show(repo[r], **pycompat.strkwargs(props))
4264 displayer.show(repo[r], **pycompat.strkwargs(props))
4265 displayer.close()
4265 displayer.close()
4266
4266
4267
4267
4268 @command(
4268 @command(
4269 b'debuguigetpass',
4269 b'debuguigetpass',
4270 [
4270 [
4271 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4271 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4272 ],
4272 ],
4273 _(b'[-p TEXT]'),
4273 _(b'[-p TEXT]'),
4274 norepo=True,
4274 norepo=True,
4275 )
4275 )
4276 def debuguigetpass(ui, prompt=b''):
4276 def debuguigetpass(ui, prompt=b''):
4277 """show prompt to type password"""
4277 """show prompt to type password"""
4278 r = ui.getpass(prompt)
4278 r = ui.getpass(prompt)
4279 if r is None:
4279 if r is None:
4280 r = b"<default response>"
4280 r = b"<default response>"
4281 ui.writenoi18n(b'response: %s\n' % r)
4281 ui.writenoi18n(b'response: %s\n' % r)
4282
4282
4283
4283
4284 @command(
4284 @command(
4285 b'debuguiprompt',
4285 b'debuguiprompt',
4286 [
4286 [
4287 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4287 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4288 ],
4288 ],
4289 _(b'[-p TEXT]'),
4289 _(b'[-p TEXT]'),
4290 norepo=True,
4290 norepo=True,
4291 )
4291 )
4292 def debuguiprompt(ui, prompt=b''):
4292 def debuguiprompt(ui, prompt=b''):
4293 """show plain prompt"""
4293 """show plain prompt"""
4294 r = ui.prompt(prompt)
4294 r = ui.prompt(prompt)
4295 ui.writenoi18n(b'response: %s\n' % r)
4295 ui.writenoi18n(b'response: %s\n' % r)
4296
4296
4297
4297
4298 @command(b'debugupdatecaches', [])
4298 @command(b'debugupdatecaches', [])
4299 def debugupdatecaches(ui, repo, *pats, **opts):
4299 def debugupdatecaches(ui, repo, *pats, **opts):
4300 """warm all known caches in the repository"""
4300 """warm all known caches in the repository"""
4301 with repo.wlock(), repo.lock():
4301 with repo.wlock(), repo.lock():
4302 repo.updatecaches(caches=repository.CACHES_ALL)
4302 repo.updatecaches(caches=repository.CACHES_ALL)
4303
4303
4304
4304
4305 @command(
4305 @command(
4306 b'debugupgraderepo',
4306 b'debugupgraderepo',
4307 [
4307 [
4308 (
4308 (
4309 b'o',
4309 b'o',
4310 b'optimize',
4310 b'optimize',
4311 [],
4311 [],
4312 _(b'extra optimization to perform'),
4312 _(b'extra optimization to perform'),
4313 _(b'NAME'),
4313 _(b'NAME'),
4314 ),
4314 ),
4315 (b'', b'run', False, _(b'performs an upgrade')),
4315 (b'', b'run', False, _(b'performs an upgrade')),
4316 (b'', b'backup', True, _(b'keep the old repository content around')),
4316 (b'', b'backup', True, _(b'keep the old repository content around')),
4317 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4317 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4318 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4318 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4319 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4319 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4320 ],
4320 ],
4321 )
4321 )
4322 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4322 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4323 """upgrade a repository to use different features
4323 """upgrade a repository to use different features
4324
4324
4325 If no arguments are specified, the repository is evaluated for upgrade
4325 If no arguments are specified, the repository is evaluated for upgrade
4326 and a list of problems and potential optimizations is printed.
4326 and a list of problems and potential optimizations is printed.
4327
4327
4328 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4328 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4329 can be influenced via additional arguments. More details will be provided
4329 can be influenced via additional arguments. More details will be provided
4330 by the command output when run without ``--run``.
4330 by the command output when run without ``--run``.
4331
4331
4332 During the upgrade, the repository will be locked and no writes will be
4332 During the upgrade, the repository will be locked and no writes will be
4333 allowed.
4333 allowed.
4334
4334
4335 At the end of the upgrade, the repository may not be readable while new
4335 At the end of the upgrade, the repository may not be readable while new
4336 repository data is swapped in. This window will be as long as it takes to
4336 repository data is swapped in. This window will be as long as it takes to
4337 rename some directories inside the ``.hg`` directory. On most machines, this
4337 rename some directories inside the ``.hg`` directory. On most machines, this
4338 should complete almost instantaneously and the chances of a consumer being
4338 should complete almost instantaneously and the chances of a consumer being
4339 unable to access the repository should be low.
4339 unable to access the repository should be low.
4340
4340
4341 By default, all revlogs will be upgraded. You can restrict this using flags
4341 By default, all revlogs will be upgraded. You can restrict this using flags
4342 such as `--manifest`:
4342 such as `--manifest`:
4343
4343
4344 * `--manifest`: only optimize the manifest
4344 * `--manifest`: only optimize the manifest
4345 * `--no-manifest`: optimize all revlog but the manifest
4345 * `--no-manifest`: optimize all revlog but the manifest
4346 * `--changelog`: optimize the changelog only
4346 * `--changelog`: optimize the changelog only
4347 * `--no-changelog --no-manifest`: optimize filelogs only
4347 * `--no-changelog --no-manifest`: optimize filelogs only
4348 * `--filelogs`: optimize the filelogs only
4348 * `--filelogs`: optimize the filelogs only
4349 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4349 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4350 """
4350 """
4351 return upgrade.upgraderepo(
4351 return upgrade.upgraderepo(
4352 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4352 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4353 )
4353 )
4354
4354
4355
4355
4356 @command(
4356 @command(
4357 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4357 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4358 )
4358 )
4359 def debugwalk(ui, repo, *pats, **opts):
4359 def debugwalk(ui, repo, *pats, **opts):
4360 """show how files match on given patterns"""
4360 """show how files match on given patterns"""
4361 opts = pycompat.byteskwargs(opts)
4361 opts = pycompat.byteskwargs(opts)
4362 m = scmutil.match(repo[None], pats, opts)
4362 m = scmutil.match(repo[None], pats, opts)
4363 if ui.verbose:
4363 if ui.verbose:
4364 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4364 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4365 items = list(repo[None].walk(m))
4365 items = list(repo[None].walk(m))
4366 if not items:
4366 if not items:
4367 return
4367 return
4368 f = lambda fn: fn
4368 f = lambda fn: fn
4369 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4369 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4370 f = lambda fn: util.normpath(fn)
4370 f = lambda fn: util.normpath(fn)
4371 fmt = b'f %%-%ds %%-%ds %%s' % (
4371 fmt = b'f %%-%ds %%-%ds %%s' % (
4372 max([len(abs) for abs in items]),
4372 max([len(abs) for abs in items]),
4373 max([len(repo.pathto(abs)) for abs in items]),
4373 max([len(repo.pathto(abs)) for abs in items]),
4374 )
4374 )
4375 for abs in items:
4375 for abs in items:
4376 line = fmt % (
4376 line = fmt % (
4377 abs,
4377 abs,
4378 f(repo.pathto(abs)),
4378 f(repo.pathto(abs)),
4379 m.exact(abs) and b'exact' or b'',
4379 m.exact(abs) and b'exact' or b'',
4380 )
4380 )
4381 ui.write(b"%s\n" % line.rstrip())
4381 ui.write(b"%s\n" % line.rstrip())
4382
4382
4383
4383
4384 @command(b'debugwhyunstable', [], _(b'REV'))
4384 @command(b'debugwhyunstable', [], _(b'REV'))
4385 def debugwhyunstable(ui, repo, rev):
4385 def debugwhyunstable(ui, repo, rev):
4386 """explain instabilities of a changeset"""
4386 """explain instabilities of a changeset"""
4387 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4387 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4388 dnodes = b''
4388 dnodes = b''
4389 if entry.get(b'divergentnodes'):
4389 if entry.get(b'divergentnodes'):
4390 dnodes = (
4390 dnodes = (
4391 b' '.join(
4391 b' '.join(
4392 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4392 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4393 for ctx in entry[b'divergentnodes']
4393 for ctx in entry[b'divergentnodes']
4394 )
4394 )
4395 + b' '
4395 + b' '
4396 )
4396 )
4397 ui.write(
4397 ui.write(
4398 b'%s: %s%s %s\n'
4398 b'%s: %s%s %s\n'
4399 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4399 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4400 )
4400 )
4401
4401
4402
4402
4403 @command(
4403 @command(
4404 b'debugwireargs',
4404 b'debugwireargs',
4405 [
4405 [
4406 (b'', b'three', b'', b'three'),
4406 (b'', b'three', b'', b'three'),
4407 (b'', b'four', b'', b'four'),
4407 (b'', b'four', b'', b'four'),
4408 (b'', b'five', b'', b'five'),
4408 (b'', b'five', b'', b'five'),
4409 ]
4409 ]
4410 + cmdutil.remoteopts,
4410 + cmdutil.remoteopts,
4411 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4411 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4412 norepo=True,
4412 norepo=True,
4413 )
4413 )
4414 def debugwireargs(ui, repopath, *vals, **opts):
4414 def debugwireargs(ui, repopath, *vals, **opts):
4415 opts = pycompat.byteskwargs(opts)
4415 opts = pycompat.byteskwargs(opts)
4416 repo = hg.peer(ui, opts, repopath)
4416 repo = hg.peer(ui, opts, repopath)
4417 try:
4417 try:
4418 for opt in cmdutil.remoteopts:
4418 for opt in cmdutil.remoteopts:
4419 del opts[opt[1]]
4419 del opts[opt[1]]
4420 args = {}
4420 args = {}
4421 for k, v in opts.items():
4421 for k, v in opts.items():
4422 if v:
4422 if v:
4423 args[k] = v
4423 args[k] = v
4424 args = pycompat.strkwargs(args)
4424 args = pycompat.strkwargs(args)
4425 # run twice to check that we don't mess up the stream for the next command
4425 # run twice to check that we don't mess up the stream for the next command
4426 res1 = repo.debugwireargs(*vals, **args)
4426 res1 = repo.debugwireargs(*vals, **args)
4427 res2 = repo.debugwireargs(*vals, **args)
4427 res2 = repo.debugwireargs(*vals, **args)
4428 ui.write(b"%s\n" % res1)
4428 ui.write(b"%s\n" % res1)
4429 if res1 != res2:
4429 if res1 != res2:
4430 ui.warn(b"%s\n" % res2)
4430 ui.warn(b"%s\n" % res2)
4431 finally:
4431 finally:
4432 repo.close()
4432 repo.close()
4433
4433
4434
4434
4435 def _parsewirelangblocks(fh):
4435 def _parsewirelangblocks(fh):
4436 activeaction = None
4436 activeaction = None
4437 blocklines = []
4437 blocklines = []
4438 lastindent = 0
4438 lastindent = 0
4439
4439
4440 for line in fh:
4440 for line in fh:
4441 line = line.rstrip()
4441 line = line.rstrip()
4442 if not line:
4442 if not line:
4443 continue
4443 continue
4444
4444
4445 if line.startswith(b'#'):
4445 if line.startswith(b'#'):
4446 continue
4446 continue
4447
4447
4448 if not line.startswith(b' '):
4448 if not line.startswith(b' '):
4449 # New block. Flush previous one.
4449 # New block. Flush previous one.
4450 if activeaction:
4450 if activeaction:
4451 yield activeaction, blocklines
4451 yield activeaction, blocklines
4452
4452
4453 activeaction = line
4453 activeaction = line
4454 blocklines = []
4454 blocklines = []
4455 lastindent = 0
4455 lastindent = 0
4456 continue
4456 continue
4457
4457
4458 # Else we start with an indent.
4458 # Else we start with an indent.
4459
4459
4460 if not activeaction:
4460 if not activeaction:
4461 raise error.Abort(_(b'indented line outside of block'))
4461 raise error.Abort(_(b'indented line outside of block'))
4462
4462
4463 indent = len(line) - len(line.lstrip())
4463 indent = len(line) - len(line.lstrip())
4464
4464
4465 # If this line is indented more than the last line, concatenate it.
4465 # If this line is indented more than the last line, concatenate it.
4466 if indent > lastindent and blocklines:
4466 if indent > lastindent and blocklines:
4467 blocklines[-1] += line.lstrip()
4467 blocklines[-1] += line.lstrip()
4468 else:
4468 else:
4469 blocklines.append(line)
4469 blocklines.append(line)
4470 lastindent = indent
4470 lastindent = indent
4471
4471
4472 # Flush last block.
4472 # Flush last block.
4473 if activeaction:
4473 if activeaction:
4474 yield activeaction, blocklines
4474 yield activeaction, blocklines
4475
4475
4476
4476
4477 @command(
4477 @command(
4478 b'debugwireproto',
4478 b'debugwireproto',
4479 [
4479 [
4480 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4480 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4481 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4481 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4482 (
4482 (
4483 b'',
4483 b'',
4484 b'noreadstderr',
4484 b'noreadstderr',
4485 False,
4485 False,
4486 _(b'do not read from stderr of the remote'),
4486 _(b'do not read from stderr of the remote'),
4487 ),
4487 ),
4488 (
4488 (
4489 b'',
4489 b'',
4490 b'nologhandshake',
4490 b'nologhandshake',
4491 False,
4491 False,
4492 _(b'do not log I/O related to the peer handshake'),
4492 _(b'do not log I/O related to the peer handshake'),
4493 ),
4493 ),
4494 ]
4494 ]
4495 + cmdutil.remoteopts,
4495 + cmdutil.remoteopts,
4496 _(b'[PATH]'),
4496 _(b'[PATH]'),
4497 optionalrepo=True,
4497 optionalrepo=True,
4498 )
4498 )
4499 def debugwireproto(ui, repo, path=None, **opts):
4499 def debugwireproto(ui, repo, path=None, **opts):
4500 """send wire protocol commands to a server
4500 """send wire protocol commands to a server
4501
4501
4502 This command can be used to issue wire protocol commands to remote
4502 This command can be used to issue wire protocol commands to remote
4503 peers and to debug the raw data being exchanged.
4503 peers and to debug the raw data being exchanged.
4504
4504
4505 ``--localssh`` will start an SSH server against the current repository
4505 ``--localssh`` will start an SSH server against the current repository
4506 and connect to that. By default, the connection will perform a handshake
4506 and connect to that. By default, the connection will perform a handshake
4507 and establish an appropriate peer instance.
4507 and establish an appropriate peer instance.
4508
4508
4509 ``--peer`` can be used to bypass the handshake protocol and construct a
4509 ``--peer`` can be used to bypass the handshake protocol and construct a
4510 peer instance using the specified class type. Valid values are ``raw``,
4510 peer instance using the specified class type. Valid values are ``raw``,
4511 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4511 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4512 don't support higher-level command actions.
4512 don't support higher-level command actions.
4513
4513
4514 ``--noreadstderr`` can be used to disable automatic reading from stderr
4514 ``--noreadstderr`` can be used to disable automatic reading from stderr
4515 of the peer (for SSH connections only). Disabling automatic reading of
4515 of the peer (for SSH connections only). Disabling automatic reading of
4516 stderr is useful for making output more deterministic.
4516 stderr is useful for making output more deterministic.
4517
4517
4518 Commands are issued via a mini language which is specified via stdin.
4518 Commands are issued via a mini language which is specified via stdin.
4519 The language consists of individual actions to perform. An action is
4519 The language consists of individual actions to perform. An action is
4520 defined by a block. A block is defined as a line with no leading
4520 defined by a block. A block is defined as a line with no leading
4521 space followed by 0 or more lines with leading space. Blocks are
4521 space followed by 0 or more lines with leading space. Blocks are
4522 effectively a high-level command with additional metadata.
4522 effectively a high-level command with additional metadata.
4523
4523
4524 Lines beginning with ``#`` are ignored.
4524 Lines beginning with ``#`` are ignored.
4525
4525
4526 The following sections denote available actions.
4526 The following sections denote available actions.
4527
4527
4528 raw
4528 raw
4529 ---
4529 ---
4530
4530
4531 Send raw data to the server.
4531 Send raw data to the server.
4532
4532
4533 The block payload contains the raw data to send as one atomic send
4533 The block payload contains the raw data to send as one atomic send
4534 operation. The data may not actually be delivered in a single system
4534 operation. The data may not actually be delivered in a single system
4535 call: it depends on the abilities of the transport being used.
4535 call: it depends on the abilities of the transport being used.
4536
4536
4537 Each line in the block is de-indented and concatenated. Then, that
4537 Each line in the block is de-indented and concatenated. Then, that
4538 value is evaluated as a Python b'' literal. This allows the use of
4538 value is evaluated as a Python b'' literal. This allows the use of
4539 backslash escaping, etc.
4539 backslash escaping, etc.
4540
4540
4541 raw+
4541 raw+
4542 ----
4542 ----
4543
4543
4544 Behaves like ``raw`` except flushes output afterwards.
4544 Behaves like ``raw`` except flushes output afterwards.
4545
4545
4546 command <X>
4546 command <X>
4547 -----------
4547 -----------
4548
4548
4549 Send a request to run a named command, whose name follows the ``command``
4549 Send a request to run a named command, whose name follows the ``command``
4550 string.
4550 string.
4551
4551
4552 Arguments to the command are defined as lines in this block. The format of
4552 Arguments to the command are defined as lines in this block. The format of
4553 each line is ``<key> <value>``. e.g.::
4553 each line is ``<key> <value>``. e.g.::
4554
4554
4555 command listkeys
4555 command listkeys
4556 namespace bookmarks
4556 namespace bookmarks
4557
4557
4558 If the value begins with ``eval:``, it will be interpreted as a Python
4558 If the value begins with ``eval:``, it will be interpreted as a Python
4559 literal expression. Otherwise values are interpreted as Python b'' literals.
4559 literal expression. Otherwise values are interpreted as Python b'' literals.
4560 This allows sending complex types and encoding special byte sequences via
4560 This allows sending complex types and encoding special byte sequences via
4561 backslash escaping.
4561 backslash escaping.
4562
4562
4563 The following arguments have special meaning:
4563 The following arguments have special meaning:
4564
4564
4565 ``PUSHFILE``
4565 ``PUSHFILE``
4566 When defined, the *push* mechanism of the peer will be used instead
4566 When defined, the *push* mechanism of the peer will be used instead
4567 of the static request-response mechanism and the content of the
4567 of the static request-response mechanism and the content of the
4568 file specified in the value of this argument will be sent as the
4568 file specified in the value of this argument will be sent as the
4569 command payload.
4569 command payload.
4570
4570
4571 This can be used to submit a local bundle file to the remote.
4571 This can be used to submit a local bundle file to the remote.
4572
4572
4573 batchbegin
4573 batchbegin
4574 ----------
4574 ----------
4575
4575
4576 Instruct the peer to begin a batched send.
4576 Instruct the peer to begin a batched send.
4577
4577
4578 All ``command`` blocks are queued for execution until the next
4578 All ``command`` blocks are queued for execution until the next
4579 ``batchsubmit`` block.
4579 ``batchsubmit`` block.
4580
4580
4581 batchsubmit
4581 batchsubmit
4582 -----------
4582 -----------
4583
4583
4584 Submit previously queued ``command`` blocks as a batch request.
4584 Submit previously queued ``command`` blocks as a batch request.
4585
4585
4586 This action MUST be paired with a ``batchbegin`` action.
4586 This action MUST be paired with a ``batchbegin`` action.
4587
4587
4588 httprequest <method> <path>
4588 httprequest <method> <path>
4589 ---------------------------
4589 ---------------------------
4590
4590
4591 (HTTP peer only)
4591 (HTTP peer only)
4592
4592
4593 Send an HTTP request to the peer.
4593 Send an HTTP request to the peer.
4594
4594
4595 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4595 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4596
4596
4597 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4597 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4598 headers to add to the request. e.g. ``Accept: foo``.
4598 headers to add to the request. e.g. ``Accept: foo``.
4599
4599
4600 The following arguments are special:
4600 The following arguments are special:
4601
4601
4602 ``BODYFILE``
4602 ``BODYFILE``
4603 The content of the file defined as the value to this argument will be
4603 The content of the file defined as the value to this argument will be
4604 transferred verbatim as the HTTP request body.
4604 transferred verbatim as the HTTP request body.
4605
4605
4606 ``frame <type> <flags> <payload>``
4606 ``frame <type> <flags> <payload>``
4607 Send a unified protocol frame as part of the request body.
4607 Send a unified protocol frame as part of the request body.
4608
4608
4609 All frames will be collected and sent as the body to the HTTP
4609 All frames will be collected and sent as the body to the HTTP
4610 request.
4610 request.
4611
4611
4612 close
4612 close
4613 -----
4613 -----
4614
4614
4615 Close the connection to the server.
4615 Close the connection to the server.
4616
4616
4617 flush
4617 flush
4618 -----
4618 -----
4619
4619
4620 Flush data written to the server.
4620 Flush data written to the server.
4621
4621
4622 readavailable
4622 readavailable
4623 -------------
4623 -------------
4624
4624
4625 Close the write end of the connection and read all available data from
4625 Close the write end of the connection and read all available data from
4626 the server.
4626 the server.
4627
4627
4628 If the connection to the server encompasses multiple pipes, we poll both
4628 If the connection to the server encompasses multiple pipes, we poll both
4629 pipes and read available data.
4629 pipes and read available data.
4630
4630
4631 readline
4631 readline
4632 --------
4632 --------
4633
4633
4634 Read a line of output from the server. If there are multiple output
4634 Read a line of output from the server. If there are multiple output
4635 pipes, reads only the main pipe.
4635 pipes, reads only the main pipe.
4636
4636
4637 ereadline
4637 ereadline
4638 ---------
4638 ---------
4639
4639
4640 Like ``readline``, but read from the stderr pipe, if available.
4640 Like ``readline``, but read from the stderr pipe, if available.
4641
4641
4642 read <X>
4642 read <X>
4643 --------
4643 --------
4644
4644
4645 ``read()`` N bytes from the server's main output pipe.
4645 ``read()`` N bytes from the server's main output pipe.
4646
4646
4647 eread <X>
4647 eread <X>
4648 ---------
4648 ---------
4649
4649
4650 ``read()`` N bytes from the server's stderr pipe, if available.
4650 ``read()`` N bytes from the server's stderr pipe, if available.
4651
4651
4652 Specifying Unified Frame-Based Protocol Frames
4652 Specifying Unified Frame-Based Protocol Frames
4653 ----------------------------------------------
4653 ----------------------------------------------
4654
4654
4655 It is possible to emit a *Unified Frame-Based Protocol* by using special
4655 It is possible to emit a *Unified Frame-Based Protocol* by using special
4656 syntax.
4656 syntax.
4657
4657
4658 A frame is composed as a type, flags, and payload. These can be parsed
4658 A frame is composed as a type, flags, and payload. These can be parsed
4659 from a string of the form:
4659 from a string of the form:
4660
4660
4661 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4661 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4662
4662
4663 ``request-id`` and ``stream-id`` are integers defining the request and
4663 ``request-id`` and ``stream-id`` are integers defining the request and
4664 stream identifiers.
4664 stream identifiers.
4665
4665
4666 ``type`` can be an integer value for the frame type or the string name
4666 ``type`` can be an integer value for the frame type or the string name
4667 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4667 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4668 ``command-name``.
4668 ``command-name``.
4669
4669
4670 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4670 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4671 components. Each component (and there can be just one) can be an integer
4671 components. Each component (and there can be just one) can be an integer
4672 or a flag name for stream flags or frame flags, respectively. Values are
4672 or a flag name for stream flags or frame flags, respectively. Values are
4673 resolved to integers and then bitwise OR'd together.
4673 resolved to integers and then bitwise OR'd together.
4674
4674
4675 ``payload`` represents the raw frame payload. If it begins with
4675 ``payload`` represents the raw frame payload. If it begins with
4676 ``cbor:``, the following string is evaluated as Python code and the
4676 ``cbor:``, the following string is evaluated as Python code and the
4677 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4677 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4678 as a Python byte string literal.
4678 as a Python byte string literal.
4679 """
4679 """
4680 opts = pycompat.byteskwargs(opts)
4680 opts = pycompat.byteskwargs(opts)
4681
4681
4682 if opts[b'localssh'] and not repo:
4682 if opts[b'localssh'] and not repo:
4683 raise error.Abort(_(b'--localssh requires a repository'))
4683 raise error.Abort(_(b'--localssh requires a repository'))
4684
4684
4685 if opts[b'peer'] and opts[b'peer'] not in (
4685 if opts[b'peer'] and opts[b'peer'] not in (
4686 b'raw',
4686 b'raw',
4687 b'ssh1',
4687 b'ssh1',
4688 ):
4688 ):
4689 raise error.Abort(
4689 raise error.Abort(
4690 _(b'invalid value for --peer'),
4690 _(b'invalid value for --peer'),
4691 hint=_(b'valid values are "raw" and "ssh1"'),
4691 hint=_(b'valid values are "raw" and "ssh1"'),
4692 )
4692 )
4693
4693
4694 if path and opts[b'localssh']:
4694 if path and opts[b'localssh']:
4695 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4695 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4696
4696
4697 if ui.interactive():
4697 if ui.interactive():
4698 ui.write(_(b'(waiting for commands on stdin)\n'))
4698 ui.write(_(b'(waiting for commands on stdin)\n'))
4699
4699
4700 blocks = list(_parsewirelangblocks(ui.fin))
4700 blocks = list(_parsewirelangblocks(ui.fin))
4701
4701
4702 proc = None
4702 proc = None
4703 stdin = None
4703 stdin = None
4704 stdout = None
4704 stdout = None
4705 stderr = None
4705 stderr = None
4706 opener = None
4706 opener = None
4707
4707
4708 if opts[b'localssh']:
4708 if opts[b'localssh']:
4709 # We start the SSH server in its own process so there is process
4709 # We start the SSH server in its own process so there is process
4710 # separation. This prevents a whole class of potential bugs around
4710 # separation. This prevents a whole class of potential bugs around
4711 # shared state from interfering with server operation.
4711 # shared state from interfering with server operation.
4712 args = procutil.hgcmd() + [
4712 args = procutil.hgcmd() + [
4713 b'-R',
4713 b'-R',
4714 repo.root,
4714 repo.root,
4715 b'debugserve',
4715 b'debugserve',
4716 b'--sshstdio',
4716 b'--sshstdio',
4717 ]
4717 ]
4718 proc = subprocess.Popen(
4718 proc = subprocess.Popen(
4719 pycompat.rapply(procutil.tonativestr, args),
4719 pycompat.rapply(procutil.tonativestr, args),
4720 stdin=subprocess.PIPE,
4720 stdin=subprocess.PIPE,
4721 stdout=subprocess.PIPE,
4721 stdout=subprocess.PIPE,
4722 stderr=subprocess.PIPE,
4722 stderr=subprocess.PIPE,
4723 bufsize=0,
4723 bufsize=0,
4724 )
4724 )
4725
4725
4726 stdin = proc.stdin
4726 stdin = proc.stdin
4727 stdout = proc.stdout
4727 stdout = proc.stdout
4728 stderr = proc.stderr
4728 stderr = proc.stderr
4729
4729
4730 # We turn the pipes into observers so we can log I/O.
4730 # We turn the pipes into observers so we can log I/O.
4731 if ui.verbose or opts[b'peer'] == b'raw':
4731 if ui.verbose or opts[b'peer'] == b'raw':
4732 stdin = util.makeloggingfileobject(
4732 stdin = util.makeloggingfileobject(
4733 ui, proc.stdin, b'i', logdata=True
4733 ui, proc.stdin, b'i', logdata=True
4734 )
4734 )
4735 stdout = util.makeloggingfileobject(
4735 stdout = util.makeloggingfileobject(
4736 ui, proc.stdout, b'o', logdata=True
4736 ui, proc.stdout, b'o', logdata=True
4737 )
4737 )
4738 stderr = util.makeloggingfileobject(
4738 stderr = util.makeloggingfileobject(
4739 ui, proc.stderr, b'e', logdata=True
4739 ui, proc.stderr, b'e', logdata=True
4740 )
4740 )
4741
4741
4742 # --localssh also implies the peer connection settings.
4742 # --localssh also implies the peer connection settings.
4743
4743
4744 url = b'ssh://localserver'
4744 url = b'ssh://localserver'
4745 autoreadstderr = not opts[b'noreadstderr']
4745 autoreadstderr = not opts[b'noreadstderr']
4746
4746
4747 if opts[b'peer'] == b'ssh1':
4747 if opts[b'peer'] == b'ssh1':
4748 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4748 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4749 peer = sshpeer.sshv1peer(
4749 peer = sshpeer.sshv1peer(
4750 ui,
4750 ui,
4751 url,
4751 url,
4752 proc,
4752 proc,
4753 stdin,
4753 stdin,
4754 stdout,
4754 stdout,
4755 stderr,
4755 stderr,
4756 None,
4756 None,
4757 autoreadstderr=autoreadstderr,
4757 autoreadstderr=autoreadstderr,
4758 )
4758 )
4759 elif opts[b'peer'] == b'raw':
4759 elif opts[b'peer'] == b'raw':
4760 ui.write(_(b'using raw connection to peer\n'))
4760 ui.write(_(b'using raw connection to peer\n'))
4761 peer = None
4761 peer = None
4762 else:
4762 else:
4763 ui.write(_(b'creating ssh peer from handshake results\n'))
4763 ui.write(_(b'creating ssh peer from handshake results\n'))
4764 peer = sshpeer.makepeer(
4764 peer = sshpeer.makepeer(
4765 ui,
4765 ui,
4766 url,
4766 url,
4767 proc,
4767 proc,
4768 stdin,
4768 stdin,
4769 stdout,
4769 stdout,
4770 stderr,
4770 stderr,
4771 autoreadstderr=autoreadstderr,
4771 autoreadstderr=autoreadstderr,
4772 )
4772 )
4773
4773
4774 elif path:
4774 elif path:
4775 # We bypass hg.peer() so we can proxy the sockets.
4775 # We bypass hg.peer() so we can proxy the sockets.
4776 # TODO consider not doing this because we skip
4776 # TODO consider not doing this because we skip
4777 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4777 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4778 u = urlutil.url(path)
4778 u = urlutil.url(path)
4779 if u.scheme != b'http':
4779 if u.scheme != b'http':
4780 raise error.Abort(_(b'only http:// paths are currently supported'))
4780 raise error.Abort(_(b'only http:// paths are currently supported'))
4781
4781
4782 url, authinfo = u.authinfo()
4782 url, authinfo = u.authinfo()
4783 openerargs = {
4783 openerargs = {
4784 'useragent': b'Mercurial debugwireproto',
4784 'useragent': b'Mercurial debugwireproto',
4785 }
4785 }
4786
4786
4787 # Turn pipes/sockets into observers so we can log I/O.
4787 # Turn pipes/sockets into observers so we can log I/O.
4788 if ui.verbose:
4788 if ui.verbose:
4789 openerargs.update(
4789 openerargs.update(
4790 {
4790 {
4791 'loggingfh': ui,
4791 'loggingfh': ui,
4792 'loggingname': b's',
4792 'loggingname': b's',
4793 'loggingopts': {
4793 'loggingopts': {
4794 'logdata': True,
4794 'logdata': True,
4795 'logdataapis': False,
4795 'logdataapis': False,
4796 },
4796 },
4797 }
4797 }
4798 )
4798 )
4799
4799
4800 if ui.debugflag:
4800 if ui.debugflag:
4801 openerargs['loggingopts']['logdataapis'] = True
4801 openerargs['loggingopts']['logdataapis'] = True
4802
4802
4803 # Don't send default headers when in raw mode. This allows us to
4803 # Don't send default headers when in raw mode. This allows us to
4804 # bypass most of the behavior of our URL handling code so we can
4804 # bypass most of the behavior of our URL handling code so we can
4805 # have near complete control over what's sent on the wire.
4805 # have near complete control over what's sent on the wire.
4806 if opts[b'peer'] == b'raw':
4806 if opts[b'peer'] == b'raw':
4807 openerargs['sendaccept'] = False
4807 openerargs['sendaccept'] = False
4808
4808
4809 opener = urlmod.opener(ui, authinfo, **openerargs)
4809 opener = urlmod.opener(ui, authinfo, **openerargs)
4810
4810
4811 if opts[b'peer'] == b'raw':
4811 if opts[b'peer'] == b'raw':
4812 ui.write(_(b'using raw connection to peer\n'))
4812 ui.write(_(b'using raw connection to peer\n'))
4813 peer = None
4813 peer = None
4814 elif opts[b'peer']:
4814 elif opts[b'peer']:
4815 raise error.Abort(
4815 raise error.Abort(
4816 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4816 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4817 )
4817 )
4818 else:
4818 else:
4819 peer = httppeer.makepeer(ui, path, opener=opener)
4819 peer = httppeer.makepeer(ui, path, opener=opener)
4820
4820
4821 # We /could/ populate stdin/stdout with sock.makefile()...
4821 # We /could/ populate stdin/stdout with sock.makefile()...
4822 else:
4822 else:
4823 raise error.Abort(_(b'unsupported connection configuration'))
4823 raise error.Abort(_(b'unsupported connection configuration'))
4824
4824
4825 batchedcommands = None
4825 batchedcommands = None
4826
4826
4827 # Now perform actions based on the parsed wire language instructions.
4827 # Now perform actions based on the parsed wire language instructions.
4828 for action, lines in blocks:
4828 for action, lines in blocks:
4829 if action in (b'raw', b'raw+'):
4829 if action in (b'raw', b'raw+'):
4830 if not stdin:
4830 if not stdin:
4831 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4831 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4832
4832
4833 # Concatenate the data together.
4833 # Concatenate the data together.
4834 data = b''.join(l.lstrip() for l in lines)
4834 data = b''.join(l.lstrip() for l in lines)
4835 data = stringutil.unescapestr(data)
4835 data = stringutil.unescapestr(data)
4836 stdin.write(data)
4836 stdin.write(data)
4837
4837
4838 if action == b'raw+':
4838 if action == b'raw+':
4839 stdin.flush()
4839 stdin.flush()
4840 elif action == b'flush':
4840 elif action == b'flush':
4841 if not stdin:
4841 if not stdin:
4842 raise error.Abort(_(b'cannot call flush on this peer'))
4842 raise error.Abort(_(b'cannot call flush on this peer'))
4843 stdin.flush()
4843 stdin.flush()
4844 elif action.startswith(b'command'):
4844 elif action.startswith(b'command'):
4845 if not peer:
4845 if not peer:
4846 raise error.Abort(
4846 raise error.Abort(
4847 _(
4847 _(
4848 b'cannot send commands unless peer instance '
4848 b'cannot send commands unless peer instance '
4849 b'is available'
4849 b'is available'
4850 )
4850 )
4851 )
4851 )
4852
4852
4853 command = action.split(b' ', 1)[1]
4853 command = action.split(b' ', 1)[1]
4854
4854
4855 args = {}
4855 args = {}
4856 for line in lines:
4856 for line in lines:
4857 # We need to allow empty values.
4857 # We need to allow empty values.
4858 fields = line.lstrip().split(b' ', 1)
4858 fields = line.lstrip().split(b' ', 1)
4859 if len(fields) == 1:
4859 if len(fields) == 1:
4860 key = fields[0]
4860 key = fields[0]
4861 value = b''
4861 value = b''
4862 else:
4862 else:
4863 key, value = fields
4863 key, value = fields
4864
4864
4865 if value.startswith(b'eval:'):
4865 if value.startswith(b'eval:'):
4866 value = stringutil.evalpythonliteral(value[5:])
4866 value = stringutil.evalpythonliteral(value[5:])
4867 else:
4867 else:
4868 value = stringutil.unescapestr(value)
4868 value = stringutil.unescapestr(value)
4869
4869
4870 args[key] = value
4870 args[key] = value
4871
4871
4872 if batchedcommands is not None:
4872 if batchedcommands is not None:
4873 batchedcommands.append((command, args))
4873 batchedcommands.append((command, args))
4874 continue
4874 continue
4875
4875
4876 ui.status(_(b'sending %s command\n') % command)
4876 ui.status(_(b'sending %s command\n') % command)
4877
4877
4878 if b'PUSHFILE' in args:
4878 if b'PUSHFILE' in args:
4879 with open(args[b'PUSHFILE'], 'rb') as fh:
4879 with open(args[b'PUSHFILE'], 'rb') as fh:
4880 del args[b'PUSHFILE']
4880 del args[b'PUSHFILE']
4881 res, output = peer._callpush(
4881 res, output = peer._callpush(
4882 command, fh, **pycompat.strkwargs(args)
4882 command, fh, **pycompat.strkwargs(args)
4883 )
4883 )
4884 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4884 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4885 ui.status(
4885 ui.status(
4886 _(b'remote output: %s\n') % stringutil.escapestr(output)
4886 _(b'remote output: %s\n') % stringutil.escapestr(output)
4887 )
4887 )
4888 else:
4888 else:
4889 with peer.commandexecutor() as e:
4889 with peer.commandexecutor() as e:
4890 res = e.callcommand(command, args).result()
4890 res = e.callcommand(command, args).result()
4891
4891
4892 ui.status(
4892 ui.status(
4893 _(b'response: %s\n')
4893 _(b'response: %s\n')
4894 % stringutil.pprint(res, bprefix=True, indent=2)
4894 % stringutil.pprint(res, bprefix=True, indent=2)
4895 )
4895 )
4896
4896
4897 elif action == b'batchbegin':
4897 elif action == b'batchbegin':
4898 if batchedcommands is not None:
4898 if batchedcommands is not None:
4899 raise error.Abort(_(b'nested batchbegin not allowed'))
4899 raise error.Abort(_(b'nested batchbegin not allowed'))
4900
4900
4901 batchedcommands = []
4901 batchedcommands = []
4902 elif action == b'batchsubmit':
4902 elif action == b'batchsubmit':
4903 # There is a batching API we could go through. But it would be
4903 # There is a batching API we could go through. But it would be
4904 # difficult to normalize requests into function calls. It is easier
4904 # difficult to normalize requests into function calls. It is easier
4905 # to bypass this layer and normalize to commands + args.
4905 # to bypass this layer and normalize to commands + args.
4906 ui.status(
4906 ui.status(
4907 _(b'sending batch with %d sub-commands\n')
4907 _(b'sending batch with %d sub-commands\n')
4908 % len(batchedcommands)
4908 % len(batchedcommands)
4909 )
4909 )
4910 assert peer is not None
4910 assert peer is not None
4911 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4911 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4912 ui.status(
4912 ui.status(
4913 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4913 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4914 )
4914 )
4915
4915
4916 batchedcommands = None
4916 batchedcommands = None
4917
4917
4918 elif action.startswith(b'httprequest '):
4918 elif action.startswith(b'httprequest '):
4919 if not opener:
4919 if not opener:
4920 raise error.Abort(
4920 raise error.Abort(
4921 _(b'cannot use httprequest without an HTTP peer')
4921 _(b'cannot use httprequest without an HTTP peer')
4922 )
4922 )
4923
4923
4924 request = action.split(b' ', 2)
4924 request = action.split(b' ', 2)
4925 if len(request) != 3:
4925 if len(request) != 3:
4926 raise error.Abort(
4926 raise error.Abort(
4927 _(
4927 _(
4928 b'invalid httprequest: expected format is '
4928 b'invalid httprequest: expected format is '
4929 b'"httprequest <method> <path>'
4929 b'"httprequest <method> <path>'
4930 )
4930 )
4931 )
4931 )
4932
4932
4933 method, httppath = request[1:]
4933 method, httppath = request[1:]
4934 headers = {}
4934 headers = {}
4935 body = None
4935 body = None
4936 frames = []
4936 frames = []
4937 for line in lines:
4937 for line in lines:
4938 line = line.lstrip()
4938 line = line.lstrip()
4939 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4939 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4940 if m:
4940 if m:
4941 # Headers need to use native strings.
4941 # Headers need to use native strings.
4942 key = pycompat.strurl(m.group(1))
4942 key = pycompat.strurl(m.group(1))
4943 value = pycompat.strurl(m.group(2))
4943 value = pycompat.strurl(m.group(2))
4944 headers[key] = value
4944 headers[key] = value
4945 continue
4945 continue
4946
4946
4947 if line.startswith(b'BODYFILE '):
4947 if line.startswith(b'BODYFILE '):
4948 with open(line.split(b' ', 1), b'rb') as fh:
4948 with open(line.split(b' ', 1), b'rb') as fh:
4949 body = fh.read()
4949 body = fh.read()
4950 elif line.startswith(b'frame '):
4950 elif line.startswith(b'frame '):
4951 frame = wireprotoframing.makeframefromhumanstring(
4951 frame = wireprotoframing.makeframefromhumanstring(
4952 line[len(b'frame ') :]
4952 line[len(b'frame ') :]
4953 )
4953 )
4954
4954
4955 frames.append(frame)
4955 frames.append(frame)
4956 else:
4956 else:
4957 raise error.Abort(
4957 raise error.Abort(
4958 _(b'unknown argument to httprequest: %s') % line
4958 _(b'unknown argument to httprequest: %s') % line
4959 )
4959 )
4960
4960
4961 url = path + httppath
4961 url = path + httppath
4962
4962
4963 if frames:
4963 if frames:
4964 body = b''.join(bytes(f) for f in frames)
4964 body = b''.join(bytes(f) for f in frames)
4965
4965
4966 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4966 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4967
4967
4968 # urllib.Request insists on using has_data() as a proxy for
4968 # urllib.Request insists on using has_data() as a proxy for
4969 # determining the request method. Override that to use our
4969 # determining the request method. Override that to use our
4970 # explicitly requested method.
4970 # explicitly requested method.
4971 req.get_method = lambda: pycompat.sysstr(method)
4971 req.get_method = lambda: pycompat.sysstr(method)
4972
4972
4973 try:
4973 try:
4974 res = opener.open(req)
4974 res = opener.open(req)
4975 body = res.read()
4975 body = res.read()
4976 except util.urlerr.urlerror as e:
4976 except util.urlerr.urlerror as e:
4977 # read() method must be called, but only exists in Python 2
4977 # read() method must be called, but only exists in Python 2
4978 getattr(e, 'read', lambda: None)()
4978 getattr(e, 'read', lambda: None)()
4979 continue
4979 continue
4980
4980
4981 ct = res.headers.get('Content-Type')
4981 ct = res.headers.get('Content-Type')
4982 if ct == 'application/mercurial-cbor':
4982 if ct == 'application/mercurial-cbor':
4983 ui.write(
4983 ui.write(
4984 _(b'cbor> %s\n')
4984 _(b'cbor> %s\n')
4985 % stringutil.pprint(
4985 % stringutil.pprint(
4986 cborutil.decodeall(body), bprefix=True, indent=2
4986 cborutil.decodeall(body), bprefix=True, indent=2
4987 )
4987 )
4988 )
4988 )
4989
4989
4990 elif action == b'close':
4990 elif action == b'close':
4991 assert peer is not None
4991 assert peer is not None
4992 peer.close()
4992 peer.close()
4993 elif action == b'readavailable':
4993 elif action == b'readavailable':
4994 if not stdout or not stderr:
4994 if not stdout or not stderr:
4995 raise error.Abort(
4995 raise error.Abort(
4996 _(b'readavailable not available on this peer')
4996 _(b'readavailable not available on this peer')
4997 )
4997 )
4998
4998
4999 stdin.close()
4999 stdin.close()
5000 stdout.read()
5000 stdout.read()
5001 stderr.read()
5001 stderr.read()
5002
5002
5003 elif action == b'readline':
5003 elif action == b'readline':
5004 if not stdout:
5004 if not stdout:
5005 raise error.Abort(_(b'readline not available on this peer'))
5005 raise error.Abort(_(b'readline not available on this peer'))
5006 stdout.readline()
5006 stdout.readline()
5007 elif action == b'ereadline':
5007 elif action == b'ereadline':
5008 if not stderr:
5008 if not stderr:
5009 raise error.Abort(_(b'ereadline not available on this peer'))
5009 raise error.Abort(_(b'ereadline not available on this peer'))
5010 stderr.readline()
5010 stderr.readline()
5011 elif action.startswith(b'read '):
5011 elif action.startswith(b'read '):
5012 count = int(action.split(b' ', 1)[1])
5012 count = int(action.split(b' ', 1)[1])
5013 if not stdout:
5013 if not stdout:
5014 raise error.Abort(_(b'read not available on this peer'))
5014 raise error.Abort(_(b'read not available on this peer'))
5015 stdout.read(count)
5015 stdout.read(count)
5016 elif action.startswith(b'eread '):
5016 elif action.startswith(b'eread '):
5017 count = int(action.split(b' ', 1)[1])
5017 count = int(action.split(b' ', 1)[1])
5018 if not stderr:
5018 if not stderr:
5019 raise error.Abort(_(b'eread not available on this peer'))
5019 raise error.Abort(_(b'eread not available on this peer'))
5020 stderr.read(count)
5020 stderr.read(count)
5021 else:
5021 else:
5022 raise error.Abort(_(b'unknown action: %s') % action)
5022 raise error.Abort(_(b'unknown action: %s') % action)
5023
5023
5024 if batchedcommands is not None:
5024 if batchedcommands is not None:
5025 raise error.Abort(_(b'unclosed "batchbegin" request'))
5025 raise error.Abort(_(b'unclosed "batchbegin" request'))
5026
5026
5027 if peer:
5027 if peer:
5028 peer.close()
5028 peer.close()
5029
5029
5030 if proc:
5030 if proc:
5031 proc.kill()
5031 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now