##// END OF EJS Templates
debugindex: move the logic into its own module...
marmoute -
r50145:61cf3d39 default
parent child Browse files
Show More
@@ -0,0 +1,51
1 # revlogutils/debug.py - utility used for revlog debuging
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 # Copyright 2022 Octobus <contact@octobus.net>
5 #
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
8
9 from .. import (
10 node as nodemod,
11 )
12
13
14 def debug_index(
15 ui,
16 repo,
17 formatter,
18 revlog,
19 full_node,
20 ):
21 """display index data for a revlog"""
22 if full_node:
23 hexfn = nodemod.hex
24 else:
25 hexfn = nodemod.short
26
27 idlen = 12
28 for i in revlog:
29 idlen = len(hexfn(revlog.node(i)))
30 break
31
32 fm = formatter
33
34 fm.plain(
35 b' rev linkrev %s %s p2\n'
36 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
37 )
38
39 for rev in revlog:
40 node = revlog.node(rev)
41 parents = revlog.parents(node)
42
43 fm.startitem()
44 fm.write(b'rev', b'%6d ', rev)
45 fm.write(b'linkrev', b'%7d ', revlog.linkrev(rev))
46 fm.write(b'node', b'%s ', hexfn(node))
47 fm.write(b'p1', b'%s ', hexfn(parents[0]))
48 fm.write(b'p2', b'%s', hexfn(parents[1]))
49 fm.plain(b'\n')
50
51 fm.end()
@@ -1,5051 +1,5032
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import binascii
9 import binascii
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import string
24 import string
25 import subprocess
25 import subprocess
26 import sys
26 import sys
27 import time
27 import time
28
28
29 from .i18n import _
29 from .i18n import _
30 from .node import (
30 from .node import (
31 bin,
31 bin,
32 hex,
32 hex,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 dirstateutils,
49 dirstateutils,
50 encoding,
50 encoding,
51 error,
51 error,
52 exchange,
52 exchange,
53 extensions,
53 extensions,
54 filemerge,
54 filemerge,
55 filesetlang,
55 filesetlang,
56 formatter,
56 formatter,
57 hg,
57 hg,
58 httppeer,
58 httppeer,
59 localrepo,
59 localrepo,
60 lock as lockmod,
60 lock as lockmod,
61 logcmdutil,
61 logcmdutil,
62 mergestate as mergestatemod,
62 mergestate as mergestatemod,
63 metadata,
63 metadata,
64 obsolete,
64 obsolete,
65 obsutil,
65 obsutil,
66 pathutil,
66 pathutil,
67 phases,
67 phases,
68 policy,
68 policy,
69 pvec,
69 pvec,
70 pycompat,
70 pycompat,
71 registrar,
71 registrar,
72 repair,
72 repair,
73 repoview,
73 repoview,
74 requirements,
74 requirements,
75 revlog,
75 revlog,
76 revlogutils,
76 revlogutils,
77 revset,
77 revset,
78 revsetlang,
78 revsetlang,
79 scmutil,
79 scmutil,
80 setdiscovery,
80 setdiscovery,
81 simplemerge,
81 simplemerge,
82 sshpeer,
82 sshpeer,
83 sslutil,
83 sslutil,
84 streamclone,
84 streamclone,
85 strip,
85 strip,
86 tags as tagsmod,
86 tags as tagsmod,
87 templater,
87 templater,
88 treediscovery,
88 treediscovery,
89 upgrade,
89 upgrade,
90 url as urlmod,
90 url as urlmod,
91 util,
91 util,
92 vfs as vfsmod,
92 vfs as vfsmod,
93 wireprotoframing,
93 wireprotoframing,
94 wireprotoserver,
94 wireprotoserver,
95 )
95 )
96 from .interfaces import repository
96 from .interfaces import repository
97 from .utils import (
97 from .utils import (
98 cborutil,
98 cborutil,
99 compression,
99 compression,
100 dateutil,
100 dateutil,
101 procutil,
101 procutil,
102 stringutil,
102 stringutil,
103 urlutil,
103 urlutil,
104 )
104 )
105
105
106 from .revlogutils import (
106 from .revlogutils import (
107 constants as revlog_constants,
107 constants as revlog_constants,
108 debug as revlog_debug,
108 deltas as deltautil,
109 deltas as deltautil,
109 nodemap,
110 nodemap,
110 rewrite,
111 rewrite,
111 sidedata,
112 sidedata,
112 )
113 )
113
114
114 release = lockmod.release
115 release = lockmod.release
115
116
116 table = {}
117 table = {}
117 table.update(strip.command._table)
118 table.update(strip.command._table)
118 command = registrar.command(table)
119 command = registrar.command(table)
119
120
120
121
121 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
122 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
122 def debugancestor(ui, repo, *args):
123 def debugancestor(ui, repo, *args):
123 """find the ancestor revision of two revisions in a given index"""
124 """find the ancestor revision of two revisions in a given index"""
124 if len(args) == 3:
125 if len(args) == 3:
125 index, rev1, rev2 = args
126 index, rev1, rev2 = args
126 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
127 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
127 lookup = r.lookup
128 lookup = r.lookup
128 elif len(args) == 2:
129 elif len(args) == 2:
129 if not repo:
130 if not repo:
130 raise error.Abort(
131 raise error.Abort(
131 _(b'there is no Mercurial repository here (.hg not found)')
132 _(b'there is no Mercurial repository here (.hg not found)')
132 )
133 )
133 rev1, rev2 = args
134 rev1, rev2 = args
134 r = repo.changelog
135 r = repo.changelog
135 lookup = repo.lookup
136 lookup = repo.lookup
136 else:
137 else:
137 raise error.Abort(_(b'either two or three arguments required'))
138 raise error.Abort(_(b'either two or three arguments required'))
138 a = r.ancestor(lookup(rev1), lookup(rev2))
139 a = r.ancestor(lookup(rev1), lookup(rev2))
139 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
140 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
140
141
141
142
142 @command(b'debugantivirusrunning', [])
143 @command(b'debugantivirusrunning', [])
143 def debugantivirusrunning(ui, repo):
144 def debugantivirusrunning(ui, repo):
144 """attempt to trigger an antivirus scanner to see if one is active"""
145 """attempt to trigger an antivirus scanner to see if one is active"""
145 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
146 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
146 f.write(
147 f.write(
147 util.b85decode(
148 util.b85decode(
148 # This is a base85-armored version of the EICAR test file. See
149 # This is a base85-armored version of the EICAR test file. See
149 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
150 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
150 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
151 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
151 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
152 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
152 )
153 )
153 )
154 )
154 # Give an AV engine time to scan the file.
155 # Give an AV engine time to scan the file.
155 time.sleep(2)
156 time.sleep(2)
156 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
157 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
157
158
158
159
159 @command(b'debugapplystreamclonebundle', [], b'FILE')
160 @command(b'debugapplystreamclonebundle', [], b'FILE')
160 def debugapplystreamclonebundle(ui, repo, fname):
161 def debugapplystreamclonebundle(ui, repo, fname):
161 """apply a stream clone bundle file"""
162 """apply a stream clone bundle file"""
162 f = hg.openpath(ui, fname)
163 f = hg.openpath(ui, fname)
163 gen = exchange.readbundle(ui, f, fname)
164 gen = exchange.readbundle(ui, f, fname)
164 gen.apply(repo)
165 gen.apply(repo)
165
166
166
167
167 @command(
168 @command(
168 b'debugbuilddag',
169 b'debugbuilddag',
169 [
170 [
170 (
171 (
171 b'm',
172 b'm',
172 b'mergeable-file',
173 b'mergeable-file',
173 None,
174 None,
174 _(b'add single file mergeable changes'),
175 _(b'add single file mergeable changes'),
175 ),
176 ),
176 (
177 (
177 b'o',
178 b'o',
178 b'overwritten-file',
179 b'overwritten-file',
179 None,
180 None,
180 _(b'add single file all revs overwrite'),
181 _(b'add single file all revs overwrite'),
181 ),
182 ),
182 (b'n', b'new-file', None, _(b'add new file at each rev')),
183 (b'n', b'new-file', None, _(b'add new file at each rev')),
183 (
184 (
184 b'',
185 b'',
185 b'from-existing',
186 b'from-existing',
186 None,
187 None,
187 _(b'continue from a non-empty repository'),
188 _(b'continue from a non-empty repository'),
188 ),
189 ),
189 ],
190 ],
190 _(b'[OPTION]... [TEXT]'),
191 _(b'[OPTION]... [TEXT]'),
191 )
192 )
192 def debugbuilddag(
193 def debugbuilddag(
193 ui,
194 ui,
194 repo,
195 repo,
195 text=None,
196 text=None,
196 mergeable_file=False,
197 mergeable_file=False,
197 overwritten_file=False,
198 overwritten_file=False,
198 new_file=False,
199 new_file=False,
199 from_existing=False,
200 from_existing=False,
200 ):
201 ):
201 """builds a repo with a given DAG from scratch in the current empty repo
202 """builds a repo with a given DAG from scratch in the current empty repo
202
203
203 The description of the DAG is read from stdin if not given on the
204 The description of the DAG is read from stdin if not given on the
204 command line.
205 command line.
205
206
206 Elements:
207 Elements:
207
208
208 - "+n" is a linear run of n nodes based on the current default parent
209 - "+n" is a linear run of n nodes based on the current default parent
209 - "." is a single node based on the current default parent
210 - "." is a single node based on the current default parent
210 - "$" resets the default parent to null (implied at the start);
211 - "$" resets the default parent to null (implied at the start);
211 otherwise the default parent is always the last node created
212 otherwise the default parent is always the last node created
212 - "<p" sets the default parent to the backref p
213 - "<p" sets the default parent to the backref p
213 - "*p" is a fork at parent p, which is a backref
214 - "*p" is a fork at parent p, which is a backref
214 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
215 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
215 - "/p2" is a merge of the preceding node and p2
216 - "/p2" is a merge of the preceding node and p2
216 - ":tag" defines a local tag for the preceding node
217 - ":tag" defines a local tag for the preceding node
217 - "@branch" sets the named branch for subsequent nodes
218 - "@branch" sets the named branch for subsequent nodes
218 - "#...\\n" is a comment up to the end of the line
219 - "#...\\n" is a comment up to the end of the line
219
220
220 Whitespace between the above elements is ignored.
221 Whitespace between the above elements is ignored.
221
222
222 A backref is either
223 A backref is either
223
224
224 - a number n, which references the node curr-n, where curr is the current
225 - a number n, which references the node curr-n, where curr is the current
225 node, or
226 node, or
226 - the name of a local tag you placed earlier using ":tag", or
227 - the name of a local tag you placed earlier using ":tag", or
227 - empty to denote the default parent.
228 - empty to denote the default parent.
228
229
229 All string valued-elements are either strictly alphanumeric, or must
230 All string valued-elements are either strictly alphanumeric, or must
230 be enclosed in double quotes ("..."), with "\\" as escape character.
231 be enclosed in double quotes ("..."), with "\\" as escape character.
231 """
232 """
232
233
233 if text is None:
234 if text is None:
234 ui.status(_(b"reading DAG from stdin\n"))
235 ui.status(_(b"reading DAG from stdin\n"))
235 text = ui.fin.read()
236 text = ui.fin.read()
236
237
237 cl = repo.changelog
238 cl = repo.changelog
238 if len(cl) > 0 and not from_existing:
239 if len(cl) > 0 and not from_existing:
239 raise error.Abort(_(b'repository is not empty'))
240 raise error.Abort(_(b'repository is not empty'))
240
241
241 # determine number of revs in DAG
242 # determine number of revs in DAG
242 total = 0
243 total = 0
243 for type, data in dagparser.parsedag(text):
244 for type, data in dagparser.parsedag(text):
244 if type == b'n':
245 if type == b'n':
245 total += 1
246 total += 1
246
247
247 if mergeable_file:
248 if mergeable_file:
248 linesperrev = 2
249 linesperrev = 2
249 # make a file with k lines per rev
250 # make a file with k lines per rev
250 initialmergedlines = [
251 initialmergedlines = [
251 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
252 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
252 ]
253 ]
253 initialmergedlines.append(b"")
254 initialmergedlines.append(b"")
254
255
255 tags = []
256 tags = []
256 progress = ui.makeprogress(
257 progress = ui.makeprogress(
257 _(b'building'), unit=_(b'revisions'), total=total
258 _(b'building'), unit=_(b'revisions'), total=total
258 )
259 )
259 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
260 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
260 at = -1
261 at = -1
261 atbranch = b'default'
262 atbranch = b'default'
262 nodeids = []
263 nodeids = []
263 id = 0
264 id = 0
264 progress.update(id)
265 progress.update(id)
265 for type, data in dagparser.parsedag(text):
266 for type, data in dagparser.parsedag(text):
266 if type == b'n':
267 if type == b'n':
267 ui.note((b'node %s\n' % pycompat.bytestr(data)))
268 ui.note((b'node %s\n' % pycompat.bytestr(data)))
268 id, ps = data
269 id, ps = data
269
270
270 files = []
271 files = []
271 filecontent = {}
272 filecontent = {}
272
273
273 p2 = None
274 p2 = None
274 if mergeable_file:
275 if mergeable_file:
275 fn = b"mf"
276 fn = b"mf"
276 p1 = repo[ps[0]]
277 p1 = repo[ps[0]]
277 if len(ps) > 1:
278 if len(ps) > 1:
278 p2 = repo[ps[1]]
279 p2 = repo[ps[1]]
279 pa = p1.ancestor(p2)
280 pa = p1.ancestor(p2)
280 base, local, other = [
281 base, local, other = [
281 x[fn].data() for x in (pa, p1, p2)
282 x[fn].data() for x in (pa, p1, p2)
282 ]
283 ]
283 m3 = simplemerge.Merge3Text(base, local, other)
284 m3 = simplemerge.Merge3Text(base, local, other)
284 ml = [
285 ml = [
285 l.strip()
286 l.strip()
286 for l in simplemerge.render_minimized(m3)[0]
287 for l in simplemerge.render_minimized(m3)[0]
287 ]
288 ]
288 ml.append(b"")
289 ml.append(b"")
289 elif at > 0:
290 elif at > 0:
290 ml = p1[fn].data().split(b"\n")
291 ml = p1[fn].data().split(b"\n")
291 else:
292 else:
292 ml = initialmergedlines
293 ml = initialmergedlines
293 ml[id * linesperrev] += b" r%i" % id
294 ml[id * linesperrev] += b" r%i" % id
294 mergedtext = b"\n".join(ml)
295 mergedtext = b"\n".join(ml)
295 files.append(fn)
296 files.append(fn)
296 filecontent[fn] = mergedtext
297 filecontent[fn] = mergedtext
297
298
298 if overwritten_file:
299 if overwritten_file:
299 fn = b"of"
300 fn = b"of"
300 files.append(fn)
301 files.append(fn)
301 filecontent[fn] = b"r%i\n" % id
302 filecontent[fn] = b"r%i\n" % id
302
303
303 if new_file:
304 if new_file:
304 fn = b"nf%i" % id
305 fn = b"nf%i" % id
305 files.append(fn)
306 files.append(fn)
306 filecontent[fn] = b"r%i\n" % id
307 filecontent[fn] = b"r%i\n" % id
307 if len(ps) > 1:
308 if len(ps) > 1:
308 if not p2:
309 if not p2:
309 p2 = repo[ps[1]]
310 p2 = repo[ps[1]]
310 for fn in p2:
311 for fn in p2:
311 if fn.startswith(b"nf"):
312 if fn.startswith(b"nf"):
312 files.append(fn)
313 files.append(fn)
313 filecontent[fn] = p2[fn].data()
314 filecontent[fn] = p2[fn].data()
314
315
315 def fctxfn(repo, cx, path):
316 def fctxfn(repo, cx, path):
316 if path in filecontent:
317 if path in filecontent:
317 return context.memfilectx(
318 return context.memfilectx(
318 repo, cx, path, filecontent[path]
319 repo, cx, path, filecontent[path]
319 )
320 )
320 return None
321 return None
321
322
322 if len(ps) == 0 or ps[0] < 0:
323 if len(ps) == 0 or ps[0] < 0:
323 pars = [None, None]
324 pars = [None, None]
324 elif len(ps) == 1:
325 elif len(ps) == 1:
325 pars = [nodeids[ps[0]], None]
326 pars = [nodeids[ps[0]], None]
326 else:
327 else:
327 pars = [nodeids[p] for p in ps]
328 pars = [nodeids[p] for p in ps]
328 cx = context.memctx(
329 cx = context.memctx(
329 repo,
330 repo,
330 pars,
331 pars,
331 b"r%i" % id,
332 b"r%i" % id,
332 files,
333 files,
333 fctxfn,
334 fctxfn,
334 date=(id, 0),
335 date=(id, 0),
335 user=b"debugbuilddag",
336 user=b"debugbuilddag",
336 extra={b'branch': atbranch},
337 extra={b'branch': atbranch},
337 )
338 )
338 nodeid = repo.commitctx(cx)
339 nodeid = repo.commitctx(cx)
339 nodeids.append(nodeid)
340 nodeids.append(nodeid)
340 at = id
341 at = id
341 elif type == b'l':
342 elif type == b'l':
342 id, name = data
343 id, name = data
343 ui.note((b'tag %s\n' % name))
344 ui.note((b'tag %s\n' % name))
344 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
345 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
345 elif type == b'a':
346 elif type == b'a':
346 ui.note((b'branch %s\n' % data))
347 ui.note((b'branch %s\n' % data))
347 atbranch = data
348 atbranch = data
348 progress.update(id)
349 progress.update(id)
349
350
350 if tags:
351 if tags:
351 repo.vfs.write(b"localtags", b"".join(tags))
352 repo.vfs.write(b"localtags", b"".join(tags))
352
353
353
354
354 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
355 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
355 indent_string = b' ' * indent
356 indent_string = b' ' * indent
356 if all:
357 if all:
357 ui.writenoi18n(
358 ui.writenoi18n(
358 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
359 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
359 % indent_string
360 % indent_string
360 )
361 )
361
362
362 def showchunks(named):
363 def showchunks(named):
363 ui.write(b"\n%s%s\n" % (indent_string, named))
364 ui.write(b"\n%s%s\n" % (indent_string, named))
364 for deltadata in gen.deltaiter():
365 for deltadata in gen.deltaiter():
365 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
366 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
366 ui.write(
367 ui.write(
367 b"%s%s %s %s %s %s %d\n"
368 b"%s%s %s %s %s %s %d\n"
368 % (
369 % (
369 indent_string,
370 indent_string,
370 hex(node),
371 hex(node),
371 hex(p1),
372 hex(p1),
372 hex(p2),
373 hex(p2),
373 hex(cs),
374 hex(cs),
374 hex(deltabase),
375 hex(deltabase),
375 len(delta),
376 len(delta),
376 )
377 )
377 )
378 )
378
379
379 gen.changelogheader()
380 gen.changelogheader()
380 showchunks(b"changelog")
381 showchunks(b"changelog")
381 gen.manifestheader()
382 gen.manifestheader()
382 showchunks(b"manifest")
383 showchunks(b"manifest")
383 for chunkdata in iter(gen.filelogheader, {}):
384 for chunkdata in iter(gen.filelogheader, {}):
384 fname = chunkdata[b'filename']
385 fname = chunkdata[b'filename']
385 showchunks(fname)
386 showchunks(fname)
386 else:
387 else:
387 if isinstance(gen, bundle2.unbundle20):
388 if isinstance(gen, bundle2.unbundle20):
388 raise error.Abort(_(b'use debugbundle2 for this file'))
389 raise error.Abort(_(b'use debugbundle2 for this file'))
389 gen.changelogheader()
390 gen.changelogheader()
390 for deltadata in gen.deltaiter():
391 for deltadata in gen.deltaiter():
391 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
392 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
392 ui.write(b"%s%s\n" % (indent_string, hex(node)))
393 ui.write(b"%s%s\n" % (indent_string, hex(node)))
393
394
394
395
395 def _debugobsmarkers(ui, part, indent=0, **opts):
396 def _debugobsmarkers(ui, part, indent=0, **opts):
396 """display version and markers contained in 'data'"""
397 """display version and markers contained in 'data'"""
397 opts = pycompat.byteskwargs(opts)
398 opts = pycompat.byteskwargs(opts)
398 data = part.read()
399 data = part.read()
399 indent_string = b' ' * indent
400 indent_string = b' ' * indent
400 try:
401 try:
401 version, markers = obsolete._readmarkers(data)
402 version, markers = obsolete._readmarkers(data)
402 except error.UnknownVersion as exc:
403 except error.UnknownVersion as exc:
403 msg = b"%sunsupported version: %s (%d bytes)\n"
404 msg = b"%sunsupported version: %s (%d bytes)\n"
404 msg %= indent_string, exc.version, len(data)
405 msg %= indent_string, exc.version, len(data)
405 ui.write(msg)
406 ui.write(msg)
406 else:
407 else:
407 msg = b"%sversion: %d (%d bytes)\n"
408 msg = b"%sversion: %d (%d bytes)\n"
408 msg %= indent_string, version, len(data)
409 msg %= indent_string, version, len(data)
409 ui.write(msg)
410 ui.write(msg)
410 fm = ui.formatter(b'debugobsolete', opts)
411 fm = ui.formatter(b'debugobsolete', opts)
411 for rawmarker in sorted(markers):
412 for rawmarker in sorted(markers):
412 m = obsutil.marker(None, rawmarker)
413 m = obsutil.marker(None, rawmarker)
413 fm.startitem()
414 fm.startitem()
414 fm.plain(indent_string)
415 fm.plain(indent_string)
415 cmdutil.showmarker(fm, m)
416 cmdutil.showmarker(fm, m)
416 fm.end()
417 fm.end()
417
418
418
419
419 def _debugphaseheads(ui, data, indent=0):
420 def _debugphaseheads(ui, data, indent=0):
420 """display version and markers contained in 'data'"""
421 """display version and markers contained in 'data'"""
421 indent_string = b' ' * indent
422 indent_string = b' ' * indent
422 headsbyphase = phases.binarydecode(data)
423 headsbyphase = phases.binarydecode(data)
423 for phase in phases.allphases:
424 for phase in phases.allphases:
424 for head in headsbyphase[phase]:
425 for head in headsbyphase[phase]:
425 ui.write(indent_string)
426 ui.write(indent_string)
426 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
427 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
427
428
428
429
429 def _quasirepr(thing):
430 def _quasirepr(thing):
430 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
431 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
431 return b'{%s}' % (
432 return b'{%s}' % (
432 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
433 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
433 )
434 )
434 return pycompat.bytestr(repr(thing))
435 return pycompat.bytestr(repr(thing))
435
436
436
437
437 def _debugbundle2(ui, gen, all=None, **opts):
438 def _debugbundle2(ui, gen, all=None, **opts):
438 """lists the contents of a bundle2"""
439 """lists the contents of a bundle2"""
439 if not isinstance(gen, bundle2.unbundle20):
440 if not isinstance(gen, bundle2.unbundle20):
440 raise error.Abort(_(b'not a bundle2 file'))
441 raise error.Abort(_(b'not a bundle2 file'))
441 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
442 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
442 parttypes = opts.get('part_type', [])
443 parttypes = opts.get('part_type', [])
443 for part in gen.iterparts():
444 for part in gen.iterparts():
444 if parttypes and part.type not in parttypes:
445 if parttypes and part.type not in parttypes:
445 continue
446 continue
446 msg = b'%s -- %s (mandatory: %r)\n'
447 msg = b'%s -- %s (mandatory: %r)\n'
447 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
448 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
448 if part.type == b'changegroup':
449 if part.type == b'changegroup':
449 version = part.params.get(b'version', b'01')
450 version = part.params.get(b'version', b'01')
450 cg = changegroup.getunbundler(version, part, b'UN')
451 cg = changegroup.getunbundler(version, part, b'UN')
451 if not ui.quiet:
452 if not ui.quiet:
452 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
453 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
453 if part.type == b'obsmarkers':
454 if part.type == b'obsmarkers':
454 if not ui.quiet:
455 if not ui.quiet:
455 _debugobsmarkers(ui, part, indent=4, **opts)
456 _debugobsmarkers(ui, part, indent=4, **opts)
456 if part.type == b'phase-heads':
457 if part.type == b'phase-heads':
457 if not ui.quiet:
458 if not ui.quiet:
458 _debugphaseheads(ui, part, indent=4)
459 _debugphaseheads(ui, part, indent=4)
459
460
460
461
461 @command(
462 @command(
462 b'debugbundle',
463 b'debugbundle',
463 [
464 [
464 (b'a', b'all', None, _(b'show all details')),
465 (b'a', b'all', None, _(b'show all details')),
465 (b'', b'part-type', [], _(b'show only the named part type')),
466 (b'', b'part-type', [], _(b'show only the named part type')),
466 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
467 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
467 ],
468 ],
468 _(b'FILE'),
469 _(b'FILE'),
469 norepo=True,
470 norepo=True,
470 )
471 )
471 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
472 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
472 """lists the contents of a bundle"""
473 """lists the contents of a bundle"""
473 with hg.openpath(ui, bundlepath) as f:
474 with hg.openpath(ui, bundlepath) as f:
474 if spec:
475 if spec:
475 spec = exchange.getbundlespec(ui, f)
476 spec = exchange.getbundlespec(ui, f)
476 ui.write(b'%s\n' % spec)
477 ui.write(b'%s\n' % spec)
477 return
478 return
478
479
479 gen = exchange.readbundle(ui, f, bundlepath)
480 gen = exchange.readbundle(ui, f, bundlepath)
480 if isinstance(gen, bundle2.unbundle20):
481 if isinstance(gen, bundle2.unbundle20):
481 return _debugbundle2(ui, gen, all=all, **opts)
482 return _debugbundle2(ui, gen, all=all, **opts)
482 _debugchangegroup(ui, gen, all=all, **opts)
483 _debugchangegroup(ui, gen, all=all, **opts)
483
484
484
485
485 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
486 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
486 def debugcapabilities(ui, path, **opts):
487 def debugcapabilities(ui, path, **opts):
487 """lists the capabilities of a remote peer"""
488 """lists the capabilities of a remote peer"""
488 opts = pycompat.byteskwargs(opts)
489 opts = pycompat.byteskwargs(opts)
489 peer = hg.peer(ui, opts, path)
490 peer = hg.peer(ui, opts, path)
490 try:
491 try:
491 caps = peer.capabilities()
492 caps = peer.capabilities()
492 ui.writenoi18n(b'Main capabilities:\n')
493 ui.writenoi18n(b'Main capabilities:\n')
493 for c in sorted(caps):
494 for c in sorted(caps):
494 ui.write(b' %s\n' % c)
495 ui.write(b' %s\n' % c)
495 b2caps = bundle2.bundle2caps(peer)
496 b2caps = bundle2.bundle2caps(peer)
496 if b2caps:
497 if b2caps:
497 ui.writenoi18n(b'Bundle2 capabilities:\n')
498 ui.writenoi18n(b'Bundle2 capabilities:\n')
498 for key, values in sorted(b2caps.items()):
499 for key, values in sorted(b2caps.items()):
499 ui.write(b' %s\n' % key)
500 ui.write(b' %s\n' % key)
500 for v in values:
501 for v in values:
501 ui.write(b' %s\n' % v)
502 ui.write(b' %s\n' % v)
502 finally:
503 finally:
503 peer.close()
504 peer.close()
504
505
505
506
506 @command(
507 @command(
507 b'debugchangedfiles',
508 b'debugchangedfiles',
508 [
509 [
509 (
510 (
510 b'',
511 b'',
511 b'compute',
512 b'compute',
512 False,
513 False,
513 b"compute information instead of reading it from storage",
514 b"compute information instead of reading it from storage",
514 ),
515 ),
515 ],
516 ],
516 b'REV',
517 b'REV',
517 )
518 )
518 def debugchangedfiles(ui, repo, rev, **opts):
519 def debugchangedfiles(ui, repo, rev, **opts):
519 """list the stored files changes for a revision"""
520 """list the stored files changes for a revision"""
520 ctx = logcmdutil.revsingle(repo, rev, None)
521 ctx = logcmdutil.revsingle(repo, rev, None)
521 files = None
522 files = None
522
523
523 if opts['compute']:
524 if opts['compute']:
524 files = metadata.compute_all_files_changes(ctx)
525 files = metadata.compute_all_files_changes(ctx)
525 else:
526 else:
526 sd = repo.changelog.sidedata(ctx.rev())
527 sd = repo.changelog.sidedata(ctx.rev())
527 files_block = sd.get(sidedata.SD_FILES)
528 files_block = sd.get(sidedata.SD_FILES)
528 if files_block is not None:
529 if files_block is not None:
529 files = metadata.decode_files_sidedata(sd)
530 files = metadata.decode_files_sidedata(sd)
530 if files is not None:
531 if files is not None:
531 for f in sorted(files.touched):
532 for f in sorted(files.touched):
532 if f in files.added:
533 if f in files.added:
533 action = b"added"
534 action = b"added"
534 elif f in files.removed:
535 elif f in files.removed:
535 action = b"removed"
536 action = b"removed"
536 elif f in files.merged:
537 elif f in files.merged:
537 action = b"merged"
538 action = b"merged"
538 elif f in files.salvaged:
539 elif f in files.salvaged:
539 action = b"salvaged"
540 action = b"salvaged"
540 else:
541 else:
541 action = b"touched"
542 action = b"touched"
542
543
543 copy_parent = b""
544 copy_parent = b""
544 copy_source = b""
545 copy_source = b""
545 if f in files.copied_from_p1:
546 if f in files.copied_from_p1:
546 copy_parent = b"p1"
547 copy_parent = b"p1"
547 copy_source = files.copied_from_p1[f]
548 copy_source = files.copied_from_p1[f]
548 elif f in files.copied_from_p2:
549 elif f in files.copied_from_p2:
549 copy_parent = b"p2"
550 copy_parent = b"p2"
550 copy_source = files.copied_from_p2[f]
551 copy_source = files.copied_from_p2[f]
551
552
552 data = (action, copy_parent, f, copy_source)
553 data = (action, copy_parent, f, copy_source)
553 template = b"%-8s %2s: %s, %s;\n"
554 template = b"%-8s %2s: %s, %s;\n"
554 ui.write(template % data)
555 ui.write(template % data)
555
556
556
557
557 @command(b'debugcheckstate', [], b'')
558 @command(b'debugcheckstate', [], b'')
558 def debugcheckstate(ui, repo):
559 def debugcheckstate(ui, repo):
559 """validate the correctness of the current dirstate"""
560 """validate the correctness of the current dirstate"""
560 parent1, parent2 = repo.dirstate.parents()
561 parent1, parent2 = repo.dirstate.parents()
561 m1 = repo[parent1].manifest()
562 m1 = repo[parent1].manifest()
562 m2 = repo[parent2].manifest()
563 m2 = repo[parent2].manifest()
563 errors = 0
564 errors = 0
564 for err in repo.dirstate.verify(m1, m2):
565 for err in repo.dirstate.verify(m1, m2):
565 ui.warn(err[0] % err[1:])
566 ui.warn(err[0] % err[1:])
566 errors += 1
567 errors += 1
567 if errors:
568 if errors:
568 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
569 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
569 raise error.Abort(errstr)
570 raise error.Abort(errstr)
570
571
571
572
572 @command(
573 @command(
573 b'debugcolor',
574 b'debugcolor',
574 [(b'', b'style', None, _(b'show all configured styles'))],
575 [(b'', b'style', None, _(b'show all configured styles'))],
575 b'hg debugcolor',
576 b'hg debugcolor',
576 )
577 )
577 def debugcolor(ui, repo, **opts):
578 def debugcolor(ui, repo, **opts):
578 """show available color, effects or style"""
579 """show available color, effects or style"""
579 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
580 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
580 if opts.get('style'):
581 if opts.get('style'):
581 return _debugdisplaystyle(ui)
582 return _debugdisplaystyle(ui)
582 else:
583 else:
583 return _debugdisplaycolor(ui)
584 return _debugdisplaycolor(ui)
584
585
585
586
586 def _debugdisplaycolor(ui):
587 def _debugdisplaycolor(ui):
587 ui = ui.copy()
588 ui = ui.copy()
588 ui._styles.clear()
589 ui._styles.clear()
589 for effect in color._activeeffects(ui).keys():
590 for effect in color._activeeffects(ui).keys():
590 ui._styles[effect] = effect
591 ui._styles[effect] = effect
591 if ui._terminfoparams:
592 if ui._terminfoparams:
592 for k, v in ui.configitems(b'color'):
593 for k, v in ui.configitems(b'color'):
593 if k.startswith(b'color.'):
594 if k.startswith(b'color.'):
594 ui._styles[k] = k[6:]
595 ui._styles[k] = k[6:]
595 elif k.startswith(b'terminfo.'):
596 elif k.startswith(b'terminfo.'):
596 ui._styles[k] = k[9:]
597 ui._styles[k] = k[9:]
597 ui.write(_(b'available colors:\n'))
598 ui.write(_(b'available colors:\n'))
598 # sort label with a '_' after the other to group '_background' entry.
599 # sort label with a '_' after the other to group '_background' entry.
599 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
600 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
600 for colorname, label in items:
601 for colorname, label in items:
601 ui.write(b'%s\n' % colorname, label=label)
602 ui.write(b'%s\n' % colorname, label=label)
602
603
603
604
604 def _debugdisplaystyle(ui):
605 def _debugdisplaystyle(ui):
605 ui.write(_(b'available style:\n'))
606 ui.write(_(b'available style:\n'))
606 if not ui._styles:
607 if not ui._styles:
607 return
608 return
608 width = max(len(s) for s in ui._styles)
609 width = max(len(s) for s in ui._styles)
609 for label, effects in sorted(ui._styles.items()):
610 for label, effects in sorted(ui._styles.items()):
610 ui.write(b'%s' % label, label=label)
611 ui.write(b'%s' % label, label=label)
611 if effects:
612 if effects:
612 # 50
613 # 50
613 ui.write(b': ')
614 ui.write(b': ')
614 ui.write(b' ' * (max(0, width - len(label))))
615 ui.write(b' ' * (max(0, width - len(label))))
615 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
616 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
616 ui.write(b'\n')
617 ui.write(b'\n')
617
618
618
619
619 @command(b'debugcreatestreamclonebundle', [], b'FILE')
620 @command(b'debugcreatestreamclonebundle', [], b'FILE')
620 def debugcreatestreamclonebundle(ui, repo, fname):
621 def debugcreatestreamclonebundle(ui, repo, fname):
621 """create a stream clone bundle file
622 """create a stream clone bundle file
622
623
623 Stream bundles are special bundles that are essentially archives of
624 Stream bundles are special bundles that are essentially archives of
624 revlog files. They are commonly used for cloning very quickly.
625 revlog files. They are commonly used for cloning very quickly.
625 """
626 """
626 # TODO we may want to turn this into an abort when this functionality
627 # TODO we may want to turn this into an abort when this functionality
627 # is moved into `hg bundle`.
628 # is moved into `hg bundle`.
628 if phases.hassecret(repo):
629 if phases.hassecret(repo):
629 ui.warn(
630 ui.warn(
630 _(
631 _(
631 b'(warning: stream clone bundle will contain secret '
632 b'(warning: stream clone bundle will contain secret '
632 b'revisions)\n'
633 b'revisions)\n'
633 )
634 )
634 )
635 )
635
636
636 requirements, gen = streamclone.generatebundlev1(repo)
637 requirements, gen = streamclone.generatebundlev1(repo)
637 changegroup.writechunks(ui, gen, fname)
638 changegroup.writechunks(ui, gen, fname)
638
639
639 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
640 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
640
641
641
642
642 @command(
643 @command(
643 b'debugdag',
644 b'debugdag',
644 [
645 [
645 (b't', b'tags', None, _(b'use tags as labels')),
646 (b't', b'tags', None, _(b'use tags as labels')),
646 (b'b', b'branches', None, _(b'annotate with branch names')),
647 (b'b', b'branches', None, _(b'annotate with branch names')),
647 (b'', b'dots', None, _(b'use dots for runs')),
648 (b'', b'dots', None, _(b'use dots for runs')),
648 (b's', b'spaces', None, _(b'separate elements by spaces')),
649 (b's', b'spaces', None, _(b'separate elements by spaces')),
649 ],
650 ],
650 _(b'[OPTION]... [FILE [REV]...]'),
651 _(b'[OPTION]... [FILE [REV]...]'),
651 optionalrepo=True,
652 optionalrepo=True,
652 )
653 )
653 def debugdag(ui, repo, file_=None, *revs, **opts):
654 def debugdag(ui, repo, file_=None, *revs, **opts):
654 """format the changelog or an index DAG as a concise textual description
655 """format the changelog or an index DAG as a concise textual description
655
656
656 If you pass a revlog index, the revlog's DAG is emitted. If you list
657 If you pass a revlog index, the revlog's DAG is emitted. If you list
657 revision numbers, they get labeled in the output as rN.
658 revision numbers, they get labeled in the output as rN.
658
659
659 Otherwise, the changelog DAG of the current repo is emitted.
660 Otherwise, the changelog DAG of the current repo is emitted.
660 """
661 """
661 spaces = opts.get('spaces')
662 spaces = opts.get('spaces')
662 dots = opts.get('dots')
663 dots = opts.get('dots')
663 if file_:
664 if file_:
664 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
665 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
665 revs = {int(r) for r in revs}
666 revs = {int(r) for r in revs}
666
667
667 def events():
668 def events():
668 for r in rlog:
669 for r in rlog:
669 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
670 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
670 if r in revs:
671 if r in revs:
671 yield b'l', (r, b"r%i" % r)
672 yield b'l', (r, b"r%i" % r)
672
673
673 elif repo:
674 elif repo:
674 cl = repo.changelog
675 cl = repo.changelog
675 tags = opts.get('tags')
676 tags = opts.get('tags')
676 branches = opts.get('branches')
677 branches = opts.get('branches')
677 if tags:
678 if tags:
678 labels = {}
679 labels = {}
679 for l, n in repo.tags().items():
680 for l, n in repo.tags().items():
680 labels.setdefault(cl.rev(n), []).append(l)
681 labels.setdefault(cl.rev(n), []).append(l)
681
682
682 def events():
683 def events():
683 b = b"default"
684 b = b"default"
684 for r in cl:
685 for r in cl:
685 if branches:
686 if branches:
686 newb = cl.read(cl.node(r))[5][b'branch']
687 newb = cl.read(cl.node(r))[5][b'branch']
687 if newb != b:
688 if newb != b:
688 yield b'a', newb
689 yield b'a', newb
689 b = newb
690 b = newb
690 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
691 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
691 if tags:
692 if tags:
692 ls = labels.get(r)
693 ls = labels.get(r)
693 if ls:
694 if ls:
694 for l in ls:
695 for l in ls:
695 yield b'l', (r, l)
696 yield b'l', (r, l)
696
697
697 else:
698 else:
698 raise error.Abort(_(b'need repo for changelog dag'))
699 raise error.Abort(_(b'need repo for changelog dag'))
699
700
700 for line in dagparser.dagtextlines(
701 for line in dagparser.dagtextlines(
701 events(),
702 events(),
702 addspaces=spaces,
703 addspaces=spaces,
703 wraplabels=True,
704 wraplabels=True,
704 wrapannotations=True,
705 wrapannotations=True,
705 wrapnonlinear=dots,
706 wrapnonlinear=dots,
706 usedots=dots,
707 usedots=dots,
707 maxlinewidth=70,
708 maxlinewidth=70,
708 ):
709 ):
709 ui.write(line)
710 ui.write(line)
710 ui.write(b"\n")
711 ui.write(b"\n")
711
712
712
713
713 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
714 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
714 def debugdata(ui, repo, file_, rev=None, **opts):
715 def debugdata(ui, repo, file_, rev=None, **opts):
715 """dump the contents of a data file revision"""
716 """dump the contents of a data file revision"""
716 opts = pycompat.byteskwargs(opts)
717 opts = pycompat.byteskwargs(opts)
717 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
718 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
718 if rev is not None:
719 if rev is not None:
719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
720 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
720 file_, rev = None, file_
721 file_, rev = None, file_
721 elif rev is None:
722 elif rev is None:
722 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
723 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
723 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
724 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
724 try:
725 try:
725 ui.write(r.rawdata(r.lookup(rev)))
726 ui.write(r.rawdata(r.lookup(rev)))
726 except KeyError:
727 except KeyError:
727 raise error.Abort(_(b'invalid revision identifier %s') % rev)
728 raise error.Abort(_(b'invalid revision identifier %s') % rev)
728
729
729
730
730 @command(
731 @command(
731 b'debugdate',
732 b'debugdate',
732 [(b'e', b'extended', None, _(b'try extended date formats'))],
733 [(b'e', b'extended', None, _(b'try extended date formats'))],
733 _(b'[-e] DATE [RANGE]'),
734 _(b'[-e] DATE [RANGE]'),
734 norepo=True,
735 norepo=True,
735 optionalrepo=True,
736 optionalrepo=True,
736 )
737 )
737 def debugdate(ui, date, range=None, **opts):
738 def debugdate(ui, date, range=None, **opts):
738 """parse and display a date"""
739 """parse and display a date"""
739 if opts["extended"]:
740 if opts["extended"]:
740 d = dateutil.parsedate(date, dateutil.extendeddateformats)
741 d = dateutil.parsedate(date, dateutil.extendeddateformats)
741 else:
742 else:
742 d = dateutil.parsedate(date)
743 d = dateutil.parsedate(date)
743 ui.writenoi18n(b"internal: %d %d\n" % d)
744 ui.writenoi18n(b"internal: %d %d\n" % d)
744 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
745 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
745 if range:
746 if range:
746 m = dateutil.matchdate(range)
747 m = dateutil.matchdate(range)
747 ui.writenoi18n(b"match: %s\n" % m(d[0]))
748 ui.writenoi18n(b"match: %s\n" % m(d[0]))
748
749
749
750
750 @command(
751 @command(
751 b'debugdeltachain',
752 b'debugdeltachain',
752 cmdutil.debugrevlogopts + cmdutil.formatteropts,
753 cmdutil.debugrevlogopts + cmdutil.formatteropts,
753 _(b'-c|-m|FILE'),
754 _(b'-c|-m|FILE'),
754 optionalrepo=True,
755 optionalrepo=True,
755 )
756 )
756 def debugdeltachain(ui, repo, file_=None, **opts):
757 def debugdeltachain(ui, repo, file_=None, **opts):
757 """dump information about delta chains in a revlog
758 """dump information about delta chains in a revlog
758
759
759 Output can be templatized. Available template keywords are:
760 Output can be templatized. Available template keywords are:
760
761
761 :``rev``: revision number
762 :``rev``: revision number
762 :``p1``: parent 1 revision number (for reference)
763 :``p1``: parent 1 revision number (for reference)
763 :``p2``: parent 2 revision number (for reference)
764 :``p2``: parent 2 revision number (for reference)
764 :``chainid``: delta chain identifier (numbered by unique base)
765 :``chainid``: delta chain identifier (numbered by unique base)
765 :``chainlen``: delta chain length to this revision
766 :``chainlen``: delta chain length to this revision
766 :``prevrev``: previous revision in delta chain
767 :``prevrev``: previous revision in delta chain
767 :``deltatype``: role of delta / how it was computed
768 :``deltatype``: role of delta / how it was computed
768 - base: a full snapshot
769 - base: a full snapshot
769 - snap: an intermediate snapshot
770 - snap: an intermediate snapshot
770 - p1: a delta against the first parent
771 - p1: a delta against the first parent
771 - p2: a delta against the second parent
772 - p2: a delta against the second parent
772 - skip1: a delta against the same base as p1
773 - skip1: a delta against the same base as p1
773 (when p1 has empty delta
774 (when p1 has empty delta
774 - skip2: a delta against the same base as p2
775 - skip2: a delta against the same base as p2
775 (when p2 has empty delta
776 (when p2 has empty delta
776 - prev: a delta against the previous revision
777 - prev: a delta against the previous revision
777 - other: a delta against an arbitrary revision
778 - other: a delta against an arbitrary revision
778 :``compsize``: compressed size of revision
779 :``compsize``: compressed size of revision
779 :``uncompsize``: uncompressed size of revision
780 :``uncompsize``: uncompressed size of revision
780 :``chainsize``: total size of compressed revisions in chain
781 :``chainsize``: total size of compressed revisions in chain
781 :``chainratio``: total chain size divided by uncompressed revision size
782 :``chainratio``: total chain size divided by uncompressed revision size
782 (new delta chains typically start at ratio 2.00)
783 (new delta chains typically start at ratio 2.00)
783 :``lindist``: linear distance from base revision in delta chain to end
784 :``lindist``: linear distance from base revision in delta chain to end
784 of this revision
785 of this revision
785 :``extradist``: total size of revisions not part of this delta chain from
786 :``extradist``: total size of revisions not part of this delta chain from
786 base of delta chain to end of this revision; a measurement
787 base of delta chain to end of this revision; a measurement
787 of how much extra data we need to read/seek across to read
788 of how much extra data we need to read/seek across to read
788 the delta chain for this revision
789 the delta chain for this revision
789 :``extraratio``: extradist divided by chainsize; another representation of
790 :``extraratio``: extradist divided by chainsize; another representation of
790 how much unrelated data is needed to load this delta chain
791 how much unrelated data is needed to load this delta chain
791
792
792 If the repository is configured to use the sparse read, additional keywords
793 If the repository is configured to use the sparse read, additional keywords
793 are available:
794 are available:
794
795
795 :``readsize``: total size of data read from the disk for a revision
796 :``readsize``: total size of data read from the disk for a revision
796 (sum of the sizes of all the blocks)
797 (sum of the sizes of all the blocks)
797 :``largestblock``: size of the largest block of data read from the disk
798 :``largestblock``: size of the largest block of data read from the disk
798 :``readdensity``: density of useful bytes in the data read from the disk
799 :``readdensity``: density of useful bytes in the data read from the disk
799 :``srchunks``: in how many data hunks the whole revision would be read
800 :``srchunks``: in how many data hunks the whole revision would be read
800
801
801 The sparse read can be enabled with experimental.sparse-read = True
802 The sparse read can be enabled with experimental.sparse-read = True
802 """
803 """
803 opts = pycompat.byteskwargs(opts)
804 opts = pycompat.byteskwargs(opts)
804 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
805 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
805 index = r.index
806 index = r.index
806 start = r.start
807 start = r.start
807 length = r.length
808 length = r.length
808 generaldelta = r._generaldelta
809 generaldelta = r._generaldelta
809 withsparseread = getattr(r, '_withsparseread', False)
810 withsparseread = getattr(r, '_withsparseread', False)
810
811
811 # security to avoid crash on corrupted revlogs
812 # security to avoid crash on corrupted revlogs
812 total_revs = len(index)
813 total_revs = len(index)
813
814
814 def revinfo(rev):
815 def revinfo(rev):
815 e = index[rev]
816 e = index[rev]
816 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
817 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
817 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
818 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
818 chainsize = 0
819 chainsize = 0
819
820
820 base = e[revlog_constants.ENTRY_DELTA_BASE]
821 base = e[revlog_constants.ENTRY_DELTA_BASE]
821 p1 = e[revlog_constants.ENTRY_PARENT_1]
822 p1 = e[revlog_constants.ENTRY_PARENT_1]
822 p2 = e[revlog_constants.ENTRY_PARENT_2]
823 p2 = e[revlog_constants.ENTRY_PARENT_2]
823
824
824 # If the parents of a revision has an empty delta, we never try to delta
825 # If the parents of a revision has an empty delta, we never try to delta
825 # against that parent, but directly against the delta base of that
826 # against that parent, but directly against the delta base of that
826 # parent (recursively). It avoids adding a useless entry in the chain.
827 # parent (recursively). It avoids adding a useless entry in the chain.
827 #
828 #
828 # However we need to detect that as a special case for delta-type, that
829 # However we need to detect that as a special case for delta-type, that
829 # is not simply "other".
830 # is not simply "other".
830 p1_base = p1
831 p1_base = p1
831 if p1 != nullrev and p1 < total_revs:
832 if p1 != nullrev and p1 < total_revs:
832 e1 = index[p1]
833 e1 = index[p1]
833 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
834 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
834 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
835 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
835 if (
836 if (
836 new_base == p1_base
837 new_base == p1_base
837 or new_base == nullrev
838 or new_base == nullrev
838 or new_base >= total_revs
839 or new_base >= total_revs
839 ):
840 ):
840 break
841 break
841 p1_base = new_base
842 p1_base = new_base
842 e1 = index[p1_base]
843 e1 = index[p1_base]
843 p2_base = p2
844 p2_base = p2
844 if p2 != nullrev and p2 < total_revs:
845 if p2 != nullrev and p2 < total_revs:
845 e2 = index[p2]
846 e2 = index[p2]
846 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
847 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
847 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
848 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
848 if (
849 if (
849 new_base == p2_base
850 new_base == p2_base
850 or new_base == nullrev
851 or new_base == nullrev
851 or new_base >= total_revs
852 or new_base >= total_revs
852 ):
853 ):
853 break
854 break
854 p2_base = new_base
855 p2_base = new_base
855 e2 = index[p2_base]
856 e2 = index[p2_base]
856
857
857 if generaldelta:
858 if generaldelta:
858 if base == p1:
859 if base == p1:
859 deltatype = b'p1'
860 deltatype = b'p1'
860 elif base == p2:
861 elif base == p2:
861 deltatype = b'p2'
862 deltatype = b'p2'
862 elif base == rev:
863 elif base == rev:
863 deltatype = b'base'
864 deltatype = b'base'
864 elif base == p1_base:
865 elif base == p1_base:
865 deltatype = b'skip1'
866 deltatype = b'skip1'
866 elif base == p2_base:
867 elif base == p2_base:
867 deltatype = b'skip2'
868 deltatype = b'skip2'
868 elif r.issnapshot(rev):
869 elif r.issnapshot(rev):
869 deltatype = b'snap'
870 deltatype = b'snap'
870 elif base == rev - 1:
871 elif base == rev - 1:
871 deltatype = b'prev'
872 deltatype = b'prev'
872 else:
873 else:
873 deltatype = b'other'
874 deltatype = b'other'
874 else:
875 else:
875 if base == rev:
876 if base == rev:
876 deltatype = b'base'
877 deltatype = b'base'
877 else:
878 else:
878 deltatype = b'prev'
879 deltatype = b'prev'
879
880
880 chain = r._deltachain(rev)[0]
881 chain = r._deltachain(rev)[0]
881 for iterrev in chain:
882 for iterrev in chain:
882 e = index[iterrev]
883 e = index[iterrev]
883 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
884 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
884
885
885 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
886 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
886
887
887 fm = ui.formatter(b'debugdeltachain', opts)
888 fm = ui.formatter(b'debugdeltachain', opts)
888
889
889 fm.plain(
890 fm.plain(
890 b' rev p1 p2 chain# chainlen prev delta '
891 b' rev p1 p2 chain# chainlen prev delta '
891 b'size rawsize chainsize ratio lindist extradist '
892 b'size rawsize chainsize ratio lindist extradist '
892 b'extraratio'
893 b'extraratio'
893 )
894 )
894 if withsparseread:
895 if withsparseread:
895 fm.plain(b' readsize largestblk rddensity srchunks')
896 fm.plain(b' readsize largestblk rddensity srchunks')
896 fm.plain(b'\n')
897 fm.plain(b'\n')
897
898
898 chainbases = {}
899 chainbases = {}
899 for rev in r:
900 for rev in r:
900 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
901 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
901 chainbase = chain[0]
902 chainbase = chain[0]
902 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
903 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
903 basestart = start(chainbase)
904 basestart = start(chainbase)
904 revstart = start(rev)
905 revstart = start(rev)
905 lineardist = revstart + comp - basestart
906 lineardist = revstart + comp - basestart
906 extradist = lineardist - chainsize
907 extradist = lineardist - chainsize
907 try:
908 try:
908 prevrev = chain[-2]
909 prevrev = chain[-2]
909 except IndexError:
910 except IndexError:
910 prevrev = -1
911 prevrev = -1
911
912
912 if uncomp != 0:
913 if uncomp != 0:
913 chainratio = float(chainsize) / float(uncomp)
914 chainratio = float(chainsize) / float(uncomp)
914 else:
915 else:
915 chainratio = chainsize
916 chainratio = chainsize
916
917
917 if chainsize != 0:
918 if chainsize != 0:
918 extraratio = float(extradist) / float(chainsize)
919 extraratio = float(extradist) / float(chainsize)
919 else:
920 else:
920 extraratio = extradist
921 extraratio = extradist
921
922
922 fm.startitem()
923 fm.startitem()
923 fm.write(
924 fm.write(
924 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
925 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
925 b'uncompsize chainsize chainratio lindist extradist '
926 b'uncompsize chainsize chainratio lindist extradist '
926 b'extraratio',
927 b'extraratio',
927 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
928 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
928 rev,
929 rev,
929 p1,
930 p1,
930 p2,
931 p2,
931 chainid,
932 chainid,
932 len(chain),
933 len(chain),
933 prevrev,
934 prevrev,
934 deltatype,
935 deltatype,
935 comp,
936 comp,
936 uncomp,
937 uncomp,
937 chainsize,
938 chainsize,
938 chainratio,
939 chainratio,
939 lineardist,
940 lineardist,
940 extradist,
941 extradist,
941 extraratio,
942 extraratio,
942 rev=rev,
943 rev=rev,
943 chainid=chainid,
944 chainid=chainid,
944 chainlen=len(chain),
945 chainlen=len(chain),
945 prevrev=prevrev,
946 prevrev=prevrev,
946 deltatype=deltatype,
947 deltatype=deltatype,
947 compsize=comp,
948 compsize=comp,
948 uncompsize=uncomp,
949 uncompsize=uncomp,
949 chainsize=chainsize,
950 chainsize=chainsize,
950 chainratio=chainratio,
951 chainratio=chainratio,
951 lindist=lineardist,
952 lindist=lineardist,
952 extradist=extradist,
953 extradist=extradist,
953 extraratio=extraratio,
954 extraratio=extraratio,
954 )
955 )
955 if withsparseread:
956 if withsparseread:
956 readsize = 0
957 readsize = 0
957 largestblock = 0
958 largestblock = 0
958 srchunks = 0
959 srchunks = 0
959
960
960 for revschunk in deltautil.slicechunk(r, chain):
961 for revschunk in deltautil.slicechunk(r, chain):
961 srchunks += 1
962 srchunks += 1
962 blkend = start(revschunk[-1]) + length(revschunk[-1])
963 blkend = start(revschunk[-1]) + length(revschunk[-1])
963 blksize = blkend - start(revschunk[0])
964 blksize = blkend - start(revschunk[0])
964
965
965 readsize += blksize
966 readsize += blksize
966 if largestblock < blksize:
967 if largestblock < blksize:
967 largestblock = blksize
968 largestblock = blksize
968
969
969 if readsize:
970 if readsize:
970 readdensity = float(chainsize) / float(readsize)
971 readdensity = float(chainsize) / float(readsize)
971 else:
972 else:
972 readdensity = 1
973 readdensity = 1
973
974
974 fm.write(
975 fm.write(
975 b'readsize largestblock readdensity srchunks',
976 b'readsize largestblock readdensity srchunks',
976 b' %10d %10d %9.5f %8d',
977 b' %10d %10d %9.5f %8d',
977 readsize,
978 readsize,
978 largestblock,
979 largestblock,
979 readdensity,
980 readdensity,
980 srchunks,
981 srchunks,
981 readsize=readsize,
982 readsize=readsize,
982 largestblock=largestblock,
983 largestblock=largestblock,
983 readdensity=readdensity,
984 readdensity=readdensity,
984 srchunks=srchunks,
985 srchunks=srchunks,
985 )
986 )
986
987
987 fm.plain(b'\n')
988 fm.plain(b'\n')
988
989
989 fm.end()
990 fm.end()
990
991
991
992
992 @command(
993 @command(
993 b'debug-delta-find',
994 b'debug-delta-find',
994 cmdutil.debugrevlogopts + cmdutil.formatteropts,
995 cmdutil.debugrevlogopts + cmdutil.formatteropts,
995 _(b'-c|-m|FILE REV'),
996 _(b'-c|-m|FILE REV'),
996 optionalrepo=True,
997 optionalrepo=True,
997 )
998 )
998 def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
999 def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
999 """display the computation to get to a valid delta for storing REV
1000 """display the computation to get to a valid delta for storing REV
1000
1001
1001 This command will replay the process used to find the "best" delta to store
1002 This command will replay the process used to find the "best" delta to store
1002 a revision and display information about all the steps used to get to that
1003 a revision and display information about all the steps used to get to that
1003 result.
1004 result.
1004
1005
1005 The revision use the revision number of the target storage (not changelog
1006 The revision use the revision number of the target storage (not changelog
1006 revision number).
1007 revision number).
1007
1008
1008 note: the process is initiated from a full text of the revision to store.
1009 note: the process is initiated from a full text of the revision to store.
1009 """
1010 """
1010 opts = pycompat.byteskwargs(opts)
1011 opts = pycompat.byteskwargs(opts)
1011 if arg_2 is None:
1012 if arg_2 is None:
1012 file_ = None
1013 file_ = None
1013 rev = arg_1
1014 rev = arg_1
1014 else:
1015 else:
1015 file_ = arg_1
1016 file_ = arg_1
1016 rev = arg_2
1017 rev = arg_2
1017
1018
1018 rev = int(rev)
1019 rev = int(rev)
1019
1020
1020 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1021 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1021
1022
1022 deltacomputer = deltautil.deltacomputer(
1023 deltacomputer = deltautil.deltacomputer(
1023 revlog,
1024 revlog,
1024 write_debug=ui.write,
1025 write_debug=ui.write,
1025 debug_search=True,
1026 debug_search=True,
1026 )
1027 )
1027
1028
1028 node = revlog.node(rev)
1029 node = revlog.node(rev)
1029 p1r, p2r = revlog.parentrevs(rev)
1030 p1r, p2r = revlog.parentrevs(rev)
1030 p1 = revlog.node(p1r)
1031 p1 = revlog.node(p1r)
1031 p2 = revlog.node(p2r)
1032 p2 = revlog.node(p2r)
1032 btext = [revlog.revision(rev)]
1033 btext = [revlog.revision(rev)]
1033 textlen = len(btext[0])
1034 textlen = len(btext[0])
1034 cachedelta = None
1035 cachedelta = None
1035 flags = revlog.flags(rev)
1036 flags = revlog.flags(rev)
1036
1037
1037 revinfo = revlogutils.revisioninfo(
1038 revinfo = revlogutils.revisioninfo(
1038 node,
1039 node,
1039 p1,
1040 p1,
1040 p2,
1041 p2,
1041 btext,
1042 btext,
1042 textlen,
1043 textlen,
1043 cachedelta,
1044 cachedelta,
1044 flags,
1045 flags,
1045 )
1046 )
1046
1047
1047 fh = revlog._datafp()
1048 fh = revlog._datafp()
1048 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1049 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1049
1050
1050
1051
1051 @command(
1052 @command(
1052 b'debugdirstate|debugstate',
1053 b'debugdirstate|debugstate',
1053 [
1054 [
1054 (
1055 (
1055 b'',
1056 b'',
1056 b'nodates',
1057 b'nodates',
1057 None,
1058 None,
1058 _(b'do not display the saved mtime (DEPRECATED)'),
1059 _(b'do not display the saved mtime (DEPRECATED)'),
1059 ),
1060 ),
1060 (b'', b'dates', True, _(b'display the saved mtime')),
1061 (b'', b'dates', True, _(b'display the saved mtime')),
1061 (b'', b'datesort', None, _(b'sort by saved mtime')),
1062 (b'', b'datesort', None, _(b'sort by saved mtime')),
1062 (
1063 (
1063 b'',
1064 b'',
1064 b'docket',
1065 b'docket',
1065 False,
1066 False,
1066 _(b'display the docket (metadata file) instead'),
1067 _(b'display the docket (metadata file) instead'),
1067 ),
1068 ),
1068 (
1069 (
1069 b'',
1070 b'',
1070 b'all',
1071 b'all',
1071 False,
1072 False,
1072 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1073 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1073 ),
1074 ),
1074 ],
1075 ],
1075 _(b'[OPTION]...'),
1076 _(b'[OPTION]...'),
1076 )
1077 )
1077 def debugstate(ui, repo, **opts):
1078 def debugstate(ui, repo, **opts):
1078 """show the contents of the current dirstate"""
1079 """show the contents of the current dirstate"""
1079
1080
1080 if opts.get("docket"):
1081 if opts.get("docket"):
1081 if not repo.dirstate._use_dirstate_v2:
1082 if not repo.dirstate._use_dirstate_v2:
1082 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1083 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1083
1084
1084 docket = repo.dirstate._map.docket
1085 docket = repo.dirstate._map.docket
1085 (
1086 (
1086 start_offset,
1087 start_offset,
1087 root_nodes,
1088 root_nodes,
1088 nodes_with_entry,
1089 nodes_with_entry,
1089 nodes_with_copy,
1090 nodes_with_copy,
1090 unused_bytes,
1091 unused_bytes,
1091 _unused,
1092 _unused,
1092 ignore_pattern,
1093 ignore_pattern,
1093 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1094 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1094
1095
1095 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1096 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1096 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1097 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1097 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1098 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1098 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1099 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1099 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1100 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1100 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1101 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1101 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1102 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1102 ui.write(
1103 ui.write(
1103 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1104 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1104 )
1105 )
1105 return
1106 return
1106
1107
1107 nodates = not opts['dates']
1108 nodates = not opts['dates']
1108 if opts.get('nodates') is not None:
1109 if opts.get('nodates') is not None:
1109 nodates = True
1110 nodates = True
1110 datesort = opts.get('datesort')
1111 datesort = opts.get('datesort')
1111
1112
1112 if datesort:
1113 if datesort:
1113
1114
1114 def keyfunc(entry):
1115 def keyfunc(entry):
1115 filename, _state, _mode, _size, mtime = entry
1116 filename, _state, _mode, _size, mtime = entry
1116 return (mtime, filename)
1117 return (mtime, filename)
1117
1118
1118 else:
1119 else:
1119 keyfunc = None # sort by filename
1120 keyfunc = None # sort by filename
1120 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1121 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1121 entries.sort(key=keyfunc)
1122 entries.sort(key=keyfunc)
1122 for entry in entries:
1123 for entry in entries:
1123 filename, state, mode, size, mtime = entry
1124 filename, state, mode, size, mtime = entry
1124 if mtime == -1:
1125 if mtime == -1:
1125 timestr = b'unset '
1126 timestr = b'unset '
1126 elif nodates:
1127 elif nodates:
1127 timestr = b'set '
1128 timestr = b'set '
1128 else:
1129 else:
1129 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1130 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1130 timestr = encoding.strtolocal(timestr)
1131 timestr = encoding.strtolocal(timestr)
1131 if mode & 0o20000:
1132 if mode & 0o20000:
1132 mode = b'lnk'
1133 mode = b'lnk'
1133 else:
1134 else:
1134 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1135 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1135 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1136 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1136 for f in repo.dirstate.copies():
1137 for f in repo.dirstate.copies():
1137 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1138 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1138
1139
1139
1140
1140 @command(
1141 @command(
1141 b'debugdirstateignorepatternshash',
1142 b'debugdirstateignorepatternshash',
1142 [],
1143 [],
1143 _(b''),
1144 _(b''),
1144 )
1145 )
1145 def debugdirstateignorepatternshash(ui, repo, **opts):
1146 def debugdirstateignorepatternshash(ui, repo, **opts):
1146 """show the hash of ignore patterns stored in dirstate if v2,
1147 """show the hash of ignore patterns stored in dirstate if v2,
1147 or nothing for dirstate-v2
1148 or nothing for dirstate-v2
1148 """
1149 """
1149 if repo.dirstate._use_dirstate_v2:
1150 if repo.dirstate._use_dirstate_v2:
1150 docket = repo.dirstate._map.docket
1151 docket = repo.dirstate._map.docket
1151 hash_len = 20 # 160 bits for SHA-1
1152 hash_len = 20 # 160 bits for SHA-1
1152 hash_bytes = docket.tree_metadata[-hash_len:]
1153 hash_bytes = docket.tree_metadata[-hash_len:]
1153 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1154 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1154
1155
1155
1156
1156 @command(
1157 @command(
1157 b'debugdiscovery',
1158 b'debugdiscovery',
1158 [
1159 [
1159 (b'', b'old', None, _(b'use old-style discovery')),
1160 (b'', b'old', None, _(b'use old-style discovery')),
1160 (
1161 (
1161 b'',
1162 b'',
1162 b'nonheads',
1163 b'nonheads',
1163 None,
1164 None,
1164 _(b'use old-style discovery with non-heads included'),
1165 _(b'use old-style discovery with non-heads included'),
1165 ),
1166 ),
1166 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1167 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1167 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1168 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1168 (
1169 (
1169 b'',
1170 b'',
1170 b'local-as-revs',
1171 b'local-as-revs',
1171 b"",
1172 b"",
1172 b'treat local has having these revisions only',
1173 b'treat local has having these revisions only',
1173 ),
1174 ),
1174 (
1175 (
1175 b'',
1176 b'',
1176 b'remote-as-revs',
1177 b'remote-as-revs',
1177 b"",
1178 b"",
1178 b'use local as remote, with only these revisions',
1179 b'use local as remote, with only these revisions',
1179 ),
1180 ),
1180 ]
1181 ]
1181 + cmdutil.remoteopts
1182 + cmdutil.remoteopts
1182 + cmdutil.formatteropts,
1183 + cmdutil.formatteropts,
1183 _(b'[--rev REV] [OTHER]'),
1184 _(b'[--rev REV] [OTHER]'),
1184 )
1185 )
1185 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1186 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1186 """runs the changeset discovery protocol in isolation
1187 """runs the changeset discovery protocol in isolation
1187
1188
1188 The local peer can be "replaced" by a subset of the local repository by
1189 The local peer can be "replaced" by a subset of the local repository by
1189 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1190 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1190 be "replaced" by a subset of the local repository using the
1191 be "replaced" by a subset of the local repository using the
1191 `--local-as-revs` flag. This is useful to efficiently debug pathological
1192 `--local-as-revs` flag. This is useful to efficiently debug pathological
1192 discovery situation.
1193 discovery situation.
1193
1194
1194 The following developer oriented config are relevant for people playing with this command:
1195 The following developer oriented config are relevant for people playing with this command:
1195
1196
1196 * devel.discovery.exchange-heads=True
1197 * devel.discovery.exchange-heads=True
1197
1198
1198 If False, the discovery will not start with
1199 If False, the discovery will not start with
1199 remote head fetching and local head querying.
1200 remote head fetching and local head querying.
1200
1201
1201 * devel.discovery.grow-sample=True
1202 * devel.discovery.grow-sample=True
1202
1203
1203 If False, the sample size used in set discovery will not be increased
1204 If False, the sample size used in set discovery will not be increased
1204 through the process
1205 through the process
1205
1206
1206 * devel.discovery.grow-sample.dynamic=True
1207 * devel.discovery.grow-sample.dynamic=True
1207
1208
1208 When discovery.grow-sample.dynamic is True, the default, the sample size is
1209 When discovery.grow-sample.dynamic is True, the default, the sample size is
1209 adapted to the shape of the undecided set (it is set to the max of:
1210 adapted to the shape of the undecided set (it is set to the max of:
1210 <target-size>, len(roots(undecided)), len(heads(undecided)
1211 <target-size>, len(roots(undecided)), len(heads(undecided)
1211
1212
1212 * devel.discovery.grow-sample.rate=1.05
1213 * devel.discovery.grow-sample.rate=1.05
1213
1214
1214 the rate at which the sample grow
1215 the rate at which the sample grow
1215
1216
1216 * devel.discovery.randomize=True
1217 * devel.discovery.randomize=True
1217
1218
1218 If andom sampling during discovery are deterministic. It is meant for
1219 If andom sampling during discovery are deterministic. It is meant for
1219 integration tests.
1220 integration tests.
1220
1221
1221 * devel.discovery.sample-size=200
1222 * devel.discovery.sample-size=200
1222
1223
1223 Control the initial size of the discovery sample
1224 Control the initial size of the discovery sample
1224
1225
1225 * devel.discovery.sample-size.initial=100
1226 * devel.discovery.sample-size.initial=100
1226
1227
1227 Control the initial size of the discovery for initial change
1228 Control the initial size of the discovery for initial change
1228 """
1229 """
1229 opts = pycompat.byteskwargs(opts)
1230 opts = pycompat.byteskwargs(opts)
1230 unfi = repo.unfiltered()
1231 unfi = repo.unfiltered()
1231
1232
1232 # setup potential extra filtering
1233 # setup potential extra filtering
1233 local_revs = opts[b"local_as_revs"]
1234 local_revs = opts[b"local_as_revs"]
1234 remote_revs = opts[b"remote_as_revs"]
1235 remote_revs = opts[b"remote_as_revs"]
1235
1236
1236 # make sure tests are repeatable
1237 # make sure tests are repeatable
1237 random.seed(int(opts[b'seed']))
1238 random.seed(int(opts[b'seed']))
1238
1239
1239 if not remote_revs:
1240 if not remote_revs:
1240
1241
1241 remoteurl, branches = urlutil.get_unique_pull_path(
1242 remoteurl, branches = urlutil.get_unique_pull_path(
1242 b'debugdiscovery', repo, ui, remoteurl
1243 b'debugdiscovery', repo, ui, remoteurl
1243 )
1244 )
1244 remote = hg.peer(repo, opts, remoteurl)
1245 remote = hg.peer(repo, opts, remoteurl)
1245 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1246 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1246 else:
1247 else:
1247 branches = (None, [])
1248 branches = (None, [])
1248 remote_filtered_revs = logcmdutil.revrange(
1249 remote_filtered_revs = logcmdutil.revrange(
1249 unfi, [b"not (::(%s))" % remote_revs]
1250 unfi, [b"not (::(%s))" % remote_revs]
1250 )
1251 )
1251 remote_filtered_revs = frozenset(remote_filtered_revs)
1252 remote_filtered_revs = frozenset(remote_filtered_revs)
1252
1253
1253 def remote_func(x):
1254 def remote_func(x):
1254 return remote_filtered_revs
1255 return remote_filtered_revs
1255
1256
1256 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1257 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1257
1258
1258 remote = repo.peer()
1259 remote = repo.peer()
1259 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1260 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1260
1261
1261 if local_revs:
1262 if local_revs:
1262 local_filtered_revs = logcmdutil.revrange(
1263 local_filtered_revs = logcmdutil.revrange(
1263 unfi, [b"not (::(%s))" % local_revs]
1264 unfi, [b"not (::(%s))" % local_revs]
1264 )
1265 )
1265 local_filtered_revs = frozenset(local_filtered_revs)
1266 local_filtered_revs = frozenset(local_filtered_revs)
1266
1267
1267 def local_func(x):
1268 def local_func(x):
1268 return local_filtered_revs
1269 return local_filtered_revs
1269
1270
1270 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1271 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1271 repo = repo.filtered(b'debug-discovery-local-filter')
1272 repo = repo.filtered(b'debug-discovery-local-filter')
1272
1273
1273 data = {}
1274 data = {}
1274 if opts.get(b'old'):
1275 if opts.get(b'old'):
1275
1276
1276 def doit(pushedrevs, remoteheads, remote=remote):
1277 def doit(pushedrevs, remoteheads, remote=remote):
1277 if not util.safehasattr(remote, b'branches'):
1278 if not util.safehasattr(remote, b'branches'):
1278 # enable in-client legacy support
1279 # enable in-client legacy support
1279 remote = localrepo.locallegacypeer(remote.local())
1280 remote = localrepo.locallegacypeer(remote.local())
1280 common, _in, hds = treediscovery.findcommonincoming(
1281 common, _in, hds = treediscovery.findcommonincoming(
1281 repo, remote, force=True, audit=data
1282 repo, remote, force=True, audit=data
1282 )
1283 )
1283 common = set(common)
1284 common = set(common)
1284 if not opts.get(b'nonheads'):
1285 if not opts.get(b'nonheads'):
1285 ui.writenoi18n(
1286 ui.writenoi18n(
1286 b"unpruned common: %s\n"
1287 b"unpruned common: %s\n"
1287 % b" ".join(sorted(short(n) for n in common))
1288 % b" ".join(sorted(short(n) for n in common))
1288 )
1289 )
1289
1290
1290 clnode = repo.changelog.node
1291 clnode = repo.changelog.node
1291 common = repo.revs(b'heads(::%ln)', common)
1292 common = repo.revs(b'heads(::%ln)', common)
1292 common = {clnode(r) for r in common}
1293 common = {clnode(r) for r in common}
1293 return common, hds
1294 return common, hds
1294
1295
1295 else:
1296 else:
1296
1297
1297 def doit(pushedrevs, remoteheads, remote=remote):
1298 def doit(pushedrevs, remoteheads, remote=remote):
1298 nodes = None
1299 nodes = None
1299 if pushedrevs:
1300 if pushedrevs:
1300 revs = logcmdutil.revrange(repo, pushedrevs)
1301 revs = logcmdutil.revrange(repo, pushedrevs)
1301 nodes = [repo[r].node() for r in revs]
1302 nodes = [repo[r].node() for r in revs]
1302 common, any, hds = setdiscovery.findcommonheads(
1303 common, any, hds = setdiscovery.findcommonheads(
1303 ui, repo, remote, ancestorsof=nodes, audit=data
1304 ui, repo, remote, ancestorsof=nodes, audit=data
1304 )
1305 )
1305 return common, hds
1306 return common, hds
1306
1307
1307 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1308 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1308 localrevs = opts[b'rev']
1309 localrevs = opts[b'rev']
1309
1310
1310 fm = ui.formatter(b'debugdiscovery', opts)
1311 fm = ui.formatter(b'debugdiscovery', opts)
1311 if fm.strict_format:
1312 if fm.strict_format:
1312
1313
1313 @contextlib.contextmanager
1314 @contextlib.contextmanager
1314 def may_capture_output():
1315 def may_capture_output():
1315 ui.pushbuffer()
1316 ui.pushbuffer()
1316 yield
1317 yield
1317 data[b'output'] = ui.popbuffer()
1318 data[b'output'] = ui.popbuffer()
1318
1319
1319 else:
1320 else:
1320 may_capture_output = util.nullcontextmanager
1321 may_capture_output = util.nullcontextmanager
1321 with may_capture_output():
1322 with may_capture_output():
1322 with util.timedcm('debug-discovery') as t:
1323 with util.timedcm('debug-discovery') as t:
1323 common, hds = doit(localrevs, remoterevs)
1324 common, hds = doit(localrevs, remoterevs)
1324
1325
1325 # compute all statistics
1326 # compute all statistics
1326 heads_common = set(common)
1327 heads_common = set(common)
1327 heads_remote = set(hds)
1328 heads_remote = set(hds)
1328 heads_local = set(repo.heads())
1329 heads_local = set(repo.heads())
1329 # note: they cannot be a local or remote head that is in common and not
1330 # note: they cannot be a local or remote head that is in common and not
1330 # itself a head of common.
1331 # itself a head of common.
1331 heads_common_local = heads_common & heads_local
1332 heads_common_local = heads_common & heads_local
1332 heads_common_remote = heads_common & heads_remote
1333 heads_common_remote = heads_common & heads_remote
1333 heads_common_both = heads_common & heads_remote & heads_local
1334 heads_common_both = heads_common & heads_remote & heads_local
1334
1335
1335 all = repo.revs(b'all()')
1336 all = repo.revs(b'all()')
1336 common = repo.revs(b'::%ln', common)
1337 common = repo.revs(b'::%ln', common)
1337 roots_common = repo.revs(b'roots(::%ld)', common)
1338 roots_common = repo.revs(b'roots(::%ld)', common)
1338 missing = repo.revs(b'not ::%ld', common)
1339 missing = repo.revs(b'not ::%ld', common)
1339 heads_missing = repo.revs(b'heads(%ld)', missing)
1340 heads_missing = repo.revs(b'heads(%ld)', missing)
1340 roots_missing = repo.revs(b'roots(%ld)', missing)
1341 roots_missing = repo.revs(b'roots(%ld)', missing)
1341 assert len(common) + len(missing) == len(all)
1342 assert len(common) + len(missing) == len(all)
1342
1343
1343 initial_undecided = repo.revs(
1344 initial_undecided = repo.revs(
1344 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1345 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1345 )
1346 )
1346 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1347 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1347 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1348 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1348 common_initial_undecided = initial_undecided & common
1349 common_initial_undecided = initial_undecided & common
1349 missing_initial_undecided = initial_undecided & missing
1350 missing_initial_undecided = initial_undecided & missing
1350
1351
1351 data[b'elapsed'] = t.elapsed
1352 data[b'elapsed'] = t.elapsed
1352 data[b'nb-common-heads'] = len(heads_common)
1353 data[b'nb-common-heads'] = len(heads_common)
1353 data[b'nb-common-heads-local'] = len(heads_common_local)
1354 data[b'nb-common-heads-local'] = len(heads_common_local)
1354 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1355 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1355 data[b'nb-common-heads-both'] = len(heads_common_both)
1356 data[b'nb-common-heads-both'] = len(heads_common_both)
1356 data[b'nb-common-roots'] = len(roots_common)
1357 data[b'nb-common-roots'] = len(roots_common)
1357 data[b'nb-head-local'] = len(heads_local)
1358 data[b'nb-head-local'] = len(heads_local)
1358 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1359 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1359 data[b'nb-head-remote'] = len(heads_remote)
1360 data[b'nb-head-remote'] = len(heads_remote)
1360 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1361 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1361 heads_common_remote
1362 heads_common_remote
1362 )
1363 )
1363 data[b'nb-revs'] = len(all)
1364 data[b'nb-revs'] = len(all)
1364 data[b'nb-revs-common'] = len(common)
1365 data[b'nb-revs-common'] = len(common)
1365 data[b'nb-revs-missing'] = len(missing)
1366 data[b'nb-revs-missing'] = len(missing)
1366 data[b'nb-missing-heads'] = len(heads_missing)
1367 data[b'nb-missing-heads'] = len(heads_missing)
1367 data[b'nb-missing-roots'] = len(roots_missing)
1368 data[b'nb-missing-roots'] = len(roots_missing)
1368 data[b'nb-ini_und'] = len(initial_undecided)
1369 data[b'nb-ini_und'] = len(initial_undecided)
1369 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1370 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1370 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1371 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1371 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1372 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1372 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1373 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1373
1374
1374 fm.startitem()
1375 fm.startitem()
1375 fm.data(**pycompat.strkwargs(data))
1376 fm.data(**pycompat.strkwargs(data))
1376 # display discovery summary
1377 # display discovery summary
1377 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1378 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1378 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1379 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1379 fm.plain(b"queries: %(total-queries)9d\n" % data)
1380 fm.plain(b"queries: %(total-queries)9d\n" % data)
1380 fm.plain(b"heads summary:\n")
1381 fm.plain(b"heads summary:\n")
1381 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1382 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1382 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1383 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1383 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1384 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1384 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1385 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1385 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1386 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1386 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1387 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1387 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1388 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1388 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1389 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1389 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1390 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1390 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1391 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1391 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1392 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1392 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1393 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1393 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1394 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1394 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1395 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1395 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1396 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1396 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1397 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1397 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1398 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1398 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1399 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1399 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1400 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1400 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1401 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1401 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1402 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1402 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1403 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1403
1404
1404 if ui.verbose:
1405 if ui.verbose:
1405 fm.plain(
1406 fm.plain(
1406 b"common heads: %s\n"
1407 b"common heads: %s\n"
1407 % b" ".join(sorted(short(n) for n in heads_common))
1408 % b" ".join(sorted(short(n) for n in heads_common))
1408 )
1409 )
1409 fm.end()
1410 fm.end()
1410
1411
1411
1412
1412 _chunksize = 4 << 10
1413 _chunksize = 4 << 10
1413
1414
1414
1415
1415 @command(
1416 @command(
1416 b'debugdownload',
1417 b'debugdownload',
1417 [
1418 [
1418 (b'o', b'output', b'', _(b'path')),
1419 (b'o', b'output', b'', _(b'path')),
1419 ],
1420 ],
1420 optionalrepo=True,
1421 optionalrepo=True,
1421 )
1422 )
1422 def debugdownload(ui, repo, url, output=None, **opts):
1423 def debugdownload(ui, repo, url, output=None, **opts):
1423 """download a resource using Mercurial logic and config"""
1424 """download a resource using Mercurial logic and config"""
1424 fh = urlmod.open(ui, url, output)
1425 fh = urlmod.open(ui, url, output)
1425
1426
1426 dest = ui
1427 dest = ui
1427 if output:
1428 if output:
1428 dest = open(output, b"wb", _chunksize)
1429 dest = open(output, b"wb", _chunksize)
1429 try:
1430 try:
1430 data = fh.read(_chunksize)
1431 data = fh.read(_chunksize)
1431 while data:
1432 while data:
1432 dest.write(data)
1433 dest.write(data)
1433 data = fh.read(_chunksize)
1434 data = fh.read(_chunksize)
1434 finally:
1435 finally:
1435 if output:
1436 if output:
1436 dest.close()
1437 dest.close()
1437
1438
1438
1439
1439 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1440 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1440 def debugextensions(ui, repo, **opts):
1441 def debugextensions(ui, repo, **opts):
1441 '''show information about active extensions'''
1442 '''show information about active extensions'''
1442 opts = pycompat.byteskwargs(opts)
1443 opts = pycompat.byteskwargs(opts)
1443 exts = extensions.extensions(ui)
1444 exts = extensions.extensions(ui)
1444 hgver = util.version()
1445 hgver = util.version()
1445 fm = ui.formatter(b'debugextensions', opts)
1446 fm = ui.formatter(b'debugextensions', opts)
1446 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1447 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1447 isinternal = extensions.ismoduleinternal(extmod)
1448 isinternal = extensions.ismoduleinternal(extmod)
1448 extsource = None
1449 extsource = None
1449
1450
1450 if util.safehasattr(extmod, '__file__'):
1451 if util.safehasattr(extmod, '__file__'):
1451 extsource = pycompat.fsencode(extmod.__file__)
1452 extsource = pycompat.fsencode(extmod.__file__)
1452 elif getattr(sys, 'oxidized', False):
1453 elif getattr(sys, 'oxidized', False):
1453 extsource = pycompat.sysexecutable
1454 extsource = pycompat.sysexecutable
1454 if isinternal:
1455 if isinternal:
1455 exttestedwith = [] # never expose magic string to users
1456 exttestedwith = [] # never expose magic string to users
1456 else:
1457 else:
1457 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1458 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1458 extbuglink = getattr(extmod, 'buglink', None)
1459 extbuglink = getattr(extmod, 'buglink', None)
1459
1460
1460 fm.startitem()
1461 fm.startitem()
1461
1462
1462 if ui.quiet or ui.verbose:
1463 if ui.quiet or ui.verbose:
1463 fm.write(b'name', b'%s\n', extname)
1464 fm.write(b'name', b'%s\n', extname)
1464 else:
1465 else:
1465 fm.write(b'name', b'%s', extname)
1466 fm.write(b'name', b'%s', extname)
1466 if isinternal or hgver in exttestedwith:
1467 if isinternal or hgver in exttestedwith:
1467 fm.plain(b'\n')
1468 fm.plain(b'\n')
1468 elif not exttestedwith:
1469 elif not exttestedwith:
1469 fm.plain(_(b' (untested!)\n'))
1470 fm.plain(_(b' (untested!)\n'))
1470 else:
1471 else:
1471 lasttestedversion = exttestedwith[-1]
1472 lasttestedversion = exttestedwith[-1]
1472 fm.plain(b' (%s!)\n' % lasttestedversion)
1473 fm.plain(b' (%s!)\n' % lasttestedversion)
1473
1474
1474 fm.condwrite(
1475 fm.condwrite(
1475 ui.verbose and extsource,
1476 ui.verbose and extsource,
1476 b'source',
1477 b'source',
1477 _(b' location: %s\n'),
1478 _(b' location: %s\n'),
1478 extsource or b"",
1479 extsource or b"",
1479 )
1480 )
1480
1481
1481 if ui.verbose:
1482 if ui.verbose:
1482 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1483 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1483 fm.data(bundled=isinternal)
1484 fm.data(bundled=isinternal)
1484
1485
1485 fm.condwrite(
1486 fm.condwrite(
1486 ui.verbose and exttestedwith,
1487 ui.verbose and exttestedwith,
1487 b'testedwith',
1488 b'testedwith',
1488 _(b' tested with: %s\n'),
1489 _(b' tested with: %s\n'),
1489 fm.formatlist(exttestedwith, name=b'ver'),
1490 fm.formatlist(exttestedwith, name=b'ver'),
1490 )
1491 )
1491
1492
1492 fm.condwrite(
1493 fm.condwrite(
1493 ui.verbose and extbuglink,
1494 ui.verbose and extbuglink,
1494 b'buglink',
1495 b'buglink',
1495 _(b' bug reporting: %s\n'),
1496 _(b' bug reporting: %s\n'),
1496 extbuglink or b"",
1497 extbuglink or b"",
1497 )
1498 )
1498
1499
1499 fm.end()
1500 fm.end()
1500
1501
1501
1502
1502 @command(
1503 @command(
1503 b'debugfileset',
1504 b'debugfileset',
1504 [
1505 [
1505 (
1506 (
1506 b'r',
1507 b'r',
1507 b'rev',
1508 b'rev',
1508 b'',
1509 b'',
1509 _(b'apply the filespec on this revision'),
1510 _(b'apply the filespec on this revision'),
1510 _(b'REV'),
1511 _(b'REV'),
1511 ),
1512 ),
1512 (
1513 (
1513 b'',
1514 b'',
1514 b'all-files',
1515 b'all-files',
1515 False,
1516 False,
1516 _(b'test files from all revisions and working directory'),
1517 _(b'test files from all revisions and working directory'),
1517 ),
1518 ),
1518 (
1519 (
1519 b's',
1520 b's',
1520 b'show-matcher',
1521 b'show-matcher',
1521 None,
1522 None,
1522 _(b'print internal representation of matcher'),
1523 _(b'print internal representation of matcher'),
1523 ),
1524 ),
1524 (
1525 (
1525 b'p',
1526 b'p',
1526 b'show-stage',
1527 b'show-stage',
1527 [],
1528 [],
1528 _(b'print parsed tree at the given stage'),
1529 _(b'print parsed tree at the given stage'),
1529 _(b'NAME'),
1530 _(b'NAME'),
1530 ),
1531 ),
1531 ],
1532 ],
1532 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1533 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1533 )
1534 )
1534 def debugfileset(ui, repo, expr, **opts):
1535 def debugfileset(ui, repo, expr, **opts):
1535 '''parse and apply a fileset specification'''
1536 '''parse and apply a fileset specification'''
1536 from . import fileset
1537 from . import fileset
1537
1538
1538 fileset.symbols # force import of fileset so we have predicates to optimize
1539 fileset.symbols # force import of fileset so we have predicates to optimize
1539 opts = pycompat.byteskwargs(opts)
1540 opts = pycompat.byteskwargs(opts)
1540 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1541 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1541
1542
1542 stages = [
1543 stages = [
1543 (b'parsed', pycompat.identity),
1544 (b'parsed', pycompat.identity),
1544 (b'analyzed', filesetlang.analyze),
1545 (b'analyzed', filesetlang.analyze),
1545 (b'optimized', filesetlang.optimize),
1546 (b'optimized', filesetlang.optimize),
1546 ]
1547 ]
1547 stagenames = {n for n, f in stages}
1548 stagenames = {n for n, f in stages}
1548
1549
1549 showalways = set()
1550 showalways = set()
1550 if ui.verbose and not opts[b'show_stage']:
1551 if ui.verbose and not opts[b'show_stage']:
1551 # show parsed tree by --verbose (deprecated)
1552 # show parsed tree by --verbose (deprecated)
1552 showalways.add(b'parsed')
1553 showalways.add(b'parsed')
1553 if opts[b'show_stage'] == [b'all']:
1554 if opts[b'show_stage'] == [b'all']:
1554 showalways.update(stagenames)
1555 showalways.update(stagenames)
1555 else:
1556 else:
1556 for n in opts[b'show_stage']:
1557 for n in opts[b'show_stage']:
1557 if n not in stagenames:
1558 if n not in stagenames:
1558 raise error.Abort(_(b'invalid stage name: %s') % n)
1559 raise error.Abort(_(b'invalid stage name: %s') % n)
1559 showalways.update(opts[b'show_stage'])
1560 showalways.update(opts[b'show_stage'])
1560
1561
1561 tree = filesetlang.parse(expr)
1562 tree = filesetlang.parse(expr)
1562 for n, f in stages:
1563 for n, f in stages:
1563 tree = f(tree)
1564 tree = f(tree)
1564 if n in showalways:
1565 if n in showalways:
1565 if opts[b'show_stage'] or n != b'parsed':
1566 if opts[b'show_stage'] or n != b'parsed':
1566 ui.write(b"* %s:\n" % n)
1567 ui.write(b"* %s:\n" % n)
1567 ui.write(filesetlang.prettyformat(tree), b"\n")
1568 ui.write(filesetlang.prettyformat(tree), b"\n")
1568
1569
1569 files = set()
1570 files = set()
1570 if opts[b'all_files']:
1571 if opts[b'all_files']:
1571 for r in repo:
1572 for r in repo:
1572 c = repo[r]
1573 c = repo[r]
1573 files.update(c.files())
1574 files.update(c.files())
1574 files.update(c.substate)
1575 files.update(c.substate)
1575 if opts[b'all_files'] or ctx.rev() is None:
1576 if opts[b'all_files'] or ctx.rev() is None:
1576 wctx = repo[None]
1577 wctx = repo[None]
1577 files.update(
1578 files.update(
1578 repo.dirstate.walk(
1579 repo.dirstate.walk(
1579 scmutil.matchall(repo),
1580 scmutil.matchall(repo),
1580 subrepos=list(wctx.substate),
1581 subrepos=list(wctx.substate),
1581 unknown=True,
1582 unknown=True,
1582 ignored=True,
1583 ignored=True,
1583 )
1584 )
1584 )
1585 )
1585 files.update(wctx.substate)
1586 files.update(wctx.substate)
1586 else:
1587 else:
1587 files.update(ctx.files())
1588 files.update(ctx.files())
1588 files.update(ctx.substate)
1589 files.update(ctx.substate)
1589
1590
1590 m = ctx.matchfileset(repo.getcwd(), expr)
1591 m = ctx.matchfileset(repo.getcwd(), expr)
1591 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1592 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1592 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1593 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1593 for f in sorted(files):
1594 for f in sorted(files):
1594 if not m(f):
1595 if not m(f):
1595 continue
1596 continue
1596 ui.write(b"%s\n" % f)
1597 ui.write(b"%s\n" % f)
1597
1598
1598
1599
1599 @command(
1600 @command(
1600 b"debug-repair-issue6528",
1601 b"debug-repair-issue6528",
1601 [
1602 [
1602 (
1603 (
1603 b'',
1604 b'',
1604 b'to-report',
1605 b'to-report',
1605 b'',
1606 b'',
1606 _(b'build a report of affected revisions to this file'),
1607 _(b'build a report of affected revisions to this file'),
1607 _(b'FILE'),
1608 _(b'FILE'),
1608 ),
1609 ),
1609 (
1610 (
1610 b'',
1611 b'',
1611 b'from-report',
1612 b'from-report',
1612 b'',
1613 b'',
1613 _(b'repair revisions listed in this report file'),
1614 _(b'repair revisions listed in this report file'),
1614 _(b'FILE'),
1615 _(b'FILE'),
1615 ),
1616 ),
1616 (
1617 (
1617 b'',
1618 b'',
1618 b'paranoid',
1619 b'paranoid',
1619 False,
1620 False,
1620 _(b'check that both detection methods do the same thing'),
1621 _(b'check that both detection methods do the same thing'),
1621 ),
1622 ),
1622 ]
1623 ]
1623 + cmdutil.dryrunopts,
1624 + cmdutil.dryrunopts,
1624 )
1625 )
1625 def debug_repair_issue6528(ui, repo, **opts):
1626 def debug_repair_issue6528(ui, repo, **opts):
1626 """find affected revisions and repair them. See issue6528 for more details.
1627 """find affected revisions and repair them. See issue6528 for more details.
1627
1628
1628 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1629 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1629 computation of affected revisions for a given repository across clones.
1630 computation of affected revisions for a given repository across clones.
1630 The report format is line-based (with empty lines ignored):
1631 The report format is line-based (with empty lines ignored):
1631
1632
1632 ```
1633 ```
1633 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1634 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1634 ```
1635 ```
1635
1636
1636 There can be multiple broken revisions per filelog, they are separated by
1637 There can be multiple broken revisions per filelog, they are separated by
1637 a comma with no spaces. The only space is between the revision(s) and the
1638 a comma with no spaces. The only space is between the revision(s) and the
1638 filename.
1639 filename.
1639
1640
1640 Note that this does *not* mean that this repairs future affected revisions,
1641 Note that this does *not* mean that this repairs future affected revisions,
1641 that needs a separate fix at the exchange level that was introduced in
1642 that needs a separate fix at the exchange level that was introduced in
1642 Mercurial 5.9.1.
1643 Mercurial 5.9.1.
1643
1644
1644 There is a `--paranoid` flag to test that the fast implementation is correct
1645 There is a `--paranoid` flag to test that the fast implementation is correct
1645 by checking it against the slow implementation. Since this matter is quite
1646 by checking it against the slow implementation. Since this matter is quite
1646 urgent and testing every edge-case is probably quite costly, we use this
1647 urgent and testing every edge-case is probably quite costly, we use this
1647 method to test on large repositories as a fuzzing method of sorts.
1648 method to test on large repositories as a fuzzing method of sorts.
1648 """
1649 """
1649 cmdutil.check_incompatible_arguments(
1650 cmdutil.check_incompatible_arguments(
1650 opts, 'to_report', ['from_report', 'dry_run']
1651 opts, 'to_report', ['from_report', 'dry_run']
1651 )
1652 )
1652 dry_run = opts.get('dry_run')
1653 dry_run = opts.get('dry_run')
1653 to_report = opts.get('to_report')
1654 to_report = opts.get('to_report')
1654 from_report = opts.get('from_report')
1655 from_report = opts.get('from_report')
1655 paranoid = opts.get('paranoid')
1656 paranoid = opts.get('paranoid')
1656 # TODO maybe add filelog pattern and revision pattern parameters to help
1657 # TODO maybe add filelog pattern and revision pattern parameters to help
1657 # narrow down the search for users that know what they're looking for?
1658 # narrow down the search for users that know what they're looking for?
1658
1659
1659 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1660 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1660 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1661 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1661 raise error.Abort(_(msg))
1662 raise error.Abort(_(msg))
1662
1663
1663 rewrite.repair_issue6528(
1664 rewrite.repair_issue6528(
1664 ui,
1665 ui,
1665 repo,
1666 repo,
1666 dry_run=dry_run,
1667 dry_run=dry_run,
1667 to_report=to_report,
1668 to_report=to_report,
1668 from_report=from_report,
1669 from_report=from_report,
1669 paranoid=paranoid,
1670 paranoid=paranoid,
1670 )
1671 )
1671
1672
1672
1673
1673 @command(b'debugformat', [] + cmdutil.formatteropts)
1674 @command(b'debugformat', [] + cmdutil.formatteropts)
1674 def debugformat(ui, repo, **opts):
1675 def debugformat(ui, repo, **opts):
1675 """display format information about the current repository
1676 """display format information about the current repository
1676
1677
1677 Use --verbose to get extra information about current config value and
1678 Use --verbose to get extra information about current config value and
1678 Mercurial default."""
1679 Mercurial default."""
1679 opts = pycompat.byteskwargs(opts)
1680 opts = pycompat.byteskwargs(opts)
1680 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1681 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1681 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1682 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1682
1683
1683 def makeformatname(name):
1684 def makeformatname(name):
1684 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1685 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1685
1686
1686 fm = ui.formatter(b'debugformat', opts)
1687 fm = ui.formatter(b'debugformat', opts)
1687 if fm.isplain():
1688 if fm.isplain():
1688
1689
1689 def formatvalue(value):
1690 def formatvalue(value):
1690 if util.safehasattr(value, b'startswith'):
1691 if util.safehasattr(value, b'startswith'):
1691 return value
1692 return value
1692 if value:
1693 if value:
1693 return b'yes'
1694 return b'yes'
1694 else:
1695 else:
1695 return b'no'
1696 return b'no'
1696
1697
1697 else:
1698 else:
1698 formatvalue = pycompat.identity
1699 formatvalue = pycompat.identity
1699
1700
1700 fm.plain(b'format-variant')
1701 fm.plain(b'format-variant')
1701 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1702 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1702 fm.plain(b' repo')
1703 fm.plain(b' repo')
1703 if ui.verbose:
1704 if ui.verbose:
1704 fm.plain(b' config default')
1705 fm.plain(b' config default')
1705 fm.plain(b'\n')
1706 fm.plain(b'\n')
1706 for fv in upgrade.allformatvariant:
1707 for fv in upgrade.allformatvariant:
1707 fm.startitem()
1708 fm.startitem()
1708 repovalue = fv.fromrepo(repo)
1709 repovalue = fv.fromrepo(repo)
1709 configvalue = fv.fromconfig(repo)
1710 configvalue = fv.fromconfig(repo)
1710
1711
1711 if repovalue != configvalue:
1712 if repovalue != configvalue:
1712 namelabel = b'formatvariant.name.mismatchconfig'
1713 namelabel = b'formatvariant.name.mismatchconfig'
1713 repolabel = b'formatvariant.repo.mismatchconfig'
1714 repolabel = b'formatvariant.repo.mismatchconfig'
1714 elif repovalue != fv.default:
1715 elif repovalue != fv.default:
1715 namelabel = b'formatvariant.name.mismatchdefault'
1716 namelabel = b'formatvariant.name.mismatchdefault'
1716 repolabel = b'formatvariant.repo.mismatchdefault'
1717 repolabel = b'formatvariant.repo.mismatchdefault'
1717 else:
1718 else:
1718 namelabel = b'formatvariant.name.uptodate'
1719 namelabel = b'formatvariant.name.uptodate'
1719 repolabel = b'formatvariant.repo.uptodate'
1720 repolabel = b'formatvariant.repo.uptodate'
1720
1721
1721 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1722 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1722 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1723 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1723 if fv.default != configvalue:
1724 if fv.default != configvalue:
1724 configlabel = b'formatvariant.config.special'
1725 configlabel = b'formatvariant.config.special'
1725 else:
1726 else:
1726 configlabel = b'formatvariant.config.default'
1727 configlabel = b'formatvariant.config.default'
1727 fm.condwrite(
1728 fm.condwrite(
1728 ui.verbose,
1729 ui.verbose,
1729 b'config',
1730 b'config',
1730 b' %6s',
1731 b' %6s',
1731 formatvalue(configvalue),
1732 formatvalue(configvalue),
1732 label=configlabel,
1733 label=configlabel,
1733 )
1734 )
1734 fm.condwrite(
1735 fm.condwrite(
1735 ui.verbose,
1736 ui.verbose,
1736 b'default',
1737 b'default',
1737 b' %7s',
1738 b' %7s',
1738 formatvalue(fv.default),
1739 formatvalue(fv.default),
1739 label=b'formatvariant.default',
1740 label=b'formatvariant.default',
1740 )
1741 )
1741 fm.plain(b'\n')
1742 fm.plain(b'\n')
1742 fm.end()
1743 fm.end()
1743
1744
1744
1745
1745 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1746 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1746 def debugfsinfo(ui, path=b"."):
1747 def debugfsinfo(ui, path=b"."):
1747 """show information detected about current filesystem"""
1748 """show information detected about current filesystem"""
1748 ui.writenoi18n(b'path: %s\n' % path)
1749 ui.writenoi18n(b'path: %s\n' % path)
1749 ui.writenoi18n(
1750 ui.writenoi18n(
1750 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1751 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1751 )
1752 )
1752 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1753 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1753 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1754 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1754 ui.writenoi18n(
1755 ui.writenoi18n(
1755 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1756 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1756 )
1757 )
1757 ui.writenoi18n(
1758 ui.writenoi18n(
1758 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1759 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1759 )
1760 )
1760 casesensitive = b'(unknown)'
1761 casesensitive = b'(unknown)'
1761 try:
1762 try:
1762 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1763 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1763 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1764 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1764 except OSError:
1765 except OSError:
1765 pass
1766 pass
1766 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1767 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1767
1768
1768
1769
1769 @command(
1770 @command(
1770 b'debuggetbundle',
1771 b'debuggetbundle',
1771 [
1772 [
1772 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1773 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1773 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1774 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1774 (
1775 (
1775 b't',
1776 b't',
1776 b'type',
1777 b'type',
1777 b'bzip2',
1778 b'bzip2',
1778 _(b'bundle compression type to use'),
1779 _(b'bundle compression type to use'),
1779 _(b'TYPE'),
1780 _(b'TYPE'),
1780 ),
1781 ),
1781 ],
1782 ],
1782 _(b'REPO FILE [-H|-C ID]...'),
1783 _(b'REPO FILE [-H|-C ID]...'),
1783 norepo=True,
1784 norepo=True,
1784 )
1785 )
1785 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1786 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1786 """retrieves a bundle from a repo
1787 """retrieves a bundle from a repo
1787
1788
1788 Every ID must be a full-length hex node id string. Saves the bundle to the
1789 Every ID must be a full-length hex node id string. Saves the bundle to the
1789 given file.
1790 given file.
1790 """
1791 """
1791 opts = pycompat.byteskwargs(opts)
1792 opts = pycompat.byteskwargs(opts)
1792 repo = hg.peer(ui, opts, repopath)
1793 repo = hg.peer(ui, opts, repopath)
1793 if not repo.capable(b'getbundle'):
1794 if not repo.capable(b'getbundle'):
1794 raise error.Abort(b"getbundle() not supported by target repository")
1795 raise error.Abort(b"getbundle() not supported by target repository")
1795 args = {}
1796 args = {}
1796 if common:
1797 if common:
1797 args['common'] = [bin(s) for s in common]
1798 args['common'] = [bin(s) for s in common]
1798 if head:
1799 if head:
1799 args['heads'] = [bin(s) for s in head]
1800 args['heads'] = [bin(s) for s in head]
1800 # TODO: get desired bundlecaps from command line.
1801 # TODO: get desired bundlecaps from command line.
1801 args['bundlecaps'] = None
1802 args['bundlecaps'] = None
1802 bundle = repo.getbundle(b'debug', **args)
1803 bundle = repo.getbundle(b'debug', **args)
1803
1804
1804 bundletype = opts.get(b'type', b'bzip2').lower()
1805 bundletype = opts.get(b'type', b'bzip2').lower()
1805 btypes = {
1806 btypes = {
1806 b'none': b'HG10UN',
1807 b'none': b'HG10UN',
1807 b'bzip2': b'HG10BZ',
1808 b'bzip2': b'HG10BZ',
1808 b'gzip': b'HG10GZ',
1809 b'gzip': b'HG10GZ',
1809 b'bundle2': b'HG20',
1810 b'bundle2': b'HG20',
1810 }
1811 }
1811 bundletype = btypes.get(bundletype)
1812 bundletype = btypes.get(bundletype)
1812 if bundletype not in bundle2.bundletypes:
1813 if bundletype not in bundle2.bundletypes:
1813 raise error.Abort(_(b'unknown bundle type specified with --type'))
1814 raise error.Abort(_(b'unknown bundle type specified with --type'))
1814 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1815 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1815
1816
1816
1817
1817 @command(b'debugignore', [], b'[FILE]')
1818 @command(b'debugignore', [], b'[FILE]')
1818 def debugignore(ui, repo, *files, **opts):
1819 def debugignore(ui, repo, *files, **opts):
1819 """display the combined ignore pattern and information about ignored files
1820 """display the combined ignore pattern and information about ignored files
1820
1821
1821 With no argument display the combined ignore pattern.
1822 With no argument display the combined ignore pattern.
1822
1823
1823 Given space separated file names, shows if the given file is ignored and
1824 Given space separated file names, shows if the given file is ignored and
1824 if so, show the ignore rule (file and line number) that matched it.
1825 if so, show the ignore rule (file and line number) that matched it.
1825 """
1826 """
1826 ignore = repo.dirstate._ignore
1827 ignore = repo.dirstate._ignore
1827 if not files:
1828 if not files:
1828 # Show all the patterns
1829 # Show all the patterns
1829 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1830 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1830 else:
1831 else:
1831 m = scmutil.match(repo[None], pats=files)
1832 m = scmutil.match(repo[None], pats=files)
1832 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1833 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1833 for f in m.files():
1834 for f in m.files():
1834 nf = util.normpath(f)
1835 nf = util.normpath(f)
1835 ignored = None
1836 ignored = None
1836 ignoredata = None
1837 ignoredata = None
1837 if nf != b'.':
1838 if nf != b'.':
1838 if ignore(nf):
1839 if ignore(nf):
1839 ignored = nf
1840 ignored = nf
1840 ignoredata = repo.dirstate._ignorefileandline(nf)
1841 ignoredata = repo.dirstate._ignorefileandline(nf)
1841 else:
1842 else:
1842 for p in pathutil.finddirs(nf):
1843 for p in pathutil.finddirs(nf):
1843 if ignore(p):
1844 if ignore(p):
1844 ignored = p
1845 ignored = p
1845 ignoredata = repo.dirstate._ignorefileandline(p)
1846 ignoredata = repo.dirstate._ignorefileandline(p)
1846 break
1847 break
1847 if ignored:
1848 if ignored:
1848 if ignored == nf:
1849 if ignored == nf:
1849 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1850 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1850 else:
1851 else:
1851 ui.write(
1852 ui.write(
1852 _(
1853 _(
1853 b"%s is ignored because of "
1854 b"%s is ignored because of "
1854 b"containing directory %s\n"
1855 b"containing directory %s\n"
1855 )
1856 )
1856 % (uipathfn(f), ignored)
1857 % (uipathfn(f), ignored)
1857 )
1858 )
1858 ignorefile, lineno, line = ignoredata
1859 ignorefile, lineno, line = ignoredata
1859 ui.write(
1860 ui.write(
1860 _(b"(ignore rule in %s, line %d: '%s')\n")
1861 _(b"(ignore rule in %s, line %d: '%s')\n")
1861 % (ignorefile, lineno, line)
1862 % (ignorefile, lineno, line)
1862 )
1863 )
1863 else:
1864 else:
1864 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1865 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1865
1866
1866
1867
1867 @command(
1868 @command(
1868 b'debug-revlog-index|debugindex',
1869 b'debug-revlog-index|debugindex',
1869 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1870 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1870 _(b'-c|-m|FILE'),
1871 _(b'-c|-m|FILE'),
1871 )
1872 )
1872 def debugindex(ui, repo, file_=None, **opts):
1873 def debugindex(ui, repo, file_=None, **opts):
1873 """dump index data for a revlog"""
1874 """dump index data for a revlog"""
1874 opts = pycompat.byteskwargs(opts)
1875 opts = pycompat.byteskwargs(opts)
1875 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1876 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1876
1877
1877 if ui.debugflag:
1878 shortfn = hex
1879 else:
1880 shortfn = short
1881
1882 idlen = 12
1883 for i in store:
1884 idlen = len(shortfn(store.node(i)))
1885 break
1886
1887 fm = ui.formatter(b'debugindex', opts)
1878 fm = ui.formatter(b'debugindex', opts)
1888 fm.plain(
1879
1889 b' rev linkrev %s %s p2\n'
1880 return revlog_debug.debug_index(
1890 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1881 ui,
1882 repo,
1883 formatter=fm,
1884 revlog=store,
1885 full_node=ui.debugflag,
1891 )
1886 )
1892
1887
1893 for rev in store:
1894 node = store.node(rev)
1895 parents = store.parents(node)
1896
1897 fm.startitem()
1898 fm.write(b'rev', b'%6d ', rev)
1899 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1900 fm.write(b'node', b'%s ', shortfn(node))
1901 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1902 fm.write(b'p2', b'%s', shortfn(parents[1]))
1903 fm.plain(b'\n')
1904
1905 fm.end()
1906
1907
1888
1908 @command(
1889 @command(
1909 b'debugindexdot',
1890 b'debugindexdot',
1910 cmdutil.debugrevlogopts,
1891 cmdutil.debugrevlogopts,
1911 _(b'-c|-m|FILE'),
1892 _(b'-c|-m|FILE'),
1912 optionalrepo=True,
1893 optionalrepo=True,
1913 )
1894 )
1914 def debugindexdot(ui, repo, file_=None, **opts):
1895 def debugindexdot(ui, repo, file_=None, **opts):
1915 """dump an index DAG as a graphviz dot file"""
1896 """dump an index DAG as a graphviz dot file"""
1916 opts = pycompat.byteskwargs(opts)
1897 opts = pycompat.byteskwargs(opts)
1917 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1898 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1918 ui.writenoi18n(b"digraph G {\n")
1899 ui.writenoi18n(b"digraph G {\n")
1919 for i in r:
1900 for i in r:
1920 node = r.node(i)
1901 node = r.node(i)
1921 pp = r.parents(node)
1902 pp = r.parents(node)
1922 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1903 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1923 if pp[1] != repo.nullid:
1904 if pp[1] != repo.nullid:
1924 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1905 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1925 ui.write(b"}\n")
1906 ui.write(b"}\n")
1926
1907
1927
1908
1928 @command(b'debugindexstats', [])
1909 @command(b'debugindexstats', [])
1929 def debugindexstats(ui, repo):
1910 def debugindexstats(ui, repo):
1930 """show stats related to the changelog index"""
1911 """show stats related to the changelog index"""
1931 repo.changelog.shortest(repo.nullid, 1)
1912 repo.changelog.shortest(repo.nullid, 1)
1932 index = repo.changelog.index
1913 index = repo.changelog.index
1933 if not util.safehasattr(index, b'stats'):
1914 if not util.safehasattr(index, b'stats'):
1934 raise error.Abort(_(b'debugindexstats only works with native code'))
1915 raise error.Abort(_(b'debugindexstats only works with native code'))
1935 for k, v in sorted(index.stats().items()):
1916 for k, v in sorted(index.stats().items()):
1936 ui.write(b'%s: %d\n' % (k, v))
1917 ui.write(b'%s: %d\n' % (k, v))
1937
1918
1938
1919
1939 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1920 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1940 def debuginstall(ui, **opts):
1921 def debuginstall(ui, **opts):
1941 """test Mercurial installation
1922 """test Mercurial installation
1942
1923
1943 Returns 0 on success.
1924 Returns 0 on success.
1944 """
1925 """
1945 opts = pycompat.byteskwargs(opts)
1926 opts = pycompat.byteskwargs(opts)
1946
1927
1947 problems = 0
1928 problems = 0
1948
1929
1949 fm = ui.formatter(b'debuginstall', opts)
1930 fm = ui.formatter(b'debuginstall', opts)
1950 fm.startitem()
1931 fm.startitem()
1951
1932
1952 # encoding might be unknown or wrong. don't translate these messages.
1933 # encoding might be unknown or wrong. don't translate these messages.
1953 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1934 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1954 err = None
1935 err = None
1955 try:
1936 try:
1956 codecs.lookup(pycompat.sysstr(encoding.encoding))
1937 codecs.lookup(pycompat.sysstr(encoding.encoding))
1957 except LookupError as inst:
1938 except LookupError as inst:
1958 err = stringutil.forcebytestr(inst)
1939 err = stringutil.forcebytestr(inst)
1959 problems += 1
1940 problems += 1
1960 fm.condwrite(
1941 fm.condwrite(
1961 err,
1942 err,
1962 b'encodingerror',
1943 b'encodingerror',
1963 b" %s\n (check that your locale is properly set)\n",
1944 b" %s\n (check that your locale is properly set)\n",
1964 err,
1945 err,
1965 )
1946 )
1966
1947
1967 # Python
1948 # Python
1968 pythonlib = None
1949 pythonlib = None
1969 if util.safehasattr(os, '__file__'):
1950 if util.safehasattr(os, '__file__'):
1970 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1951 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1971 elif getattr(sys, 'oxidized', False):
1952 elif getattr(sys, 'oxidized', False):
1972 pythonlib = pycompat.sysexecutable
1953 pythonlib = pycompat.sysexecutable
1973
1954
1974 fm.write(
1955 fm.write(
1975 b'pythonexe',
1956 b'pythonexe',
1976 _(b"checking Python executable (%s)\n"),
1957 _(b"checking Python executable (%s)\n"),
1977 pycompat.sysexecutable or _(b"unknown"),
1958 pycompat.sysexecutable or _(b"unknown"),
1978 )
1959 )
1979 fm.write(
1960 fm.write(
1980 b'pythonimplementation',
1961 b'pythonimplementation',
1981 _(b"checking Python implementation (%s)\n"),
1962 _(b"checking Python implementation (%s)\n"),
1982 pycompat.sysbytes(platform.python_implementation()),
1963 pycompat.sysbytes(platform.python_implementation()),
1983 )
1964 )
1984 fm.write(
1965 fm.write(
1985 b'pythonver',
1966 b'pythonver',
1986 _(b"checking Python version (%s)\n"),
1967 _(b"checking Python version (%s)\n"),
1987 (b"%d.%d.%d" % sys.version_info[:3]),
1968 (b"%d.%d.%d" % sys.version_info[:3]),
1988 )
1969 )
1989 fm.write(
1970 fm.write(
1990 b'pythonlib',
1971 b'pythonlib',
1991 _(b"checking Python lib (%s)...\n"),
1972 _(b"checking Python lib (%s)...\n"),
1992 pythonlib or _(b"unknown"),
1973 pythonlib or _(b"unknown"),
1993 )
1974 )
1994
1975
1995 try:
1976 try:
1996 from . import rustext # pytype: disable=import-error
1977 from . import rustext # pytype: disable=import-error
1997
1978
1998 rustext.__doc__ # trigger lazy import
1979 rustext.__doc__ # trigger lazy import
1999 except ImportError:
1980 except ImportError:
2000 rustext = None
1981 rustext = None
2001
1982
2002 security = set(sslutil.supportedprotocols)
1983 security = set(sslutil.supportedprotocols)
2003 if sslutil.hassni:
1984 if sslutil.hassni:
2004 security.add(b'sni')
1985 security.add(b'sni')
2005
1986
2006 fm.write(
1987 fm.write(
2007 b'pythonsecurity',
1988 b'pythonsecurity',
2008 _(b"checking Python security support (%s)\n"),
1989 _(b"checking Python security support (%s)\n"),
2009 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1990 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2010 )
1991 )
2011
1992
2012 # These are warnings, not errors. So don't increment problem count. This
1993 # These are warnings, not errors. So don't increment problem count. This
2013 # may change in the future.
1994 # may change in the future.
2014 if b'tls1.2' not in security:
1995 if b'tls1.2' not in security:
2015 fm.plain(
1996 fm.plain(
2016 _(
1997 _(
2017 b' TLS 1.2 not supported by Python install; '
1998 b' TLS 1.2 not supported by Python install; '
2018 b'network connections lack modern security\n'
1999 b'network connections lack modern security\n'
2019 )
2000 )
2020 )
2001 )
2021 if b'sni' not in security:
2002 if b'sni' not in security:
2022 fm.plain(
2003 fm.plain(
2023 _(
2004 _(
2024 b' SNI not supported by Python install; may have '
2005 b' SNI not supported by Python install; may have '
2025 b'connectivity issues with some servers\n'
2006 b'connectivity issues with some servers\n'
2026 )
2007 )
2027 )
2008 )
2028
2009
2029 fm.plain(
2010 fm.plain(
2030 _(
2011 _(
2031 b"checking Rust extensions (%s)\n"
2012 b"checking Rust extensions (%s)\n"
2032 % (b'missing' if rustext is None else b'installed')
2013 % (b'missing' if rustext is None else b'installed')
2033 ),
2014 ),
2034 )
2015 )
2035
2016
2036 # TODO print CA cert info
2017 # TODO print CA cert info
2037
2018
2038 # hg version
2019 # hg version
2039 hgver = util.version()
2020 hgver = util.version()
2040 fm.write(
2021 fm.write(
2041 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2022 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2042 )
2023 )
2043 fm.write(
2024 fm.write(
2044 b'hgverextra',
2025 b'hgverextra',
2045 _(b"checking Mercurial custom build (%s)\n"),
2026 _(b"checking Mercurial custom build (%s)\n"),
2046 b'+'.join(hgver.split(b'+')[1:]),
2027 b'+'.join(hgver.split(b'+')[1:]),
2047 )
2028 )
2048
2029
2049 # compiled modules
2030 # compiled modules
2050 hgmodules = None
2031 hgmodules = None
2051 if util.safehasattr(sys.modules[__name__], '__file__'):
2032 if util.safehasattr(sys.modules[__name__], '__file__'):
2052 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2033 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2053 elif getattr(sys, 'oxidized', False):
2034 elif getattr(sys, 'oxidized', False):
2054 hgmodules = pycompat.sysexecutable
2035 hgmodules = pycompat.sysexecutable
2055
2036
2056 fm.write(
2037 fm.write(
2057 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2038 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2058 )
2039 )
2059 fm.write(
2040 fm.write(
2060 b'hgmodules',
2041 b'hgmodules',
2061 _(b"checking installed modules (%s)...\n"),
2042 _(b"checking installed modules (%s)...\n"),
2062 hgmodules or _(b"unknown"),
2043 hgmodules or _(b"unknown"),
2063 )
2044 )
2064
2045
2065 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2046 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2066 rustext = rustandc # for now, that's the only case
2047 rustext = rustandc # for now, that's the only case
2067 cext = policy.policy in (b'c', b'allow') or rustandc
2048 cext = policy.policy in (b'c', b'allow') or rustandc
2068 nopure = cext or rustext
2049 nopure = cext or rustext
2069 if nopure:
2050 if nopure:
2070 err = None
2051 err = None
2071 try:
2052 try:
2072 if cext:
2053 if cext:
2073 from .cext import ( # pytype: disable=import-error
2054 from .cext import ( # pytype: disable=import-error
2074 base85,
2055 base85,
2075 bdiff,
2056 bdiff,
2076 mpatch,
2057 mpatch,
2077 osutil,
2058 osutil,
2078 )
2059 )
2079
2060
2080 # quiet pyflakes
2061 # quiet pyflakes
2081 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2062 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2082 if rustext:
2063 if rustext:
2083 from .rustext import ( # pytype: disable=import-error
2064 from .rustext import ( # pytype: disable=import-error
2084 ancestor,
2065 ancestor,
2085 dirstate,
2066 dirstate,
2086 )
2067 )
2087
2068
2088 dir(ancestor), dir(dirstate) # quiet pyflakes
2069 dir(ancestor), dir(dirstate) # quiet pyflakes
2089 except Exception as inst:
2070 except Exception as inst:
2090 err = stringutil.forcebytestr(inst)
2071 err = stringutil.forcebytestr(inst)
2091 problems += 1
2072 problems += 1
2092 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2073 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2093
2074
2094 compengines = util.compengines._engines.values()
2075 compengines = util.compengines._engines.values()
2095 fm.write(
2076 fm.write(
2096 b'compengines',
2077 b'compengines',
2097 _(b'checking registered compression engines (%s)\n'),
2078 _(b'checking registered compression engines (%s)\n'),
2098 fm.formatlist(
2079 fm.formatlist(
2099 sorted(e.name() for e in compengines),
2080 sorted(e.name() for e in compengines),
2100 name=b'compengine',
2081 name=b'compengine',
2101 fmt=b'%s',
2082 fmt=b'%s',
2102 sep=b', ',
2083 sep=b', ',
2103 ),
2084 ),
2104 )
2085 )
2105 fm.write(
2086 fm.write(
2106 b'compenginesavail',
2087 b'compenginesavail',
2107 _(b'checking available compression engines (%s)\n'),
2088 _(b'checking available compression engines (%s)\n'),
2108 fm.formatlist(
2089 fm.formatlist(
2109 sorted(e.name() for e in compengines if e.available()),
2090 sorted(e.name() for e in compengines if e.available()),
2110 name=b'compengine',
2091 name=b'compengine',
2111 fmt=b'%s',
2092 fmt=b'%s',
2112 sep=b', ',
2093 sep=b', ',
2113 ),
2094 ),
2114 )
2095 )
2115 wirecompengines = compression.compengines.supportedwireengines(
2096 wirecompengines = compression.compengines.supportedwireengines(
2116 compression.SERVERROLE
2097 compression.SERVERROLE
2117 )
2098 )
2118 fm.write(
2099 fm.write(
2119 b'compenginesserver',
2100 b'compenginesserver',
2120 _(
2101 _(
2121 b'checking available compression engines '
2102 b'checking available compression engines '
2122 b'for wire protocol (%s)\n'
2103 b'for wire protocol (%s)\n'
2123 ),
2104 ),
2124 fm.formatlist(
2105 fm.formatlist(
2125 [e.name() for e in wirecompengines if e.wireprotosupport()],
2106 [e.name() for e in wirecompengines if e.wireprotosupport()],
2126 name=b'compengine',
2107 name=b'compengine',
2127 fmt=b'%s',
2108 fmt=b'%s',
2128 sep=b', ',
2109 sep=b', ',
2129 ),
2110 ),
2130 )
2111 )
2131 re2 = b'missing'
2112 re2 = b'missing'
2132 if util._re2:
2113 if util._re2:
2133 re2 = b'available'
2114 re2 = b'available'
2134 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2115 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2135 fm.data(re2=bool(util._re2))
2116 fm.data(re2=bool(util._re2))
2136
2117
2137 # templates
2118 # templates
2138 p = templater.templatedir()
2119 p = templater.templatedir()
2139 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2120 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2140 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2121 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2141 if p:
2122 if p:
2142 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2123 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2143 if m:
2124 if m:
2144 # template found, check if it is working
2125 # template found, check if it is working
2145 err = None
2126 err = None
2146 try:
2127 try:
2147 templater.templater.frommapfile(m)
2128 templater.templater.frommapfile(m)
2148 except Exception as inst:
2129 except Exception as inst:
2149 err = stringutil.forcebytestr(inst)
2130 err = stringutil.forcebytestr(inst)
2150 p = None
2131 p = None
2151 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2132 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2152 else:
2133 else:
2153 p = None
2134 p = None
2154 fm.condwrite(
2135 fm.condwrite(
2155 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2136 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2156 )
2137 )
2157 fm.condwrite(
2138 fm.condwrite(
2158 not m,
2139 not m,
2159 b'defaulttemplatenotfound',
2140 b'defaulttemplatenotfound',
2160 _(b" template '%s' not found\n"),
2141 _(b" template '%s' not found\n"),
2161 b"default",
2142 b"default",
2162 )
2143 )
2163 if not p:
2144 if not p:
2164 problems += 1
2145 problems += 1
2165 fm.condwrite(
2146 fm.condwrite(
2166 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2147 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2167 )
2148 )
2168
2149
2169 # editor
2150 # editor
2170 editor = ui.geteditor()
2151 editor = ui.geteditor()
2171 editor = util.expandpath(editor)
2152 editor = util.expandpath(editor)
2172 editorbin = procutil.shellsplit(editor)[0]
2153 editorbin = procutil.shellsplit(editor)[0]
2173 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2154 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2174 cmdpath = procutil.findexe(editorbin)
2155 cmdpath = procutil.findexe(editorbin)
2175 fm.condwrite(
2156 fm.condwrite(
2176 not cmdpath and editor == b'vi',
2157 not cmdpath and editor == b'vi',
2177 b'vinotfound',
2158 b'vinotfound',
2178 _(
2159 _(
2179 b" No commit editor set and can't find %s in PATH\n"
2160 b" No commit editor set and can't find %s in PATH\n"
2180 b" (specify a commit editor in your configuration"
2161 b" (specify a commit editor in your configuration"
2181 b" file)\n"
2162 b" file)\n"
2182 ),
2163 ),
2183 not cmdpath and editor == b'vi' and editorbin,
2164 not cmdpath and editor == b'vi' and editorbin,
2184 )
2165 )
2185 fm.condwrite(
2166 fm.condwrite(
2186 not cmdpath and editor != b'vi',
2167 not cmdpath and editor != b'vi',
2187 b'editornotfound',
2168 b'editornotfound',
2188 _(
2169 _(
2189 b" Can't find editor '%s' in PATH\n"
2170 b" Can't find editor '%s' in PATH\n"
2190 b" (specify a commit editor in your configuration"
2171 b" (specify a commit editor in your configuration"
2191 b" file)\n"
2172 b" file)\n"
2192 ),
2173 ),
2193 not cmdpath and editorbin,
2174 not cmdpath and editorbin,
2194 )
2175 )
2195 if not cmdpath and editor != b'vi':
2176 if not cmdpath and editor != b'vi':
2196 problems += 1
2177 problems += 1
2197
2178
2198 # check username
2179 # check username
2199 username = None
2180 username = None
2200 err = None
2181 err = None
2201 try:
2182 try:
2202 username = ui.username()
2183 username = ui.username()
2203 except error.Abort as e:
2184 except error.Abort as e:
2204 err = e.message
2185 err = e.message
2205 problems += 1
2186 problems += 1
2206
2187
2207 fm.condwrite(
2188 fm.condwrite(
2208 username, b'username', _(b"checking username (%s)\n"), username
2189 username, b'username', _(b"checking username (%s)\n"), username
2209 )
2190 )
2210 fm.condwrite(
2191 fm.condwrite(
2211 err,
2192 err,
2212 b'usernameerror',
2193 b'usernameerror',
2213 _(
2194 _(
2214 b"checking username...\n %s\n"
2195 b"checking username...\n %s\n"
2215 b" (specify a username in your configuration file)\n"
2196 b" (specify a username in your configuration file)\n"
2216 ),
2197 ),
2217 err,
2198 err,
2218 )
2199 )
2219
2200
2220 for name, mod in extensions.extensions():
2201 for name, mod in extensions.extensions():
2221 handler = getattr(mod, 'debuginstall', None)
2202 handler = getattr(mod, 'debuginstall', None)
2222 if handler is not None:
2203 if handler is not None:
2223 problems += handler(ui, fm)
2204 problems += handler(ui, fm)
2224
2205
2225 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2206 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2226 if not problems:
2207 if not problems:
2227 fm.data(problems=problems)
2208 fm.data(problems=problems)
2228 fm.condwrite(
2209 fm.condwrite(
2229 problems,
2210 problems,
2230 b'problems',
2211 b'problems',
2231 _(b"%d problems detected, please check your install!\n"),
2212 _(b"%d problems detected, please check your install!\n"),
2232 problems,
2213 problems,
2233 )
2214 )
2234 fm.end()
2215 fm.end()
2235
2216
2236 return problems
2217 return problems
2237
2218
2238
2219
2239 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2220 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2240 def debugknown(ui, repopath, *ids, **opts):
2221 def debugknown(ui, repopath, *ids, **opts):
2241 """test whether node ids are known to a repo
2222 """test whether node ids are known to a repo
2242
2223
2243 Every ID must be a full-length hex node id string. Returns a list of 0s
2224 Every ID must be a full-length hex node id string. Returns a list of 0s
2244 and 1s indicating unknown/known.
2225 and 1s indicating unknown/known.
2245 """
2226 """
2246 opts = pycompat.byteskwargs(opts)
2227 opts = pycompat.byteskwargs(opts)
2247 repo = hg.peer(ui, opts, repopath)
2228 repo = hg.peer(ui, opts, repopath)
2248 if not repo.capable(b'known'):
2229 if not repo.capable(b'known'):
2249 raise error.Abort(b"known() not supported by target repository")
2230 raise error.Abort(b"known() not supported by target repository")
2250 flags = repo.known([bin(s) for s in ids])
2231 flags = repo.known([bin(s) for s in ids])
2251 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2232 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2252
2233
2253
2234
2254 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2235 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2255 def debuglabelcomplete(ui, repo, *args):
2236 def debuglabelcomplete(ui, repo, *args):
2256 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2237 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2257 debugnamecomplete(ui, repo, *args)
2238 debugnamecomplete(ui, repo, *args)
2258
2239
2259
2240
2260 @command(
2241 @command(
2261 b'debuglocks',
2242 b'debuglocks',
2262 [
2243 [
2263 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2244 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2264 (
2245 (
2265 b'W',
2246 b'W',
2266 b'force-free-wlock',
2247 b'force-free-wlock',
2267 None,
2248 None,
2268 _(b'free the working state lock (DANGEROUS)'),
2249 _(b'free the working state lock (DANGEROUS)'),
2269 ),
2250 ),
2270 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2251 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2271 (
2252 (
2272 b'S',
2253 b'S',
2273 b'set-wlock',
2254 b'set-wlock',
2274 None,
2255 None,
2275 _(b'set the working state lock until stopped'),
2256 _(b'set the working state lock until stopped'),
2276 ),
2257 ),
2277 ],
2258 ],
2278 _(b'[OPTION]...'),
2259 _(b'[OPTION]...'),
2279 )
2260 )
2280 def debuglocks(ui, repo, **opts):
2261 def debuglocks(ui, repo, **opts):
2281 """show or modify state of locks
2262 """show or modify state of locks
2282
2263
2283 By default, this command will show which locks are held. This
2264 By default, this command will show which locks are held. This
2284 includes the user and process holding the lock, the amount of time
2265 includes the user and process holding the lock, the amount of time
2285 the lock has been held, and the machine name where the process is
2266 the lock has been held, and the machine name where the process is
2286 running if it's not local.
2267 running if it's not local.
2287
2268
2288 Locks protect the integrity of Mercurial's data, so should be
2269 Locks protect the integrity of Mercurial's data, so should be
2289 treated with care. System crashes or other interruptions may cause
2270 treated with care. System crashes or other interruptions may cause
2290 locks to not be properly released, though Mercurial will usually
2271 locks to not be properly released, though Mercurial will usually
2291 detect and remove such stale locks automatically.
2272 detect and remove such stale locks automatically.
2292
2273
2293 However, detecting stale locks may not always be possible (for
2274 However, detecting stale locks may not always be possible (for
2294 instance, on a shared filesystem). Removing locks may also be
2275 instance, on a shared filesystem). Removing locks may also be
2295 blocked by filesystem permissions.
2276 blocked by filesystem permissions.
2296
2277
2297 Setting a lock will prevent other commands from changing the data.
2278 Setting a lock will prevent other commands from changing the data.
2298 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2279 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2299 The set locks are removed when the command exits.
2280 The set locks are removed when the command exits.
2300
2281
2301 Returns 0 if no locks are held.
2282 Returns 0 if no locks are held.
2302
2283
2303 """
2284 """
2304
2285
2305 if opts.get('force_free_lock'):
2286 if opts.get('force_free_lock'):
2306 repo.svfs.tryunlink(b'lock')
2287 repo.svfs.tryunlink(b'lock')
2307 if opts.get('force_free_wlock'):
2288 if opts.get('force_free_wlock'):
2308 repo.vfs.tryunlink(b'wlock')
2289 repo.vfs.tryunlink(b'wlock')
2309 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2290 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2310 return 0
2291 return 0
2311
2292
2312 locks = []
2293 locks = []
2313 try:
2294 try:
2314 if opts.get('set_wlock'):
2295 if opts.get('set_wlock'):
2315 try:
2296 try:
2316 locks.append(repo.wlock(False))
2297 locks.append(repo.wlock(False))
2317 except error.LockHeld:
2298 except error.LockHeld:
2318 raise error.Abort(_(b'wlock is already held'))
2299 raise error.Abort(_(b'wlock is already held'))
2319 if opts.get('set_lock'):
2300 if opts.get('set_lock'):
2320 try:
2301 try:
2321 locks.append(repo.lock(False))
2302 locks.append(repo.lock(False))
2322 except error.LockHeld:
2303 except error.LockHeld:
2323 raise error.Abort(_(b'lock is already held'))
2304 raise error.Abort(_(b'lock is already held'))
2324 if len(locks):
2305 if len(locks):
2325 try:
2306 try:
2326 if ui.interactive():
2307 if ui.interactive():
2327 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2308 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2328 ui.promptchoice(prompt)
2309 ui.promptchoice(prompt)
2329 else:
2310 else:
2330 msg = b"%d locks held, waiting for signal\n"
2311 msg = b"%d locks held, waiting for signal\n"
2331 msg %= len(locks)
2312 msg %= len(locks)
2332 ui.status(msg)
2313 ui.status(msg)
2333 while True: # XXX wait for a signal
2314 while True: # XXX wait for a signal
2334 time.sleep(0.1)
2315 time.sleep(0.1)
2335 except KeyboardInterrupt:
2316 except KeyboardInterrupt:
2336 msg = b"signal-received releasing locks\n"
2317 msg = b"signal-received releasing locks\n"
2337 ui.status(msg)
2318 ui.status(msg)
2338 return 0
2319 return 0
2339 finally:
2320 finally:
2340 release(*locks)
2321 release(*locks)
2341
2322
2342 now = time.time()
2323 now = time.time()
2343 held = 0
2324 held = 0
2344
2325
2345 def report(vfs, name, method):
2326 def report(vfs, name, method):
2346 # this causes stale locks to get reaped for more accurate reporting
2327 # this causes stale locks to get reaped for more accurate reporting
2347 try:
2328 try:
2348 l = method(False)
2329 l = method(False)
2349 except error.LockHeld:
2330 except error.LockHeld:
2350 l = None
2331 l = None
2351
2332
2352 if l:
2333 if l:
2353 l.release()
2334 l.release()
2354 else:
2335 else:
2355 try:
2336 try:
2356 st = vfs.lstat(name)
2337 st = vfs.lstat(name)
2357 age = now - st[stat.ST_MTIME]
2338 age = now - st[stat.ST_MTIME]
2358 user = util.username(st.st_uid)
2339 user = util.username(st.st_uid)
2359 locker = vfs.readlock(name)
2340 locker = vfs.readlock(name)
2360 if b":" in locker:
2341 if b":" in locker:
2361 host, pid = locker.split(b':')
2342 host, pid = locker.split(b':')
2362 if host == socket.gethostname():
2343 if host == socket.gethostname():
2363 locker = b'user %s, process %s' % (user or b'None', pid)
2344 locker = b'user %s, process %s' % (user or b'None', pid)
2364 else:
2345 else:
2365 locker = b'user %s, process %s, host %s' % (
2346 locker = b'user %s, process %s, host %s' % (
2366 user or b'None',
2347 user or b'None',
2367 pid,
2348 pid,
2368 host,
2349 host,
2369 )
2350 )
2370 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2351 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2371 return 1
2352 return 1
2372 except OSError as e:
2353 except OSError as e:
2373 if e.errno != errno.ENOENT:
2354 if e.errno != errno.ENOENT:
2374 raise
2355 raise
2375
2356
2376 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2357 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2377 return 0
2358 return 0
2378
2359
2379 held += report(repo.svfs, b"lock", repo.lock)
2360 held += report(repo.svfs, b"lock", repo.lock)
2380 held += report(repo.vfs, b"wlock", repo.wlock)
2361 held += report(repo.vfs, b"wlock", repo.wlock)
2381
2362
2382 return held
2363 return held
2383
2364
2384
2365
2385 @command(
2366 @command(
2386 b'debugmanifestfulltextcache',
2367 b'debugmanifestfulltextcache',
2387 [
2368 [
2388 (b'', b'clear', False, _(b'clear the cache')),
2369 (b'', b'clear', False, _(b'clear the cache')),
2389 (
2370 (
2390 b'a',
2371 b'a',
2391 b'add',
2372 b'add',
2392 [],
2373 [],
2393 _(b'add the given manifest nodes to the cache'),
2374 _(b'add the given manifest nodes to the cache'),
2394 _(b'NODE'),
2375 _(b'NODE'),
2395 ),
2376 ),
2396 ],
2377 ],
2397 b'',
2378 b'',
2398 )
2379 )
2399 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2380 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2400 """show, clear or amend the contents of the manifest fulltext cache"""
2381 """show, clear or amend the contents of the manifest fulltext cache"""
2401
2382
2402 def getcache():
2383 def getcache():
2403 r = repo.manifestlog.getstorage(b'')
2384 r = repo.manifestlog.getstorage(b'')
2404 try:
2385 try:
2405 return r._fulltextcache
2386 return r._fulltextcache
2406 except AttributeError:
2387 except AttributeError:
2407 msg = _(
2388 msg = _(
2408 b"Current revlog implementation doesn't appear to have a "
2389 b"Current revlog implementation doesn't appear to have a "
2409 b"manifest fulltext cache\n"
2390 b"manifest fulltext cache\n"
2410 )
2391 )
2411 raise error.Abort(msg)
2392 raise error.Abort(msg)
2412
2393
2413 if opts.get('clear'):
2394 if opts.get('clear'):
2414 with repo.wlock():
2395 with repo.wlock():
2415 cache = getcache()
2396 cache = getcache()
2416 cache.clear(clear_persisted_data=True)
2397 cache.clear(clear_persisted_data=True)
2417 return
2398 return
2418
2399
2419 if add:
2400 if add:
2420 with repo.wlock():
2401 with repo.wlock():
2421 m = repo.manifestlog
2402 m = repo.manifestlog
2422 store = m.getstorage(b'')
2403 store = m.getstorage(b'')
2423 for n in add:
2404 for n in add:
2424 try:
2405 try:
2425 manifest = m[store.lookup(n)]
2406 manifest = m[store.lookup(n)]
2426 except error.LookupError as e:
2407 except error.LookupError as e:
2427 raise error.Abort(
2408 raise error.Abort(
2428 bytes(e), hint=b"Check your manifest node id"
2409 bytes(e), hint=b"Check your manifest node id"
2429 )
2410 )
2430 manifest.read() # stores revisision in cache too
2411 manifest.read() # stores revisision in cache too
2431 return
2412 return
2432
2413
2433 cache = getcache()
2414 cache = getcache()
2434 if not len(cache):
2415 if not len(cache):
2435 ui.write(_(b'cache empty\n'))
2416 ui.write(_(b'cache empty\n'))
2436 else:
2417 else:
2437 ui.write(
2418 ui.write(
2438 _(
2419 _(
2439 b'cache contains %d manifest entries, in order of most to '
2420 b'cache contains %d manifest entries, in order of most to '
2440 b'least recent:\n'
2421 b'least recent:\n'
2441 )
2422 )
2442 % (len(cache),)
2423 % (len(cache),)
2443 )
2424 )
2444 totalsize = 0
2425 totalsize = 0
2445 for nodeid in cache:
2426 for nodeid in cache:
2446 # Use cache.get to not update the LRU order
2427 # Use cache.get to not update the LRU order
2447 data = cache.peek(nodeid)
2428 data = cache.peek(nodeid)
2448 size = len(data)
2429 size = len(data)
2449 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2430 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2450 ui.write(
2431 ui.write(
2451 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2432 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2452 )
2433 )
2453 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2434 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2454 ui.write(
2435 ui.write(
2455 _(b'total cache data size %s, on-disk %s\n')
2436 _(b'total cache data size %s, on-disk %s\n')
2456 % (util.bytecount(totalsize), util.bytecount(ondisk))
2437 % (util.bytecount(totalsize), util.bytecount(ondisk))
2457 )
2438 )
2458
2439
2459
2440
2460 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2441 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2461 def debugmergestate(ui, repo, *args, **opts):
2442 def debugmergestate(ui, repo, *args, **opts):
2462 """print merge state
2443 """print merge state
2463
2444
2464 Use --verbose to print out information about whether v1 or v2 merge state
2445 Use --verbose to print out information about whether v1 or v2 merge state
2465 was chosen."""
2446 was chosen."""
2466
2447
2467 if ui.verbose:
2448 if ui.verbose:
2468 ms = mergestatemod.mergestate(repo)
2449 ms = mergestatemod.mergestate(repo)
2469
2450
2470 # sort so that reasonable information is on top
2451 # sort so that reasonable information is on top
2471 v1records = ms._readrecordsv1()
2452 v1records = ms._readrecordsv1()
2472 v2records = ms._readrecordsv2()
2453 v2records = ms._readrecordsv2()
2473
2454
2474 if not v1records and not v2records:
2455 if not v1records and not v2records:
2475 pass
2456 pass
2476 elif not v2records:
2457 elif not v2records:
2477 ui.writenoi18n(b'no version 2 merge state\n')
2458 ui.writenoi18n(b'no version 2 merge state\n')
2478 elif ms._v1v2match(v1records, v2records):
2459 elif ms._v1v2match(v1records, v2records):
2479 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2460 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2480 else:
2461 else:
2481 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2462 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2482
2463
2483 opts = pycompat.byteskwargs(opts)
2464 opts = pycompat.byteskwargs(opts)
2484 if not opts[b'template']:
2465 if not opts[b'template']:
2485 opts[b'template'] = (
2466 opts[b'template'] = (
2486 b'{if(commits, "", "no merge state found\n")}'
2467 b'{if(commits, "", "no merge state found\n")}'
2487 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2468 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2488 b'{files % "file: {path} (state \\"{state}\\")\n'
2469 b'{files % "file: {path} (state \\"{state}\\")\n'
2489 b'{if(local_path, "'
2470 b'{if(local_path, "'
2490 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2471 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2491 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2472 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2492 b' other path: {other_path} (node {other_node})\n'
2473 b' other path: {other_path} (node {other_node})\n'
2493 b'")}'
2474 b'")}'
2494 b'{if(rename_side, "'
2475 b'{if(rename_side, "'
2495 b' rename side: {rename_side}\n'
2476 b' rename side: {rename_side}\n'
2496 b' renamed path: {renamed_path}\n'
2477 b' renamed path: {renamed_path}\n'
2497 b'")}'
2478 b'")}'
2498 b'{extras % " extra: {key} = {value}\n"}'
2479 b'{extras % " extra: {key} = {value}\n"}'
2499 b'"}'
2480 b'"}'
2500 b'{extras % "extra: {file} ({key} = {value})\n"}'
2481 b'{extras % "extra: {file} ({key} = {value})\n"}'
2501 )
2482 )
2502
2483
2503 ms = mergestatemod.mergestate.read(repo)
2484 ms = mergestatemod.mergestate.read(repo)
2504
2485
2505 fm = ui.formatter(b'debugmergestate', opts)
2486 fm = ui.formatter(b'debugmergestate', opts)
2506 fm.startitem()
2487 fm.startitem()
2507
2488
2508 fm_commits = fm.nested(b'commits')
2489 fm_commits = fm.nested(b'commits')
2509 if ms.active():
2490 if ms.active():
2510 for name, node, label_index in (
2491 for name, node, label_index in (
2511 (b'local', ms.local, 0),
2492 (b'local', ms.local, 0),
2512 (b'other', ms.other, 1),
2493 (b'other', ms.other, 1),
2513 ):
2494 ):
2514 fm_commits.startitem()
2495 fm_commits.startitem()
2515 fm_commits.data(name=name)
2496 fm_commits.data(name=name)
2516 fm_commits.data(node=hex(node))
2497 fm_commits.data(node=hex(node))
2517 if ms._labels and len(ms._labels) > label_index:
2498 if ms._labels and len(ms._labels) > label_index:
2518 fm_commits.data(label=ms._labels[label_index])
2499 fm_commits.data(label=ms._labels[label_index])
2519 fm_commits.end()
2500 fm_commits.end()
2520
2501
2521 fm_files = fm.nested(b'files')
2502 fm_files = fm.nested(b'files')
2522 if ms.active():
2503 if ms.active():
2523 for f in ms:
2504 for f in ms:
2524 fm_files.startitem()
2505 fm_files.startitem()
2525 fm_files.data(path=f)
2506 fm_files.data(path=f)
2526 state = ms._state[f]
2507 state = ms._state[f]
2527 fm_files.data(state=state[0])
2508 fm_files.data(state=state[0])
2528 if state[0] in (
2509 if state[0] in (
2529 mergestatemod.MERGE_RECORD_UNRESOLVED,
2510 mergestatemod.MERGE_RECORD_UNRESOLVED,
2530 mergestatemod.MERGE_RECORD_RESOLVED,
2511 mergestatemod.MERGE_RECORD_RESOLVED,
2531 ):
2512 ):
2532 fm_files.data(local_key=state[1])
2513 fm_files.data(local_key=state[1])
2533 fm_files.data(local_path=state[2])
2514 fm_files.data(local_path=state[2])
2534 fm_files.data(ancestor_path=state[3])
2515 fm_files.data(ancestor_path=state[3])
2535 fm_files.data(ancestor_node=state[4])
2516 fm_files.data(ancestor_node=state[4])
2536 fm_files.data(other_path=state[5])
2517 fm_files.data(other_path=state[5])
2537 fm_files.data(other_node=state[6])
2518 fm_files.data(other_node=state[6])
2538 fm_files.data(local_flags=state[7])
2519 fm_files.data(local_flags=state[7])
2539 elif state[0] in (
2520 elif state[0] in (
2540 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2521 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2541 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2522 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2542 ):
2523 ):
2543 fm_files.data(renamed_path=state[1])
2524 fm_files.data(renamed_path=state[1])
2544 fm_files.data(rename_side=state[2])
2525 fm_files.data(rename_side=state[2])
2545 fm_extras = fm_files.nested(b'extras')
2526 fm_extras = fm_files.nested(b'extras')
2546 for k, v in sorted(ms.extras(f).items()):
2527 for k, v in sorted(ms.extras(f).items()):
2547 fm_extras.startitem()
2528 fm_extras.startitem()
2548 fm_extras.data(key=k)
2529 fm_extras.data(key=k)
2549 fm_extras.data(value=v)
2530 fm_extras.data(value=v)
2550 fm_extras.end()
2531 fm_extras.end()
2551
2532
2552 fm_files.end()
2533 fm_files.end()
2553
2534
2554 fm_extras = fm.nested(b'extras')
2535 fm_extras = fm.nested(b'extras')
2555 for f, d in sorted(ms.allextras().items()):
2536 for f, d in sorted(ms.allextras().items()):
2556 if f in ms:
2537 if f in ms:
2557 # If file is in mergestate, we have already processed it's extras
2538 # If file is in mergestate, we have already processed it's extras
2558 continue
2539 continue
2559 for k, v in d.items():
2540 for k, v in d.items():
2560 fm_extras.startitem()
2541 fm_extras.startitem()
2561 fm_extras.data(file=f)
2542 fm_extras.data(file=f)
2562 fm_extras.data(key=k)
2543 fm_extras.data(key=k)
2563 fm_extras.data(value=v)
2544 fm_extras.data(value=v)
2564 fm_extras.end()
2545 fm_extras.end()
2565
2546
2566 fm.end()
2547 fm.end()
2567
2548
2568
2549
2569 @command(b'debugnamecomplete', [], _(b'NAME...'))
2550 @command(b'debugnamecomplete', [], _(b'NAME...'))
2570 def debugnamecomplete(ui, repo, *args):
2551 def debugnamecomplete(ui, repo, *args):
2571 '''complete "names" - tags, open branch names, bookmark names'''
2552 '''complete "names" - tags, open branch names, bookmark names'''
2572
2553
2573 names = set()
2554 names = set()
2574 # since we previously only listed open branches, we will handle that
2555 # since we previously only listed open branches, we will handle that
2575 # specially (after this for loop)
2556 # specially (after this for loop)
2576 for name, ns in repo.names.items():
2557 for name, ns in repo.names.items():
2577 if name != b'branches':
2558 if name != b'branches':
2578 names.update(ns.listnames(repo))
2559 names.update(ns.listnames(repo))
2579 names.update(
2560 names.update(
2580 tag
2561 tag
2581 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2562 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2582 if not closed
2563 if not closed
2583 )
2564 )
2584 completions = set()
2565 completions = set()
2585 if not args:
2566 if not args:
2586 args = [b'']
2567 args = [b'']
2587 for a in args:
2568 for a in args:
2588 completions.update(n for n in names if n.startswith(a))
2569 completions.update(n for n in names if n.startswith(a))
2589 ui.write(b'\n'.join(sorted(completions)))
2570 ui.write(b'\n'.join(sorted(completions)))
2590 ui.write(b'\n')
2571 ui.write(b'\n')
2591
2572
2592
2573
2593 @command(
2574 @command(
2594 b'debugnodemap',
2575 b'debugnodemap',
2595 [
2576 [
2596 (
2577 (
2597 b'',
2578 b'',
2598 b'dump-new',
2579 b'dump-new',
2599 False,
2580 False,
2600 _(b'write a (new) persistent binary nodemap on stdout'),
2581 _(b'write a (new) persistent binary nodemap on stdout'),
2601 ),
2582 ),
2602 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2583 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2603 (
2584 (
2604 b'',
2585 b'',
2605 b'check',
2586 b'check',
2606 False,
2587 False,
2607 _(b'check that the data on disk data are correct.'),
2588 _(b'check that the data on disk data are correct.'),
2608 ),
2589 ),
2609 (
2590 (
2610 b'',
2591 b'',
2611 b'metadata',
2592 b'metadata',
2612 False,
2593 False,
2613 _(b'display the on disk meta data for the nodemap'),
2594 _(b'display the on disk meta data for the nodemap'),
2614 ),
2595 ),
2615 ],
2596 ],
2616 )
2597 )
2617 def debugnodemap(ui, repo, **opts):
2598 def debugnodemap(ui, repo, **opts):
2618 """write and inspect on disk nodemap"""
2599 """write and inspect on disk nodemap"""
2619 if opts['dump_new']:
2600 if opts['dump_new']:
2620 unfi = repo.unfiltered()
2601 unfi = repo.unfiltered()
2621 cl = unfi.changelog
2602 cl = unfi.changelog
2622 if util.safehasattr(cl.index, "nodemap_data_all"):
2603 if util.safehasattr(cl.index, "nodemap_data_all"):
2623 data = cl.index.nodemap_data_all()
2604 data = cl.index.nodemap_data_all()
2624 else:
2605 else:
2625 data = nodemap.persistent_data(cl.index)
2606 data = nodemap.persistent_data(cl.index)
2626 ui.write(data)
2607 ui.write(data)
2627 elif opts['dump_disk']:
2608 elif opts['dump_disk']:
2628 unfi = repo.unfiltered()
2609 unfi = repo.unfiltered()
2629 cl = unfi.changelog
2610 cl = unfi.changelog
2630 nm_data = nodemap.persisted_data(cl)
2611 nm_data = nodemap.persisted_data(cl)
2631 if nm_data is not None:
2612 if nm_data is not None:
2632 docket, data = nm_data
2613 docket, data = nm_data
2633 ui.write(data[:])
2614 ui.write(data[:])
2634 elif opts['check']:
2615 elif opts['check']:
2635 unfi = repo.unfiltered()
2616 unfi = repo.unfiltered()
2636 cl = unfi.changelog
2617 cl = unfi.changelog
2637 nm_data = nodemap.persisted_data(cl)
2618 nm_data = nodemap.persisted_data(cl)
2638 if nm_data is not None:
2619 if nm_data is not None:
2639 docket, data = nm_data
2620 docket, data = nm_data
2640 return nodemap.check_data(ui, cl.index, data)
2621 return nodemap.check_data(ui, cl.index, data)
2641 elif opts['metadata']:
2622 elif opts['metadata']:
2642 unfi = repo.unfiltered()
2623 unfi = repo.unfiltered()
2643 cl = unfi.changelog
2624 cl = unfi.changelog
2644 nm_data = nodemap.persisted_data(cl)
2625 nm_data = nodemap.persisted_data(cl)
2645 if nm_data is not None:
2626 if nm_data is not None:
2646 docket, data = nm_data
2627 docket, data = nm_data
2647 ui.write((b"uid: %s\n") % docket.uid)
2628 ui.write((b"uid: %s\n") % docket.uid)
2648 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2629 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2649 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2630 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2650 ui.write((b"data-length: %d\n") % docket.data_length)
2631 ui.write((b"data-length: %d\n") % docket.data_length)
2651 ui.write((b"data-unused: %d\n") % docket.data_unused)
2632 ui.write((b"data-unused: %d\n") % docket.data_unused)
2652 unused_perc = docket.data_unused * 100.0 / docket.data_length
2633 unused_perc = docket.data_unused * 100.0 / docket.data_length
2653 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2634 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2654
2635
2655
2636
2656 @command(
2637 @command(
2657 b'debugobsolete',
2638 b'debugobsolete',
2658 [
2639 [
2659 (b'', b'flags', 0, _(b'markers flag')),
2640 (b'', b'flags', 0, _(b'markers flag')),
2660 (
2641 (
2661 b'',
2642 b'',
2662 b'record-parents',
2643 b'record-parents',
2663 False,
2644 False,
2664 _(b'record parent information for the precursor'),
2645 _(b'record parent information for the precursor'),
2665 ),
2646 ),
2666 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2647 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2667 (
2648 (
2668 b'',
2649 b'',
2669 b'exclusive',
2650 b'exclusive',
2670 False,
2651 False,
2671 _(b'restrict display to markers only relevant to REV'),
2652 _(b'restrict display to markers only relevant to REV'),
2672 ),
2653 ),
2673 (b'', b'index', False, _(b'display index of the marker')),
2654 (b'', b'index', False, _(b'display index of the marker')),
2674 (b'', b'delete', [], _(b'delete markers specified by indices')),
2655 (b'', b'delete', [], _(b'delete markers specified by indices')),
2675 ]
2656 ]
2676 + cmdutil.commitopts2
2657 + cmdutil.commitopts2
2677 + cmdutil.formatteropts,
2658 + cmdutil.formatteropts,
2678 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2659 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2679 )
2660 )
2680 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2661 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2681 """create arbitrary obsolete marker
2662 """create arbitrary obsolete marker
2682
2663
2683 With no arguments, displays the list of obsolescence markers."""
2664 With no arguments, displays the list of obsolescence markers."""
2684
2665
2685 opts = pycompat.byteskwargs(opts)
2666 opts = pycompat.byteskwargs(opts)
2686
2667
2687 def parsenodeid(s):
2668 def parsenodeid(s):
2688 try:
2669 try:
2689 # We do not use revsingle/revrange functions here to accept
2670 # We do not use revsingle/revrange functions here to accept
2690 # arbitrary node identifiers, possibly not present in the
2671 # arbitrary node identifiers, possibly not present in the
2691 # local repository.
2672 # local repository.
2692 n = bin(s)
2673 n = bin(s)
2693 if len(n) != repo.nodeconstants.nodelen:
2674 if len(n) != repo.nodeconstants.nodelen:
2694 raise ValueError
2675 raise ValueError
2695 return n
2676 return n
2696 except ValueError:
2677 except ValueError:
2697 raise error.InputError(
2678 raise error.InputError(
2698 b'changeset references must be full hexadecimal '
2679 b'changeset references must be full hexadecimal '
2699 b'node identifiers'
2680 b'node identifiers'
2700 )
2681 )
2701
2682
2702 if opts.get(b'delete'):
2683 if opts.get(b'delete'):
2703 indices = []
2684 indices = []
2704 for v in opts.get(b'delete'):
2685 for v in opts.get(b'delete'):
2705 try:
2686 try:
2706 indices.append(int(v))
2687 indices.append(int(v))
2707 except ValueError:
2688 except ValueError:
2708 raise error.InputError(
2689 raise error.InputError(
2709 _(b'invalid index value: %r') % v,
2690 _(b'invalid index value: %r') % v,
2710 hint=_(b'use integers for indices'),
2691 hint=_(b'use integers for indices'),
2711 )
2692 )
2712
2693
2713 if repo.currenttransaction():
2694 if repo.currenttransaction():
2714 raise error.Abort(
2695 raise error.Abort(
2715 _(b'cannot delete obsmarkers in the middle of transaction.')
2696 _(b'cannot delete obsmarkers in the middle of transaction.')
2716 )
2697 )
2717
2698
2718 with repo.lock():
2699 with repo.lock():
2719 n = repair.deleteobsmarkers(repo.obsstore, indices)
2700 n = repair.deleteobsmarkers(repo.obsstore, indices)
2720 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2701 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2721
2702
2722 return
2703 return
2723
2704
2724 if precursor is not None:
2705 if precursor is not None:
2725 if opts[b'rev']:
2706 if opts[b'rev']:
2726 raise error.InputError(
2707 raise error.InputError(
2727 b'cannot select revision when creating marker'
2708 b'cannot select revision when creating marker'
2728 )
2709 )
2729 metadata = {}
2710 metadata = {}
2730 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2711 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2731 succs = tuple(parsenodeid(succ) for succ in successors)
2712 succs = tuple(parsenodeid(succ) for succ in successors)
2732 l = repo.lock()
2713 l = repo.lock()
2733 try:
2714 try:
2734 tr = repo.transaction(b'debugobsolete')
2715 tr = repo.transaction(b'debugobsolete')
2735 try:
2716 try:
2736 date = opts.get(b'date')
2717 date = opts.get(b'date')
2737 if date:
2718 if date:
2738 date = dateutil.parsedate(date)
2719 date = dateutil.parsedate(date)
2739 else:
2720 else:
2740 date = None
2721 date = None
2741 prec = parsenodeid(precursor)
2722 prec = parsenodeid(precursor)
2742 parents = None
2723 parents = None
2743 if opts[b'record_parents']:
2724 if opts[b'record_parents']:
2744 if prec not in repo.unfiltered():
2725 if prec not in repo.unfiltered():
2745 raise error.Abort(
2726 raise error.Abort(
2746 b'cannot used --record-parents on '
2727 b'cannot used --record-parents on '
2747 b'unknown changesets'
2728 b'unknown changesets'
2748 )
2729 )
2749 parents = repo.unfiltered()[prec].parents()
2730 parents = repo.unfiltered()[prec].parents()
2750 parents = tuple(p.node() for p in parents)
2731 parents = tuple(p.node() for p in parents)
2751 repo.obsstore.create(
2732 repo.obsstore.create(
2752 tr,
2733 tr,
2753 prec,
2734 prec,
2754 succs,
2735 succs,
2755 opts[b'flags'],
2736 opts[b'flags'],
2756 parents=parents,
2737 parents=parents,
2757 date=date,
2738 date=date,
2758 metadata=metadata,
2739 metadata=metadata,
2759 ui=ui,
2740 ui=ui,
2760 )
2741 )
2761 tr.close()
2742 tr.close()
2762 except ValueError as exc:
2743 except ValueError as exc:
2763 raise error.Abort(
2744 raise error.Abort(
2764 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2745 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2765 )
2746 )
2766 finally:
2747 finally:
2767 tr.release()
2748 tr.release()
2768 finally:
2749 finally:
2769 l.release()
2750 l.release()
2770 else:
2751 else:
2771 if opts[b'rev']:
2752 if opts[b'rev']:
2772 revs = logcmdutil.revrange(repo, opts[b'rev'])
2753 revs = logcmdutil.revrange(repo, opts[b'rev'])
2773 nodes = [repo[r].node() for r in revs]
2754 nodes = [repo[r].node() for r in revs]
2774 markers = list(
2755 markers = list(
2775 obsutil.getmarkers(
2756 obsutil.getmarkers(
2776 repo, nodes=nodes, exclusive=opts[b'exclusive']
2757 repo, nodes=nodes, exclusive=opts[b'exclusive']
2777 )
2758 )
2778 )
2759 )
2779 markers.sort(key=lambda x: x._data)
2760 markers.sort(key=lambda x: x._data)
2780 else:
2761 else:
2781 markers = obsutil.getmarkers(repo)
2762 markers = obsutil.getmarkers(repo)
2782
2763
2783 markerstoiter = markers
2764 markerstoiter = markers
2784 isrelevant = lambda m: True
2765 isrelevant = lambda m: True
2785 if opts.get(b'rev') and opts.get(b'index'):
2766 if opts.get(b'rev') and opts.get(b'index'):
2786 markerstoiter = obsutil.getmarkers(repo)
2767 markerstoiter = obsutil.getmarkers(repo)
2787 markerset = set(markers)
2768 markerset = set(markers)
2788 isrelevant = lambda m: m in markerset
2769 isrelevant = lambda m: m in markerset
2789
2770
2790 fm = ui.formatter(b'debugobsolete', opts)
2771 fm = ui.formatter(b'debugobsolete', opts)
2791 for i, m in enumerate(markerstoiter):
2772 for i, m in enumerate(markerstoiter):
2792 if not isrelevant(m):
2773 if not isrelevant(m):
2793 # marker can be irrelevant when we're iterating over a set
2774 # marker can be irrelevant when we're iterating over a set
2794 # of markers (markerstoiter) which is bigger than the set
2775 # of markers (markerstoiter) which is bigger than the set
2795 # of markers we want to display (markers)
2776 # of markers we want to display (markers)
2796 # this can happen if both --index and --rev options are
2777 # this can happen if both --index and --rev options are
2797 # provided and thus we need to iterate over all of the markers
2778 # provided and thus we need to iterate over all of the markers
2798 # to get the correct indices, but only display the ones that
2779 # to get the correct indices, but only display the ones that
2799 # are relevant to --rev value
2780 # are relevant to --rev value
2800 continue
2781 continue
2801 fm.startitem()
2782 fm.startitem()
2802 ind = i if opts.get(b'index') else None
2783 ind = i if opts.get(b'index') else None
2803 cmdutil.showmarker(fm, m, index=ind)
2784 cmdutil.showmarker(fm, m, index=ind)
2804 fm.end()
2785 fm.end()
2805
2786
2806
2787
2807 @command(
2788 @command(
2808 b'debugp1copies',
2789 b'debugp1copies',
2809 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2790 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2810 _(b'[-r REV]'),
2791 _(b'[-r REV]'),
2811 )
2792 )
2812 def debugp1copies(ui, repo, **opts):
2793 def debugp1copies(ui, repo, **opts):
2813 """dump copy information compared to p1"""
2794 """dump copy information compared to p1"""
2814
2795
2815 opts = pycompat.byteskwargs(opts)
2796 opts = pycompat.byteskwargs(opts)
2816 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2797 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2817 for dst, src in ctx.p1copies().items():
2798 for dst, src in ctx.p1copies().items():
2818 ui.write(b'%s -> %s\n' % (src, dst))
2799 ui.write(b'%s -> %s\n' % (src, dst))
2819
2800
2820
2801
2821 @command(
2802 @command(
2822 b'debugp2copies',
2803 b'debugp2copies',
2823 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2804 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2824 _(b'[-r REV]'),
2805 _(b'[-r REV]'),
2825 )
2806 )
2826 def debugp1copies(ui, repo, **opts):
2807 def debugp1copies(ui, repo, **opts):
2827 """dump copy information compared to p2"""
2808 """dump copy information compared to p2"""
2828
2809
2829 opts = pycompat.byteskwargs(opts)
2810 opts = pycompat.byteskwargs(opts)
2830 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2811 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2831 for dst, src in ctx.p2copies().items():
2812 for dst, src in ctx.p2copies().items():
2832 ui.write(b'%s -> %s\n' % (src, dst))
2813 ui.write(b'%s -> %s\n' % (src, dst))
2833
2814
2834
2815
2835 @command(
2816 @command(
2836 b'debugpathcomplete',
2817 b'debugpathcomplete',
2837 [
2818 [
2838 (b'f', b'full', None, _(b'complete an entire path')),
2819 (b'f', b'full', None, _(b'complete an entire path')),
2839 (b'n', b'normal', None, _(b'show only normal files')),
2820 (b'n', b'normal', None, _(b'show only normal files')),
2840 (b'a', b'added', None, _(b'show only added files')),
2821 (b'a', b'added', None, _(b'show only added files')),
2841 (b'r', b'removed', None, _(b'show only removed files')),
2822 (b'r', b'removed', None, _(b'show only removed files')),
2842 ],
2823 ],
2843 _(b'FILESPEC...'),
2824 _(b'FILESPEC...'),
2844 )
2825 )
2845 def debugpathcomplete(ui, repo, *specs, **opts):
2826 def debugpathcomplete(ui, repo, *specs, **opts):
2846 """complete part or all of a tracked path
2827 """complete part or all of a tracked path
2847
2828
2848 This command supports shells that offer path name completion. It
2829 This command supports shells that offer path name completion. It
2849 currently completes only files already known to the dirstate.
2830 currently completes only files already known to the dirstate.
2850
2831
2851 Completion extends only to the next path segment unless
2832 Completion extends only to the next path segment unless
2852 --full is specified, in which case entire paths are used."""
2833 --full is specified, in which case entire paths are used."""
2853
2834
2854 def complete(path, acceptable):
2835 def complete(path, acceptable):
2855 dirstate = repo.dirstate
2836 dirstate = repo.dirstate
2856 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2837 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2857 rootdir = repo.root + pycompat.ossep
2838 rootdir = repo.root + pycompat.ossep
2858 if spec != repo.root and not spec.startswith(rootdir):
2839 if spec != repo.root and not spec.startswith(rootdir):
2859 return [], []
2840 return [], []
2860 if os.path.isdir(spec):
2841 if os.path.isdir(spec):
2861 spec += b'/'
2842 spec += b'/'
2862 spec = spec[len(rootdir) :]
2843 spec = spec[len(rootdir) :]
2863 fixpaths = pycompat.ossep != b'/'
2844 fixpaths = pycompat.ossep != b'/'
2864 if fixpaths:
2845 if fixpaths:
2865 spec = spec.replace(pycompat.ossep, b'/')
2846 spec = spec.replace(pycompat.ossep, b'/')
2866 speclen = len(spec)
2847 speclen = len(spec)
2867 fullpaths = opts['full']
2848 fullpaths = opts['full']
2868 files, dirs = set(), set()
2849 files, dirs = set(), set()
2869 adddir, addfile = dirs.add, files.add
2850 adddir, addfile = dirs.add, files.add
2870 for f, st in dirstate.items():
2851 for f, st in dirstate.items():
2871 if f.startswith(spec) and st.state in acceptable:
2852 if f.startswith(spec) and st.state in acceptable:
2872 if fixpaths:
2853 if fixpaths:
2873 f = f.replace(b'/', pycompat.ossep)
2854 f = f.replace(b'/', pycompat.ossep)
2874 if fullpaths:
2855 if fullpaths:
2875 addfile(f)
2856 addfile(f)
2876 continue
2857 continue
2877 s = f.find(pycompat.ossep, speclen)
2858 s = f.find(pycompat.ossep, speclen)
2878 if s >= 0:
2859 if s >= 0:
2879 adddir(f[:s])
2860 adddir(f[:s])
2880 else:
2861 else:
2881 addfile(f)
2862 addfile(f)
2882 return files, dirs
2863 return files, dirs
2883
2864
2884 acceptable = b''
2865 acceptable = b''
2885 if opts['normal']:
2866 if opts['normal']:
2886 acceptable += b'nm'
2867 acceptable += b'nm'
2887 if opts['added']:
2868 if opts['added']:
2888 acceptable += b'a'
2869 acceptable += b'a'
2889 if opts['removed']:
2870 if opts['removed']:
2890 acceptable += b'r'
2871 acceptable += b'r'
2891 cwd = repo.getcwd()
2872 cwd = repo.getcwd()
2892 if not specs:
2873 if not specs:
2893 specs = [b'.']
2874 specs = [b'.']
2894
2875
2895 files, dirs = set(), set()
2876 files, dirs = set(), set()
2896 for spec in specs:
2877 for spec in specs:
2897 f, d = complete(spec, acceptable or b'nmar')
2878 f, d = complete(spec, acceptable or b'nmar')
2898 files.update(f)
2879 files.update(f)
2899 dirs.update(d)
2880 dirs.update(d)
2900 files.update(dirs)
2881 files.update(dirs)
2901 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2882 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2902 ui.write(b'\n')
2883 ui.write(b'\n')
2903
2884
2904
2885
2905 @command(
2886 @command(
2906 b'debugpathcopies',
2887 b'debugpathcopies',
2907 cmdutil.walkopts,
2888 cmdutil.walkopts,
2908 b'hg debugpathcopies REV1 REV2 [FILE]',
2889 b'hg debugpathcopies REV1 REV2 [FILE]',
2909 inferrepo=True,
2890 inferrepo=True,
2910 )
2891 )
2911 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2892 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2912 """show copies between two revisions"""
2893 """show copies between two revisions"""
2913 ctx1 = scmutil.revsingle(repo, rev1)
2894 ctx1 = scmutil.revsingle(repo, rev1)
2914 ctx2 = scmutil.revsingle(repo, rev2)
2895 ctx2 = scmutil.revsingle(repo, rev2)
2915 m = scmutil.match(ctx1, pats, opts)
2896 m = scmutil.match(ctx1, pats, opts)
2916 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2897 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2917 ui.write(b'%s -> %s\n' % (src, dst))
2898 ui.write(b'%s -> %s\n' % (src, dst))
2918
2899
2919
2900
2920 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2901 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2921 def debugpeer(ui, path):
2902 def debugpeer(ui, path):
2922 """establish a connection to a peer repository"""
2903 """establish a connection to a peer repository"""
2923 # Always enable peer request logging. Requires --debug to display
2904 # Always enable peer request logging. Requires --debug to display
2924 # though.
2905 # though.
2925 overrides = {
2906 overrides = {
2926 (b'devel', b'debug.peer-request'): True,
2907 (b'devel', b'debug.peer-request'): True,
2927 }
2908 }
2928
2909
2929 with ui.configoverride(overrides):
2910 with ui.configoverride(overrides):
2930 peer = hg.peer(ui, {}, path)
2911 peer = hg.peer(ui, {}, path)
2931
2912
2932 try:
2913 try:
2933 local = peer.local() is not None
2914 local = peer.local() is not None
2934 canpush = peer.canpush()
2915 canpush = peer.canpush()
2935
2916
2936 ui.write(_(b'url: %s\n') % peer.url())
2917 ui.write(_(b'url: %s\n') % peer.url())
2937 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2918 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2938 ui.write(
2919 ui.write(
2939 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2920 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2940 )
2921 )
2941 finally:
2922 finally:
2942 peer.close()
2923 peer.close()
2943
2924
2944
2925
2945 @command(
2926 @command(
2946 b'debugpickmergetool',
2927 b'debugpickmergetool',
2947 [
2928 [
2948 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2929 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2949 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2930 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2950 ]
2931 ]
2951 + cmdutil.walkopts
2932 + cmdutil.walkopts
2952 + cmdutil.mergetoolopts,
2933 + cmdutil.mergetoolopts,
2953 _(b'[PATTERN]...'),
2934 _(b'[PATTERN]...'),
2954 inferrepo=True,
2935 inferrepo=True,
2955 )
2936 )
2956 def debugpickmergetool(ui, repo, *pats, **opts):
2937 def debugpickmergetool(ui, repo, *pats, **opts):
2957 """examine which merge tool is chosen for specified file
2938 """examine which merge tool is chosen for specified file
2958
2939
2959 As described in :hg:`help merge-tools`, Mercurial examines
2940 As described in :hg:`help merge-tools`, Mercurial examines
2960 configurations below in this order to decide which merge tool is
2941 configurations below in this order to decide which merge tool is
2961 chosen for specified file.
2942 chosen for specified file.
2962
2943
2963 1. ``--tool`` option
2944 1. ``--tool`` option
2964 2. ``HGMERGE`` environment variable
2945 2. ``HGMERGE`` environment variable
2965 3. configurations in ``merge-patterns`` section
2946 3. configurations in ``merge-patterns`` section
2966 4. configuration of ``ui.merge``
2947 4. configuration of ``ui.merge``
2967 5. configurations in ``merge-tools`` section
2948 5. configurations in ``merge-tools`` section
2968 6. ``hgmerge`` tool (for historical reason only)
2949 6. ``hgmerge`` tool (for historical reason only)
2969 7. default tool for fallback (``:merge`` or ``:prompt``)
2950 7. default tool for fallback (``:merge`` or ``:prompt``)
2970
2951
2971 This command writes out examination result in the style below::
2952 This command writes out examination result in the style below::
2972
2953
2973 FILE = MERGETOOL
2954 FILE = MERGETOOL
2974
2955
2975 By default, all files known in the first parent context of the
2956 By default, all files known in the first parent context of the
2976 working directory are examined. Use file patterns and/or -I/-X
2957 working directory are examined. Use file patterns and/or -I/-X
2977 options to limit target files. -r/--rev is also useful to examine
2958 options to limit target files. -r/--rev is also useful to examine
2978 files in another context without actual updating to it.
2959 files in another context without actual updating to it.
2979
2960
2980 With --debug, this command shows warning messages while matching
2961 With --debug, this command shows warning messages while matching
2981 against ``merge-patterns`` and so on, too. It is recommended to
2962 against ``merge-patterns`` and so on, too. It is recommended to
2982 use this option with explicit file patterns and/or -I/-X options,
2963 use this option with explicit file patterns and/or -I/-X options,
2983 because this option increases amount of output per file according
2964 because this option increases amount of output per file according
2984 to configurations in hgrc.
2965 to configurations in hgrc.
2985
2966
2986 With -v/--verbose, this command shows configurations below at
2967 With -v/--verbose, this command shows configurations below at
2987 first (only if specified).
2968 first (only if specified).
2988
2969
2989 - ``--tool`` option
2970 - ``--tool`` option
2990 - ``HGMERGE`` environment variable
2971 - ``HGMERGE`` environment variable
2991 - configuration of ``ui.merge``
2972 - configuration of ``ui.merge``
2992
2973
2993 If merge tool is chosen before matching against
2974 If merge tool is chosen before matching against
2994 ``merge-patterns``, this command can't show any helpful
2975 ``merge-patterns``, this command can't show any helpful
2995 information, even with --debug. In such case, information above is
2976 information, even with --debug. In such case, information above is
2996 useful to know why a merge tool is chosen.
2977 useful to know why a merge tool is chosen.
2997 """
2978 """
2998 opts = pycompat.byteskwargs(opts)
2979 opts = pycompat.byteskwargs(opts)
2999 overrides = {}
2980 overrides = {}
3000 if opts[b'tool']:
2981 if opts[b'tool']:
3001 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2982 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3002 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2983 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3003
2984
3004 with ui.configoverride(overrides, b'debugmergepatterns'):
2985 with ui.configoverride(overrides, b'debugmergepatterns'):
3005 hgmerge = encoding.environ.get(b"HGMERGE")
2986 hgmerge = encoding.environ.get(b"HGMERGE")
3006 if hgmerge is not None:
2987 if hgmerge is not None:
3007 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2988 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3008 uimerge = ui.config(b"ui", b"merge")
2989 uimerge = ui.config(b"ui", b"merge")
3009 if uimerge:
2990 if uimerge:
3010 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2991 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3011
2992
3012 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2993 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3013 m = scmutil.match(ctx, pats, opts)
2994 m = scmutil.match(ctx, pats, opts)
3014 changedelete = opts[b'changedelete']
2995 changedelete = opts[b'changedelete']
3015 for path in ctx.walk(m):
2996 for path in ctx.walk(m):
3016 fctx = ctx[path]
2997 fctx = ctx[path]
3017 with ui.silent(
2998 with ui.silent(
3018 error=True
2999 error=True
3019 ) if not ui.debugflag else util.nullcontextmanager():
3000 ) if not ui.debugflag else util.nullcontextmanager():
3020 tool, toolpath = filemerge._picktool(
3001 tool, toolpath = filemerge._picktool(
3021 repo,
3002 repo,
3022 ui,
3003 ui,
3023 path,
3004 path,
3024 fctx.isbinary(),
3005 fctx.isbinary(),
3025 b'l' in fctx.flags(),
3006 b'l' in fctx.flags(),
3026 changedelete,
3007 changedelete,
3027 )
3008 )
3028 ui.write(b'%s = %s\n' % (path, tool))
3009 ui.write(b'%s = %s\n' % (path, tool))
3029
3010
3030
3011
3031 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3012 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3032 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3013 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3033 """access the pushkey key/value protocol
3014 """access the pushkey key/value protocol
3034
3015
3035 With two args, list the keys in the given namespace.
3016 With two args, list the keys in the given namespace.
3036
3017
3037 With five args, set a key to new if it currently is set to old.
3018 With five args, set a key to new if it currently is set to old.
3038 Reports success or failure.
3019 Reports success or failure.
3039 """
3020 """
3040
3021
3041 target = hg.peer(ui, {}, repopath)
3022 target = hg.peer(ui, {}, repopath)
3042 try:
3023 try:
3043 if keyinfo:
3024 if keyinfo:
3044 key, old, new = keyinfo
3025 key, old, new = keyinfo
3045 with target.commandexecutor() as e:
3026 with target.commandexecutor() as e:
3046 r = e.callcommand(
3027 r = e.callcommand(
3047 b'pushkey',
3028 b'pushkey',
3048 {
3029 {
3049 b'namespace': namespace,
3030 b'namespace': namespace,
3050 b'key': key,
3031 b'key': key,
3051 b'old': old,
3032 b'old': old,
3052 b'new': new,
3033 b'new': new,
3053 },
3034 },
3054 ).result()
3035 ).result()
3055
3036
3056 ui.status(pycompat.bytestr(r) + b'\n')
3037 ui.status(pycompat.bytestr(r) + b'\n')
3057 return not r
3038 return not r
3058 else:
3039 else:
3059 for k, v in sorted(target.listkeys(namespace).items()):
3040 for k, v in sorted(target.listkeys(namespace).items()):
3060 ui.write(
3041 ui.write(
3061 b"%s\t%s\n"
3042 b"%s\t%s\n"
3062 % (stringutil.escapestr(k), stringutil.escapestr(v))
3043 % (stringutil.escapestr(k), stringutil.escapestr(v))
3063 )
3044 )
3064 finally:
3045 finally:
3065 target.close()
3046 target.close()
3066
3047
3067
3048
3068 @command(b'debugpvec', [], _(b'A B'))
3049 @command(b'debugpvec', [], _(b'A B'))
3069 def debugpvec(ui, repo, a, b=None):
3050 def debugpvec(ui, repo, a, b=None):
3070 ca = scmutil.revsingle(repo, a)
3051 ca = scmutil.revsingle(repo, a)
3071 cb = scmutil.revsingle(repo, b)
3052 cb = scmutil.revsingle(repo, b)
3072 pa = pvec.ctxpvec(ca)
3053 pa = pvec.ctxpvec(ca)
3073 pb = pvec.ctxpvec(cb)
3054 pb = pvec.ctxpvec(cb)
3074 if pa == pb:
3055 if pa == pb:
3075 rel = b"="
3056 rel = b"="
3076 elif pa > pb:
3057 elif pa > pb:
3077 rel = b">"
3058 rel = b">"
3078 elif pa < pb:
3059 elif pa < pb:
3079 rel = b"<"
3060 rel = b"<"
3080 elif pa | pb:
3061 elif pa | pb:
3081 rel = b"|"
3062 rel = b"|"
3082 ui.write(_(b"a: %s\n") % pa)
3063 ui.write(_(b"a: %s\n") % pa)
3083 ui.write(_(b"b: %s\n") % pb)
3064 ui.write(_(b"b: %s\n") % pb)
3084 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3065 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3085 ui.write(
3066 ui.write(
3086 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3067 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3087 % (
3068 % (
3088 abs(pa._depth - pb._depth),
3069 abs(pa._depth - pb._depth),
3089 pvec._hamming(pa._vec, pb._vec),
3070 pvec._hamming(pa._vec, pb._vec),
3090 pa.distance(pb),
3071 pa.distance(pb),
3091 rel,
3072 rel,
3092 )
3073 )
3093 )
3074 )
3094
3075
3095
3076
3096 @command(
3077 @command(
3097 b'debugrebuilddirstate|debugrebuildstate',
3078 b'debugrebuilddirstate|debugrebuildstate',
3098 [
3079 [
3099 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3080 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3100 (
3081 (
3101 b'',
3082 b'',
3102 b'minimal',
3083 b'minimal',
3103 None,
3084 None,
3104 _(
3085 _(
3105 b'only rebuild files that are inconsistent with '
3086 b'only rebuild files that are inconsistent with '
3106 b'the working copy parent'
3087 b'the working copy parent'
3107 ),
3088 ),
3108 ),
3089 ),
3109 ],
3090 ],
3110 _(b'[-r REV]'),
3091 _(b'[-r REV]'),
3111 )
3092 )
3112 def debugrebuilddirstate(ui, repo, rev, **opts):
3093 def debugrebuilddirstate(ui, repo, rev, **opts):
3113 """rebuild the dirstate as it would look like for the given revision
3094 """rebuild the dirstate as it would look like for the given revision
3114
3095
3115 If no revision is specified the first current parent will be used.
3096 If no revision is specified the first current parent will be used.
3116
3097
3117 The dirstate will be set to the files of the given revision.
3098 The dirstate will be set to the files of the given revision.
3118 The actual working directory content or existing dirstate
3099 The actual working directory content or existing dirstate
3119 information such as adds or removes is not considered.
3100 information such as adds or removes is not considered.
3120
3101
3121 ``minimal`` will only rebuild the dirstate status for files that claim to be
3102 ``minimal`` will only rebuild the dirstate status for files that claim to be
3122 tracked but are not in the parent manifest, or that exist in the parent
3103 tracked but are not in the parent manifest, or that exist in the parent
3123 manifest but are not in the dirstate. It will not change adds, removes, or
3104 manifest but are not in the dirstate. It will not change adds, removes, or
3124 modified files that are in the working copy parent.
3105 modified files that are in the working copy parent.
3125
3106
3126 One use of this command is to make the next :hg:`status` invocation
3107 One use of this command is to make the next :hg:`status` invocation
3127 check the actual file content.
3108 check the actual file content.
3128 """
3109 """
3129 ctx = scmutil.revsingle(repo, rev)
3110 ctx = scmutil.revsingle(repo, rev)
3130 with repo.wlock():
3111 with repo.wlock():
3131 dirstate = repo.dirstate
3112 dirstate = repo.dirstate
3132 changedfiles = None
3113 changedfiles = None
3133 # See command doc for what minimal does.
3114 # See command doc for what minimal does.
3134 if opts.get('minimal'):
3115 if opts.get('minimal'):
3135 manifestfiles = set(ctx.manifest().keys())
3116 manifestfiles = set(ctx.manifest().keys())
3136 dirstatefiles = set(dirstate)
3117 dirstatefiles = set(dirstate)
3137 manifestonly = manifestfiles - dirstatefiles
3118 manifestonly = manifestfiles - dirstatefiles
3138 dsonly = dirstatefiles - manifestfiles
3119 dsonly = dirstatefiles - manifestfiles
3139 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3120 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3140 changedfiles = manifestonly | dsnotadded
3121 changedfiles = manifestonly | dsnotadded
3141
3122
3142 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3123 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3143
3124
3144
3125
3145 @command(
3126 @command(
3146 b'debugrebuildfncache',
3127 b'debugrebuildfncache',
3147 [
3128 [
3148 (
3129 (
3149 b'',
3130 b'',
3150 b'only-data',
3131 b'only-data',
3151 False,
3132 False,
3152 _(b'only look for wrong .d files (much faster)'),
3133 _(b'only look for wrong .d files (much faster)'),
3153 )
3134 )
3154 ],
3135 ],
3155 b'',
3136 b'',
3156 )
3137 )
3157 def debugrebuildfncache(ui, repo, **opts):
3138 def debugrebuildfncache(ui, repo, **opts):
3158 """rebuild the fncache file"""
3139 """rebuild the fncache file"""
3159 opts = pycompat.byteskwargs(opts)
3140 opts = pycompat.byteskwargs(opts)
3160 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3141 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3161
3142
3162
3143
3163 @command(
3144 @command(
3164 b'debugrename',
3145 b'debugrename',
3165 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3146 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3166 _(b'[-r REV] [FILE]...'),
3147 _(b'[-r REV] [FILE]...'),
3167 )
3148 )
3168 def debugrename(ui, repo, *pats, **opts):
3149 def debugrename(ui, repo, *pats, **opts):
3169 """dump rename information"""
3150 """dump rename information"""
3170
3151
3171 opts = pycompat.byteskwargs(opts)
3152 opts = pycompat.byteskwargs(opts)
3172 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3153 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3173 m = scmutil.match(ctx, pats, opts)
3154 m = scmutil.match(ctx, pats, opts)
3174 for abs in ctx.walk(m):
3155 for abs in ctx.walk(m):
3175 fctx = ctx[abs]
3156 fctx = ctx[abs]
3176 o = fctx.filelog().renamed(fctx.filenode())
3157 o = fctx.filelog().renamed(fctx.filenode())
3177 rel = repo.pathto(abs)
3158 rel = repo.pathto(abs)
3178 if o:
3159 if o:
3179 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3160 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3180 else:
3161 else:
3181 ui.write(_(b"%s not renamed\n") % rel)
3162 ui.write(_(b"%s not renamed\n") % rel)
3182
3163
3183
3164
3184 @command(b'debugrequires|debugrequirements', [], b'')
3165 @command(b'debugrequires|debugrequirements', [], b'')
3185 def debugrequirements(ui, repo):
3166 def debugrequirements(ui, repo):
3186 """print the current repo requirements"""
3167 """print the current repo requirements"""
3187 for r in sorted(repo.requirements):
3168 for r in sorted(repo.requirements):
3188 ui.write(b"%s\n" % r)
3169 ui.write(b"%s\n" % r)
3189
3170
3190
3171
3191 @command(
3172 @command(
3192 b'debugrevlog',
3173 b'debugrevlog',
3193 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3174 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3194 _(b'-c|-m|FILE'),
3175 _(b'-c|-m|FILE'),
3195 optionalrepo=True,
3176 optionalrepo=True,
3196 )
3177 )
3197 def debugrevlog(ui, repo, file_=None, **opts):
3178 def debugrevlog(ui, repo, file_=None, **opts):
3198 """show data and statistics about a revlog"""
3179 """show data and statistics about a revlog"""
3199 opts = pycompat.byteskwargs(opts)
3180 opts = pycompat.byteskwargs(opts)
3200 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3181 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3201
3182
3202 if opts.get(b"dump"):
3183 if opts.get(b"dump"):
3203 numrevs = len(r)
3184 numrevs = len(r)
3204 ui.write(
3185 ui.write(
3205 (
3186 (
3206 b"# rev p1rev p2rev start end deltastart base p1 p2"
3187 b"# rev p1rev p2rev start end deltastart base p1 p2"
3207 b" rawsize totalsize compression heads chainlen\n"
3188 b" rawsize totalsize compression heads chainlen\n"
3208 )
3189 )
3209 )
3190 )
3210 ts = 0
3191 ts = 0
3211 heads = set()
3192 heads = set()
3212
3193
3213 for rev in pycompat.xrange(numrevs):
3194 for rev in pycompat.xrange(numrevs):
3214 dbase = r.deltaparent(rev)
3195 dbase = r.deltaparent(rev)
3215 if dbase == -1:
3196 if dbase == -1:
3216 dbase = rev
3197 dbase = rev
3217 cbase = r.chainbase(rev)
3198 cbase = r.chainbase(rev)
3218 clen = r.chainlen(rev)
3199 clen = r.chainlen(rev)
3219 p1, p2 = r.parentrevs(rev)
3200 p1, p2 = r.parentrevs(rev)
3220 rs = r.rawsize(rev)
3201 rs = r.rawsize(rev)
3221 ts = ts + rs
3202 ts = ts + rs
3222 heads -= set(r.parentrevs(rev))
3203 heads -= set(r.parentrevs(rev))
3223 heads.add(rev)
3204 heads.add(rev)
3224 try:
3205 try:
3225 compression = ts / r.end(rev)
3206 compression = ts / r.end(rev)
3226 except ZeroDivisionError:
3207 except ZeroDivisionError:
3227 compression = 0
3208 compression = 0
3228 ui.write(
3209 ui.write(
3229 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3210 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3230 b"%11d %5d %8d\n"
3211 b"%11d %5d %8d\n"
3231 % (
3212 % (
3232 rev,
3213 rev,
3233 p1,
3214 p1,
3234 p2,
3215 p2,
3235 r.start(rev),
3216 r.start(rev),
3236 r.end(rev),
3217 r.end(rev),
3237 r.start(dbase),
3218 r.start(dbase),
3238 r.start(cbase),
3219 r.start(cbase),
3239 r.start(p1),
3220 r.start(p1),
3240 r.start(p2),
3221 r.start(p2),
3241 rs,
3222 rs,
3242 ts,
3223 ts,
3243 compression,
3224 compression,
3244 len(heads),
3225 len(heads),
3245 clen,
3226 clen,
3246 )
3227 )
3247 )
3228 )
3248 return 0
3229 return 0
3249
3230
3250 format = r._format_version
3231 format = r._format_version
3251 v = r._format_flags
3232 v = r._format_flags
3252 flags = []
3233 flags = []
3253 gdelta = False
3234 gdelta = False
3254 if v & revlog.FLAG_INLINE_DATA:
3235 if v & revlog.FLAG_INLINE_DATA:
3255 flags.append(b'inline')
3236 flags.append(b'inline')
3256 if v & revlog.FLAG_GENERALDELTA:
3237 if v & revlog.FLAG_GENERALDELTA:
3257 gdelta = True
3238 gdelta = True
3258 flags.append(b'generaldelta')
3239 flags.append(b'generaldelta')
3259 if not flags:
3240 if not flags:
3260 flags = [b'(none)']
3241 flags = [b'(none)']
3261
3242
3262 ### tracks merge vs single parent
3243 ### tracks merge vs single parent
3263 nummerges = 0
3244 nummerges = 0
3264
3245
3265 ### tracks ways the "delta" are build
3246 ### tracks ways the "delta" are build
3266 # nodelta
3247 # nodelta
3267 numempty = 0
3248 numempty = 0
3268 numemptytext = 0
3249 numemptytext = 0
3269 numemptydelta = 0
3250 numemptydelta = 0
3270 # full file content
3251 # full file content
3271 numfull = 0
3252 numfull = 0
3272 # intermediate snapshot against a prior snapshot
3253 # intermediate snapshot against a prior snapshot
3273 numsemi = 0
3254 numsemi = 0
3274 # snapshot count per depth
3255 # snapshot count per depth
3275 numsnapdepth = collections.defaultdict(lambda: 0)
3256 numsnapdepth = collections.defaultdict(lambda: 0)
3276 # delta against previous revision
3257 # delta against previous revision
3277 numprev = 0
3258 numprev = 0
3278 # delta against first or second parent (not prev)
3259 # delta against first or second parent (not prev)
3279 nump1 = 0
3260 nump1 = 0
3280 nump2 = 0
3261 nump2 = 0
3281 # delta against neither prev nor parents
3262 # delta against neither prev nor parents
3282 numother = 0
3263 numother = 0
3283 # delta against prev that are also first or second parent
3264 # delta against prev that are also first or second parent
3284 # (details of `numprev`)
3265 # (details of `numprev`)
3285 nump1prev = 0
3266 nump1prev = 0
3286 nump2prev = 0
3267 nump2prev = 0
3287
3268
3288 # data about delta chain of each revs
3269 # data about delta chain of each revs
3289 chainlengths = []
3270 chainlengths = []
3290 chainbases = []
3271 chainbases = []
3291 chainspans = []
3272 chainspans = []
3292
3273
3293 # data about each revision
3274 # data about each revision
3294 datasize = [None, 0, 0]
3275 datasize = [None, 0, 0]
3295 fullsize = [None, 0, 0]
3276 fullsize = [None, 0, 0]
3296 semisize = [None, 0, 0]
3277 semisize = [None, 0, 0]
3297 # snapshot count per depth
3278 # snapshot count per depth
3298 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3279 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3299 deltasize = [None, 0, 0]
3280 deltasize = [None, 0, 0]
3300 chunktypecounts = {}
3281 chunktypecounts = {}
3301 chunktypesizes = {}
3282 chunktypesizes = {}
3302
3283
3303 def addsize(size, l):
3284 def addsize(size, l):
3304 if l[0] is None or size < l[0]:
3285 if l[0] is None or size < l[0]:
3305 l[0] = size
3286 l[0] = size
3306 if size > l[1]:
3287 if size > l[1]:
3307 l[1] = size
3288 l[1] = size
3308 l[2] += size
3289 l[2] += size
3309
3290
3310 numrevs = len(r)
3291 numrevs = len(r)
3311 for rev in pycompat.xrange(numrevs):
3292 for rev in pycompat.xrange(numrevs):
3312 p1, p2 = r.parentrevs(rev)
3293 p1, p2 = r.parentrevs(rev)
3313 delta = r.deltaparent(rev)
3294 delta = r.deltaparent(rev)
3314 if format > 0:
3295 if format > 0:
3315 addsize(r.rawsize(rev), datasize)
3296 addsize(r.rawsize(rev), datasize)
3316 if p2 != nullrev:
3297 if p2 != nullrev:
3317 nummerges += 1
3298 nummerges += 1
3318 size = r.length(rev)
3299 size = r.length(rev)
3319 if delta == nullrev:
3300 if delta == nullrev:
3320 chainlengths.append(0)
3301 chainlengths.append(0)
3321 chainbases.append(r.start(rev))
3302 chainbases.append(r.start(rev))
3322 chainspans.append(size)
3303 chainspans.append(size)
3323 if size == 0:
3304 if size == 0:
3324 numempty += 1
3305 numempty += 1
3325 numemptytext += 1
3306 numemptytext += 1
3326 else:
3307 else:
3327 numfull += 1
3308 numfull += 1
3328 numsnapdepth[0] += 1
3309 numsnapdepth[0] += 1
3329 addsize(size, fullsize)
3310 addsize(size, fullsize)
3330 addsize(size, snapsizedepth[0])
3311 addsize(size, snapsizedepth[0])
3331 else:
3312 else:
3332 chainlengths.append(chainlengths[delta] + 1)
3313 chainlengths.append(chainlengths[delta] + 1)
3333 baseaddr = chainbases[delta]
3314 baseaddr = chainbases[delta]
3334 revaddr = r.start(rev)
3315 revaddr = r.start(rev)
3335 chainbases.append(baseaddr)
3316 chainbases.append(baseaddr)
3336 chainspans.append((revaddr - baseaddr) + size)
3317 chainspans.append((revaddr - baseaddr) + size)
3337 if size == 0:
3318 if size == 0:
3338 numempty += 1
3319 numempty += 1
3339 numemptydelta += 1
3320 numemptydelta += 1
3340 elif r.issnapshot(rev):
3321 elif r.issnapshot(rev):
3341 addsize(size, semisize)
3322 addsize(size, semisize)
3342 numsemi += 1
3323 numsemi += 1
3343 depth = r.snapshotdepth(rev)
3324 depth = r.snapshotdepth(rev)
3344 numsnapdepth[depth] += 1
3325 numsnapdepth[depth] += 1
3345 addsize(size, snapsizedepth[depth])
3326 addsize(size, snapsizedepth[depth])
3346 else:
3327 else:
3347 addsize(size, deltasize)
3328 addsize(size, deltasize)
3348 if delta == rev - 1:
3329 if delta == rev - 1:
3349 numprev += 1
3330 numprev += 1
3350 if delta == p1:
3331 if delta == p1:
3351 nump1prev += 1
3332 nump1prev += 1
3352 elif delta == p2:
3333 elif delta == p2:
3353 nump2prev += 1
3334 nump2prev += 1
3354 elif delta == p1:
3335 elif delta == p1:
3355 nump1 += 1
3336 nump1 += 1
3356 elif delta == p2:
3337 elif delta == p2:
3357 nump2 += 1
3338 nump2 += 1
3358 elif delta != nullrev:
3339 elif delta != nullrev:
3359 numother += 1
3340 numother += 1
3360
3341
3361 # Obtain data on the raw chunks in the revlog.
3342 # Obtain data on the raw chunks in the revlog.
3362 if util.safehasattr(r, b'_getsegmentforrevs'):
3343 if util.safehasattr(r, b'_getsegmentforrevs'):
3363 segment = r._getsegmentforrevs(rev, rev)[1]
3344 segment = r._getsegmentforrevs(rev, rev)[1]
3364 else:
3345 else:
3365 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3346 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3366 if segment:
3347 if segment:
3367 chunktype = bytes(segment[0:1])
3348 chunktype = bytes(segment[0:1])
3368 else:
3349 else:
3369 chunktype = b'empty'
3350 chunktype = b'empty'
3370
3351
3371 if chunktype not in chunktypecounts:
3352 if chunktype not in chunktypecounts:
3372 chunktypecounts[chunktype] = 0
3353 chunktypecounts[chunktype] = 0
3373 chunktypesizes[chunktype] = 0
3354 chunktypesizes[chunktype] = 0
3374
3355
3375 chunktypecounts[chunktype] += 1
3356 chunktypecounts[chunktype] += 1
3376 chunktypesizes[chunktype] += size
3357 chunktypesizes[chunktype] += size
3377
3358
3378 # Adjust size min value for empty cases
3359 # Adjust size min value for empty cases
3379 for size in (datasize, fullsize, semisize, deltasize):
3360 for size in (datasize, fullsize, semisize, deltasize):
3380 if size[0] is None:
3361 if size[0] is None:
3381 size[0] = 0
3362 size[0] = 0
3382
3363
3383 numdeltas = numrevs - numfull - numempty - numsemi
3364 numdeltas = numrevs - numfull - numempty - numsemi
3384 numoprev = numprev - nump1prev - nump2prev
3365 numoprev = numprev - nump1prev - nump2prev
3385 totalrawsize = datasize[2]
3366 totalrawsize = datasize[2]
3386 datasize[2] /= numrevs
3367 datasize[2] /= numrevs
3387 fulltotal = fullsize[2]
3368 fulltotal = fullsize[2]
3388 if numfull == 0:
3369 if numfull == 0:
3389 fullsize[2] = 0
3370 fullsize[2] = 0
3390 else:
3371 else:
3391 fullsize[2] /= numfull
3372 fullsize[2] /= numfull
3392 semitotal = semisize[2]
3373 semitotal = semisize[2]
3393 snaptotal = {}
3374 snaptotal = {}
3394 if numsemi > 0:
3375 if numsemi > 0:
3395 semisize[2] /= numsemi
3376 semisize[2] /= numsemi
3396 for depth in snapsizedepth:
3377 for depth in snapsizedepth:
3397 snaptotal[depth] = snapsizedepth[depth][2]
3378 snaptotal[depth] = snapsizedepth[depth][2]
3398 snapsizedepth[depth][2] /= numsnapdepth[depth]
3379 snapsizedepth[depth][2] /= numsnapdepth[depth]
3399
3380
3400 deltatotal = deltasize[2]
3381 deltatotal = deltasize[2]
3401 if numdeltas > 0:
3382 if numdeltas > 0:
3402 deltasize[2] /= numdeltas
3383 deltasize[2] /= numdeltas
3403 totalsize = fulltotal + semitotal + deltatotal
3384 totalsize = fulltotal + semitotal + deltatotal
3404 avgchainlen = sum(chainlengths) / numrevs
3385 avgchainlen = sum(chainlengths) / numrevs
3405 maxchainlen = max(chainlengths)
3386 maxchainlen = max(chainlengths)
3406 maxchainspan = max(chainspans)
3387 maxchainspan = max(chainspans)
3407 compratio = 1
3388 compratio = 1
3408 if totalsize:
3389 if totalsize:
3409 compratio = totalrawsize / totalsize
3390 compratio = totalrawsize / totalsize
3410
3391
3411 basedfmtstr = b'%%%dd\n'
3392 basedfmtstr = b'%%%dd\n'
3412 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3393 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3413
3394
3414 def dfmtstr(max):
3395 def dfmtstr(max):
3415 return basedfmtstr % len(str(max))
3396 return basedfmtstr % len(str(max))
3416
3397
3417 def pcfmtstr(max, padding=0):
3398 def pcfmtstr(max, padding=0):
3418 return basepcfmtstr % (len(str(max)), b' ' * padding)
3399 return basepcfmtstr % (len(str(max)), b' ' * padding)
3419
3400
3420 def pcfmt(value, total):
3401 def pcfmt(value, total):
3421 if total:
3402 if total:
3422 return (value, 100 * float(value) / total)
3403 return (value, 100 * float(value) / total)
3423 else:
3404 else:
3424 return value, 100.0
3405 return value, 100.0
3425
3406
3426 ui.writenoi18n(b'format : %d\n' % format)
3407 ui.writenoi18n(b'format : %d\n' % format)
3427 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3408 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3428
3409
3429 ui.write(b'\n')
3410 ui.write(b'\n')
3430 fmt = pcfmtstr(totalsize)
3411 fmt = pcfmtstr(totalsize)
3431 fmt2 = dfmtstr(totalsize)
3412 fmt2 = dfmtstr(totalsize)
3432 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3413 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3433 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3414 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3434 ui.writenoi18n(
3415 ui.writenoi18n(
3435 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3416 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3436 )
3417 )
3437 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3418 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3438 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3419 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3439 ui.writenoi18n(
3420 ui.writenoi18n(
3440 b' text : '
3421 b' text : '
3441 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3422 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3442 )
3423 )
3443 ui.writenoi18n(
3424 ui.writenoi18n(
3444 b' delta : '
3425 b' delta : '
3445 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3426 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3446 )
3427 )
3447 ui.writenoi18n(
3428 ui.writenoi18n(
3448 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3429 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3449 )
3430 )
3450 for depth in sorted(numsnapdepth):
3431 for depth in sorted(numsnapdepth):
3451 ui.write(
3432 ui.write(
3452 (b' lvl-%-3d : ' % depth)
3433 (b' lvl-%-3d : ' % depth)
3453 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3434 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3454 )
3435 )
3455 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3436 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3456 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3437 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3457 ui.writenoi18n(
3438 ui.writenoi18n(
3458 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3439 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3459 )
3440 )
3460 for depth in sorted(numsnapdepth):
3441 for depth in sorted(numsnapdepth):
3461 ui.write(
3442 ui.write(
3462 (b' lvl-%-3d : ' % depth)
3443 (b' lvl-%-3d : ' % depth)
3463 + fmt % pcfmt(snaptotal[depth], totalsize)
3444 + fmt % pcfmt(snaptotal[depth], totalsize)
3464 )
3445 )
3465 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3446 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3466
3447
3467 def fmtchunktype(chunktype):
3448 def fmtchunktype(chunktype):
3468 if chunktype == b'empty':
3449 if chunktype == b'empty':
3469 return b' %s : ' % chunktype
3450 return b' %s : ' % chunktype
3470 elif chunktype in pycompat.bytestr(string.ascii_letters):
3451 elif chunktype in pycompat.bytestr(string.ascii_letters):
3471 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3452 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3472 else:
3453 else:
3473 return b' 0x%s : ' % hex(chunktype)
3454 return b' 0x%s : ' % hex(chunktype)
3474
3455
3475 ui.write(b'\n')
3456 ui.write(b'\n')
3476 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3457 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3477 for chunktype in sorted(chunktypecounts):
3458 for chunktype in sorted(chunktypecounts):
3478 ui.write(fmtchunktype(chunktype))
3459 ui.write(fmtchunktype(chunktype))
3479 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3460 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3480 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3461 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3481 for chunktype in sorted(chunktypecounts):
3462 for chunktype in sorted(chunktypecounts):
3482 ui.write(fmtchunktype(chunktype))
3463 ui.write(fmtchunktype(chunktype))
3483 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3464 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3484
3465
3485 ui.write(b'\n')
3466 ui.write(b'\n')
3486 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3467 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3487 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3468 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3488 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3469 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3489 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3470 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3490 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3471 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3491
3472
3492 if format > 0:
3473 if format > 0:
3493 ui.write(b'\n')
3474 ui.write(b'\n')
3494 ui.writenoi18n(
3475 ui.writenoi18n(
3495 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3476 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3496 % tuple(datasize)
3477 % tuple(datasize)
3497 )
3478 )
3498 ui.writenoi18n(
3479 ui.writenoi18n(
3499 b'full revision size (min/max/avg) : %d / %d / %d\n'
3480 b'full revision size (min/max/avg) : %d / %d / %d\n'
3500 % tuple(fullsize)
3481 % tuple(fullsize)
3501 )
3482 )
3502 ui.writenoi18n(
3483 ui.writenoi18n(
3503 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3484 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3504 % tuple(semisize)
3485 % tuple(semisize)
3505 )
3486 )
3506 for depth in sorted(snapsizedepth):
3487 for depth in sorted(snapsizedepth):
3507 if depth == 0:
3488 if depth == 0:
3508 continue
3489 continue
3509 ui.writenoi18n(
3490 ui.writenoi18n(
3510 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3491 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3511 % ((depth,) + tuple(snapsizedepth[depth]))
3492 % ((depth,) + tuple(snapsizedepth[depth]))
3512 )
3493 )
3513 ui.writenoi18n(
3494 ui.writenoi18n(
3514 b'delta size (min/max/avg) : %d / %d / %d\n'
3495 b'delta size (min/max/avg) : %d / %d / %d\n'
3515 % tuple(deltasize)
3496 % tuple(deltasize)
3516 )
3497 )
3517
3498
3518 if numdeltas > 0:
3499 if numdeltas > 0:
3519 ui.write(b'\n')
3500 ui.write(b'\n')
3520 fmt = pcfmtstr(numdeltas)
3501 fmt = pcfmtstr(numdeltas)
3521 fmt2 = pcfmtstr(numdeltas, 4)
3502 fmt2 = pcfmtstr(numdeltas, 4)
3522 ui.writenoi18n(
3503 ui.writenoi18n(
3523 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3504 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3524 )
3505 )
3525 if numprev > 0:
3506 if numprev > 0:
3526 ui.writenoi18n(
3507 ui.writenoi18n(
3527 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3508 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3528 )
3509 )
3529 ui.writenoi18n(
3510 ui.writenoi18n(
3530 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3511 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3531 )
3512 )
3532 ui.writenoi18n(
3513 ui.writenoi18n(
3533 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3514 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3534 )
3515 )
3535 if gdelta:
3516 if gdelta:
3536 ui.writenoi18n(
3517 ui.writenoi18n(
3537 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3518 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3538 )
3519 )
3539 ui.writenoi18n(
3520 ui.writenoi18n(
3540 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3521 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3541 )
3522 )
3542 ui.writenoi18n(
3523 ui.writenoi18n(
3543 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3524 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3544 )
3525 )
3545
3526
3546
3527
3547 @command(
3528 @command(
3548 b'debugrevlogindex',
3529 b'debugrevlogindex',
3549 cmdutil.debugrevlogopts
3530 cmdutil.debugrevlogopts
3550 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3531 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3551 _(b'[-f FORMAT] -c|-m|FILE'),
3532 _(b'[-f FORMAT] -c|-m|FILE'),
3552 optionalrepo=True,
3533 optionalrepo=True,
3553 )
3534 )
3554 def debugrevlogindex(ui, repo, file_=None, **opts):
3535 def debugrevlogindex(ui, repo, file_=None, **opts):
3555 """dump the contents of a revlog index"""
3536 """dump the contents of a revlog index"""
3556 opts = pycompat.byteskwargs(opts)
3537 opts = pycompat.byteskwargs(opts)
3557 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3538 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3558 format = opts.get(b'format', 0)
3539 format = opts.get(b'format', 0)
3559 if format not in (0, 1):
3540 if format not in (0, 1):
3560 raise error.Abort(_(b"unknown format %d") % format)
3541 raise error.Abort(_(b"unknown format %d") % format)
3561
3542
3562 if ui.debugflag:
3543 if ui.debugflag:
3563 shortfn = hex
3544 shortfn = hex
3564 else:
3545 else:
3565 shortfn = short
3546 shortfn = short
3566
3547
3567 # There might not be anything in r, so have a sane default
3548 # There might not be anything in r, so have a sane default
3568 idlen = 12
3549 idlen = 12
3569 for i in r:
3550 for i in r:
3570 idlen = len(shortfn(r.node(i)))
3551 idlen = len(shortfn(r.node(i)))
3571 break
3552 break
3572
3553
3573 if format == 0:
3554 if format == 0:
3574 if ui.verbose:
3555 if ui.verbose:
3575 ui.writenoi18n(
3556 ui.writenoi18n(
3576 b" rev offset length linkrev %s %s p2\n"
3557 b" rev offset length linkrev %s %s p2\n"
3577 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3558 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3578 )
3559 )
3579 else:
3560 else:
3580 ui.writenoi18n(
3561 ui.writenoi18n(
3581 b" rev linkrev %s %s p2\n"
3562 b" rev linkrev %s %s p2\n"
3582 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3563 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3583 )
3564 )
3584 elif format == 1:
3565 elif format == 1:
3585 if ui.verbose:
3566 if ui.verbose:
3586 ui.writenoi18n(
3567 ui.writenoi18n(
3587 (
3568 (
3588 b" rev flag offset length size link p1"
3569 b" rev flag offset length size link p1"
3589 b" p2 %s\n"
3570 b" p2 %s\n"
3590 )
3571 )
3591 % b"nodeid".rjust(idlen)
3572 % b"nodeid".rjust(idlen)
3592 )
3573 )
3593 else:
3574 else:
3594 ui.writenoi18n(
3575 ui.writenoi18n(
3595 b" rev flag size link p1 p2 %s\n"
3576 b" rev flag size link p1 p2 %s\n"
3596 % b"nodeid".rjust(idlen)
3577 % b"nodeid".rjust(idlen)
3597 )
3578 )
3598
3579
3599 for i in r:
3580 for i in r:
3600 node = r.node(i)
3581 node = r.node(i)
3601 if format == 0:
3582 if format == 0:
3602 try:
3583 try:
3603 pp = r.parents(node)
3584 pp = r.parents(node)
3604 except Exception:
3585 except Exception:
3605 pp = [repo.nullid, repo.nullid]
3586 pp = [repo.nullid, repo.nullid]
3606 if ui.verbose:
3587 if ui.verbose:
3607 ui.write(
3588 ui.write(
3608 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3589 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3609 % (
3590 % (
3610 i,
3591 i,
3611 r.start(i),
3592 r.start(i),
3612 r.length(i),
3593 r.length(i),
3613 r.linkrev(i),
3594 r.linkrev(i),
3614 shortfn(node),
3595 shortfn(node),
3615 shortfn(pp[0]),
3596 shortfn(pp[0]),
3616 shortfn(pp[1]),
3597 shortfn(pp[1]),
3617 )
3598 )
3618 )
3599 )
3619 else:
3600 else:
3620 ui.write(
3601 ui.write(
3621 b"% 6d % 7d %s %s %s\n"
3602 b"% 6d % 7d %s %s %s\n"
3622 % (
3603 % (
3623 i,
3604 i,
3624 r.linkrev(i),
3605 r.linkrev(i),
3625 shortfn(node),
3606 shortfn(node),
3626 shortfn(pp[0]),
3607 shortfn(pp[0]),
3627 shortfn(pp[1]),
3608 shortfn(pp[1]),
3628 )
3609 )
3629 )
3610 )
3630 elif format == 1:
3611 elif format == 1:
3631 pr = r.parentrevs(i)
3612 pr = r.parentrevs(i)
3632 if ui.verbose:
3613 if ui.verbose:
3633 ui.write(
3614 ui.write(
3634 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3615 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3635 % (
3616 % (
3636 i,
3617 i,
3637 r.flags(i),
3618 r.flags(i),
3638 r.start(i),
3619 r.start(i),
3639 r.length(i),
3620 r.length(i),
3640 r.rawsize(i),
3621 r.rawsize(i),
3641 r.linkrev(i),
3622 r.linkrev(i),
3642 pr[0],
3623 pr[0],
3643 pr[1],
3624 pr[1],
3644 shortfn(node),
3625 shortfn(node),
3645 )
3626 )
3646 )
3627 )
3647 else:
3628 else:
3648 ui.write(
3629 ui.write(
3649 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3630 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3650 % (
3631 % (
3651 i,
3632 i,
3652 r.flags(i),
3633 r.flags(i),
3653 r.rawsize(i),
3634 r.rawsize(i),
3654 r.linkrev(i),
3635 r.linkrev(i),
3655 pr[0],
3636 pr[0],
3656 pr[1],
3637 pr[1],
3657 shortfn(node),
3638 shortfn(node),
3658 )
3639 )
3659 )
3640 )
3660
3641
3661
3642
3662 @command(
3643 @command(
3663 b'debugrevspec',
3644 b'debugrevspec',
3664 [
3645 [
3665 (
3646 (
3666 b'',
3647 b'',
3667 b'optimize',
3648 b'optimize',
3668 None,
3649 None,
3669 _(b'print parsed tree after optimizing (DEPRECATED)'),
3650 _(b'print parsed tree after optimizing (DEPRECATED)'),
3670 ),
3651 ),
3671 (
3652 (
3672 b'',
3653 b'',
3673 b'show-revs',
3654 b'show-revs',
3674 True,
3655 True,
3675 _(b'print list of result revisions (default)'),
3656 _(b'print list of result revisions (default)'),
3676 ),
3657 ),
3677 (
3658 (
3678 b's',
3659 b's',
3679 b'show-set',
3660 b'show-set',
3680 None,
3661 None,
3681 _(b'print internal representation of result set'),
3662 _(b'print internal representation of result set'),
3682 ),
3663 ),
3683 (
3664 (
3684 b'p',
3665 b'p',
3685 b'show-stage',
3666 b'show-stage',
3686 [],
3667 [],
3687 _(b'print parsed tree at the given stage'),
3668 _(b'print parsed tree at the given stage'),
3688 _(b'NAME'),
3669 _(b'NAME'),
3689 ),
3670 ),
3690 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3671 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3691 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3672 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3692 ],
3673 ],
3693 b'REVSPEC',
3674 b'REVSPEC',
3694 )
3675 )
3695 def debugrevspec(ui, repo, expr, **opts):
3676 def debugrevspec(ui, repo, expr, **opts):
3696 """parse and apply a revision specification
3677 """parse and apply a revision specification
3697
3678
3698 Use -p/--show-stage option to print the parsed tree at the given stages.
3679 Use -p/--show-stage option to print the parsed tree at the given stages.
3699 Use -p all to print tree at every stage.
3680 Use -p all to print tree at every stage.
3700
3681
3701 Use --no-show-revs option with -s or -p to print only the set
3682 Use --no-show-revs option with -s or -p to print only the set
3702 representation or the parsed tree respectively.
3683 representation or the parsed tree respectively.
3703
3684
3704 Use --verify-optimized to compare the optimized result with the unoptimized
3685 Use --verify-optimized to compare the optimized result with the unoptimized
3705 one. Returns 1 if the optimized result differs.
3686 one. Returns 1 if the optimized result differs.
3706 """
3687 """
3707 opts = pycompat.byteskwargs(opts)
3688 opts = pycompat.byteskwargs(opts)
3708 aliases = ui.configitems(b'revsetalias')
3689 aliases = ui.configitems(b'revsetalias')
3709 stages = [
3690 stages = [
3710 (b'parsed', lambda tree: tree),
3691 (b'parsed', lambda tree: tree),
3711 (
3692 (
3712 b'expanded',
3693 b'expanded',
3713 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3694 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3714 ),
3695 ),
3715 (b'concatenated', revsetlang.foldconcat),
3696 (b'concatenated', revsetlang.foldconcat),
3716 (b'analyzed', revsetlang.analyze),
3697 (b'analyzed', revsetlang.analyze),
3717 (b'optimized', revsetlang.optimize),
3698 (b'optimized', revsetlang.optimize),
3718 ]
3699 ]
3719 if opts[b'no_optimized']:
3700 if opts[b'no_optimized']:
3720 stages = stages[:-1]
3701 stages = stages[:-1]
3721 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3702 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3722 raise error.Abort(
3703 raise error.Abort(
3723 _(b'cannot use --verify-optimized with --no-optimized')
3704 _(b'cannot use --verify-optimized with --no-optimized')
3724 )
3705 )
3725 stagenames = {n for n, f in stages}
3706 stagenames = {n for n, f in stages}
3726
3707
3727 showalways = set()
3708 showalways = set()
3728 showchanged = set()
3709 showchanged = set()
3729 if ui.verbose and not opts[b'show_stage']:
3710 if ui.verbose and not opts[b'show_stage']:
3730 # show parsed tree by --verbose (deprecated)
3711 # show parsed tree by --verbose (deprecated)
3731 showalways.add(b'parsed')
3712 showalways.add(b'parsed')
3732 showchanged.update([b'expanded', b'concatenated'])
3713 showchanged.update([b'expanded', b'concatenated'])
3733 if opts[b'optimize']:
3714 if opts[b'optimize']:
3734 showalways.add(b'optimized')
3715 showalways.add(b'optimized')
3735 if opts[b'show_stage'] and opts[b'optimize']:
3716 if opts[b'show_stage'] and opts[b'optimize']:
3736 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3717 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3737 if opts[b'show_stage'] == [b'all']:
3718 if opts[b'show_stage'] == [b'all']:
3738 showalways.update(stagenames)
3719 showalways.update(stagenames)
3739 else:
3720 else:
3740 for n in opts[b'show_stage']:
3721 for n in opts[b'show_stage']:
3741 if n not in stagenames:
3722 if n not in stagenames:
3742 raise error.Abort(_(b'invalid stage name: %s') % n)
3723 raise error.Abort(_(b'invalid stage name: %s') % n)
3743 showalways.update(opts[b'show_stage'])
3724 showalways.update(opts[b'show_stage'])
3744
3725
3745 treebystage = {}
3726 treebystage = {}
3746 printedtree = None
3727 printedtree = None
3747 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3728 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3748 for n, f in stages:
3729 for n, f in stages:
3749 treebystage[n] = tree = f(tree)
3730 treebystage[n] = tree = f(tree)
3750 if n in showalways or (n in showchanged and tree != printedtree):
3731 if n in showalways or (n in showchanged and tree != printedtree):
3751 if opts[b'show_stage'] or n != b'parsed':
3732 if opts[b'show_stage'] or n != b'parsed':
3752 ui.write(b"* %s:\n" % n)
3733 ui.write(b"* %s:\n" % n)
3753 ui.write(revsetlang.prettyformat(tree), b"\n")
3734 ui.write(revsetlang.prettyformat(tree), b"\n")
3754 printedtree = tree
3735 printedtree = tree
3755
3736
3756 if opts[b'verify_optimized']:
3737 if opts[b'verify_optimized']:
3757 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3738 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3758 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3739 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3759 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3740 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3760 ui.writenoi18n(
3741 ui.writenoi18n(
3761 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3742 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3762 )
3743 )
3763 ui.writenoi18n(
3744 ui.writenoi18n(
3764 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3745 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3765 )
3746 )
3766 arevs = list(arevs)
3747 arevs = list(arevs)
3767 brevs = list(brevs)
3748 brevs = list(brevs)
3768 if arevs == brevs:
3749 if arevs == brevs:
3769 return 0
3750 return 0
3770 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3751 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3771 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3752 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3772 sm = difflib.SequenceMatcher(None, arevs, brevs)
3753 sm = difflib.SequenceMatcher(None, arevs, brevs)
3773 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3754 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3774 if tag in ('delete', 'replace'):
3755 if tag in ('delete', 'replace'):
3775 for c in arevs[alo:ahi]:
3756 for c in arevs[alo:ahi]:
3776 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3757 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3777 if tag in ('insert', 'replace'):
3758 if tag in ('insert', 'replace'):
3778 for c in brevs[blo:bhi]:
3759 for c in brevs[blo:bhi]:
3779 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3760 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3780 if tag == 'equal':
3761 if tag == 'equal':
3781 for c in arevs[alo:ahi]:
3762 for c in arevs[alo:ahi]:
3782 ui.write(b' %d\n' % c)
3763 ui.write(b' %d\n' % c)
3783 return 1
3764 return 1
3784
3765
3785 func = revset.makematcher(tree)
3766 func = revset.makematcher(tree)
3786 revs = func(repo)
3767 revs = func(repo)
3787 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3768 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3788 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3769 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3789 if not opts[b'show_revs']:
3770 if not opts[b'show_revs']:
3790 return
3771 return
3791 for c in revs:
3772 for c in revs:
3792 ui.write(b"%d\n" % c)
3773 ui.write(b"%d\n" % c)
3793
3774
3794
3775
3795 @command(
3776 @command(
3796 b'debugserve',
3777 b'debugserve',
3797 [
3778 [
3798 (
3779 (
3799 b'',
3780 b'',
3800 b'sshstdio',
3781 b'sshstdio',
3801 False,
3782 False,
3802 _(b'run an SSH server bound to process handles'),
3783 _(b'run an SSH server bound to process handles'),
3803 ),
3784 ),
3804 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3785 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3805 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3786 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3806 ],
3787 ],
3807 b'',
3788 b'',
3808 )
3789 )
3809 def debugserve(ui, repo, **opts):
3790 def debugserve(ui, repo, **opts):
3810 """run a server with advanced settings
3791 """run a server with advanced settings
3811
3792
3812 This command is similar to :hg:`serve`. It exists partially as a
3793 This command is similar to :hg:`serve`. It exists partially as a
3813 workaround to the fact that ``hg serve --stdio`` must have specific
3794 workaround to the fact that ``hg serve --stdio`` must have specific
3814 arguments for security reasons.
3795 arguments for security reasons.
3815 """
3796 """
3816 opts = pycompat.byteskwargs(opts)
3797 opts = pycompat.byteskwargs(opts)
3817
3798
3818 if not opts[b'sshstdio']:
3799 if not opts[b'sshstdio']:
3819 raise error.Abort(_(b'only --sshstdio is currently supported'))
3800 raise error.Abort(_(b'only --sshstdio is currently supported'))
3820
3801
3821 logfh = None
3802 logfh = None
3822
3803
3823 if opts[b'logiofd'] and opts[b'logiofile']:
3804 if opts[b'logiofd'] and opts[b'logiofile']:
3824 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3805 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3825
3806
3826 if opts[b'logiofd']:
3807 if opts[b'logiofd']:
3827 # Ideally we would be line buffered. But line buffering in binary
3808 # Ideally we would be line buffered. But line buffering in binary
3828 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3809 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3829 # buffering could have performance impacts. But since this isn't
3810 # buffering could have performance impacts. But since this isn't
3830 # performance critical code, it should be fine.
3811 # performance critical code, it should be fine.
3831 try:
3812 try:
3832 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3813 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3833 except OSError as e:
3814 except OSError as e:
3834 if e.errno != errno.ESPIPE:
3815 if e.errno != errno.ESPIPE:
3835 raise
3816 raise
3836 # can't seek a pipe, so `ab` mode fails on py3
3817 # can't seek a pipe, so `ab` mode fails on py3
3837 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3818 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3838 elif opts[b'logiofile']:
3819 elif opts[b'logiofile']:
3839 logfh = open(opts[b'logiofile'], b'ab', 0)
3820 logfh = open(opts[b'logiofile'], b'ab', 0)
3840
3821
3841 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3822 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3842 s.serve_forever()
3823 s.serve_forever()
3843
3824
3844
3825
3845 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3826 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3846 def debugsetparents(ui, repo, rev1, rev2=None):
3827 def debugsetparents(ui, repo, rev1, rev2=None):
3847 """manually set the parents of the current working directory (DANGEROUS)
3828 """manually set the parents of the current working directory (DANGEROUS)
3848
3829
3849 This command is not what you are looking for and should not be used. Using
3830 This command is not what you are looking for and should not be used. Using
3850 this command will most certainly results in slight corruption of the file
3831 this command will most certainly results in slight corruption of the file
3851 level histories withing your repository. DO NOT USE THIS COMMAND.
3832 level histories withing your repository. DO NOT USE THIS COMMAND.
3852
3833
3853 The command update the p1 and p2 field in the dirstate, and not touching
3834 The command update the p1 and p2 field in the dirstate, and not touching
3854 anything else. This useful for writing repository conversion tools, but
3835 anything else. This useful for writing repository conversion tools, but
3855 should be used with extreme care. For example, neither the working
3836 should be used with extreme care. For example, neither the working
3856 directory nor the dirstate is updated, so file status may be incorrect
3837 directory nor the dirstate is updated, so file status may be incorrect
3857 after running this command. Only used if you are one of the few people that
3838 after running this command. Only used if you are one of the few people that
3858 deeply unstand both conversion tools and file level histories. If you are
3839 deeply unstand both conversion tools and file level histories. If you are
3859 reading this help, you are not one of this people (most of them sailed west
3840 reading this help, you are not one of this people (most of them sailed west
3860 from Mithlond anyway.
3841 from Mithlond anyway.
3861
3842
3862 So one last time DO NOT USE THIS COMMAND.
3843 So one last time DO NOT USE THIS COMMAND.
3863
3844
3864 Returns 0 on success.
3845 Returns 0 on success.
3865 """
3846 """
3866
3847
3867 node1 = scmutil.revsingle(repo, rev1).node()
3848 node1 = scmutil.revsingle(repo, rev1).node()
3868 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3849 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3869
3850
3870 with repo.wlock():
3851 with repo.wlock():
3871 repo.setparents(node1, node2)
3852 repo.setparents(node1, node2)
3872
3853
3873
3854
3874 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3855 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3875 def debugsidedata(ui, repo, file_, rev=None, **opts):
3856 def debugsidedata(ui, repo, file_, rev=None, **opts):
3876 """dump the side data for a cl/manifest/file revision
3857 """dump the side data for a cl/manifest/file revision
3877
3858
3878 Use --verbose to dump the sidedata content."""
3859 Use --verbose to dump the sidedata content."""
3879 opts = pycompat.byteskwargs(opts)
3860 opts = pycompat.byteskwargs(opts)
3880 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3861 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3881 if rev is not None:
3862 if rev is not None:
3882 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3863 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3883 file_, rev = None, file_
3864 file_, rev = None, file_
3884 elif rev is None:
3865 elif rev is None:
3885 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3866 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3886 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3867 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3887 r = getattr(r, '_revlog', r)
3868 r = getattr(r, '_revlog', r)
3888 try:
3869 try:
3889 sidedata = r.sidedata(r.lookup(rev))
3870 sidedata = r.sidedata(r.lookup(rev))
3890 except KeyError:
3871 except KeyError:
3891 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3872 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3892 if sidedata:
3873 if sidedata:
3893 sidedata = list(sidedata.items())
3874 sidedata = list(sidedata.items())
3894 sidedata.sort()
3875 sidedata.sort()
3895 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3876 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3896 for key, value in sidedata:
3877 for key, value in sidedata:
3897 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3878 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3898 if ui.verbose:
3879 if ui.verbose:
3899 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3880 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3900
3881
3901
3882
3902 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3883 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3903 def debugssl(ui, repo, source=None, **opts):
3884 def debugssl(ui, repo, source=None, **opts):
3904 """test a secure connection to a server
3885 """test a secure connection to a server
3905
3886
3906 This builds the certificate chain for the server on Windows, installing the
3887 This builds the certificate chain for the server on Windows, installing the
3907 missing intermediates and trusted root via Windows Update if necessary. It
3888 missing intermediates and trusted root via Windows Update if necessary. It
3908 does nothing on other platforms.
3889 does nothing on other platforms.
3909
3890
3910 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3891 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3911 that server is used. See :hg:`help urls` for more information.
3892 that server is used. See :hg:`help urls` for more information.
3912
3893
3913 If the update succeeds, retry the original operation. Otherwise, the cause
3894 If the update succeeds, retry the original operation. Otherwise, the cause
3914 of the SSL error is likely another issue.
3895 of the SSL error is likely another issue.
3915 """
3896 """
3916 if not pycompat.iswindows:
3897 if not pycompat.iswindows:
3917 raise error.Abort(
3898 raise error.Abort(
3918 _(b'certificate chain building is only possible on Windows')
3899 _(b'certificate chain building is only possible on Windows')
3919 )
3900 )
3920
3901
3921 if not source:
3902 if not source:
3922 if not repo:
3903 if not repo:
3923 raise error.Abort(
3904 raise error.Abort(
3924 _(
3905 _(
3925 b"there is no Mercurial repository here, and no "
3906 b"there is no Mercurial repository here, and no "
3926 b"server specified"
3907 b"server specified"
3927 )
3908 )
3928 )
3909 )
3929 source = b"default"
3910 source = b"default"
3930
3911
3931 source, branches = urlutil.get_unique_pull_path(
3912 source, branches = urlutil.get_unique_pull_path(
3932 b'debugssl', repo, ui, source
3913 b'debugssl', repo, ui, source
3933 )
3914 )
3934 url = urlutil.url(source)
3915 url = urlutil.url(source)
3935
3916
3936 defaultport = {b'https': 443, b'ssh': 22}
3917 defaultport = {b'https': 443, b'ssh': 22}
3937 if url.scheme in defaultport:
3918 if url.scheme in defaultport:
3938 try:
3919 try:
3939 addr = (url.host, int(url.port or defaultport[url.scheme]))
3920 addr = (url.host, int(url.port or defaultport[url.scheme]))
3940 except ValueError:
3921 except ValueError:
3941 raise error.Abort(_(b"malformed port number in URL"))
3922 raise error.Abort(_(b"malformed port number in URL"))
3942 else:
3923 else:
3943 raise error.Abort(_(b"only https and ssh connections are supported"))
3924 raise error.Abort(_(b"only https and ssh connections are supported"))
3944
3925
3945 from . import win32
3926 from . import win32
3946
3927
3947 s = ssl.wrap_socket(
3928 s = ssl.wrap_socket(
3948 socket.socket(),
3929 socket.socket(),
3949 ssl_version=ssl.PROTOCOL_TLS,
3930 ssl_version=ssl.PROTOCOL_TLS,
3950 cert_reqs=ssl.CERT_NONE,
3931 cert_reqs=ssl.CERT_NONE,
3951 ca_certs=None,
3932 ca_certs=None,
3952 )
3933 )
3953
3934
3954 try:
3935 try:
3955 s.connect(addr)
3936 s.connect(addr)
3956 cert = s.getpeercert(True)
3937 cert = s.getpeercert(True)
3957
3938
3958 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3939 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3959
3940
3960 complete = win32.checkcertificatechain(cert, build=False)
3941 complete = win32.checkcertificatechain(cert, build=False)
3961
3942
3962 if not complete:
3943 if not complete:
3963 ui.status(_(b'certificate chain is incomplete, updating... '))
3944 ui.status(_(b'certificate chain is incomplete, updating... '))
3964
3945
3965 if not win32.checkcertificatechain(cert):
3946 if not win32.checkcertificatechain(cert):
3966 ui.status(_(b'failed.\n'))
3947 ui.status(_(b'failed.\n'))
3967 else:
3948 else:
3968 ui.status(_(b'done.\n'))
3949 ui.status(_(b'done.\n'))
3969 else:
3950 else:
3970 ui.status(_(b'full certificate chain is available\n'))
3951 ui.status(_(b'full certificate chain is available\n'))
3971 finally:
3952 finally:
3972 s.close()
3953 s.close()
3973
3954
3974
3955
3975 @command(
3956 @command(
3976 b"debugbackupbundle",
3957 b"debugbackupbundle",
3977 [
3958 [
3978 (
3959 (
3979 b"",
3960 b"",
3980 b"recover",
3961 b"recover",
3981 b"",
3962 b"",
3982 b"brings the specified changeset back into the repository",
3963 b"brings the specified changeset back into the repository",
3983 )
3964 )
3984 ]
3965 ]
3985 + cmdutil.logopts,
3966 + cmdutil.logopts,
3986 _(b"hg debugbackupbundle [--recover HASH]"),
3967 _(b"hg debugbackupbundle [--recover HASH]"),
3987 )
3968 )
3988 def debugbackupbundle(ui, repo, *pats, **opts):
3969 def debugbackupbundle(ui, repo, *pats, **opts):
3989 """lists the changesets available in backup bundles
3970 """lists the changesets available in backup bundles
3990
3971
3991 Without any arguments, this command prints a list of the changesets in each
3972 Without any arguments, this command prints a list of the changesets in each
3992 backup bundle.
3973 backup bundle.
3993
3974
3994 --recover takes a changeset hash and unbundles the first bundle that
3975 --recover takes a changeset hash and unbundles the first bundle that
3995 contains that hash, which puts that changeset back in your repository.
3976 contains that hash, which puts that changeset back in your repository.
3996
3977
3997 --verbose will print the entire commit message and the bundle path for that
3978 --verbose will print the entire commit message and the bundle path for that
3998 backup.
3979 backup.
3999 """
3980 """
4000 backups = list(
3981 backups = list(
4001 filter(
3982 filter(
4002 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3983 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
4003 )
3984 )
4004 )
3985 )
4005 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3986 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
4006
3987
4007 opts = pycompat.byteskwargs(opts)
3988 opts = pycompat.byteskwargs(opts)
4008 opts[b"bundle"] = b""
3989 opts[b"bundle"] = b""
4009 opts[b"force"] = None
3990 opts[b"force"] = None
4010 limit = logcmdutil.getlimit(opts)
3991 limit = logcmdutil.getlimit(opts)
4011
3992
4012 def display(other, chlist, displayer):
3993 def display(other, chlist, displayer):
4013 if opts.get(b"newest_first"):
3994 if opts.get(b"newest_first"):
4014 chlist.reverse()
3995 chlist.reverse()
4015 count = 0
3996 count = 0
4016 for n in chlist:
3997 for n in chlist:
4017 if limit is not None and count >= limit:
3998 if limit is not None and count >= limit:
4018 break
3999 break
4019 parents = [
4000 parents = [
4020 True for p in other.changelog.parents(n) if p != repo.nullid
4001 True for p in other.changelog.parents(n) if p != repo.nullid
4021 ]
4002 ]
4022 if opts.get(b"no_merges") and len(parents) == 2:
4003 if opts.get(b"no_merges") and len(parents) == 2:
4023 continue
4004 continue
4024 count += 1
4005 count += 1
4025 displayer.show(other[n])
4006 displayer.show(other[n])
4026
4007
4027 recovernode = opts.get(b"recover")
4008 recovernode = opts.get(b"recover")
4028 if recovernode:
4009 if recovernode:
4029 if scmutil.isrevsymbol(repo, recovernode):
4010 if scmutil.isrevsymbol(repo, recovernode):
4030 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4011 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4031 return
4012 return
4032 elif backups:
4013 elif backups:
4033 msg = _(
4014 msg = _(
4034 b"Recover changesets using: hg debugbackupbundle --recover "
4015 b"Recover changesets using: hg debugbackupbundle --recover "
4035 b"<changeset hash>\n\nAvailable backup changesets:"
4016 b"<changeset hash>\n\nAvailable backup changesets:"
4036 )
4017 )
4037 ui.status(msg, label=b"status.removed")
4018 ui.status(msg, label=b"status.removed")
4038 else:
4019 else:
4039 ui.status(_(b"no backup changesets found\n"))
4020 ui.status(_(b"no backup changesets found\n"))
4040 return
4021 return
4041
4022
4042 for backup in backups:
4023 for backup in backups:
4043 # Much of this is copied from the hg incoming logic
4024 # Much of this is copied from the hg incoming logic
4044 source = os.path.relpath(backup, encoding.getcwd())
4025 source = os.path.relpath(backup, encoding.getcwd())
4045 source, branches = urlutil.get_unique_pull_path(
4026 source, branches = urlutil.get_unique_pull_path(
4046 b'debugbackupbundle',
4027 b'debugbackupbundle',
4047 repo,
4028 repo,
4048 ui,
4029 ui,
4049 source,
4030 source,
4050 default_branches=opts.get(b'branch'),
4031 default_branches=opts.get(b'branch'),
4051 )
4032 )
4052 try:
4033 try:
4053 other = hg.peer(repo, opts, source)
4034 other = hg.peer(repo, opts, source)
4054 except error.LookupError as ex:
4035 except error.LookupError as ex:
4055 msg = _(b"\nwarning: unable to open bundle %s") % source
4036 msg = _(b"\nwarning: unable to open bundle %s") % source
4056 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4037 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4057 ui.warn(msg, hint=hint)
4038 ui.warn(msg, hint=hint)
4058 continue
4039 continue
4059 revs, checkout = hg.addbranchrevs(
4040 revs, checkout = hg.addbranchrevs(
4060 repo, other, branches, opts.get(b"rev")
4041 repo, other, branches, opts.get(b"rev")
4061 )
4042 )
4062
4043
4063 if revs:
4044 if revs:
4064 revs = [other.lookup(rev) for rev in revs]
4045 revs = [other.lookup(rev) for rev in revs]
4065
4046
4066 with ui.silent():
4047 with ui.silent():
4067 try:
4048 try:
4068 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4049 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4069 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4050 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4070 )
4051 )
4071 except error.LookupError:
4052 except error.LookupError:
4072 continue
4053 continue
4073
4054
4074 try:
4055 try:
4075 if not chlist:
4056 if not chlist:
4076 continue
4057 continue
4077 if recovernode:
4058 if recovernode:
4078 with repo.lock(), repo.transaction(b"unbundle") as tr:
4059 with repo.lock(), repo.transaction(b"unbundle") as tr:
4079 if scmutil.isrevsymbol(other, recovernode):
4060 if scmutil.isrevsymbol(other, recovernode):
4080 ui.status(_(b"Unbundling %s\n") % (recovernode))
4061 ui.status(_(b"Unbundling %s\n") % (recovernode))
4081 f = hg.openpath(ui, source)
4062 f = hg.openpath(ui, source)
4082 gen = exchange.readbundle(ui, f, source)
4063 gen = exchange.readbundle(ui, f, source)
4083 if isinstance(gen, bundle2.unbundle20):
4064 if isinstance(gen, bundle2.unbundle20):
4084 bundle2.applybundle(
4065 bundle2.applybundle(
4085 repo,
4066 repo,
4086 gen,
4067 gen,
4087 tr,
4068 tr,
4088 source=b"unbundle",
4069 source=b"unbundle",
4089 url=b"bundle:" + source,
4070 url=b"bundle:" + source,
4090 )
4071 )
4091 else:
4072 else:
4092 gen.apply(repo, b"unbundle", b"bundle:" + source)
4073 gen.apply(repo, b"unbundle", b"bundle:" + source)
4093 break
4074 break
4094 else:
4075 else:
4095 backupdate = encoding.strtolocal(
4076 backupdate = encoding.strtolocal(
4096 time.strftime(
4077 time.strftime(
4097 "%a %H:%M, %Y-%m-%d",
4078 "%a %H:%M, %Y-%m-%d",
4098 time.localtime(os.path.getmtime(source)),
4079 time.localtime(os.path.getmtime(source)),
4099 )
4080 )
4100 )
4081 )
4101 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4082 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4102 if ui.verbose:
4083 if ui.verbose:
4103 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4084 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4104 else:
4085 else:
4105 opts[
4086 opts[
4106 b"template"
4087 b"template"
4107 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4088 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4108 displayer = logcmdutil.changesetdisplayer(
4089 displayer = logcmdutil.changesetdisplayer(
4109 ui, other, opts, False
4090 ui, other, opts, False
4110 )
4091 )
4111 display(other, chlist, displayer)
4092 display(other, chlist, displayer)
4112 displayer.close()
4093 displayer.close()
4113 finally:
4094 finally:
4114 cleanupfn()
4095 cleanupfn()
4115
4096
4116
4097
4117 @command(
4098 @command(
4118 b'debugsub',
4099 b'debugsub',
4119 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4100 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4120 _(b'[-r REV] [REV]'),
4101 _(b'[-r REV] [REV]'),
4121 )
4102 )
4122 def debugsub(ui, repo, rev=None):
4103 def debugsub(ui, repo, rev=None):
4123 ctx = scmutil.revsingle(repo, rev, None)
4104 ctx = scmutil.revsingle(repo, rev, None)
4124 for k, v in sorted(ctx.substate.items()):
4105 for k, v in sorted(ctx.substate.items()):
4125 ui.writenoi18n(b'path %s\n' % k)
4106 ui.writenoi18n(b'path %s\n' % k)
4126 ui.writenoi18n(b' source %s\n' % v[0])
4107 ui.writenoi18n(b' source %s\n' % v[0])
4127 ui.writenoi18n(b' revision %s\n' % v[1])
4108 ui.writenoi18n(b' revision %s\n' % v[1])
4128
4109
4129
4110
4130 @command(b'debugshell', optionalrepo=True)
4111 @command(b'debugshell', optionalrepo=True)
4131 def debugshell(ui, repo):
4112 def debugshell(ui, repo):
4132 """run an interactive Python interpreter
4113 """run an interactive Python interpreter
4133
4114
4134 The local namespace is provided with a reference to the ui and
4115 The local namespace is provided with a reference to the ui and
4135 the repo instance (if available).
4116 the repo instance (if available).
4136 """
4117 """
4137 import code
4118 import code
4138
4119
4139 imported_objects = {
4120 imported_objects = {
4140 'ui': ui,
4121 'ui': ui,
4141 'repo': repo,
4122 'repo': repo,
4142 }
4123 }
4143
4124
4144 code.interact(local=imported_objects)
4125 code.interact(local=imported_objects)
4145
4126
4146
4127
4147 @command(
4128 @command(
4148 b'debugsuccessorssets',
4129 b'debugsuccessorssets',
4149 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4130 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4150 _(b'[REV]'),
4131 _(b'[REV]'),
4151 )
4132 )
4152 def debugsuccessorssets(ui, repo, *revs, **opts):
4133 def debugsuccessorssets(ui, repo, *revs, **opts):
4153 """show set of successors for revision
4134 """show set of successors for revision
4154
4135
4155 A successors set of changeset A is a consistent group of revisions that
4136 A successors set of changeset A is a consistent group of revisions that
4156 succeed A. It contains non-obsolete changesets only unless closests
4137 succeed A. It contains non-obsolete changesets only unless closests
4157 successors set is set.
4138 successors set is set.
4158
4139
4159 In most cases a changeset A has a single successors set containing a single
4140 In most cases a changeset A has a single successors set containing a single
4160 successor (changeset A replaced by A').
4141 successor (changeset A replaced by A').
4161
4142
4162 A changeset that is made obsolete with no successors are called "pruned".
4143 A changeset that is made obsolete with no successors are called "pruned".
4163 Such changesets have no successors sets at all.
4144 Such changesets have no successors sets at all.
4164
4145
4165 A changeset that has been "split" will have a successors set containing
4146 A changeset that has been "split" will have a successors set containing
4166 more than one successor.
4147 more than one successor.
4167
4148
4168 A changeset that has been rewritten in multiple different ways is called
4149 A changeset that has been rewritten in multiple different ways is called
4169 "divergent". Such changesets have multiple successor sets (each of which
4150 "divergent". Such changesets have multiple successor sets (each of which
4170 may also be split, i.e. have multiple successors).
4151 may also be split, i.e. have multiple successors).
4171
4152
4172 Results are displayed as follows::
4153 Results are displayed as follows::
4173
4154
4174 <rev1>
4155 <rev1>
4175 <successors-1A>
4156 <successors-1A>
4176 <rev2>
4157 <rev2>
4177 <successors-2A>
4158 <successors-2A>
4178 <successors-2B1> <successors-2B2> <successors-2B3>
4159 <successors-2B1> <successors-2B2> <successors-2B3>
4179
4160
4180 Here rev2 has two possible (i.e. divergent) successors sets. The first
4161 Here rev2 has two possible (i.e. divergent) successors sets. The first
4181 holds one element, whereas the second holds three (i.e. the changeset has
4162 holds one element, whereas the second holds three (i.e. the changeset has
4182 been split).
4163 been split).
4183 """
4164 """
4184 # passed to successorssets caching computation from one call to another
4165 # passed to successorssets caching computation from one call to another
4185 cache = {}
4166 cache = {}
4186 ctx2str = bytes
4167 ctx2str = bytes
4187 node2str = short
4168 node2str = short
4188 for rev in logcmdutil.revrange(repo, revs):
4169 for rev in logcmdutil.revrange(repo, revs):
4189 ctx = repo[rev]
4170 ctx = repo[rev]
4190 ui.write(b'%s\n' % ctx2str(ctx))
4171 ui.write(b'%s\n' % ctx2str(ctx))
4191 for succsset in obsutil.successorssets(
4172 for succsset in obsutil.successorssets(
4192 repo, ctx.node(), closest=opts['closest'], cache=cache
4173 repo, ctx.node(), closest=opts['closest'], cache=cache
4193 ):
4174 ):
4194 if succsset:
4175 if succsset:
4195 ui.write(b' ')
4176 ui.write(b' ')
4196 ui.write(node2str(succsset[0]))
4177 ui.write(node2str(succsset[0]))
4197 for node in succsset[1:]:
4178 for node in succsset[1:]:
4198 ui.write(b' ')
4179 ui.write(b' ')
4199 ui.write(node2str(node))
4180 ui.write(node2str(node))
4200 ui.write(b'\n')
4181 ui.write(b'\n')
4201
4182
4202
4183
4203 @command(b'debugtagscache', [])
4184 @command(b'debugtagscache', [])
4204 def debugtagscache(ui, repo):
4185 def debugtagscache(ui, repo):
4205 """display the contents of .hg/cache/hgtagsfnodes1"""
4186 """display the contents of .hg/cache/hgtagsfnodes1"""
4206 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4187 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4207 flog = repo.file(b'.hgtags')
4188 flog = repo.file(b'.hgtags')
4208 for r in repo:
4189 for r in repo:
4209 node = repo[r].node()
4190 node = repo[r].node()
4210 tagsnode = cache.getfnode(node, computemissing=False)
4191 tagsnode = cache.getfnode(node, computemissing=False)
4211 if tagsnode:
4192 if tagsnode:
4212 tagsnodedisplay = hex(tagsnode)
4193 tagsnodedisplay = hex(tagsnode)
4213 if not flog.hasnode(tagsnode):
4194 if not flog.hasnode(tagsnode):
4214 tagsnodedisplay += b' (unknown node)'
4195 tagsnodedisplay += b' (unknown node)'
4215 elif tagsnode is None:
4196 elif tagsnode is None:
4216 tagsnodedisplay = b'missing'
4197 tagsnodedisplay = b'missing'
4217 else:
4198 else:
4218 tagsnodedisplay = b'invalid'
4199 tagsnodedisplay = b'invalid'
4219
4200
4220 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4201 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4221
4202
4222
4203
4223 @command(
4204 @command(
4224 b'debugtemplate',
4205 b'debugtemplate',
4225 [
4206 [
4226 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4207 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4227 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4208 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4228 ],
4209 ],
4229 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4210 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4230 optionalrepo=True,
4211 optionalrepo=True,
4231 )
4212 )
4232 def debugtemplate(ui, repo, tmpl, **opts):
4213 def debugtemplate(ui, repo, tmpl, **opts):
4233 """parse and apply a template
4214 """parse and apply a template
4234
4215
4235 If -r/--rev is given, the template is processed as a log template and
4216 If -r/--rev is given, the template is processed as a log template and
4236 applied to the given changesets. Otherwise, it is processed as a generic
4217 applied to the given changesets. Otherwise, it is processed as a generic
4237 template.
4218 template.
4238
4219
4239 Use --verbose to print the parsed tree.
4220 Use --verbose to print the parsed tree.
4240 """
4221 """
4241 revs = None
4222 revs = None
4242 if opts['rev']:
4223 if opts['rev']:
4243 if repo is None:
4224 if repo is None:
4244 raise error.RepoError(
4225 raise error.RepoError(
4245 _(b'there is no Mercurial repository here (.hg not found)')
4226 _(b'there is no Mercurial repository here (.hg not found)')
4246 )
4227 )
4247 revs = logcmdutil.revrange(repo, opts['rev'])
4228 revs = logcmdutil.revrange(repo, opts['rev'])
4248
4229
4249 props = {}
4230 props = {}
4250 for d in opts['define']:
4231 for d in opts['define']:
4251 try:
4232 try:
4252 k, v = (e.strip() for e in d.split(b'=', 1))
4233 k, v = (e.strip() for e in d.split(b'=', 1))
4253 if not k or k == b'ui':
4234 if not k or k == b'ui':
4254 raise ValueError
4235 raise ValueError
4255 props[k] = v
4236 props[k] = v
4256 except ValueError:
4237 except ValueError:
4257 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4238 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4258
4239
4259 if ui.verbose:
4240 if ui.verbose:
4260 aliases = ui.configitems(b'templatealias')
4241 aliases = ui.configitems(b'templatealias')
4261 tree = templater.parse(tmpl)
4242 tree = templater.parse(tmpl)
4262 ui.note(templater.prettyformat(tree), b'\n')
4243 ui.note(templater.prettyformat(tree), b'\n')
4263 newtree = templater.expandaliases(tree, aliases)
4244 newtree = templater.expandaliases(tree, aliases)
4264 if newtree != tree:
4245 if newtree != tree:
4265 ui.notenoi18n(
4246 ui.notenoi18n(
4266 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4247 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4267 )
4248 )
4268
4249
4269 if revs is None:
4250 if revs is None:
4270 tres = formatter.templateresources(ui, repo)
4251 tres = formatter.templateresources(ui, repo)
4271 t = formatter.maketemplater(ui, tmpl, resources=tres)
4252 t = formatter.maketemplater(ui, tmpl, resources=tres)
4272 if ui.verbose:
4253 if ui.verbose:
4273 kwds, funcs = t.symbolsuseddefault()
4254 kwds, funcs = t.symbolsuseddefault()
4274 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4255 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4275 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4256 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4276 ui.write(t.renderdefault(props))
4257 ui.write(t.renderdefault(props))
4277 else:
4258 else:
4278 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4259 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4279 if ui.verbose:
4260 if ui.verbose:
4280 kwds, funcs = displayer.t.symbolsuseddefault()
4261 kwds, funcs = displayer.t.symbolsuseddefault()
4281 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4262 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4282 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4263 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4283 for r in revs:
4264 for r in revs:
4284 displayer.show(repo[r], **pycompat.strkwargs(props))
4265 displayer.show(repo[r], **pycompat.strkwargs(props))
4285 displayer.close()
4266 displayer.close()
4286
4267
4287
4268
4288 @command(
4269 @command(
4289 b'debuguigetpass',
4270 b'debuguigetpass',
4290 [
4271 [
4291 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4272 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4292 ],
4273 ],
4293 _(b'[-p TEXT]'),
4274 _(b'[-p TEXT]'),
4294 norepo=True,
4275 norepo=True,
4295 )
4276 )
4296 def debuguigetpass(ui, prompt=b''):
4277 def debuguigetpass(ui, prompt=b''):
4297 """show prompt to type password"""
4278 """show prompt to type password"""
4298 r = ui.getpass(prompt)
4279 r = ui.getpass(prompt)
4299 if r is None:
4280 if r is None:
4300 r = b"<default response>"
4281 r = b"<default response>"
4301 ui.writenoi18n(b'response: %s\n' % r)
4282 ui.writenoi18n(b'response: %s\n' % r)
4302
4283
4303
4284
4304 @command(
4285 @command(
4305 b'debuguiprompt',
4286 b'debuguiprompt',
4306 [
4287 [
4307 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4288 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4308 ],
4289 ],
4309 _(b'[-p TEXT]'),
4290 _(b'[-p TEXT]'),
4310 norepo=True,
4291 norepo=True,
4311 )
4292 )
4312 def debuguiprompt(ui, prompt=b''):
4293 def debuguiprompt(ui, prompt=b''):
4313 """show plain prompt"""
4294 """show plain prompt"""
4314 r = ui.prompt(prompt)
4295 r = ui.prompt(prompt)
4315 ui.writenoi18n(b'response: %s\n' % r)
4296 ui.writenoi18n(b'response: %s\n' % r)
4316
4297
4317
4298
4318 @command(b'debugupdatecaches', [])
4299 @command(b'debugupdatecaches', [])
4319 def debugupdatecaches(ui, repo, *pats, **opts):
4300 def debugupdatecaches(ui, repo, *pats, **opts):
4320 """warm all known caches in the repository"""
4301 """warm all known caches in the repository"""
4321 with repo.wlock(), repo.lock():
4302 with repo.wlock(), repo.lock():
4322 repo.updatecaches(caches=repository.CACHES_ALL)
4303 repo.updatecaches(caches=repository.CACHES_ALL)
4323
4304
4324
4305
4325 @command(
4306 @command(
4326 b'debugupgraderepo',
4307 b'debugupgraderepo',
4327 [
4308 [
4328 (
4309 (
4329 b'o',
4310 b'o',
4330 b'optimize',
4311 b'optimize',
4331 [],
4312 [],
4332 _(b'extra optimization to perform'),
4313 _(b'extra optimization to perform'),
4333 _(b'NAME'),
4314 _(b'NAME'),
4334 ),
4315 ),
4335 (b'', b'run', False, _(b'performs an upgrade')),
4316 (b'', b'run', False, _(b'performs an upgrade')),
4336 (b'', b'backup', True, _(b'keep the old repository content around')),
4317 (b'', b'backup', True, _(b'keep the old repository content around')),
4337 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4318 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4338 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4319 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4339 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4320 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4340 ],
4321 ],
4341 )
4322 )
4342 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4323 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4343 """upgrade a repository to use different features
4324 """upgrade a repository to use different features
4344
4325
4345 If no arguments are specified, the repository is evaluated for upgrade
4326 If no arguments are specified, the repository is evaluated for upgrade
4346 and a list of problems and potential optimizations is printed.
4327 and a list of problems and potential optimizations is printed.
4347
4328
4348 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4329 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4349 can be influenced via additional arguments. More details will be provided
4330 can be influenced via additional arguments. More details will be provided
4350 by the command output when run without ``--run``.
4331 by the command output when run without ``--run``.
4351
4332
4352 During the upgrade, the repository will be locked and no writes will be
4333 During the upgrade, the repository will be locked and no writes will be
4353 allowed.
4334 allowed.
4354
4335
4355 At the end of the upgrade, the repository may not be readable while new
4336 At the end of the upgrade, the repository may not be readable while new
4356 repository data is swapped in. This window will be as long as it takes to
4337 repository data is swapped in. This window will be as long as it takes to
4357 rename some directories inside the ``.hg`` directory. On most machines, this
4338 rename some directories inside the ``.hg`` directory. On most machines, this
4358 should complete almost instantaneously and the chances of a consumer being
4339 should complete almost instantaneously and the chances of a consumer being
4359 unable to access the repository should be low.
4340 unable to access the repository should be low.
4360
4341
4361 By default, all revlogs will be upgraded. You can restrict this using flags
4342 By default, all revlogs will be upgraded. You can restrict this using flags
4362 such as `--manifest`:
4343 such as `--manifest`:
4363
4344
4364 * `--manifest`: only optimize the manifest
4345 * `--manifest`: only optimize the manifest
4365 * `--no-manifest`: optimize all revlog but the manifest
4346 * `--no-manifest`: optimize all revlog but the manifest
4366 * `--changelog`: optimize the changelog only
4347 * `--changelog`: optimize the changelog only
4367 * `--no-changelog --no-manifest`: optimize filelogs only
4348 * `--no-changelog --no-manifest`: optimize filelogs only
4368 * `--filelogs`: optimize the filelogs only
4349 * `--filelogs`: optimize the filelogs only
4369 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4350 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4370 """
4351 """
4371 return upgrade.upgraderepo(
4352 return upgrade.upgraderepo(
4372 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4353 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4373 )
4354 )
4374
4355
4375
4356
4376 @command(
4357 @command(
4377 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4358 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4378 )
4359 )
4379 def debugwalk(ui, repo, *pats, **opts):
4360 def debugwalk(ui, repo, *pats, **opts):
4380 """show how files match on given patterns"""
4361 """show how files match on given patterns"""
4381 opts = pycompat.byteskwargs(opts)
4362 opts = pycompat.byteskwargs(opts)
4382 m = scmutil.match(repo[None], pats, opts)
4363 m = scmutil.match(repo[None], pats, opts)
4383 if ui.verbose:
4364 if ui.verbose:
4384 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4365 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4385 items = list(repo[None].walk(m))
4366 items = list(repo[None].walk(m))
4386 if not items:
4367 if not items:
4387 return
4368 return
4388 f = lambda fn: fn
4369 f = lambda fn: fn
4389 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4370 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4390 f = lambda fn: util.normpath(fn)
4371 f = lambda fn: util.normpath(fn)
4391 fmt = b'f %%-%ds %%-%ds %%s' % (
4372 fmt = b'f %%-%ds %%-%ds %%s' % (
4392 max([len(abs) for abs in items]),
4373 max([len(abs) for abs in items]),
4393 max([len(repo.pathto(abs)) for abs in items]),
4374 max([len(repo.pathto(abs)) for abs in items]),
4394 )
4375 )
4395 for abs in items:
4376 for abs in items:
4396 line = fmt % (
4377 line = fmt % (
4397 abs,
4378 abs,
4398 f(repo.pathto(abs)),
4379 f(repo.pathto(abs)),
4399 m.exact(abs) and b'exact' or b'',
4380 m.exact(abs) and b'exact' or b'',
4400 )
4381 )
4401 ui.write(b"%s\n" % line.rstrip())
4382 ui.write(b"%s\n" % line.rstrip())
4402
4383
4403
4384
4404 @command(b'debugwhyunstable', [], _(b'REV'))
4385 @command(b'debugwhyunstable', [], _(b'REV'))
4405 def debugwhyunstable(ui, repo, rev):
4386 def debugwhyunstable(ui, repo, rev):
4406 """explain instabilities of a changeset"""
4387 """explain instabilities of a changeset"""
4407 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4388 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4408 dnodes = b''
4389 dnodes = b''
4409 if entry.get(b'divergentnodes'):
4390 if entry.get(b'divergentnodes'):
4410 dnodes = (
4391 dnodes = (
4411 b' '.join(
4392 b' '.join(
4412 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4393 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4413 for ctx in entry[b'divergentnodes']
4394 for ctx in entry[b'divergentnodes']
4414 )
4395 )
4415 + b' '
4396 + b' '
4416 )
4397 )
4417 ui.write(
4398 ui.write(
4418 b'%s: %s%s %s\n'
4399 b'%s: %s%s %s\n'
4419 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4400 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4420 )
4401 )
4421
4402
4422
4403
4423 @command(
4404 @command(
4424 b'debugwireargs',
4405 b'debugwireargs',
4425 [
4406 [
4426 (b'', b'three', b'', b'three'),
4407 (b'', b'three', b'', b'three'),
4427 (b'', b'four', b'', b'four'),
4408 (b'', b'four', b'', b'four'),
4428 (b'', b'five', b'', b'five'),
4409 (b'', b'five', b'', b'five'),
4429 ]
4410 ]
4430 + cmdutil.remoteopts,
4411 + cmdutil.remoteopts,
4431 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4412 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4432 norepo=True,
4413 norepo=True,
4433 )
4414 )
4434 def debugwireargs(ui, repopath, *vals, **opts):
4415 def debugwireargs(ui, repopath, *vals, **opts):
4435 opts = pycompat.byteskwargs(opts)
4416 opts = pycompat.byteskwargs(opts)
4436 repo = hg.peer(ui, opts, repopath)
4417 repo = hg.peer(ui, opts, repopath)
4437 try:
4418 try:
4438 for opt in cmdutil.remoteopts:
4419 for opt in cmdutil.remoteopts:
4439 del opts[opt[1]]
4420 del opts[opt[1]]
4440 args = {}
4421 args = {}
4441 for k, v in opts.items():
4422 for k, v in opts.items():
4442 if v:
4423 if v:
4443 args[k] = v
4424 args[k] = v
4444 args = pycompat.strkwargs(args)
4425 args = pycompat.strkwargs(args)
4445 # run twice to check that we don't mess up the stream for the next command
4426 # run twice to check that we don't mess up the stream for the next command
4446 res1 = repo.debugwireargs(*vals, **args)
4427 res1 = repo.debugwireargs(*vals, **args)
4447 res2 = repo.debugwireargs(*vals, **args)
4428 res2 = repo.debugwireargs(*vals, **args)
4448 ui.write(b"%s\n" % res1)
4429 ui.write(b"%s\n" % res1)
4449 if res1 != res2:
4430 if res1 != res2:
4450 ui.warn(b"%s\n" % res2)
4431 ui.warn(b"%s\n" % res2)
4451 finally:
4432 finally:
4452 repo.close()
4433 repo.close()
4453
4434
4454
4435
4455 def _parsewirelangblocks(fh):
4436 def _parsewirelangblocks(fh):
4456 activeaction = None
4437 activeaction = None
4457 blocklines = []
4438 blocklines = []
4458 lastindent = 0
4439 lastindent = 0
4459
4440
4460 for line in fh:
4441 for line in fh:
4461 line = line.rstrip()
4442 line = line.rstrip()
4462 if not line:
4443 if not line:
4463 continue
4444 continue
4464
4445
4465 if line.startswith(b'#'):
4446 if line.startswith(b'#'):
4466 continue
4447 continue
4467
4448
4468 if not line.startswith(b' '):
4449 if not line.startswith(b' '):
4469 # New block. Flush previous one.
4450 # New block. Flush previous one.
4470 if activeaction:
4451 if activeaction:
4471 yield activeaction, blocklines
4452 yield activeaction, blocklines
4472
4453
4473 activeaction = line
4454 activeaction = line
4474 blocklines = []
4455 blocklines = []
4475 lastindent = 0
4456 lastindent = 0
4476 continue
4457 continue
4477
4458
4478 # Else we start with an indent.
4459 # Else we start with an indent.
4479
4460
4480 if not activeaction:
4461 if not activeaction:
4481 raise error.Abort(_(b'indented line outside of block'))
4462 raise error.Abort(_(b'indented line outside of block'))
4482
4463
4483 indent = len(line) - len(line.lstrip())
4464 indent = len(line) - len(line.lstrip())
4484
4465
4485 # If this line is indented more than the last line, concatenate it.
4466 # If this line is indented more than the last line, concatenate it.
4486 if indent > lastindent and blocklines:
4467 if indent > lastindent and blocklines:
4487 blocklines[-1] += line.lstrip()
4468 blocklines[-1] += line.lstrip()
4488 else:
4469 else:
4489 blocklines.append(line)
4470 blocklines.append(line)
4490 lastindent = indent
4471 lastindent = indent
4491
4472
4492 # Flush last block.
4473 # Flush last block.
4493 if activeaction:
4474 if activeaction:
4494 yield activeaction, blocklines
4475 yield activeaction, blocklines
4495
4476
4496
4477
4497 @command(
4478 @command(
4498 b'debugwireproto',
4479 b'debugwireproto',
4499 [
4480 [
4500 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4481 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4501 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4482 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4502 (
4483 (
4503 b'',
4484 b'',
4504 b'noreadstderr',
4485 b'noreadstderr',
4505 False,
4486 False,
4506 _(b'do not read from stderr of the remote'),
4487 _(b'do not read from stderr of the remote'),
4507 ),
4488 ),
4508 (
4489 (
4509 b'',
4490 b'',
4510 b'nologhandshake',
4491 b'nologhandshake',
4511 False,
4492 False,
4512 _(b'do not log I/O related to the peer handshake'),
4493 _(b'do not log I/O related to the peer handshake'),
4513 ),
4494 ),
4514 ]
4495 ]
4515 + cmdutil.remoteopts,
4496 + cmdutil.remoteopts,
4516 _(b'[PATH]'),
4497 _(b'[PATH]'),
4517 optionalrepo=True,
4498 optionalrepo=True,
4518 )
4499 )
4519 def debugwireproto(ui, repo, path=None, **opts):
4500 def debugwireproto(ui, repo, path=None, **opts):
4520 """send wire protocol commands to a server
4501 """send wire protocol commands to a server
4521
4502
4522 This command can be used to issue wire protocol commands to remote
4503 This command can be used to issue wire protocol commands to remote
4523 peers and to debug the raw data being exchanged.
4504 peers and to debug the raw data being exchanged.
4524
4505
4525 ``--localssh`` will start an SSH server against the current repository
4506 ``--localssh`` will start an SSH server against the current repository
4526 and connect to that. By default, the connection will perform a handshake
4507 and connect to that. By default, the connection will perform a handshake
4527 and establish an appropriate peer instance.
4508 and establish an appropriate peer instance.
4528
4509
4529 ``--peer`` can be used to bypass the handshake protocol and construct a
4510 ``--peer`` can be used to bypass the handshake protocol and construct a
4530 peer instance using the specified class type. Valid values are ``raw``,
4511 peer instance using the specified class type. Valid values are ``raw``,
4531 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4512 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4532 don't support higher-level command actions.
4513 don't support higher-level command actions.
4533
4514
4534 ``--noreadstderr`` can be used to disable automatic reading from stderr
4515 ``--noreadstderr`` can be used to disable automatic reading from stderr
4535 of the peer (for SSH connections only). Disabling automatic reading of
4516 of the peer (for SSH connections only). Disabling automatic reading of
4536 stderr is useful for making output more deterministic.
4517 stderr is useful for making output more deterministic.
4537
4518
4538 Commands are issued via a mini language which is specified via stdin.
4519 Commands are issued via a mini language which is specified via stdin.
4539 The language consists of individual actions to perform. An action is
4520 The language consists of individual actions to perform. An action is
4540 defined by a block. A block is defined as a line with no leading
4521 defined by a block. A block is defined as a line with no leading
4541 space followed by 0 or more lines with leading space. Blocks are
4522 space followed by 0 or more lines with leading space. Blocks are
4542 effectively a high-level command with additional metadata.
4523 effectively a high-level command with additional metadata.
4543
4524
4544 Lines beginning with ``#`` are ignored.
4525 Lines beginning with ``#`` are ignored.
4545
4526
4546 The following sections denote available actions.
4527 The following sections denote available actions.
4547
4528
4548 raw
4529 raw
4549 ---
4530 ---
4550
4531
4551 Send raw data to the server.
4532 Send raw data to the server.
4552
4533
4553 The block payload contains the raw data to send as one atomic send
4534 The block payload contains the raw data to send as one atomic send
4554 operation. The data may not actually be delivered in a single system
4535 operation. The data may not actually be delivered in a single system
4555 call: it depends on the abilities of the transport being used.
4536 call: it depends on the abilities of the transport being used.
4556
4537
4557 Each line in the block is de-indented and concatenated. Then, that
4538 Each line in the block is de-indented and concatenated. Then, that
4558 value is evaluated as a Python b'' literal. This allows the use of
4539 value is evaluated as a Python b'' literal. This allows the use of
4559 backslash escaping, etc.
4540 backslash escaping, etc.
4560
4541
4561 raw+
4542 raw+
4562 ----
4543 ----
4563
4544
4564 Behaves like ``raw`` except flushes output afterwards.
4545 Behaves like ``raw`` except flushes output afterwards.
4565
4546
4566 command <X>
4547 command <X>
4567 -----------
4548 -----------
4568
4549
4569 Send a request to run a named command, whose name follows the ``command``
4550 Send a request to run a named command, whose name follows the ``command``
4570 string.
4551 string.
4571
4552
4572 Arguments to the command are defined as lines in this block. The format of
4553 Arguments to the command are defined as lines in this block. The format of
4573 each line is ``<key> <value>``. e.g.::
4554 each line is ``<key> <value>``. e.g.::
4574
4555
4575 command listkeys
4556 command listkeys
4576 namespace bookmarks
4557 namespace bookmarks
4577
4558
4578 If the value begins with ``eval:``, it will be interpreted as a Python
4559 If the value begins with ``eval:``, it will be interpreted as a Python
4579 literal expression. Otherwise values are interpreted as Python b'' literals.
4560 literal expression. Otherwise values are interpreted as Python b'' literals.
4580 This allows sending complex types and encoding special byte sequences via
4561 This allows sending complex types and encoding special byte sequences via
4581 backslash escaping.
4562 backslash escaping.
4582
4563
4583 The following arguments have special meaning:
4564 The following arguments have special meaning:
4584
4565
4585 ``PUSHFILE``
4566 ``PUSHFILE``
4586 When defined, the *push* mechanism of the peer will be used instead
4567 When defined, the *push* mechanism of the peer will be used instead
4587 of the static request-response mechanism and the content of the
4568 of the static request-response mechanism and the content of the
4588 file specified in the value of this argument will be sent as the
4569 file specified in the value of this argument will be sent as the
4589 command payload.
4570 command payload.
4590
4571
4591 This can be used to submit a local bundle file to the remote.
4572 This can be used to submit a local bundle file to the remote.
4592
4573
4593 batchbegin
4574 batchbegin
4594 ----------
4575 ----------
4595
4576
4596 Instruct the peer to begin a batched send.
4577 Instruct the peer to begin a batched send.
4597
4578
4598 All ``command`` blocks are queued for execution until the next
4579 All ``command`` blocks are queued for execution until the next
4599 ``batchsubmit`` block.
4580 ``batchsubmit`` block.
4600
4581
4601 batchsubmit
4582 batchsubmit
4602 -----------
4583 -----------
4603
4584
4604 Submit previously queued ``command`` blocks as a batch request.
4585 Submit previously queued ``command`` blocks as a batch request.
4605
4586
4606 This action MUST be paired with a ``batchbegin`` action.
4587 This action MUST be paired with a ``batchbegin`` action.
4607
4588
4608 httprequest <method> <path>
4589 httprequest <method> <path>
4609 ---------------------------
4590 ---------------------------
4610
4591
4611 (HTTP peer only)
4592 (HTTP peer only)
4612
4593
4613 Send an HTTP request to the peer.
4594 Send an HTTP request to the peer.
4614
4595
4615 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4596 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4616
4597
4617 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4598 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4618 headers to add to the request. e.g. ``Accept: foo``.
4599 headers to add to the request. e.g. ``Accept: foo``.
4619
4600
4620 The following arguments are special:
4601 The following arguments are special:
4621
4602
4622 ``BODYFILE``
4603 ``BODYFILE``
4623 The content of the file defined as the value to this argument will be
4604 The content of the file defined as the value to this argument will be
4624 transferred verbatim as the HTTP request body.
4605 transferred verbatim as the HTTP request body.
4625
4606
4626 ``frame <type> <flags> <payload>``
4607 ``frame <type> <flags> <payload>``
4627 Send a unified protocol frame as part of the request body.
4608 Send a unified protocol frame as part of the request body.
4628
4609
4629 All frames will be collected and sent as the body to the HTTP
4610 All frames will be collected and sent as the body to the HTTP
4630 request.
4611 request.
4631
4612
4632 close
4613 close
4633 -----
4614 -----
4634
4615
4635 Close the connection to the server.
4616 Close the connection to the server.
4636
4617
4637 flush
4618 flush
4638 -----
4619 -----
4639
4620
4640 Flush data written to the server.
4621 Flush data written to the server.
4641
4622
4642 readavailable
4623 readavailable
4643 -------------
4624 -------------
4644
4625
4645 Close the write end of the connection and read all available data from
4626 Close the write end of the connection and read all available data from
4646 the server.
4627 the server.
4647
4628
4648 If the connection to the server encompasses multiple pipes, we poll both
4629 If the connection to the server encompasses multiple pipes, we poll both
4649 pipes and read available data.
4630 pipes and read available data.
4650
4631
4651 readline
4632 readline
4652 --------
4633 --------
4653
4634
4654 Read a line of output from the server. If there are multiple output
4635 Read a line of output from the server. If there are multiple output
4655 pipes, reads only the main pipe.
4636 pipes, reads only the main pipe.
4656
4637
4657 ereadline
4638 ereadline
4658 ---------
4639 ---------
4659
4640
4660 Like ``readline``, but read from the stderr pipe, if available.
4641 Like ``readline``, but read from the stderr pipe, if available.
4661
4642
4662 read <X>
4643 read <X>
4663 --------
4644 --------
4664
4645
4665 ``read()`` N bytes from the server's main output pipe.
4646 ``read()`` N bytes from the server's main output pipe.
4666
4647
4667 eread <X>
4648 eread <X>
4668 ---------
4649 ---------
4669
4650
4670 ``read()`` N bytes from the server's stderr pipe, if available.
4651 ``read()`` N bytes from the server's stderr pipe, if available.
4671
4652
4672 Specifying Unified Frame-Based Protocol Frames
4653 Specifying Unified Frame-Based Protocol Frames
4673 ----------------------------------------------
4654 ----------------------------------------------
4674
4655
4675 It is possible to emit a *Unified Frame-Based Protocol* by using special
4656 It is possible to emit a *Unified Frame-Based Protocol* by using special
4676 syntax.
4657 syntax.
4677
4658
4678 A frame is composed as a type, flags, and payload. These can be parsed
4659 A frame is composed as a type, flags, and payload. These can be parsed
4679 from a string of the form:
4660 from a string of the form:
4680
4661
4681 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4662 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4682
4663
4683 ``request-id`` and ``stream-id`` are integers defining the request and
4664 ``request-id`` and ``stream-id`` are integers defining the request and
4684 stream identifiers.
4665 stream identifiers.
4685
4666
4686 ``type`` can be an integer value for the frame type or the string name
4667 ``type`` can be an integer value for the frame type or the string name
4687 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4668 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4688 ``command-name``.
4669 ``command-name``.
4689
4670
4690 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4671 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4691 components. Each component (and there can be just one) can be an integer
4672 components. Each component (and there can be just one) can be an integer
4692 or a flag name for stream flags or frame flags, respectively. Values are
4673 or a flag name for stream flags or frame flags, respectively. Values are
4693 resolved to integers and then bitwise OR'd together.
4674 resolved to integers and then bitwise OR'd together.
4694
4675
4695 ``payload`` represents the raw frame payload. If it begins with
4676 ``payload`` represents the raw frame payload. If it begins with
4696 ``cbor:``, the following string is evaluated as Python code and the
4677 ``cbor:``, the following string is evaluated as Python code and the
4697 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4678 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4698 as a Python byte string literal.
4679 as a Python byte string literal.
4699 """
4680 """
4700 opts = pycompat.byteskwargs(opts)
4681 opts = pycompat.byteskwargs(opts)
4701
4682
4702 if opts[b'localssh'] and not repo:
4683 if opts[b'localssh'] and not repo:
4703 raise error.Abort(_(b'--localssh requires a repository'))
4684 raise error.Abort(_(b'--localssh requires a repository'))
4704
4685
4705 if opts[b'peer'] and opts[b'peer'] not in (
4686 if opts[b'peer'] and opts[b'peer'] not in (
4706 b'raw',
4687 b'raw',
4707 b'ssh1',
4688 b'ssh1',
4708 ):
4689 ):
4709 raise error.Abort(
4690 raise error.Abort(
4710 _(b'invalid value for --peer'),
4691 _(b'invalid value for --peer'),
4711 hint=_(b'valid values are "raw" and "ssh1"'),
4692 hint=_(b'valid values are "raw" and "ssh1"'),
4712 )
4693 )
4713
4694
4714 if path and opts[b'localssh']:
4695 if path and opts[b'localssh']:
4715 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4696 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4716
4697
4717 if ui.interactive():
4698 if ui.interactive():
4718 ui.write(_(b'(waiting for commands on stdin)\n'))
4699 ui.write(_(b'(waiting for commands on stdin)\n'))
4719
4700
4720 blocks = list(_parsewirelangblocks(ui.fin))
4701 blocks = list(_parsewirelangblocks(ui.fin))
4721
4702
4722 proc = None
4703 proc = None
4723 stdin = None
4704 stdin = None
4724 stdout = None
4705 stdout = None
4725 stderr = None
4706 stderr = None
4726 opener = None
4707 opener = None
4727
4708
4728 if opts[b'localssh']:
4709 if opts[b'localssh']:
4729 # We start the SSH server in its own process so there is process
4710 # We start the SSH server in its own process so there is process
4730 # separation. This prevents a whole class of potential bugs around
4711 # separation. This prevents a whole class of potential bugs around
4731 # shared state from interfering with server operation.
4712 # shared state from interfering with server operation.
4732 args = procutil.hgcmd() + [
4713 args = procutil.hgcmd() + [
4733 b'-R',
4714 b'-R',
4734 repo.root,
4715 repo.root,
4735 b'debugserve',
4716 b'debugserve',
4736 b'--sshstdio',
4717 b'--sshstdio',
4737 ]
4718 ]
4738 proc = subprocess.Popen(
4719 proc = subprocess.Popen(
4739 pycompat.rapply(procutil.tonativestr, args),
4720 pycompat.rapply(procutil.tonativestr, args),
4740 stdin=subprocess.PIPE,
4721 stdin=subprocess.PIPE,
4741 stdout=subprocess.PIPE,
4722 stdout=subprocess.PIPE,
4742 stderr=subprocess.PIPE,
4723 stderr=subprocess.PIPE,
4743 bufsize=0,
4724 bufsize=0,
4744 )
4725 )
4745
4726
4746 stdin = proc.stdin
4727 stdin = proc.stdin
4747 stdout = proc.stdout
4728 stdout = proc.stdout
4748 stderr = proc.stderr
4729 stderr = proc.stderr
4749
4730
4750 # We turn the pipes into observers so we can log I/O.
4731 # We turn the pipes into observers so we can log I/O.
4751 if ui.verbose or opts[b'peer'] == b'raw':
4732 if ui.verbose or opts[b'peer'] == b'raw':
4752 stdin = util.makeloggingfileobject(
4733 stdin = util.makeloggingfileobject(
4753 ui, proc.stdin, b'i', logdata=True
4734 ui, proc.stdin, b'i', logdata=True
4754 )
4735 )
4755 stdout = util.makeloggingfileobject(
4736 stdout = util.makeloggingfileobject(
4756 ui, proc.stdout, b'o', logdata=True
4737 ui, proc.stdout, b'o', logdata=True
4757 )
4738 )
4758 stderr = util.makeloggingfileobject(
4739 stderr = util.makeloggingfileobject(
4759 ui, proc.stderr, b'e', logdata=True
4740 ui, proc.stderr, b'e', logdata=True
4760 )
4741 )
4761
4742
4762 # --localssh also implies the peer connection settings.
4743 # --localssh also implies the peer connection settings.
4763
4744
4764 url = b'ssh://localserver'
4745 url = b'ssh://localserver'
4765 autoreadstderr = not opts[b'noreadstderr']
4746 autoreadstderr = not opts[b'noreadstderr']
4766
4747
4767 if opts[b'peer'] == b'ssh1':
4748 if opts[b'peer'] == b'ssh1':
4768 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4749 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4769 peer = sshpeer.sshv1peer(
4750 peer = sshpeer.sshv1peer(
4770 ui,
4751 ui,
4771 url,
4752 url,
4772 proc,
4753 proc,
4773 stdin,
4754 stdin,
4774 stdout,
4755 stdout,
4775 stderr,
4756 stderr,
4776 None,
4757 None,
4777 autoreadstderr=autoreadstderr,
4758 autoreadstderr=autoreadstderr,
4778 )
4759 )
4779 elif opts[b'peer'] == b'raw':
4760 elif opts[b'peer'] == b'raw':
4780 ui.write(_(b'using raw connection to peer\n'))
4761 ui.write(_(b'using raw connection to peer\n'))
4781 peer = None
4762 peer = None
4782 else:
4763 else:
4783 ui.write(_(b'creating ssh peer from handshake results\n'))
4764 ui.write(_(b'creating ssh peer from handshake results\n'))
4784 peer = sshpeer.makepeer(
4765 peer = sshpeer.makepeer(
4785 ui,
4766 ui,
4786 url,
4767 url,
4787 proc,
4768 proc,
4788 stdin,
4769 stdin,
4789 stdout,
4770 stdout,
4790 stderr,
4771 stderr,
4791 autoreadstderr=autoreadstderr,
4772 autoreadstderr=autoreadstderr,
4792 )
4773 )
4793
4774
4794 elif path:
4775 elif path:
4795 # We bypass hg.peer() so we can proxy the sockets.
4776 # We bypass hg.peer() so we can proxy the sockets.
4796 # TODO consider not doing this because we skip
4777 # TODO consider not doing this because we skip
4797 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4778 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4798 u = urlutil.url(path)
4779 u = urlutil.url(path)
4799 if u.scheme != b'http':
4780 if u.scheme != b'http':
4800 raise error.Abort(_(b'only http:// paths are currently supported'))
4781 raise error.Abort(_(b'only http:// paths are currently supported'))
4801
4782
4802 url, authinfo = u.authinfo()
4783 url, authinfo = u.authinfo()
4803 openerargs = {
4784 openerargs = {
4804 'useragent': b'Mercurial debugwireproto',
4785 'useragent': b'Mercurial debugwireproto',
4805 }
4786 }
4806
4787
4807 # Turn pipes/sockets into observers so we can log I/O.
4788 # Turn pipes/sockets into observers so we can log I/O.
4808 if ui.verbose:
4789 if ui.verbose:
4809 openerargs.update(
4790 openerargs.update(
4810 {
4791 {
4811 'loggingfh': ui,
4792 'loggingfh': ui,
4812 'loggingname': b's',
4793 'loggingname': b's',
4813 'loggingopts': {
4794 'loggingopts': {
4814 'logdata': True,
4795 'logdata': True,
4815 'logdataapis': False,
4796 'logdataapis': False,
4816 },
4797 },
4817 }
4798 }
4818 )
4799 )
4819
4800
4820 if ui.debugflag:
4801 if ui.debugflag:
4821 openerargs['loggingopts']['logdataapis'] = True
4802 openerargs['loggingopts']['logdataapis'] = True
4822
4803
4823 # Don't send default headers when in raw mode. This allows us to
4804 # Don't send default headers when in raw mode. This allows us to
4824 # bypass most of the behavior of our URL handling code so we can
4805 # bypass most of the behavior of our URL handling code so we can
4825 # have near complete control over what's sent on the wire.
4806 # have near complete control over what's sent on the wire.
4826 if opts[b'peer'] == b'raw':
4807 if opts[b'peer'] == b'raw':
4827 openerargs['sendaccept'] = False
4808 openerargs['sendaccept'] = False
4828
4809
4829 opener = urlmod.opener(ui, authinfo, **openerargs)
4810 opener = urlmod.opener(ui, authinfo, **openerargs)
4830
4811
4831 if opts[b'peer'] == b'raw':
4812 if opts[b'peer'] == b'raw':
4832 ui.write(_(b'using raw connection to peer\n'))
4813 ui.write(_(b'using raw connection to peer\n'))
4833 peer = None
4814 peer = None
4834 elif opts[b'peer']:
4815 elif opts[b'peer']:
4835 raise error.Abort(
4816 raise error.Abort(
4836 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4817 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4837 )
4818 )
4838 else:
4819 else:
4839 peer = httppeer.makepeer(ui, path, opener=opener)
4820 peer = httppeer.makepeer(ui, path, opener=opener)
4840
4821
4841 # We /could/ populate stdin/stdout with sock.makefile()...
4822 # We /could/ populate stdin/stdout with sock.makefile()...
4842 else:
4823 else:
4843 raise error.Abort(_(b'unsupported connection configuration'))
4824 raise error.Abort(_(b'unsupported connection configuration'))
4844
4825
4845 batchedcommands = None
4826 batchedcommands = None
4846
4827
4847 # Now perform actions based on the parsed wire language instructions.
4828 # Now perform actions based on the parsed wire language instructions.
4848 for action, lines in blocks:
4829 for action, lines in blocks:
4849 if action in (b'raw', b'raw+'):
4830 if action in (b'raw', b'raw+'):
4850 if not stdin:
4831 if not stdin:
4851 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4832 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4852
4833
4853 # Concatenate the data together.
4834 # Concatenate the data together.
4854 data = b''.join(l.lstrip() for l in lines)
4835 data = b''.join(l.lstrip() for l in lines)
4855 data = stringutil.unescapestr(data)
4836 data = stringutil.unescapestr(data)
4856 stdin.write(data)
4837 stdin.write(data)
4857
4838
4858 if action == b'raw+':
4839 if action == b'raw+':
4859 stdin.flush()
4840 stdin.flush()
4860 elif action == b'flush':
4841 elif action == b'flush':
4861 if not stdin:
4842 if not stdin:
4862 raise error.Abort(_(b'cannot call flush on this peer'))
4843 raise error.Abort(_(b'cannot call flush on this peer'))
4863 stdin.flush()
4844 stdin.flush()
4864 elif action.startswith(b'command'):
4845 elif action.startswith(b'command'):
4865 if not peer:
4846 if not peer:
4866 raise error.Abort(
4847 raise error.Abort(
4867 _(
4848 _(
4868 b'cannot send commands unless peer instance '
4849 b'cannot send commands unless peer instance '
4869 b'is available'
4850 b'is available'
4870 )
4851 )
4871 )
4852 )
4872
4853
4873 command = action.split(b' ', 1)[1]
4854 command = action.split(b' ', 1)[1]
4874
4855
4875 args = {}
4856 args = {}
4876 for line in lines:
4857 for line in lines:
4877 # We need to allow empty values.
4858 # We need to allow empty values.
4878 fields = line.lstrip().split(b' ', 1)
4859 fields = line.lstrip().split(b' ', 1)
4879 if len(fields) == 1:
4860 if len(fields) == 1:
4880 key = fields[0]
4861 key = fields[0]
4881 value = b''
4862 value = b''
4882 else:
4863 else:
4883 key, value = fields
4864 key, value = fields
4884
4865
4885 if value.startswith(b'eval:'):
4866 if value.startswith(b'eval:'):
4886 value = stringutil.evalpythonliteral(value[5:])
4867 value = stringutil.evalpythonliteral(value[5:])
4887 else:
4868 else:
4888 value = stringutil.unescapestr(value)
4869 value = stringutil.unescapestr(value)
4889
4870
4890 args[key] = value
4871 args[key] = value
4891
4872
4892 if batchedcommands is not None:
4873 if batchedcommands is not None:
4893 batchedcommands.append((command, args))
4874 batchedcommands.append((command, args))
4894 continue
4875 continue
4895
4876
4896 ui.status(_(b'sending %s command\n') % command)
4877 ui.status(_(b'sending %s command\n') % command)
4897
4878
4898 if b'PUSHFILE' in args:
4879 if b'PUSHFILE' in args:
4899 with open(args[b'PUSHFILE'], 'rb') as fh:
4880 with open(args[b'PUSHFILE'], 'rb') as fh:
4900 del args[b'PUSHFILE']
4881 del args[b'PUSHFILE']
4901 res, output = peer._callpush(
4882 res, output = peer._callpush(
4902 command, fh, **pycompat.strkwargs(args)
4883 command, fh, **pycompat.strkwargs(args)
4903 )
4884 )
4904 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4885 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4905 ui.status(
4886 ui.status(
4906 _(b'remote output: %s\n') % stringutil.escapestr(output)
4887 _(b'remote output: %s\n') % stringutil.escapestr(output)
4907 )
4888 )
4908 else:
4889 else:
4909 with peer.commandexecutor() as e:
4890 with peer.commandexecutor() as e:
4910 res = e.callcommand(command, args).result()
4891 res = e.callcommand(command, args).result()
4911
4892
4912 ui.status(
4893 ui.status(
4913 _(b'response: %s\n')
4894 _(b'response: %s\n')
4914 % stringutil.pprint(res, bprefix=True, indent=2)
4895 % stringutil.pprint(res, bprefix=True, indent=2)
4915 )
4896 )
4916
4897
4917 elif action == b'batchbegin':
4898 elif action == b'batchbegin':
4918 if batchedcommands is not None:
4899 if batchedcommands is not None:
4919 raise error.Abort(_(b'nested batchbegin not allowed'))
4900 raise error.Abort(_(b'nested batchbegin not allowed'))
4920
4901
4921 batchedcommands = []
4902 batchedcommands = []
4922 elif action == b'batchsubmit':
4903 elif action == b'batchsubmit':
4923 # There is a batching API we could go through. But it would be
4904 # There is a batching API we could go through. But it would be
4924 # difficult to normalize requests into function calls. It is easier
4905 # difficult to normalize requests into function calls. It is easier
4925 # to bypass this layer and normalize to commands + args.
4906 # to bypass this layer and normalize to commands + args.
4926 ui.status(
4907 ui.status(
4927 _(b'sending batch with %d sub-commands\n')
4908 _(b'sending batch with %d sub-commands\n')
4928 % len(batchedcommands)
4909 % len(batchedcommands)
4929 )
4910 )
4930 assert peer is not None
4911 assert peer is not None
4931 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4912 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4932 ui.status(
4913 ui.status(
4933 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4914 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4934 )
4915 )
4935
4916
4936 batchedcommands = None
4917 batchedcommands = None
4937
4918
4938 elif action.startswith(b'httprequest '):
4919 elif action.startswith(b'httprequest '):
4939 if not opener:
4920 if not opener:
4940 raise error.Abort(
4921 raise error.Abort(
4941 _(b'cannot use httprequest without an HTTP peer')
4922 _(b'cannot use httprequest without an HTTP peer')
4942 )
4923 )
4943
4924
4944 request = action.split(b' ', 2)
4925 request = action.split(b' ', 2)
4945 if len(request) != 3:
4926 if len(request) != 3:
4946 raise error.Abort(
4927 raise error.Abort(
4947 _(
4928 _(
4948 b'invalid httprequest: expected format is '
4929 b'invalid httprequest: expected format is '
4949 b'"httprequest <method> <path>'
4930 b'"httprequest <method> <path>'
4950 )
4931 )
4951 )
4932 )
4952
4933
4953 method, httppath = request[1:]
4934 method, httppath = request[1:]
4954 headers = {}
4935 headers = {}
4955 body = None
4936 body = None
4956 frames = []
4937 frames = []
4957 for line in lines:
4938 for line in lines:
4958 line = line.lstrip()
4939 line = line.lstrip()
4959 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4940 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4960 if m:
4941 if m:
4961 # Headers need to use native strings.
4942 # Headers need to use native strings.
4962 key = pycompat.strurl(m.group(1))
4943 key = pycompat.strurl(m.group(1))
4963 value = pycompat.strurl(m.group(2))
4944 value = pycompat.strurl(m.group(2))
4964 headers[key] = value
4945 headers[key] = value
4965 continue
4946 continue
4966
4947
4967 if line.startswith(b'BODYFILE '):
4948 if line.startswith(b'BODYFILE '):
4968 with open(line.split(b' ', 1), b'rb') as fh:
4949 with open(line.split(b' ', 1), b'rb') as fh:
4969 body = fh.read()
4950 body = fh.read()
4970 elif line.startswith(b'frame '):
4951 elif line.startswith(b'frame '):
4971 frame = wireprotoframing.makeframefromhumanstring(
4952 frame = wireprotoframing.makeframefromhumanstring(
4972 line[len(b'frame ') :]
4953 line[len(b'frame ') :]
4973 )
4954 )
4974
4955
4975 frames.append(frame)
4956 frames.append(frame)
4976 else:
4957 else:
4977 raise error.Abort(
4958 raise error.Abort(
4978 _(b'unknown argument to httprequest: %s') % line
4959 _(b'unknown argument to httprequest: %s') % line
4979 )
4960 )
4980
4961
4981 url = path + httppath
4962 url = path + httppath
4982
4963
4983 if frames:
4964 if frames:
4984 body = b''.join(bytes(f) for f in frames)
4965 body = b''.join(bytes(f) for f in frames)
4985
4966
4986 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4967 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4987
4968
4988 # urllib.Request insists on using has_data() as a proxy for
4969 # urllib.Request insists on using has_data() as a proxy for
4989 # determining the request method. Override that to use our
4970 # determining the request method. Override that to use our
4990 # explicitly requested method.
4971 # explicitly requested method.
4991 req.get_method = lambda: pycompat.sysstr(method)
4972 req.get_method = lambda: pycompat.sysstr(method)
4992
4973
4993 try:
4974 try:
4994 res = opener.open(req)
4975 res = opener.open(req)
4995 body = res.read()
4976 body = res.read()
4996 except util.urlerr.urlerror as e:
4977 except util.urlerr.urlerror as e:
4997 # read() method must be called, but only exists in Python 2
4978 # read() method must be called, but only exists in Python 2
4998 getattr(e, 'read', lambda: None)()
4979 getattr(e, 'read', lambda: None)()
4999 continue
4980 continue
5000
4981
5001 ct = res.headers.get('Content-Type')
4982 ct = res.headers.get('Content-Type')
5002 if ct == 'application/mercurial-cbor':
4983 if ct == 'application/mercurial-cbor':
5003 ui.write(
4984 ui.write(
5004 _(b'cbor> %s\n')
4985 _(b'cbor> %s\n')
5005 % stringutil.pprint(
4986 % stringutil.pprint(
5006 cborutil.decodeall(body), bprefix=True, indent=2
4987 cborutil.decodeall(body), bprefix=True, indent=2
5007 )
4988 )
5008 )
4989 )
5009
4990
5010 elif action == b'close':
4991 elif action == b'close':
5011 assert peer is not None
4992 assert peer is not None
5012 peer.close()
4993 peer.close()
5013 elif action == b'readavailable':
4994 elif action == b'readavailable':
5014 if not stdout or not stderr:
4995 if not stdout or not stderr:
5015 raise error.Abort(
4996 raise error.Abort(
5016 _(b'readavailable not available on this peer')
4997 _(b'readavailable not available on this peer')
5017 )
4998 )
5018
4999
5019 stdin.close()
5000 stdin.close()
5020 stdout.read()
5001 stdout.read()
5021 stderr.read()
5002 stderr.read()
5022
5003
5023 elif action == b'readline':
5004 elif action == b'readline':
5024 if not stdout:
5005 if not stdout:
5025 raise error.Abort(_(b'readline not available on this peer'))
5006 raise error.Abort(_(b'readline not available on this peer'))
5026 stdout.readline()
5007 stdout.readline()
5027 elif action == b'ereadline':
5008 elif action == b'ereadline':
5028 if not stderr:
5009 if not stderr:
5029 raise error.Abort(_(b'ereadline not available on this peer'))
5010 raise error.Abort(_(b'ereadline not available on this peer'))
5030 stderr.readline()
5011 stderr.readline()
5031 elif action.startswith(b'read '):
5012 elif action.startswith(b'read '):
5032 count = int(action.split(b' ', 1)[1])
5013 count = int(action.split(b' ', 1)[1])
5033 if not stdout:
5014 if not stdout:
5034 raise error.Abort(_(b'read not available on this peer'))
5015 raise error.Abort(_(b'read not available on this peer'))
5035 stdout.read(count)
5016 stdout.read(count)
5036 elif action.startswith(b'eread '):
5017 elif action.startswith(b'eread '):
5037 count = int(action.split(b' ', 1)[1])
5018 count = int(action.split(b' ', 1)[1])
5038 if not stderr:
5019 if not stderr:
5039 raise error.Abort(_(b'eread not available on this peer'))
5020 raise error.Abort(_(b'eread not available on this peer'))
5040 stderr.read(count)
5021 stderr.read(count)
5041 else:
5022 else:
5042 raise error.Abort(_(b'unknown action: %s') % action)
5023 raise error.Abort(_(b'unknown action: %s') % action)
5043
5024
5044 if batchedcommands is not None:
5025 if batchedcommands is not None:
5045 raise error.Abort(_(b'unclosed "batchbegin" request'))
5026 raise error.Abort(_(b'unclosed "batchbegin" request'))
5046
5027
5047 if peer:
5028 if peer:
5048 peer.close()
5029 peer.close()
5049
5030
5050 if proc:
5031 if proc:
5051 proc.kill()
5032 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now