##// END OF EJS Templates
updatecaches: use the caches argument in `hg debugupdatecaches`...
marmoute -
r48078:e96f7585 default
parent child Browse files
Show More
@@ -1,4828 +1,4829
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import string
24 import string
25 import subprocess
25 import subprocess
26 import sys
26 import sys
27 import time
27 import time
28
28
29 from .i18n import _
29 from .i18n import _
30 from .node import (
30 from .node import (
31 bin,
31 bin,
32 hex,
32 hex,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 encoding,
49 encoding,
50 error,
50 error,
51 exchange,
51 exchange,
52 extensions,
52 extensions,
53 filemerge,
53 filemerge,
54 filesetlang,
54 filesetlang,
55 formatter,
55 formatter,
56 hg,
56 hg,
57 httppeer,
57 httppeer,
58 localrepo,
58 localrepo,
59 lock as lockmod,
59 lock as lockmod,
60 logcmdutil,
60 logcmdutil,
61 mergestate as mergestatemod,
61 mergestate as mergestatemod,
62 metadata,
62 metadata,
63 obsolete,
63 obsolete,
64 obsutil,
64 obsutil,
65 pathutil,
65 pathutil,
66 phases,
66 phases,
67 policy,
67 policy,
68 pvec,
68 pvec,
69 pycompat,
69 pycompat,
70 registrar,
70 registrar,
71 repair,
71 repair,
72 repoview,
72 repoview,
73 revlog,
73 revlog,
74 revset,
74 revset,
75 revsetlang,
75 revsetlang,
76 scmutil,
76 scmutil,
77 setdiscovery,
77 setdiscovery,
78 simplemerge,
78 simplemerge,
79 sshpeer,
79 sshpeer,
80 sslutil,
80 sslutil,
81 streamclone,
81 streamclone,
82 strip,
82 strip,
83 tags as tagsmod,
83 tags as tagsmod,
84 templater,
84 templater,
85 treediscovery,
85 treediscovery,
86 upgrade,
86 upgrade,
87 url as urlmod,
87 url as urlmod,
88 util,
88 util,
89 vfs as vfsmod,
89 vfs as vfsmod,
90 wireprotoframing,
90 wireprotoframing,
91 wireprotoserver,
91 wireprotoserver,
92 wireprotov2peer,
92 wireprotov2peer,
93 )
93 )
94 from .interfaces import repository
94 from .utils import (
95 from .utils import (
95 cborutil,
96 cborutil,
96 compression,
97 compression,
97 dateutil,
98 dateutil,
98 procutil,
99 procutil,
99 stringutil,
100 stringutil,
100 urlutil,
101 urlutil,
101 )
102 )
102
103
103 from .revlogutils import (
104 from .revlogutils import (
104 deltas as deltautil,
105 deltas as deltautil,
105 nodemap,
106 nodemap,
106 sidedata,
107 sidedata,
107 )
108 )
108
109
109 release = lockmod.release
110 release = lockmod.release
110
111
111 table = {}
112 table = {}
112 table.update(strip.command._table)
113 table.update(strip.command._table)
113 command = registrar.command(table)
114 command = registrar.command(table)
114
115
115
116
116 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
117 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
117 def debugancestor(ui, repo, *args):
118 def debugancestor(ui, repo, *args):
118 """find the ancestor revision of two revisions in a given index"""
119 """find the ancestor revision of two revisions in a given index"""
119 if len(args) == 3:
120 if len(args) == 3:
120 index, rev1, rev2 = args
121 index, rev1, rev2 = args
121 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
122 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
122 lookup = r.lookup
123 lookup = r.lookup
123 elif len(args) == 2:
124 elif len(args) == 2:
124 if not repo:
125 if not repo:
125 raise error.Abort(
126 raise error.Abort(
126 _(b'there is no Mercurial repository here (.hg not found)')
127 _(b'there is no Mercurial repository here (.hg not found)')
127 )
128 )
128 rev1, rev2 = args
129 rev1, rev2 = args
129 r = repo.changelog
130 r = repo.changelog
130 lookup = repo.lookup
131 lookup = repo.lookup
131 else:
132 else:
132 raise error.Abort(_(b'either two or three arguments required'))
133 raise error.Abort(_(b'either two or three arguments required'))
133 a = r.ancestor(lookup(rev1), lookup(rev2))
134 a = r.ancestor(lookup(rev1), lookup(rev2))
134 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
135 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
135
136
136
137
137 @command(b'debugantivirusrunning', [])
138 @command(b'debugantivirusrunning', [])
138 def debugantivirusrunning(ui, repo):
139 def debugantivirusrunning(ui, repo):
139 """attempt to trigger an antivirus scanner to see if one is active"""
140 """attempt to trigger an antivirus scanner to see if one is active"""
140 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
141 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
141 f.write(
142 f.write(
142 util.b85decode(
143 util.b85decode(
143 # This is a base85-armored version of the EICAR test file. See
144 # This is a base85-armored version of the EICAR test file. See
144 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
145 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
145 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
146 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
146 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
147 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
147 )
148 )
148 )
149 )
149 # Give an AV engine time to scan the file.
150 # Give an AV engine time to scan the file.
150 time.sleep(2)
151 time.sleep(2)
151 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
152 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
152
153
153
154
154 @command(b'debugapplystreamclonebundle', [], b'FILE')
155 @command(b'debugapplystreamclonebundle', [], b'FILE')
155 def debugapplystreamclonebundle(ui, repo, fname):
156 def debugapplystreamclonebundle(ui, repo, fname):
156 """apply a stream clone bundle file"""
157 """apply a stream clone bundle file"""
157 f = hg.openpath(ui, fname)
158 f = hg.openpath(ui, fname)
158 gen = exchange.readbundle(ui, f, fname)
159 gen = exchange.readbundle(ui, f, fname)
159 gen.apply(repo)
160 gen.apply(repo)
160
161
161
162
162 @command(
163 @command(
163 b'debugbuilddag',
164 b'debugbuilddag',
164 [
165 [
165 (
166 (
166 b'm',
167 b'm',
167 b'mergeable-file',
168 b'mergeable-file',
168 None,
169 None,
169 _(b'add single file mergeable changes'),
170 _(b'add single file mergeable changes'),
170 ),
171 ),
171 (
172 (
172 b'o',
173 b'o',
173 b'overwritten-file',
174 b'overwritten-file',
174 None,
175 None,
175 _(b'add single file all revs overwrite'),
176 _(b'add single file all revs overwrite'),
176 ),
177 ),
177 (b'n', b'new-file', None, _(b'add new file at each rev')),
178 (b'n', b'new-file', None, _(b'add new file at each rev')),
178 ],
179 ],
179 _(b'[OPTION]... [TEXT]'),
180 _(b'[OPTION]... [TEXT]'),
180 )
181 )
181 def debugbuilddag(
182 def debugbuilddag(
182 ui,
183 ui,
183 repo,
184 repo,
184 text=None,
185 text=None,
185 mergeable_file=False,
186 mergeable_file=False,
186 overwritten_file=False,
187 overwritten_file=False,
187 new_file=False,
188 new_file=False,
188 ):
189 ):
189 """builds a repo with a given DAG from scratch in the current empty repo
190 """builds a repo with a given DAG from scratch in the current empty repo
190
191
191 The description of the DAG is read from stdin if not given on the
192 The description of the DAG is read from stdin if not given on the
192 command line.
193 command line.
193
194
194 Elements:
195 Elements:
195
196
196 - "+n" is a linear run of n nodes based on the current default parent
197 - "+n" is a linear run of n nodes based on the current default parent
197 - "." is a single node based on the current default parent
198 - "." is a single node based on the current default parent
198 - "$" resets the default parent to null (implied at the start);
199 - "$" resets the default parent to null (implied at the start);
199 otherwise the default parent is always the last node created
200 otherwise the default parent is always the last node created
200 - "<p" sets the default parent to the backref p
201 - "<p" sets the default parent to the backref p
201 - "*p" is a fork at parent p, which is a backref
202 - "*p" is a fork at parent p, which is a backref
202 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
203 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
203 - "/p2" is a merge of the preceding node and p2
204 - "/p2" is a merge of the preceding node and p2
204 - ":tag" defines a local tag for the preceding node
205 - ":tag" defines a local tag for the preceding node
205 - "@branch" sets the named branch for subsequent nodes
206 - "@branch" sets the named branch for subsequent nodes
206 - "#...\\n" is a comment up to the end of the line
207 - "#...\\n" is a comment up to the end of the line
207
208
208 Whitespace between the above elements is ignored.
209 Whitespace between the above elements is ignored.
209
210
210 A backref is either
211 A backref is either
211
212
212 - a number n, which references the node curr-n, where curr is the current
213 - a number n, which references the node curr-n, where curr is the current
213 node, or
214 node, or
214 - the name of a local tag you placed earlier using ":tag", or
215 - the name of a local tag you placed earlier using ":tag", or
215 - empty to denote the default parent.
216 - empty to denote the default parent.
216
217
217 All string valued-elements are either strictly alphanumeric, or must
218 All string valued-elements are either strictly alphanumeric, or must
218 be enclosed in double quotes ("..."), with "\\" as escape character.
219 be enclosed in double quotes ("..."), with "\\" as escape character.
219 """
220 """
220
221
221 if text is None:
222 if text is None:
222 ui.status(_(b"reading DAG from stdin\n"))
223 ui.status(_(b"reading DAG from stdin\n"))
223 text = ui.fin.read()
224 text = ui.fin.read()
224
225
225 cl = repo.changelog
226 cl = repo.changelog
226 if len(cl) > 0:
227 if len(cl) > 0:
227 raise error.Abort(_(b'repository is not empty'))
228 raise error.Abort(_(b'repository is not empty'))
228
229
229 # determine number of revs in DAG
230 # determine number of revs in DAG
230 total = 0
231 total = 0
231 for type, data in dagparser.parsedag(text):
232 for type, data in dagparser.parsedag(text):
232 if type == b'n':
233 if type == b'n':
233 total += 1
234 total += 1
234
235
235 if mergeable_file:
236 if mergeable_file:
236 linesperrev = 2
237 linesperrev = 2
237 # make a file with k lines per rev
238 # make a file with k lines per rev
238 initialmergedlines = [
239 initialmergedlines = [
239 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
240 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
240 ]
241 ]
241 initialmergedlines.append(b"")
242 initialmergedlines.append(b"")
242
243
243 tags = []
244 tags = []
244 progress = ui.makeprogress(
245 progress = ui.makeprogress(
245 _(b'building'), unit=_(b'revisions'), total=total
246 _(b'building'), unit=_(b'revisions'), total=total
246 )
247 )
247 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
248 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
248 at = -1
249 at = -1
249 atbranch = b'default'
250 atbranch = b'default'
250 nodeids = []
251 nodeids = []
251 id = 0
252 id = 0
252 progress.update(id)
253 progress.update(id)
253 for type, data in dagparser.parsedag(text):
254 for type, data in dagparser.parsedag(text):
254 if type == b'n':
255 if type == b'n':
255 ui.note((b'node %s\n' % pycompat.bytestr(data)))
256 ui.note((b'node %s\n' % pycompat.bytestr(data)))
256 id, ps = data
257 id, ps = data
257
258
258 files = []
259 files = []
259 filecontent = {}
260 filecontent = {}
260
261
261 p2 = None
262 p2 = None
262 if mergeable_file:
263 if mergeable_file:
263 fn = b"mf"
264 fn = b"mf"
264 p1 = repo[ps[0]]
265 p1 = repo[ps[0]]
265 if len(ps) > 1:
266 if len(ps) > 1:
266 p2 = repo[ps[1]]
267 p2 = repo[ps[1]]
267 pa = p1.ancestor(p2)
268 pa = p1.ancestor(p2)
268 base, local, other = [
269 base, local, other = [
269 x[fn].data() for x in (pa, p1, p2)
270 x[fn].data() for x in (pa, p1, p2)
270 ]
271 ]
271 m3 = simplemerge.Merge3Text(base, local, other)
272 m3 = simplemerge.Merge3Text(base, local, other)
272 ml = [l.strip() for l in m3.merge_lines()]
273 ml = [l.strip() for l in m3.merge_lines()]
273 ml.append(b"")
274 ml.append(b"")
274 elif at > 0:
275 elif at > 0:
275 ml = p1[fn].data().split(b"\n")
276 ml = p1[fn].data().split(b"\n")
276 else:
277 else:
277 ml = initialmergedlines
278 ml = initialmergedlines
278 ml[id * linesperrev] += b" r%i" % id
279 ml[id * linesperrev] += b" r%i" % id
279 mergedtext = b"\n".join(ml)
280 mergedtext = b"\n".join(ml)
280 files.append(fn)
281 files.append(fn)
281 filecontent[fn] = mergedtext
282 filecontent[fn] = mergedtext
282
283
283 if overwritten_file:
284 if overwritten_file:
284 fn = b"of"
285 fn = b"of"
285 files.append(fn)
286 files.append(fn)
286 filecontent[fn] = b"r%i\n" % id
287 filecontent[fn] = b"r%i\n" % id
287
288
288 if new_file:
289 if new_file:
289 fn = b"nf%i" % id
290 fn = b"nf%i" % id
290 files.append(fn)
291 files.append(fn)
291 filecontent[fn] = b"r%i\n" % id
292 filecontent[fn] = b"r%i\n" % id
292 if len(ps) > 1:
293 if len(ps) > 1:
293 if not p2:
294 if not p2:
294 p2 = repo[ps[1]]
295 p2 = repo[ps[1]]
295 for fn in p2:
296 for fn in p2:
296 if fn.startswith(b"nf"):
297 if fn.startswith(b"nf"):
297 files.append(fn)
298 files.append(fn)
298 filecontent[fn] = p2[fn].data()
299 filecontent[fn] = p2[fn].data()
299
300
300 def fctxfn(repo, cx, path):
301 def fctxfn(repo, cx, path):
301 if path in filecontent:
302 if path in filecontent:
302 return context.memfilectx(
303 return context.memfilectx(
303 repo, cx, path, filecontent[path]
304 repo, cx, path, filecontent[path]
304 )
305 )
305 return None
306 return None
306
307
307 if len(ps) == 0 or ps[0] < 0:
308 if len(ps) == 0 or ps[0] < 0:
308 pars = [None, None]
309 pars = [None, None]
309 elif len(ps) == 1:
310 elif len(ps) == 1:
310 pars = [nodeids[ps[0]], None]
311 pars = [nodeids[ps[0]], None]
311 else:
312 else:
312 pars = [nodeids[p] for p in ps]
313 pars = [nodeids[p] for p in ps]
313 cx = context.memctx(
314 cx = context.memctx(
314 repo,
315 repo,
315 pars,
316 pars,
316 b"r%i" % id,
317 b"r%i" % id,
317 files,
318 files,
318 fctxfn,
319 fctxfn,
319 date=(id, 0),
320 date=(id, 0),
320 user=b"debugbuilddag",
321 user=b"debugbuilddag",
321 extra={b'branch': atbranch},
322 extra={b'branch': atbranch},
322 )
323 )
323 nodeid = repo.commitctx(cx)
324 nodeid = repo.commitctx(cx)
324 nodeids.append(nodeid)
325 nodeids.append(nodeid)
325 at = id
326 at = id
326 elif type == b'l':
327 elif type == b'l':
327 id, name = data
328 id, name = data
328 ui.note((b'tag %s\n' % name))
329 ui.note((b'tag %s\n' % name))
329 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
330 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
330 elif type == b'a':
331 elif type == b'a':
331 ui.note((b'branch %s\n' % data))
332 ui.note((b'branch %s\n' % data))
332 atbranch = data
333 atbranch = data
333 progress.update(id)
334 progress.update(id)
334
335
335 if tags:
336 if tags:
336 repo.vfs.write(b"localtags", b"".join(tags))
337 repo.vfs.write(b"localtags", b"".join(tags))
337
338
338
339
339 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
340 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
340 indent_string = b' ' * indent
341 indent_string = b' ' * indent
341 if all:
342 if all:
342 ui.writenoi18n(
343 ui.writenoi18n(
343 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
344 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
344 % indent_string
345 % indent_string
345 )
346 )
346
347
347 def showchunks(named):
348 def showchunks(named):
348 ui.write(b"\n%s%s\n" % (indent_string, named))
349 ui.write(b"\n%s%s\n" % (indent_string, named))
349 for deltadata in gen.deltaiter():
350 for deltadata in gen.deltaiter():
350 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
351 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
351 ui.write(
352 ui.write(
352 b"%s%s %s %s %s %s %d\n"
353 b"%s%s %s %s %s %s %d\n"
353 % (
354 % (
354 indent_string,
355 indent_string,
355 hex(node),
356 hex(node),
356 hex(p1),
357 hex(p1),
357 hex(p2),
358 hex(p2),
358 hex(cs),
359 hex(cs),
359 hex(deltabase),
360 hex(deltabase),
360 len(delta),
361 len(delta),
361 )
362 )
362 )
363 )
363
364
364 gen.changelogheader()
365 gen.changelogheader()
365 showchunks(b"changelog")
366 showchunks(b"changelog")
366 gen.manifestheader()
367 gen.manifestheader()
367 showchunks(b"manifest")
368 showchunks(b"manifest")
368 for chunkdata in iter(gen.filelogheader, {}):
369 for chunkdata in iter(gen.filelogheader, {}):
369 fname = chunkdata[b'filename']
370 fname = chunkdata[b'filename']
370 showchunks(fname)
371 showchunks(fname)
371 else:
372 else:
372 if isinstance(gen, bundle2.unbundle20):
373 if isinstance(gen, bundle2.unbundle20):
373 raise error.Abort(_(b'use debugbundle2 for this file'))
374 raise error.Abort(_(b'use debugbundle2 for this file'))
374 gen.changelogheader()
375 gen.changelogheader()
375 for deltadata in gen.deltaiter():
376 for deltadata in gen.deltaiter():
376 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
377 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
377 ui.write(b"%s%s\n" % (indent_string, hex(node)))
378 ui.write(b"%s%s\n" % (indent_string, hex(node)))
378
379
379
380
380 def _debugobsmarkers(ui, part, indent=0, **opts):
381 def _debugobsmarkers(ui, part, indent=0, **opts):
381 """display version and markers contained in 'data'"""
382 """display version and markers contained in 'data'"""
382 opts = pycompat.byteskwargs(opts)
383 opts = pycompat.byteskwargs(opts)
383 data = part.read()
384 data = part.read()
384 indent_string = b' ' * indent
385 indent_string = b' ' * indent
385 try:
386 try:
386 version, markers = obsolete._readmarkers(data)
387 version, markers = obsolete._readmarkers(data)
387 except error.UnknownVersion as exc:
388 except error.UnknownVersion as exc:
388 msg = b"%sunsupported version: %s (%d bytes)\n"
389 msg = b"%sunsupported version: %s (%d bytes)\n"
389 msg %= indent_string, exc.version, len(data)
390 msg %= indent_string, exc.version, len(data)
390 ui.write(msg)
391 ui.write(msg)
391 else:
392 else:
392 msg = b"%sversion: %d (%d bytes)\n"
393 msg = b"%sversion: %d (%d bytes)\n"
393 msg %= indent_string, version, len(data)
394 msg %= indent_string, version, len(data)
394 ui.write(msg)
395 ui.write(msg)
395 fm = ui.formatter(b'debugobsolete', opts)
396 fm = ui.formatter(b'debugobsolete', opts)
396 for rawmarker in sorted(markers):
397 for rawmarker in sorted(markers):
397 m = obsutil.marker(None, rawmarker)
398 m = obsutil.marker(None, rawmarker)
398 fm.startitem()
399 fm.startitem()
399 fm.plain(indent_string)
400 fm.plain(indent_string)
400 cmdutil.showmarker(fm, m)
401 cmdutil.showmarker(fm, m)
401 fm.end()
402 fm.end()
402
403
403
404
404 def _debugphaseheads(ui, data, indent=0):
405 def _debugphaseheads(ui, data, indent=0):
405 """display version and markers contained in 'data'"""
406 """display version and markers contained in 'data'"""
406 indent_string = b' ' * indent
407 indent_string = b' ' * indent
407 headsbyphase = phases.binarydecode(data)
408 headsbyphase = phases.binarydecode(data)
408 for phase in phases.allphases:
409 for phase in phases.allphases:
409 for head in headsbyphase[phase]:
410 for head in headsbyphase[phase]:
410 ui.write(indent_string)
411 ui.write(indent_string)
411 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
412 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
412
413
413
414
414 def _quasirepr(thing):
415 def _quasirepr(thing):
415 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
416 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
416 return b'{%s}' % (
417 return b'{%s}' % (
417 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
418 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
418 )
419 )
419 return pycompat.bytestr(repr(thing))
420 return pycompat.bytestr(repr(thing))
420
421
421
422
422 def _debugbundle2(ui, gen, all=None, **opts):
423 def _debugbundle2(ui, gen, all=None, **opts):
423 """lists the contents of a bundle2"""
424 """lists the contents of a bundle2"""
424 if not isinstance(gen, bundle2.unbundle20):
425 if not isinstance(gen, bundle2.unbundle20):
425 raise error.Abort(_(b'not a bundle2 file'))
426 raise error.Abort(_(b'not a bundle2 file'))
426 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
427 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
427 parttypes = opts.get('part_type', [])
428 parttypes = opts.get('part_type', [])
428 for part in gen.iterparts():
429 for part in gen.iterparts():
429 if parttypes and part.type not in parttypes:
430 if parttypes and part.type not in parttypes:
430 continue
431 continue
431 msg = b'%s -- %s (mandatory: %r)\n'
432 msg = b'%s -- %s (mandatory: %r)\n'
432 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
433 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
433 if part.type == b'changegroup':
434 if part.type == b'changegroup':
434 version = part.params.get(b'version', b'01')
435 version = part.params.get(b'version', b'01')
435 cg = changegroup.getunbundler(version, part, b'UN')
436 cg = changegroup.getunbundler(version, part, b'UN')
436 if not ui.quiet:
437 if not ui.quiet:
437 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
438 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
438 if part.type == b'obsmarkers':
439 if part.type == b'obsmarkers':
439 if not ui.quiet:
440 if not ui.quiet:
440 _debugobsmarkers(ui, part, indent=4, **opts)
441 _debugobsmarkers(ui, part, indent=4, **opts)
441 if part.type == b'phase-heads':
442 if part.type == b'phase-heads':
442 if not ui.quiet:
443 if not ui.quiet:
443 _debugphaseheads(ui, part, indent=4)
444 _debugphaseheads(ui, part, indent=4)
444
445
445
446
446 @command(
447 @command(
447 b'debugbundle',
448 b'debugbundle',
448 [
449 [
449 (b'a', b'all', None, _(b'show all details')),
450 (b'a', b'all', None, _(b'show all details')),
450 (b'', b'part-type', [], _(b'show only the named part type')),
451 (b'', b'part-type', [], _(b'show only the named part type')),
451 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
452 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
452 ],
453 ],
453 _(b'FILE'),
454 _(b'FILE'),
454 norepo=True,
455 norepo=True,
455 )
456 )
456 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
457 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
457 """lists the contents of a bundle"""
458 """lists the contents of a bundle"""
458 with hg.openpath(ui, bundlepath) as f:
459 with hg.openpath(ui, bundlepath) as f:
459 if spec:
460 if spec:
460 spec = exchange.getbundlespec(ui, f)
461 spec = exchange.getbundlespec(ui, f)
461 ui.write(b'%s\n' % spec)
462 ui.write(b'%s\n' % spec)
462 return
463 return
463
464
464 gen = exchange.readbundle(ui, f, bundlepath)
465 gen = exchange.readbundle(ui, f, bundlepath)
465 if isinstance(gen, bundle2.unbundle20):
466 if isinstance(gen, bundle2.unbundle20):
466 return _debugbundle2(ui, gen, all=all, **opts)
467 return _debugbundle2(ui, gen, all=all, **opts)
467 _debugchangegroup(ui, gen, all=all, **opts)
468 _debugchangegroup(ui, gen, all=all, **opts)
468
469
469
470
470 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
471 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
471 def debugcapabilities(ui, path, **opts):
472 def debugcapabilities(ui, path, **opts):
472 """lists the capabilities of a remote peer"""
473 """lists the capabilities of a remote peer"""
473 opts = pycompat.byteskwargs(opts)
474 opts = pycompat.byteskwargs(opts)
474 peer = hg.peer(ui, opts, path)
475 peer = hg.peer(ui, opts, path)
475 try:
476 try:
476 caps = peer.capabilities()
477 caps = peer.capabilities()
477 ui.writenoi18n(b'Main capabilities:\n')
478 ui.writenoi18n(b'Main capabilities:\n')
478 for c in sorted(caps):
479 for c in sorted(caps):
479 ui.write(b' %s\n' % c)
480 ui.write(b' %s\n' % c)
480 b2caps = bundle2.bundle2caps(peer)
481 b2caps = bundle2.bundle2caps(peer)
481 if b2caps:
482 if b2caps:
482 ui.writenoi18n(b'Bundle2 capabilities:\n')
483 ui.writenoi18n(b'Bundle2 capabilities:\n')
483 for key, values in sorted(pycompat.iteritems(b2caps)):
484 for key, values in sorted(pycompat.iteritems(b2caps)):
484 ui.write(b' %s\n' % key)
485 ui.write(b' %s\n' % key)
485 for v in values:
486 for v in values:
486 ui.write(b' %s\n' % v)
487 ui.write(b' %s\n' % v)
487 finally:
488 finally:
488 peer.close()
489 peer.close()
489
490
490
491
491 @command(
492 @command(
492 b'debugchangedfiles',
493 b'debugchangedfiles',
493 [
494 [
494 (
495 (
495 b'',
496 b'',
496 b'compute',
497 b'compute',
497 False,
498 False,
498 b"compute information instead of reading it from storage",
499 b"compute information instead of reading it from storage",
499 ),
500 ),
500 ],
501 ],
501 b'REV',
502 b'REV',
502 )
503 )
503 def debugchangedfiles(ui, repo, rev, **opts):
504 def debugchangedfiles(ui, repo, rev, **opts):
504 """list the stored files changes for a revision"""
505 """list the stored files changes for a revision"""
505 ctx = scmutil.revsingle(repo, rev, None)
506 ctx = scmutil.revsingle(repo, rev, None)
506 files = None
507 files = None
507
508
508 if opts['compute']:
509 if opts['compute']:
509 files = metadata.compute_all_files_changes(ctx)
510 files = metadata.compute_all_files_changes(ctx)
510 else:
511 else:
511 sd = repo.changelog.sidedata(ctx.rev())
512 sd = repo.changelog.sidedata(ctx.rev())
512 files_block = sd.get(sidedata.SD_FILES)
513 files_block = sd.get(sidedata.SD_FILES)
513 if files_block is not None:
514 if files_block is not None:
514 files = metadata.decode_files_sidedata(sd)
515 files = metadata.decode_files_sidedata(sd)
515 if files is not None:
516 if files is not None:
516 for f in sorted(files.touched):
517 for f in sorted(files.touched):
517 if f in files.added:
518 if f in files.added:
518 action = b"added"
519 action = b"added"
519 elif f in files.removed:
520 elif f in files.removed:
520 action = b"removed"
521 action = b"removed"
521 elif f in files.merged:
522 elif f in files.merged:
522 action = b"merged"
523 action = b"merged"
523 elif f in files.salvaged:
524 elif f in files.salvaged:
524 action = b"salvaged"
525 action = b"salvaged"
525 else:
526 else:
526 action = b"touched"
527 action = b"touched"
527
528
528 copy_parent = b""
529 copy_parent = b""
529 copy_source = b""
530 copy_source = b""
530 if f in files.copied_from_p1:
531 if f in files.copied_from_p1:
531 copy_parent = b"p1"
532 copy_parent = b"p1"
532 copy_source = files.copied_from_p1[f]
533 copy_source = files.copied_from_p1[f]
533 elif f in files.copied_from_p2:
534 elif f in files.copied_from_p2:
534 copy_parent = b"p2"
535 copy_parent = b"p2"
535 copy_source = files.copied_from_p2[f]
536 copy_source = files.copied_from_p2[f]
536
537
537 data = (action, copy_parent, f, copy_source)
538 data = (action, copy_parent, f, copy_source)
538 template = b"%-8s %2s: %s, %s;\n"
539 template = b"%-8s %2s: %s, %s;\n"
539 ui.write(template % data)
540 ui.write(template % data)
540
541
541
542
542 @command(b'debugcheckstate', [], b'')
543 @command(b'debugcheckstate', [], b'')
543 def debugcheckstate(ui, repo):
544 def debugcheckstate(ui, repo):
544 """validate the correctness of the current dirstate"""
545 """validate the correctness of the current dirstate"""
545 parent1, parent2 = repo.dirstate.parents()
546 parent1, parent2 = repo.dirstate.parents()
546 m1 = repo[parent1].manifest()
547 m1 = repo[parent1].manifest()
547 m2 = repo[parent2].manifest()
548 m2 = repo[parent2].manifest()
548 errors = 0
549 errors = 0
549 for f in repo.dirstate:
550 for f in repo.dirstate:
550 state = repo.dirstate[f]
551 state = repo.dirstate[f]
551 if state in b"nr" and f not in m1:
552 if state in b"nr" and f not in m1:
552 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
553 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
553 errors += 1
554 errors += 1
554 if state in b"a" and f in m1:
555 if state in b"a" and f in m1:
555 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
556 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
556 errors += 1
557 errors += 1
557 if state in b"m" and f not in m1 and f not in m2:
558 if state in b"m" and f not in m1 and f not in m2:
558 ui.warn(
559 ui.warn(
559 _(b"%s in state %s, but not in either manifest\n") % (f, state)
560 _(b"%s in state %s, but not in either manifest\n") % (f, state)
560 )
561 )
561 errors += 1
562 errors += 1
562 for f in m1:
563 for f in m1:
563 state = repo.dirstate[f]
564 state = repo.dirstate[f]
564 if state not in b"nrm":
565 if state not in b"nrm":
565 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
566 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
566 errors += 1
567 errors += 1
567 if errors:
568 if errors:
568 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
569 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
569 raise error.Abort(errstr)
570 raise error.Abort(errstr)
570
571
571
572
572 @command(
573 @command(
573 b'debugcolor',
574 b'debugcolor',
574 [(b'', b'style', None, _(b'show all configured styles'))],
575 [(b'', b'style', None, _(b'show all configured styles'))],
575 b'hg debugcolor',
576 b'hg debugcolor',
576 )
577 )
577 def debugcolor(ui, repo, **opts):
578 def debugcolor(ui, repo, **opts):
578 """show available color, effects or style"""
579 """show available color, effects or style"""
579 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
580 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
580 if opts.get('style'):
581 if opts.get('style'):
581 return _debugdisplaystyle(ui)
582 return _debugdisplaystyle(ui)
582 else:
583 else:
583 return _debugdisplaycolor(ui)
584 return _debugdisplaycolor(ui)
584
585
585
586
586 def _debugdisplaycolor(ui):
587 def _debugdisplaycolor(ui):
587 ui = ui.copy()
588 ui = ui.copy()
588 ui._styles.clear()
589 ui._styles.clear()
589 for effect in color._activeeffects(ui).keys():
590 for effect in color._activeeffects(ui).keys():
590 ui._styles[effect] = effect
591 ui._styles[effect] = effect
591 if ui._terminfoparams:
592 if ui._terminfoparams:
592 for k, v in ui.configitems(b'color'):
593 for k, v in ui.configitems(b'color'):
593 if k.startswith(b'color.'):
594 if k.startswith(b'color.'):
594 ui._styles[k] = k[6:]
595 ui._styles[k] = k[6:]
595 elif k.startswith(b'terminfo.'):
596 elif k.startswith(b'terminfo.'):
596 ui._styles[k] = k[9:]
597 ui._styles[k] = k[9:]
597 ui.write(_(b'available colors:\n'))
598 ui.write(_(b'available colors:\n'))
598 # sort label with a '_' after the other to group '_background' entry.
599 # sort label with a '_' after the other to group '_background' entry.
599 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
600 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
600 for colorname, label in items:
601 for colorname, label in items:
601 ui.write(b'%s\n' % colorname, label=label)
602 ui.write(b'%s\n' % colorname, label=label)
602
603
603
604
604 def _debugdisplaystyle(ui):
605 def _debugdisplaystyle(ui):
605 ui.write(_(b'available style:\n'))
606 ui.write(_(b'available style:\n'))
606 if not ui._styles:
607 if not ui._styles:
607 return
608 return
608 width = max(len(s) for s in ui._styles)
609 width = max(len(s) for s in ui._styles)
609 for label, effects in sorted(ui._styles.items()):
610 for label, effects in sorted(ui._styles.items()):
610 ui.write(b'%s' % label, label=label)
611 ui.write(b'%s' % label, label=label)
611 if effects:
612 if effects:
612 # 50
613 # 50
613 ui.write(b': ')
614 ui.write(b': ')
614 ui.write(b' ' * (max(0, width - len(label))))
615 ui.write(b' ' * (max(0, width - len(label))))
615 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
616 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
616 ui.write(b'\n')
617 ui.write(b'\n')
617
618
618
619
619 @command(b'debugcreatestreamclonebundle', [], b'FILE')
620 @command(b'debugcreatestreamclonebundle', [], b'FILE')
620 def debugcreatestreamclonebundle(ui, repo, fname):
621 def debugcreatestreamclonebundle(ui, repo, fname):
621 """create a stream clone bundle file
622 """create a stream clone bundle file
622
623
623 Stream bundles are special bundles that are essentially archives of
624 Stream bundles are special bundles that are essentially archives of
624 revlog files. They are commonly used for cloning very quickly.
625 revlog files. They are commonly used for cloning very quickly.
625 """
626 """
626 # TODO we may want to turn this into an abort when this functionality
627 # TODO we may want to turn this into an abort when this functionality
627 # is moved into `hg bundle`.
628 # is moved into `hg bundle`.
628 if phases.hassecret(repo):
629 if phases.hassecret(repo):
629 ui.warn(
630 ui.warn(
630 _(
631 _(
631 b'(warning: stream clone bundle will contain secret '
632 b'(warning: stream clone bundle will contain secret '
632 b'revisions)\n'
633 b'revisions)\n'
633 )
634 )
634 )
635 )
635
636
636 requirements, gen = streamclone.generatebundlev1(repo)
637 requirements, gen = streamclone.generatebundlev1(repo)
637 changegroup.writechunks(ui, gen, fname)
638 changegroup.writechunks(ui, gen, fname)
638
639
639 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
640 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
640
641
641
642
642 @command(
643 @command(
643 b'debugdag',
644 b'debugdag',
644 [
645 [
645 (b't', b'tags', None, _(b'use tags as labels')),
646 (b't', b'tags', None, _(b'use tags as labels')),
646 (b'b', b'branches', None, _(b'annotate with branch names')),
647 (b'b', b'branches', None, _(b'annotate with branch names')),
647 (b'', b'dots', None, _(b'use dots for runs')),
648 (b'', b'dots', None, _(b'use dots for runs')),
648 (b's', b'spaces', None, _(b'separate elements by spaces')),
649 (b's', b'spaces', None, _(b'separate elements by spaces')),
649 ],
650 ],
650 _(b'[OPTION]... [FILE [REV]...]'),
651 _(b'[OPTION]... [FILE [REV]...]'),
651 optionalrepo=True,
652 optionalrepo=True,
652 )
653 )
653 def debugdag(ui, repo, file_=None, *revs, **opts):
654 def debugdag(ui, repo, file_=None, *revs, **opts):
654 """format the changelog or an index DAG as a concise textual description
655 """format the changelog or an index DAG as a concise textual description
655
656
656 If you pass a revlog index, the revlog's DAG is emitted. If you list
657 If you pass a revlog index, the revlog's DAG is emitted. If you list
657 revision numbers, they get labeled in the output as rN.
658 revision numbers, they get labeled in the output as rN.
658
659
659 Otherwise, the changelog DAG of the current repo is emitted.
660 Otherwise, the changelog DAG of the current repo is emitted.
660 """
661 """
661 spaces = opts.get('spaces')
662 spaces = opts.get('spaces')
662 dots = opts.get('dots')
663 dots = opts.get('dots')
663 if file_:
664 if file_:
664 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
665 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
665 revs = {int(r) for r in revs}
666 revs = {int(r) for r in revs}
666
667
667 def events():
668 def events():
668 for r in rlog:
669 for r in rlog:
669 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
670 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
670 if r in revs:
671 if r in revs:
671 yield b'l', (r, b"r%i" % r)
672 yield b'l', (r, b"r%i" % r)
672
673
673 elif repo:
674 elif repo:
674 cl = repo.changelog
675 cl = repo.changelog
675 tags = opts.get('tags')
676 tags = opts.get('tags')
676 branches = opts.get('branches')
677 branches = opts.get('branches')
677 if tags:
678 if tags:
678 labels = {}
679 labels = {}
679 for l, n in repo.tags().items():
680 for l, n in repo.tags().items():
680 labels.setdefault(cl.rev(n), []).append(l)
681 labels.setdefault(cl.rev(n), []).append(l)
681
682
682 def events():
683 def events():
683 b = b"default"
684 b = b"default"
684 for r in cl:
685 for r in cl:
685 if branches:
686 if branches:
686 newb = cl.read(cl.node(r))[5][b'branch']
687 newb = cl.read(cl.node(r))[5][b'branch']
687 if newb != b:
688 if newb != b:
688 yield b'a', newb
689 yield b'a', newb
689 b = newb
690 b = newb
690 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
691 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
691 if tags:
692 if tags:
692 ls = labels.get(r)
693 ls = labels.get(r)
693 if ls:
694 if ls:
694 for l in ls:
695 for l in ls:
695 yield b'l', (r, l)
696 yield b'l', (r, l)
696
697
697 else:
698 else:
698 raise error.Abort(_(b'need repo for changelog dag'))
699 raise error.Abort(_(b'need repo for changelog dag'))
699
700
700 for line in dagparser.dagtextlines(
701 for line in dagparser.dagtextlines(
701 events(),
702 events(),
702 addspaces=spaces,
703 addspaces=spaces,
703 wraplabels=True,
704 wraplabels=True,
704 wrapannotations=True,
705 wrapannotations=True,
705 wrapnonlinear=dots,
706 wrapnonlinear=dots,
706 usedots=dots,
707 usedots=dots,
707 maxlinewidth=70,
708 maxlinewidth=70,
708 ):
709 ):
709 ui.write(line)
710 ui.write(line)
710 ui.write(b"\n")
711 ui.write(b"\n")
711
712
712
713
713 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
714 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
714 def debugdata(ui, repo, file_, rev=None, **opts):
715 def debugdata(ui, repo, file_, rev=None, **opts):
715 """dump the contents of a data file revision"""
716 """dump the contents of a data file revision"""
716 opts = pycompat.byteskwargs(opts)
717 opts = pycompat.byteskwargs(opts)
717 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
718 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
718 if rev is not None:
719 if rev is not None:
719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
720 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
720 file_, rev = None, file_
721 file_, rev = None, file_
721 elif rev is None:
722 elif rev is None:
722 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
723 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
723 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
724 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
724 try:
725 try:
725 ui.write(r.rawdata(r.lookup(rev)))
726 ui.write(r.rawdata(r.lookup(rev)))
726 except KeyError:
727 except KeyError:
727 raise error.Abort(_(b'invalid revision identifier %s') % rev)
728 raise error.Abort(_(b'invalid revision identifier %s') % rev)
728
729
729
730
730 @command(
731 @command(
731 b'debugdate',
732 b'debugdate',
732 [(b'e', b'extended', None, _(b'try extended date formats'))],
733 [(b'e', b'extended', None, _(b'try extended date formats'))],
733 _(b'[-e] DATE [RANGE]'),
734 _(b'[-e] DATE [RANGE]'),
734 norepo=True,
735 norepo=True,
735 optionalrepo=True,
736 optionalrepo=True,
736 )
737 )
737 def debugdate(ui, date, range=None, **opts):
738 def debugdate(ui, date, range=None, **opts):
738 """parse and display a date"""
739 """parse and display a date"""
739 if opts["extended"]:
740 if opts["extended"]:
740 d = dateutil.parsedate(date, dateutil.extendeddateformats)
741 d = dateutil.parsedate(date, dateutil.extendeddateformats)
741 else:
742 else:
742 d = dateutil.parsedate(date)
743 d = dateutil.parsedate(date)
743 ui.writenoi18n(b"internal: %d %d\n" % d)
744 ui.writenoi18n(b"internal: %d %d\n" % d)
744 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
745 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
745 if range:
746 if range:
746 m = dateutil.matchdate(range)
747 m = dateutil.matchdate(range)
747 ui.writenoi18n(b"match: %s\n" % m(d[0]))
748 ui.writenoi18n(b"match: %s\n" % m(d[0]))
748
749
749
750
750 @command(
751 @command(
751 b'debugdeltachain',
752 b'debugdeltachain',
752 cmdutil.debugrevlogopts + cmdutil.formatteropts,
753 cmdutil.debugrevlogopts + cmdutil.formatteropts,
753 _(b'-c|-m|FILE'),
754 _(b'-c|-m|FILE'),
754 optionalrepo=True,
755 optionalrepo=True,
755 )
756 )
756 def debugdeltachain(ui, repo, file_=None, **opts):
757 def debugdeltachain(ui, repo, file_=None, **opts):
757 """dump information about delta chains in a revlog
758 """dump information about delta chains in a revlog
758
759
759 Output can be templatized. Available template keywords are:
760 Output can be templatized. Available template keywords are:
760
761
761 :``rev``: revision number
762 :``rev``: revision number
762 :``chainid``: delta chain identifier (numbered by unique base)
763 :``chainid``: delta chain identifier (numbered by unique base)
763 :``chainlen``: delta chain length to this revision
764 :``chainlen``: delta chain length to this revision
764 :``prevrev``: previous revision in delta chain
765 :``prevrev``: previous revision in delta chain
765 :``deltatype``: role of delta / how it was computed
766 :``deltatype``: role of delta / how it was computed
766 :``compsize``: compressed size of revision
767 :``compsize``: compressed size of revision
767 :``uncompsize``: uncompressed size of revision
768 :``uncompsize``: uncompressed size of revision
768 :``chainsize``: total size of compressed revisions in chain
769 :``chainsize``: total size of compressed revisions in chain
769 :``chainratio``: total chain size divided by uncompressed revision size
770 :``chainratio``: total chain size divided by uncompressed revision size
770 (new delta chains typically start at ratio 2.00)
771 (new delta chains typically start at ratio 2.00)
771 :``lindist``: linear distance from base revision in delta chain to end
772 :``lindist``: linear distance from base revision in delta chain to end
772 of this revision
773 of this revision
773 :``extradist``: total size of revisions not part of this delta chain from
774 :``extradist``: total size of revisions not part of this delta chain from
774 base of delta chain to end of this revision; a measurement
775 base of delta chain to end of this revision; a measurement
775 of how much extra data we need to read/seek across to read
776 of how much extra data we need to read/seek across to read
776 the delta chain for this revision
777 the delta chain for this revision
777 :``extraratio``: extradist divided by chainsize; another representation of
778 :``extraratio``: extradist divided by chainsize; another representation of
778 how much unrelated data is needed to load this delta chain
779 how much unrelated data is needed to load this delta chain
779
780
780 If the repository is configured to use the sparse read, additional keywords
781 If the repository is configured to use the sparse read, additional keywords
781 are available:
782 are available:
782
783
783 :``readsize``: total size of data read from the disk for a revision
784 :``readsize``: total size of data read from the disk for a revision
784 (sum of the sizes of all the blocks)
785 (sum of the sizes of all the blocks)
785 :``largestblock``: size of the largest block of data read from the disk
786 :``largestblock``: size of the largest block of data read from the disk
786 :``readdensity``: density of useful bytes in the data read from the disk
787 :``readdensity``: density of useful bytes in the data read from the disk
787 :``srchunks``: in how many data hunks the whole revision would be read
788 :``srchunks``: in how many data hunks the whole revision would be read
788
789
789 The sparse read can be enabled with experimental.sparse-read = True
790 The sparse read can be enabled with experimental.sparse-read = True
790 """
791 """
791 opts = pycompat.byteskwargs(opts)
792 opts = pycompat.byteskwargs(opts)
792 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
793 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
793 index = r.index
794 index = r.index
794 start = r.start
795 start = r.start
795 length = r.length
796 length = r.length
796 generaldelta = r._generaldelta
797 generaldelta = r._generaldelta
797 withsparseread = getattr(r, '_withsparseread', False)
798 withsparseread = getattr(r, '_withsparseread', False)
798
799
799 def revinfo(rev):
800 def revinfo(rev):
800 e = index[rev]
801 e = index[rev]
801 compsize = e[1]
802 compsize = e[1]
802 uncompsize = e[2]
803 uncompsize = e[2]
803 chainsize = 0
804 chainsize = 0
804
805
805 if generaldelta:
806 if generaldelta:
806 if e[3] == e[5]:
807 if e[3] == e[5]:
807 deltatype = b'p1'
808 deltatype = b'p1'
808 elif e[3] == e[6]:
809 elif e[3] == e[6]:
809 deltatype = b'p2'
810 deltatype = b'p2'
810 elif e[3] == rev - 1:
811 elif e[3] == rev - 1:
811 deltatype = b'prev'
812 deltatype = b'prev'
812 elif e[3] == rev:
813 elif e[3] == rev:
813 deltatype = b'base'
814 deltatype = b'base'
814 else:
815 else:
815 deltatype = b'other'
816 deltatype = b'other'
816 else:
817 else:
817 if e[3] == rev:
818 if e[3] == rev:
818 deltatype = b'base'
819 deltatype = b'base'
819 else:
820 else:
820 deltatype = b'prev'
821 deltatype = b'prev'
821
822
822 chain = r._deltachain(rev)[0]
823 chain = r._deltachain(rev)[0]
823 for iterrev in chain:
824 for iterrev in chain:
824 e = index[iterrev]
825 e = index[iterrev]
825 chainsize += e[1]
826 chainsize += e[1]
826
827
827 return compsize, uncompsize, deltatype, chain, chainsize
828 return compsize, uncompsize, deltatype, chain, chainsize
828
829
829 fm = ui.formatter(b'debugdeltachain', opts)
830 fm = ui.formatter(b'debugdeltachain', opts)
830
831
831 fm.plain(
832 fm.plain(
832 b' rev chain# chainlen prev delta '
833 b' rev chain# chainlen prev delta '
833 b'size rawsize chainsize ratio lindist extradist '
834 b'size rawsize chainsize ratio lindist extradist '
834 b'extraratio'
835 b'extraratio'
835 )
836 )
836 if withsparseread:
837 if withsparseread:
837 fm.plain(b' readsize largestblk rddensity srchunks')
838 fm.plain(b' readsize largestblk rddensity srchunks')
838 fm.plain(b'\n')
839 fm.plain(b'\n')
839
840
840 chainbases = {}
841 chainbases = {}
841 for rev in r:
842 for rev in r:
842 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
843 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
843 chainbase = chain[0]
844 chainbase = chain[0]
844 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
845 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
845 basestart = start(chainbase)
846 basestart = start(chainbase)
846 revstart = start(rev)
847 revstart = start(rev)
847 lineardist = revstart + comp - basestart
848 lineardist = revstart + comp - basestart
848 extradist = lineardist - chainsize
849 extradist = lineardist - chainsize
849 try:
850 try:
850 prevrev = chain[-2]
851 prevrev = chain[-2]
851 except IndexError:
852 except IndexError:
852 prevrev = -1
853 prevrev = -1
853
854
854 if uncomp != 0:
855 if uncomp != 0:
855 chainratio = float(chainsize) / float(uncomp)
856 chainratio = float(chainsize) / float(uncomp)
856 else:
857 else:
857 chainratio = chainsize
858 chainratio = chainsize
858
859
859 if chainsize != 0:
860 if chainsize != 0:
860 extraratio = float(extradist) / float(chainsize)
861 extraratio = float(extradist) / float(chainsize)
861 else:
862 else:
862 extraratio = extradist
863 extraratio = extradist
863
864
864 fm.startitem()
865 fm.startitem()
865 fm.write(
866 fm.write(
866 b'rev chainid chainlen prevrev deltatype compsize '
867 b'rev chainid chainlen prevrev deltatype compsize '
867 b'uncompsize chainsize chainratio lindist extradist '
868 b'uncompsize chainsize chainratio lindist extradist '
868 b'extraratio',
869 b'extraratio',
869 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
870 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
870 rev,
871 rev,
871 chainid,
872 chainid,
872 len(chain),
873 len(chain),
873 prevrev,
874 prevrev,
874 deltatype,
875 deltatype,
875 comp,
876 comp,
876 uncomp,
877 uncomp,
877 chainsize,
878 chainsize,
878 chainratio,
879 chainratio,
879 lineardist,
880 lineardist,
880 extradist,
881 extradist,
881 extraratio,
882 extraratio,
882 rev=rev,
883 rev=rev,
883 chainid=chainid,
884 chainid=chainid,
884 chainlen=len(chain),
885 chainlen=len(chain),
885 prevrev=prevrev,
886 prevrev=prevrev,
886 deltatype=deltatype,
887 deltatype=deltatype,
887 compsize=comp,
888 compsize=comp,
888 uncompsize=uncomp,
889 uncompsize=uncomp,
889 chainsize=chainsize,
890 chainsize=chainsize,
890 chainratio=chainratio,
891 chainratio=chainratio,
891 lindist=lineardist,
892 lindist=lineardist,
892 extradist=extradist,
893 extradist=extradist,
893 extraratio=extraratio,
894 extraratio=extraratio,
894 )
895 )
895 if withsparseread:
896 if withsparseread:
896 readsize = 0
897 readsize = 0
897 largestblock = 0
898 largestblock = 0
898 srchunks = 0
899 srchunks = 0
899
900
900 for revschunk in deltautil.slicechunk(r, chain):
901 for revschunk in deltautil.slicechunk(r, chain):
901 srchunks += 1
902 srchunks += 1
902 blkend = start(revschunk[-1]) + length(revschunk[-1])
903 blkend = start(revschunk[-1]) + length(revschunk[-1])
903 blksize = blkend - start(revschunk[0])
904 blksize = blkend - start(revschunk[0])
904
905
905 readsize += blksize
906 readsize += blksize
906 if largestblock < blksize:
907 if largestblock < blksize:
907 largestblock = blksize
908 largestblock = blksize
908
909
909 if readsize:
910 if readsize:
910 readdensity = float(chainsize) / float(readsize)
911 readdensity = float(chainsize) / float(readsize)
911 else:
912 else:
912 readdensity = 1
913 readdensity = 1
913
914
914 fm.write(
915 fm.write(
915 b'readsize largestblock readdensity srchunks',
916 b'readsize largestblock readdensity srchunks',
916 b' %10d %10d %9.5f %8d',
917 b' %10d %10d %9.5f %8d',
917 readsize,
918 readsize,
918 largestblock,
919 largestblock,
919 readdensity,
920 readdensity,
920 srchunks,
921 srchunks,
921 readsize=readsize,
922 readsize=readsize,
922 largestblock=largestblock,
923 largestblock=largestblock,
923 readdensity=readdensity,
924 readdensity=readdensity,
924 srchunks=srchunks,
925 srchunks=srchunks,
925 )
926 )
926
927
927 fm.plain(b'\n')
928 fm.plain(b'\n')
928
929
929 fm.end()
930 fm.end()
930
931
931
932
932 @command(
933 @command(
933 b'debugdirstate|debugstate',
934 b'debugdirstate|debugstate',
934 [
935 [
935 (
936 (
936 b'',
937 b'',
937 b'nodates',
938 b'nodates',
938 None,
939 None,
939 _(b'do not display the saved mtime (DEPRECATED)'),
940 _(b'do not display the saved mtime (DEPRECATED)'),
940 ),
941 ),
941 (b'', b'dates', True, _(b'display the saved mtime')),
942 (b'', b'dates', True, _(b'display the saved mtime')),
942 (b'', b'datesort', None, _(b'sort by saved mtime')),
943 (b'', b'datesort', None, _(b'sort by saved mtime')),
943 ],
944 ],
944 _(b'[OPTION]...'),
945 _(b'[OPTION]...'),
945 )
946 )
946 def debugstate(ui, repo, **opts):
947 def debugstate(ui, repo, **opts):
947 """show the contents of the current dirstate"""
948 """show the contents of the current dirstate"""
948
949
949 nodates = not opts['dates']
950 nodates = not opts['dates']
950 if opts.get('nodates') is not None:
951 if opts.get('nodates') is not None:
951 nodates = True
952 nodates = True
952 datesort = opts.get('datesort')
953 datesort = opts.get('datesort')
953
954
954 if datesort:
955 if datesort:
955 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
956 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
956 else:
957 else:
957 keyfunc = None # sort by filename
958 keyfunc = None # sort by filename
958 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
959 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
959 if ent[3] == -1:
960 if ent[3] == -1:
960 timestr = b'unset '
961 timestr = b'unset '
961 elif nodates:
962 elif nodates:
962 timestr = b'set '
963 timestr = b'set '
963 else:
964 else:
964 timestr = time.strftime(
965 timestr = time.strftime(
965 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
966 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
966 )
967 )
967 timestr = encoding.strtolocal(timestr)
968 timestr = encoding.strtolocal(timestr)
968 if ent[1] & 0o20000:
969 if ent[1] & 0o20000:
969 mode = b'lnk'
970 mode = b'lnk'
970 else:
971 else:
971 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
972 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
972 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
973 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
973 for f in repo.dirstate.copies():
974 for f in repo.dirstate.copies():
974 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
975 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
975
976
976
977
977 @command(
978 @command(
978 b'debugdiscovery',
979 b'debugdiscovery',
979 [
980 [
980 (b'', b'old', None, _(b'use old-style discovery')),
981 (b'', b'old', None, _(b'use old-style discovery')),
981 (
982 (
982 b'',
983 b'',
983 b'nonheads',
984 b'nonheads',
984 None,
985 None,
985 _(b'use old-style discovery with non-heads included'),
986 _(b'use old-style discovery with non-heads included'),
986 ),
987 ),
987 (b'', b'rev', [], b'restrict discovery to this set of revs'),
988 (b'', b'rev', [], b'restrict discovery to this set of revs'),
988 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
989 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
989 (
990 (
990 b'',
991 b'',
991 b'local-as-revs',
992 b'local-as-revs',
992 b"",
993 b"",
993 b'treat local has having these revisions only',
994 b'treat local has having these revisions only',
994 ),
995 ),
995 (
996 (
996 b'',
997 b'',
997 b'remote-as-revs',
998 b'remote-as-revs',
998 b"",
999 b"",
999 b'use local as remote, with only these these revisions',
1000 b'use local as remote, with only these these revisions',
1000 ),
1001 ),
1001 ]
1002 ]
1002 + cmdutil.remoteopts
1003 + cmdutil.remoteopts
1003 + cmdutil.formatteropts,
1004 + cmdutil.formatteropts,
1004 _(b'[--rev REV] [OTHER]'),
1005 _(b'[--rev REV] [OTHER]'),
1005 )
1006 )
1006 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1007 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1007 """runs the changeset discovery protocol in isolation
1008 """runs the changeset discovery protocol in isolation
1008
1009
1009 The local peer can be "replaced" by a subset of the local repository by
1010 The local peer can be "replaced" by a subset of the local repository by
1010 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1011 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1011 be "replaced" by a subset of the local repository using the
1012 be "replaced" by a subset of the local repository using the
1012 `--local-as-revs` flag. This is useful to efficiently debug pathological
1013 `--local-as-revs` flag. This is useful to efficiently debug pathological
1013 discovery situation.
1014 discovery situation.
1014
1015
1015 The following developer oriented config are relevant for people playing with this command:
1016 The following developer oriented config are relevant for people playing with this command:
1016
1017
1017 * devel.discovery.exchange-heads=True
1018 * devel.discovery.exchange-heads=True
1018
1019
1019 If False, the discovery will not start with
1020 If False, the discovery will not start with
1020 remote head fetching and local head querying.
1021 remote head fetching and local head querying.
1021
1022
1022 * devel.discovery.grow-sample=True
1023 * devel.discovery.grow-sample=True
1023
1024
1024 If False, the sample size used in set discovery will not be increased
1025 If False, the sample size used in set discovery will not be increased
1025 through the process
1026 through the process
1026
1027
1027 * devel.discovery.grow-sample.dynamic=True
1028 * devel.discovery.grow-sample.dynamic=True
1028
1029
1029 When discovery.grow-sample.dynamic is True, the default, the sample size is
1030 When discovery.grow-sample.dynamic is True, the default, the sample size is
1030 adapted to the shape of the undecided set (it is set to the max of:
1031 adapted to the shape of the undecided set (it is set to the max of:
1031 <target-size>, len(roots(undecided)), len(heads(undecided)
1032 <target-size>, len(roots(undecided)), len(heads(undecided)
1032
1033
1033 * devel.discovery.grow-sample.rate=1.05
1034 * devel.discovery.grow-sample.rate=1.05
1034
1035
1035 the rate at which the sample grow
1036 the rate at which the sample grow
1036
1037
1037 * devel.discovery.randomize=True
1038 * devel.discovery.randomize=True
1038
1039
1039 If andom sampling during discovery are deterministic. It is meant for
1040 If andom sampling during discovery are deterministic. It is meant for
1040 integration tests.
1041 integration tests.
1041
1042
1042 * devel.discovery.sample-size=200
1043 * devel.discovery.sample-size=200
1043
1044
1044 Control the initial size of the discovery sample
1045 Control the initial size of the discovery sample
1045
1046
1046 * devel.discovery.sample-size.initial=100
1047 * devel.discovery.sample-size.initial=100
1047
1048
1048 Control the initial size of the discovery for initial change
1049 Control the initial size of the discovery for initial change
1049 """
1050 """
1050 opts = pycompat.byteskwargs(opts)
1051 opts = pycompat.byteskwargs(opts)
1051 unfi = repo.unfiltered()
1052 unfi = repo.unfiltered()
1052
1053
1053 # setup potential extra filtering
1054 # setup potential extra filtering
1054 local_revs = opts[b"local_as_revs"]
1055 local_revs = opts[b"local_as_revs"]
1055 remote_revs = opts[b"remote_as_revs"]
1056 remote_revs = opts[b"remote_as_revs"]
1056
1057
1057 # make sure tests are repeatable
1058 # make sure tests are repeatable
1058 random.seed(int(opts[b'seed']))
1059 random.seed(int(opts[b'seed']))
1059
1060
1060 if not remote_revs:
1061 if not remote_revs:
1061
1062
1062 remoteurl, branches = urlutil.get_unique_pull_path(
1063 remoteurl, branches = urlutil.get_unique_pull_path(
1063 b'debugdiscovery', repo, ui, remoteurl
1064 b'debugdiscovery', repo, ui, remoteurl
1064 )
1065 )
1065 remote = hg.peer(repo, opts, remoteurl)
1066 remote = hg.peer(repo, opts, remoteurl)
1066 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1067 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1067 else:
1068 else:
1068 branches = (None, [])
1069 branches = (None, [])
1069 remote_filtered_revs = scmutil.revrange(
1070 remote_filtered_revs = scmutil.revrange(
1070 unfi, [b"not (::(%s))" % remote_revs]
1071 unfi, [b"not (::(%s))" % remote_revs]
1071 )
1072 )
1072 remote_filtered_revs = frozenset(remote_filtered_revs)
1073 remote_filtered_revs = frozenset(remote_filtered_revs)
1073
1074
1074 def remote_func(x):
1075 def remote_func(x):
1075 return remote_filtered_revs
1076 return remote_filtered_revs
1076
1077
1077 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1078 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1078
1079
1079 remote = repo.peer()
1080 remote = repo.peer()
1080 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1081 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1081
1082
1082 if local_revs:
1083 if local_revs:
1083 local_filtered_revs = scmutil.revrange(
1084 local_filtered_revs = scmutil.revrange(
1084 unfi, [b"not (::(%s))" % local_revs]
1085 unfi, [b"not (::(%s))" % local_revs]
1085 )
1086 )
1086 local_filtered_revs = frozenset(local_filtered_revs)
1087 local_filtered_revs = frozenset(local_filtered_revs)
1087
1088
1088 def local_func(x):
1089 def local_func(x):
1089 return local_filtered_revs
1090 return local_filtered_revs
1090
1091
1091 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1092 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1092 repo = repo.filtered(b'debug-discovery-local-filter')
1093 repo = repo.filtered(b'debug-discovery-local-filter')
1093
1094
1094 data = {}
1095 data = {}
1095 if opts.get(b'old'):
1096 if opts.get(b'old'):
1096
1097
1097 def doit(pushedrevs, remoteheads, remote=remote):
1098 def doit(pushedrevs, remoteheads, remote=remote):
1098 if not util.safehasattr(remote, b'branches'):
1099 if not util.safehasattr(remote, b'branches'):
1099 # enable in-client legacy support
1100 # enable in-client legacy support
1100 remote = localrepo.locallegacypeer(remote.local())
1101 remote = localrepo.locallegacypeer(remote.local())
1101 common, _in, hds = treediscovery.findcommonincoming(
1102 common, _in, hds = treediscovery.findcommonincoming(
1102 repo, remote, force=True, audit=data
1103 repo, remote, force=True, audit=data
1103 )
1104 )
1104 common = set(common)
1105 common = set(common)
1105 if not opts.get(b'nonheads'):
1106 if not opts.get(b'nonheads'):
1106 ui.writenoi18n(
1107 ui.writenoi18n(
1107 b"unpruned common: %s\n"
1108 b"unpruned common: %s\n"
1108 % b" ".join(sorted(short(n) for n in common))
1109 % b" ".join(sorted(short(n) for n in common))
1109 )
1110 )
1110
1111
1111 clnode = repo.changelog.node
1112 clnode = repo.changelog.node
1112 common = repo.revs(b'heads(::%ln)', common)
1113 common = repo.revs(b'heads(::%ln)', common)
1113 common = {clnode(r) for r in common}
1114 common = {clnode(r) for r in common}
1114 return common, hds
1115 return common, hds
1115
1116
1116 else:
1117 else:
1117
1118
1118 def doit(pushedrevs, remoteheads, remote=remote):
1119 def doit(pushedrevs, remoteheads, remote=remote):
1119 nodes = None
1120 nodes = None
1120 if pushedrevs:
1121 if pushedrevs:
1121 revs = scmutil.revrange(repo, pushedrevs)
1122 revs = scmutil.revrange(repo, pushedrevs)
1122 nodes = [repo[r].node() for r in revs]
1123 nodes = [repo[r].node() for r in revs]
1123 common, any, hds = setdiscovery.findcommonheads(
1124 common, any, hds = setdiscovery.findcommonheads(
1124 ui, repo, remote, ancestorsof=nodes, audit=data
1125 ui, repo, remote, ancestorsof=nodes, audit=data
1125 )
1126 )
1126 return common, hds
1127 return common, hds
1127
1128
1128 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1129 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1129 localrevs = opts[b'rev']
1130 localrevs = opts[b'rev']
1130
1131
1131 fm = ui.formatter(b'debugdiscovery', opts)
1132 fm = ui.formatter(b'debugdiscovery', opts)
1132 if fm.strict_format:
1133 if fm.strict_format:
1133
1134
1134 @contextlib.contextmanager
1135 @contextlib.contextmanager
1135 def may_capture_output():
1136 def may_capture_output():
1136 ui.pushbuffer()
1137 ui.pushbuffer()
1137 yield
1138 yield
1138 data[b'output'] = ui.popbuffer()
1139 data[b'output'] = ui.popbuffer()
1139
1140
1140 else:
1141 else:
1141 may_capture_output = util.nullcontextmanager
1142 may_capture_output = util.nullcontextmanager
1142 with may_capture_output():
1143 with may_capture_output():
1143 with util.timedcm('debug-discovery') as t:
1144 with util.timedcm('debug-discovery') as t:
1144 common, hds = doit(localrevs, remoterevs)
1145 common, hds = doit(localrevs, remoterevs)
1145
1146
1146 # compute all statistics
1147 # compute all statistics
1147 heads_common = set(common)
1148 heads_common = set(common)
1148 heads_remote = set(hds)
1149 heads_remote = set(hds)
1149 heads_local = set(repo.heads())
1150 heads_local = set(repo.heads())
1150 # note: they cannot be a local or remote head that is in common and not
1151 # note: they cannot be a local or remote head that is in common and not
1151 # itself a head of common.
1152 # itself a head of common.
1152 heads_common_local = heads_common & heads_local
1153 heads_common_local = heads_common & heads_local
1153 heads_common_remote = heads_common & heads_remote
1154 heads_common_remote = heads_common & heads_remote
1154 heads_common_both = heads_common & heads_remote & heads_local
1155 heads_common_both = heads_common & heads_remote & heads_local
1155
1156
1156 all = repo.revs(b'all()')
1157 all = repo.revs(b'all()')
1157 common = repo.revs(b'::%ln', common)
1158 common = repo.revs(b'::%ln', common)
1158 roots_common = repo.revs(b'roots(::%ld)', common)
1159 roots_common = repo.revs(b'roots(::%ld)', common)
1159 missing = repo.revs(b'not ::%ld', common)
1160 missing = repo.revs(b'not ::%ld', common)
1160 heads_missing = repo.revs(b'heads(%ld)', missing)
1161 heads_missing = repo.revs(b'heads(%ld)', missing)
1161 roots_missing = repo.revs(b'roots(%ld)', missing)
1162 roots_missing = repo.revs(b'roots(%ld)', missing)
1162 assert len(common) + len(missing) == len(all)
1163 assert len(common) + len(missing) == len(all)
1163
1164
1164 initial_undecided = repo.revs(
1165 initial_undecided = repo.revs(
1165 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1166 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1166 )
1167 )
1167 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1168 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1168 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1169 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1169 common_initial_undecided = initial_undecided & common
1170 common_initial_undecided = initial_undecided & common
1170 missing_initial_undecided = initial_undecided & missing
1171 missing_initial_undecided = initial_undecided & missing
1171
1172
1172 data[b'elapsed'] = t.elapsed
1173 data[b'elapsed'] = t.elapsed
1173 data[b'nb-common-heads'] = len(heads_common)
1174 data[b'nb-common-heads'] = len(heads_common)
1174 data[b'nb-common-heads-local'] = len(heads_common_local)
1175 data[b'nb-common-heads-local'] = len(heads_common_local)
1175 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1176 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1176 data[b'nb-common-heads-both'] = len(heads_common_both)
1177 data[b'nb-common-heads-both'] = len(heads_common_both)
1177 data[b'nb-common-roots'] = len(roots_common)
1178 data[b'nb-common-roots'] = len(roots_common)
1178 data[b'nb-head-local'] = len(heads_local)
1179 data[b'nb-head-local'] = len(heads_local)
1179 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1180 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1180 data[b'nb-head-remote'] = len(heads_remote)
1181 data[b'nb-head-remote'] = len(heads_remote)
1181 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1182 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1182 heads_common_remote
1183 heads_common_remote
1183 )
1184 )
1184 data[b'nb-revs'] = len(all)
1185 data[b'nb-revs'] = len(all)
1185 data[b'nb-revs-common'] = len(common)
1186 data[b'nb-revs-common'] = len(common)
1186 data[b'nb-revs-missing'] = len(missing)
1187 data[b'nb-revs-missing'] = len(missing)
1187 data[b'nb-missing-heads'] = len(heads_missing)
1188 data[b'nb-missing-heads'] = len(heads_missing)
1188 data[b'nb-missing-roots'] = len(roots_missing)
1189 data[b'nb-missing-roots'] = len(roots_missing)
1189 data[b'nb-ini_und'] = len(initial_undecided)
1190 data[b'nb-ini_und'] = len(initial_undecided)
1190 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1191 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1191 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1192 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1192 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1193 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1193 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1194 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1194
1195
1195 fm.startitem()
1196 fm.startitem()
1196 fm.data(**pycompat.strkwargs(data))
1197 fm.data(**pycompat.strkwargs(data))
1197 # display discovery summary
1198 # display discovery summary
1198 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1199 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1199 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1200 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1200 fm.plain(b"heads summary:\n")
1201 fm.plain(b"heads summary:\n")
1201 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1202 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1202 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1203 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1203 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1204 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1204 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1205 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1205 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1206 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1206 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1207 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1207 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1208 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1208 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1209 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1209 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1210 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1210 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1211 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1211 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1212 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1212 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1213 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1213 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1214 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1214 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1215 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1215 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1216 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1216 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1217 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1217 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1218 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1218 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1219 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1219 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1220 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1220 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1221 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1221 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1222 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1222 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1223 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1223
1224
1224 if ui.verbose:
1225 if ui.verbose:
1225 fm.plain(
1226 fm.plain(
1226 b"common heads: %s\n"
1227 b"common heads: %s\n"
1227 % b" ".join(sorted(short(n) for n in heads_common))
1228 % b" ".join(sorted(short(n) for n in heads_common))
1228 )
1229 )
1229 fm.end()
1230 fm.end()
1230
1231
1231
1232
1232 _chunksize = 4 << 10
1233 _chunksize = 4 << 10
1233
1234
1234
1235
1235 @command(
1236 @command(
1236 b'debugdownload',
1237 b'debugdownload',
1237 [
1238 [
1238 (b'o', b'output', b'', _(b'path')),
1239 (b'o', b'output', b'', _(b'path')),
1239 ],
1240 ],
1240 optionalrepo=True,
1241 optionalrepo=True,
1241 )
1242 )
1242 def debugdownload(ui, repo, url, output=None, **opts):
1243 def debugdownload(ui, repo, url, output=None, **opts):
1243 """download a resource using Mercurial logic and config"""
1244 """download a resource using Mercurial logic and config"""
1244 fh = urlmod.open(ui, url, output)
1245 fh = urlmod.open(ui, url, output)
1245
1246
1246 dest = ui
1247 dest = ui
1247 if output:
1248 if output:
1248 dest = open(output, b"wb", _chunksize)
1249 dest = open(output, b"wb", _chunksize)
1249 try:
1250 try:
1250 data = fh.read(_chunksize)
1251 data = fh.read(_chunksize)
1251 while data:
1252 while data:
1252 dest.write(data)
1253 dest.write(data)
1253 data = fh.read(_chunksize)
1254 data = fh.read(_chunksize)
1254 finally:
1255 finally:
1255 if output:
1256 if output:
1256 dest.close()
1257 dest.close()
1257
1258
1258
1259
1259 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1260 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1260 def debugextensions(ui, repo, **opts):
1261 def debugextensions(ui, repo, **opts):
1261 '''show information about active extensions'''
1262 '''show information about active extensions'''
1262 opts = pycompat.byteskwargs(opts)
1263 opts = pycompat.byteskwargs(opts)
1263 exts = extensions.extensions(ui)
1264 exts = extensions.extensions(ui)
1264 hgver = util.version()
1265 hgver = util.version()
1265 fm = ui.formatter(b'debugextensions', opts)
1266 fm = ui.formatter(b'debugextensions', opts)
1266 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1267 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1267 isinternal = extensions.ismoduleinternal(extmod)
1268 isinternal = extensions.ismoduleinternal(extmod)
1268 extsource = None
1269 extsource = None
1269
1270
1270 if util.safehasattr(extmod, '__file__'):
1271 if util.safehasattr(extmod, '__file__'):
1271 extsource = pycompat.fsencode(extmod.__file__)
1272 extsource = pycompat.fsencode(extmod.__file__)
1272 elif getattr(sys, 'oxidized', False):
1273 elif getattr(sys, 'oxidized', False):
1273 extsource = pycompat.sysexecutable
1274 extsource = pycompat.sysexecutable
1274 if isinternal:
1275 if isinternal:
1275 exttestedwith = [] # never expose magic string to users
1276 exttestedwith = [] # never expose magic string to users
1276 else:
1277 else:
1277 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1278 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1278 extbuglink = getattr(extmod, 'buglink', None)
1279 extbuglink = getattr(extmod, 'buglink', None)
1279
1280
1280 fm.startitem()
1281 fm.startitem()
1281
1282
1282 if ui.quiet or ui.verbose:
1283 if ui.quiet or ui.verbose:
1283 fm.write(b'name', b'%s\n', extname)
1284 fm.write(b'name', b'%s\n', extname)
1284 else:
1285 else:
1285 fm.write(b'name', b'%s', extname)
1286 fm.write(b'name', b'%s', extname)
1286 if isinternal or hgver in exttestedwith:
1287 if isinternal or hgver in exttestedwith:
1287 fm.plain(b'\n')
1288 fm.plain(b'\n')
1288 elif not exttestedwith:
1289 elif not exttestedwith:
1289 fm.plain(_(b' (untested!)\n'))
1290 fm.plain(_(b' (untested!)\n'))
1290 else:
1291 else:
1291 lasttestedversion = exttestedwith[-1]
1292 lasttestedversion = exttestedwith[-1]
1292 fm.plain(b' (%s!)\n' % lasttestedversion)
1293 fm.plain(b' (%s!)\n' % lasttestedversion)
1293
1294
1294 fm.condwrite(
1295 fm.condwrite(
1295 ui.verbose and extsource,
1296 ui.verbose and extsource,
1296 b'source',
1297 b'source',
1297 _(b' location: %s\n'),
1298 _(b' location: %s\n'),
1298 extsource or b"",
1299 extsource or b"",
1299 )
1300 )
1300
1301
1301 if ui.verbose:
1302 if ui.verbose:
1302 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1303 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1303 fm.data(bundled=isinternal)
1304 fm.data(bundled=isinternal)
1304
1305
1305 fm.condwrite(
1306 fm.condwrite(
1306 ui.verbose and exttestedwith,
1307 ui.verbose and exttestedwith,
1307 b'testedwith',
1308 b'testedwith',
1308 _(b' tested with: %s\n'),
1309 _(b' tested with: %s\n'),
1309 fm.formatlist(exttestedwith, name=b'ver'),
1310 fm.formatlist(exttestedwith, name=b'ver'),
1310 )
1311 )
1311
1312
1312 fm.condwrite(
1313 fm.condwrite(
1313 ui.verbose and extbuglink,
1314 ui.verbose and extbuglink,
1314 b'buglink',
1315 b'buglink',
1315 _(b' bug reporting: %s\n'),
1316 _(b' bug reporting: %s\n'),
1316 extbuglink or b"",
1317 extbuglink or b"",
1317 )
1318 )
1318
1319
1319 fm.end()
1320 fm.end()
1320
1321
1321
1322
1322 @command(
1323 @command(
1323 b'debugfileset',
1324 b'debugfileset',
1324 [
1325 [
1325 (
1326 (
1326 b'r',
1327 b'r',
1327 b'rev',
1328 b'rev',
1328 b'',
1329 b'',
1329 _(b'apply the filespec on this revision'),
1330 _(b'apply the filespec on this revision'),
1330 _(b'REV'),
1331 _(b'REV'),
1331 ),
1332 ),
1332 (
1333 (
1333 b'',
1334 b'',
1334 b'all-files',
1335 b'all-files',
1335 False,
1336 False,
1336 _(b'test files from all revisions and working directory'),
1337 _(b'test files from all revisions and working directory'),
1337 ),
1338 ),
1338 (
1339 (
1339 b's',
1340 b's',
1340 b'show-matcher',
1341 b'show-matcher',
1341 None,
1342 None,
1342 _(b'print internal representation of matcher'),
1343 _(b'print internal representation of matcher'),
1343 ),
1344 ),
1344 (
1345 (
1345 b'p',
1346 b'p',
1346 b'show-stage',
1347 b'show-stage',
1347 [],
1348 [],
1348 _(b'print parsed tree at the given stage'),
1349 _(b'print parsed tree at the given stage'),
1349 _(b'NAME'),
1350 _(b'NAME'),
1350 ),
1351 ),
1351 ],
1352 ],
1352 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1353 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1353 )
1354 )
1354 def debugfileset(ui, repo, expr, **opts):
1355 def debugfileset(ui, repo, expr, **opts):
1355 '''parse and apply a fileset specification'''
1356 '''parse and apply a fileset specification'''
1356 from . import fileset
1357 from . import fileset
1357
1358
1358 fileset.symbols # force import of fileset so we have predicates to optimize
1359 fileset.symbols # force import of fileset so we have predicates to optimize
1359 opts = pycompat.byteskwargs(opts)
1360 opts = pycompat.byteskwargs(opts)
1360 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1361 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1361
1362
1362 stages = [
1363 stages = [
1363 (b'parsed', pycompat.identity),
1364 (b'parsed', pycompat.identity),
1364 (b'analyzed', filesetlang.analyze),
1365 (b'analyzed', filesetlang.analyze),
1365 (b'optimized', filesetlang.optimize),
1366 (b'optimized', filesetlang.optimize),
1366 ]
1367 ]
1367 stagenames = {n for n, f in stages}
1368 stagenames = {n for n, f in stages}
1368
1369
1369 showalways = set()
1370 showalways = set()
1370 if ui.verbose and not opts[b'show_stage']:
1371 if ui.verbose and not opts[b'show_stage']:
1371 # show parsed tree by --verbose (deprecated)
1372 # show parsed tree by --verbose (deprecated)
1372 showalways.add(b'parsed')
1373 showalways.add(b'parsed')
1373 if opts[b'show_stage'] == [b'all']:
1374 if opts[b'show_stage'] == [b'all']:
1374 showalways.update(stagenames)
1375 showalways.update(stagenames)
1375 else:
1376 else:
1376 for n in opts[b'show_stage']:
1377 for n in opts[b'show_stage']:
1377 if n not in stagenames:
1378 if n not in stagenames:
1378 raise error.Abort(_(b'invalid stage name: %s') % n)
1379 raise error.Abort(_(b'invalid stage name: %s') % n)
1379 showalways.update(opts[b'show_stage'])
1380 showalways.update(opts[b'show_stage'])
1380
1381
1381 tree = filesetlang.parse(expr)
1382 tree = filesetlang.parse(expr)
1382 for n, f in stages:
1383 for n, f in stages:
1383 tree = f(tree)
1384 tree = f(tree)
1384 if n in showalways:
1385 if n in showalways:
1385 if opts[b'show_stage'] or n != b'parsed':
1386 if opts[b'show_stage'] or n != b'parsed':
1386 ui.write(b"* %s:\n" % n)
1387 ui.write(b"* %s:\n" % n)
1387 ui.write(filesetlang.prettyformat(tree), b"\n")
1388 ui.write(filesetlang.prettyformat(tree), b"\n")
1388
1389
1389 files = set()
1390 files = set()
1390 if opts[b'all_files']:
1391 if opts[b'all_files']:
1391 for r in repo:
1392 for r in repo:
1392 c = repo[r]
1393 c = repo[r]
1393 files.update(c.files())
1394 files.update(c.files())
1394 files.update(c.substate)
1395 files.update(c.substate)
1395 if opts[b'all_files'] or ctx.rev() is None:
1396 if opts[b'all_files'] or ctx.rev() is None:
1396 wctx = repo[None]
1397 wctx = repo[None]
1397 files.update(
1398 files.update(
1398 repo.dirstate.walk(
1399 repo.dirstate.walk(
1399 scmutil.matchall(repo),
1400 scmutil.matchall(repo),
1400 subrepos=list(wctx.substate),
1401 subrepos=list(wctx.substate),
1401 unknown=True,
1402 unknown=True,
1402 ignored=True,
1403 ignored=True,
1403 )
1404 )
1404 )
1405 )
1405 files.update(wctx.substate)
1406 files.update(wctx.substate)
1406 else:
1407 else:
1407 files.update(ctx.files())
1408 files.update(ctx.files())
1408 files.update(ctx.substate)
1409 files.update(ctx.substate)
1409
1410
1410 m = ctx.matchfileset(repo.getcwd(), expr)
1411 m = ctx.matchfileset(repo.getcwd(), expr)
1411 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1412 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1412 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1413 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1413 for f in sorted(files):
1414 for f in sorted(files):
1414 if not m(f):
1415 if not m(f):
1415 continue
1416 continue
1416 ui.write(b"%s\n" % f)
1417 ui.write(b"%s\n" % f)
1417
1418
1418
1419
1419 @command(b'debugformat', [] + cmdutil.formatteropts)
1420 @command(b'debugformat', [] + cmdutil.formatteropts)
1420 def debugformat(ui, repo, **opts):
1421 def debugformat(ui, repo, **opts):
1421 """display format information about the current repository
1422 """display format information about the current repository
1422
1423
1423 Use --verbose to get extra information about current config value and
1424 Use --verbose to get extra information about current config value and
1424 Mercurial default."""
1425 Mercurial default."""
1425 opts = pycompat.byteskwargs(opts)
1426 opts = pycompat.byteskwargs(opts)
1426 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1427 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1427 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1428 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1428
1429
1429 def makeformatname(name):
1430 def makeformatname(name):
1430 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1431 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1431
1432
1432 fm = ui.formatter(b'debugformat', opts)
1433 fm = ui.formatter(b'debugformat', opts)
1433 if fm.isplain():
1434 if fm.isplain():
1434
1435
1435 def formatvalue(value):
1436 def formatvalue(value):
1436 if util.safehasattr(value, b'startswith'):
1437 if util.safehasattr(value, b'startswith'):
1437 return value
1438 return value
1438 if value:
1439 if value:
1439 return b'yes'
1440 return b'yes'
1440 else:
1441 else:
1441 return b'no'
1442 return b'no'
1442
1443
1443 else:
1444 else:
1444 formatvalue = pycompat.identity
1445 formatvalue = pycompat.identity
1445
1446
1446 fm.plain(b'format-variant')
1447 fm.plain(b'format-variant')
1447 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1448 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1448 fm.plain(b' repo')
1449 fm.plain(b' repo')
1449 if ui.verbose:
1450 if ui.verbose:
1450 fm.plain(b' config default')
1451 fm.plain(b' config default')
1451 fm.plain(b'\n')
1452 fm.plain(b'\n')
1452 for fv in upgrade.allformatvariant:
1453 for fv in upgrade.allformatvariant:
1453 fm.startitem()
1454 fm.startitem()
1454 repovalue = fv.fromrepo(repo)
1455 repovalue = fv.fromrepo(repo)
1455 configvalue = fv.fromconfig(repo)
1456 configvalue = fv.fromconfig(repo)
1456
1457
1457 if repovalue != configvalue:
1458 if repovalue != configvalue:
1458 namelabel = b'formatvariant.name.mismatchconfig'
1459 namelabel = b'formatvariant.name.mismatchconfig'
1459 repolabel = b'formatvariant.repo.mismatchconfig'
1460 repolabel = b'formatvariant.repo.mismatchconfig'
1460 elif repovalue != fv.default:
1461 elif repovalue != fv.default:
1461 namelabel = b'formatvariant.name.mismatchdefault'
1462 namelabel = b'formatvariant.name.mismatchdefault'
1462 repolabel = b'formatvariant.repo.mismatchdefault'
1463 repolabel = b'formatvariant.repo.mismatchdefault'
1463 else:
1464 else:
1464 namelabel = b'formatvariant.name.uptodate'
1465 namelabel = b'formatvariant.name.uptodate'
1465 repolabel = b'formatvariant.repo.uptodate'
1466 repolabel = b'formatvariant.repo.uptodate'
1466
1467
1467 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1468 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1468 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1469 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1469 if fv.default != configvalue:
1470 if fv.default != configvalue:
1470 configlabel = b'formatvariant.config.special'
1471 configlabel = b'formatvariant.config.special'
1471 else:
1472 else:
1472 configlabel = b'formatvariant.config.default'
1473 configlabel = b'formatvariant.config.default'
1473 fm.condwrite(
1474 fm.condwrite(
1474 ui.verbose,
1475 ui.verbose,
1475 b'config',
1476 b'config',
1476 b' %6s',
1477 b' %6s',
1477 formatvalue(configvalue),
1478 formatvalue(configvalue),
1478 label=configlabel,
1479 label=configlabel,
1479 )
1480 )
1480 fm.condwrite(
1481 fm.condwrite(
1481 ui.verbose,
1482 ui.verbose,
1482 b'default',
1483 b'default',
1483 b' %7s',
1484 b' %7s',
1484 formatvalue(fv.default),
1485 formatvalue(fv.default),
1485 label=b'formatvariant.default',
1486 label=b'formatvariant.default',
1486 )
1487 )
1487 fm.plain(b'\n')
1488 fm.plain(b'\n')
1488 fm.end()
1489 fm.end()
1489
1490
1490
1491
1491 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1492 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1492 def debugfsinfo(ui, path=b"."):
1493 def debugfsinfo(ui, path=b"."):
1493 """show information detected about current filesystem"""
1494 """show information detected about current filesystem"""
1494 ui.writenoi18n(b'path: %s\n' % path)
1495 ui.writenoi18n(b'path: %s\n' % path)
1495 ui.writenoi18n(
1496 ui.writenoi18n(
1496 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1497 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1497 )
1498 )
1498 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1499 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1499 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1500 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1500 ui.writenoi18n(
1501 ui.writenoi18n(
1501 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1502 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1502 )
1503 )
1503 ui.writenoi18n(
1504 ui.writenoi18n(
1504 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1505 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1505 )
1506 )
1506 casesensitive = b'(unknown)'
1507 casesensitive = b'(unknown)'
1507 try:
1508 try:
1508 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1509 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1509 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1510 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1510 except OSError:
1511 except OSError:
1511 pass
1512 pass
1512 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1513 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1513
1514
1514
1515
1515 @command(
1516 @command(
1516 b'debuggetbundle',
1517 b'debuggetbundle',
1517 [
1518 [
1518 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1519 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1519 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1520 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1520 (
1521 (
1521 b't',
1522 b't',
1522 b'type',
1523 b'type',
1523 b'bzip2',
1524 b'bzip2',
1524 _(b'bundle compression type to use'),
1525 _(b'bundle compression type to use'),
1525 _(b'TYPE'),
1526 _(b'TYPE'),
1526 ),
1527 ),
1527 ],
1528 ],
1528 _(b'REPO FILE [-H|-C ID]...'),
1529 _(b'REPO FILE [-H|-C ID]...'),
1529 norepo=True,
1530 norepo=True,
1530 )
1531 )
1531 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1532 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1532 """retrieves a bundle from a repo
1533 """retrieves a bundle from a repo
1533
1534
1534 Every ID must be a full-length hex node id string. Saves the bundle to the
1535 Every ID must be a full-length hex node id string. Saves the bundle to the
1535 given file.
1536 given file.
1536 """
1537 """
1537 opts = pycompat.byteskwargs(opts)
1538 opts = pycompat.byteskwargs(opts)
1538 repo = hg.peer(ui, opts, repopath)
1539 repo = hg.peer(ui, opts, repopath)
1539 if not repo.capable(b'getbundle'):
1540 if not repo.capable(b'getbundle'):
1540 raise error.Abort(b"getbundle() not supported by target repository")
1541 raise error.Abort(b"getbundle() not supported by target repository")
1541 args = {}
1542 args = {}
1542 if common:
1543 if common:
1543 args['common'] = [bin(s) for s in common]
1544 args['common'] = [bin(s) for s in common]
1544 if head:
1545 if head:
1545 args['heads'] = [bin(s) for s in head]
1546 args['heads'] = [bin(s) for s in head]
1546 # TODO: get desired bundlecaps from command line.
1547 # TODO: get desired bundlecaps from command line.
1547 args['bundlecaps'] = None
1548 args['bundlecaps'] = None
1548 bundle = repo.getbundle(b'debug', **args)
1549 bundle = repo.getbundle(b'debug', **args)
1549
1550
1550 bundletype = opts.get(b'type', b'bzip2').lower()
1551 bundletype = opts.get(b'type', b'bzip2').lower()
1551 btypes = {
1552 btypes = {
1552 b'none': b'HG10UN',
1553 b'none': b'HG10UN',
1553 b'bzip2': b'HG10BZ',
1554 b'bzip2': b'HG10BZ',
1554 b'gzip': b'HG10GZ',
1555 b'gzip': b'HG10GZ',
1555 b'bundle2': b'HG20',
1556 b'bundle2': b'HG20',
1556 }
1557 }
1557 bundletype = btypes.get(bundletype)
1558 bundletype = btypes.get(bundletype)
1558 if bundletype not in bundle2.bundletypes:
1559 if bundletype not in bundle2.bundletypes:
1559 raise error.Abort(_(b'unknown bundle type specified with --type'))
1560 raise error.Abort(_(b'unknown bundle type specified with --type'))
1560 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1561 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1561
1562
1562
1563
1563 @command(b'debugignore', [], b'[FILE]')
1564 @command(b'debugignore', [], b'[FILE]')
1564 def debugignore(ui, repo, *files, **opts):
1565 def debugignore(ui, repo, *files, **opts):
1565 """display the combined ignore pattern and information about ignored files
1566 """display the combined ignore pattern and information about ignored files
1566
1567
1567 With no argument display the combined ignore pattern.
1568 With no argument display the combined ignore pattern.
1568
1569
1569 Given space separated file names, shows if the given file is ignored and
1570 Given space separated file names, shows if the given file is ignored and
1570 if so, show the ignore rule (file and line number) that matched it.
1571 if so, show the ignore rule (file and line number) that matched it.
1571 """
1572 """
1572 ignore = repo.dirstate._ignore
1573 ignore = repo.dirstate._ignore
1573 if not files:
1574 if not files:
1574 # Show all the patterns
1575 # Show all the patterns
1575 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1576 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1576 else:
1577 else:
1577 m = scmutil.match(repo[None], pats=files)
1578 m = scmutil.match(repo[None], pats=files)
1578 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1579 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1579 for f in m.files():
1580 for f in m.files():
1580 nf = util.normpath(f)
1581 nf = util.normpath(f)
1581 ignored = None
1582 ignored = None
1582 ignoredata = None
1583 ignoredata = None
1583 if nf != b'.':
1584 if nf != b'.':
1584 if ignore(nf):
1585 if ignore(nf):
1585 ignored = nf
1586 ignored = nf
1586 ignoredata = repo.dirstate._ignorefileandline(nf)
1587 ignoredata = repo.dirstate._ignorefileandline(nf)
1587 else:
1588 else:
1588 for p in pathutil.finddirs(nf):
1589 for p in pathutil.finddirs(nf):
1589 if ignore(p):
1590 if ignore(p):
1590 ignored = p
1591 ignored = p
1591 ignoredata = repo.dirstate._ignorefileandline(p)
1592 ignoredata = repo.dirstate._ignorefileandline(p)
1592 break
1593 break
1593 if ignored:
1594 if ignored:
1594 if ignored == nf:
1595 if ignored == nf:
1595 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1596 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1596 else:
1597 else:
1597 ui.write(
1598 ui.write(
1598 _(
1599 _(
1599 b"%s is ignored because of "
1600 b"%s is ignored because of "
1600 b"containing directory %s\n"
1601 b"containing directory %s\n"
1601 )
1602 )
1602 % (uipathfn(f), ignored)
1603 % (uipathfn(f), ignored)
1603 )
1604 )
1604 ignorefile, lineno, line = ignoredata
1605 ignorefile, lineno, line = ignoredata
1605 ui.write(
1606 ui.write(
1606 _(b"(ignore rule in %s, line %d: '%s')\n")
1607 _(b"(ignore rule in %s, line %d: '%s')\n")
1607 % (ignorefile, lineno, line)
1608 % (ignorefile, lineno, line)
1608 )
1609 )
1609 else:
1610 else:
1610 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1611 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1611
1612
1612
1613
1613 @command(
1614 @command(
1614 b'debugindex',
1615 b'debugindex',
1615 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1616 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1616 _(b'-c|-m|FILE'),
1617 _(b'-c|-m|FILE'),
1617 )
1618 )
1618 def debugindex(ui, repo, file_=None, **opts):
1619 def debugindex(ui, repo, file_=None, **opts):
1619 """dump index data for a storage primitive"""
1620 """dump index data for a storage primitive"""
1620 opts = pycompat.byteskwargs(opts)
1621 opts = pycompat.byteskwargs(opts)
1621 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1622 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1622
1623
1623 if ui.debugflag:
1624 if ui.debugflag:
1624 shortfn = hex
1625 shortfn = hex
1625 else:
1626 else:
1626 shortfn = short
1627 shortfn = short
1627
1628
1628 idlen = 12
1629 idlen = 12
1629 for i in store:
1630 for i in store:
1630 idlen = len(shortfn(store.node(i)))
1631 idlen = len(shortfn(store.node(i)))
1631 break
1632 break
1632
1633
1633 fm = ui.formatter(b'debugindex', opts)
1634 fm = ui.formatter(b'debugindex', opts)
1634 fm.plain(
1635 fm.plain(
1635 b' rev linkrev %s %s p2\n'
1636 b' rev linkrev %s %s p2\n'
1636 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1637 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1637 )
1638 )
1638
1639
1639 for rev in store:
1640 for rev in store:
1640 node = store.node(rev)
1641 node = store.node(rev)
1641 parents = store.parents(node)
1642 parents = store.parents(node)
1642
1643
1643 fm.startitem()
1644 fm.startitem()
1644 fm.write(b'rev', b'%6d ', rev)
1645 fm.write(b'rev', b'%6d ', rev)
1645 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1646 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1646 fm.write(b'node', b'%s ', shortfn(node))
1647 fm.write(b'node', b'%s ', shortfn(node))
1647 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1648 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1648 fm.write(b'p2', b'%s', shortfn(parents[1]))
1649 fm.write(b'p2', b'%s', shortfn(parents[1]))
1649 fm.plain(b'\n')
1650 fm.plain(b'\n')
1650
1651
1651 fm.end()
1652 fm.end()
1652
1653
1653
1654
1654 @command(
1655 @command(
1655 b'debugindexdot',
1656 b'debugindexdot',
1656 cmdutil.debugrevlogopts,
1657 cmdutil.debugrevlogopts,
1657 _(b'-c|-m|FILE'),
1658 _(b'-c|-m|FILE'),
1658 optionalrepo=True,
1659 optionalrepo=True,
1659 )
1660 )
1660 def debugindexdot(ui, repo, file_=None, **opts):
1661 def debugindexdot(ui, repo, file_=None, **opts):
1661 """dump an index DAG as a graphviz dot file"""
1662 """dump an index DAG as a graphviz dot file"""
1662 opts = pycompat.byteskwargs(opts)
1663 opts = pycompat.byteskwargs(opts)
1663 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1664 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1664 ui.writenoi18n(b"digraph G {\n")
1665 ui.writenoi18n(b"digraph G {\n")
1665 for i in r:
1666 for i in r:
1666 node = r.node(i)
1667 node = r.node(i)
1667 pp = r.parents(node)
1668 pp = r.parents(node)
1668 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1669 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1669 if pp[1] != repo.nullid:
1670 if pp[1] != repo.nullid:
1670 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1671 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1671 ui.write(b"}\n")
1672 ui.write(b"}\n")
1672
1673
1673
1674
1674 @command(b'debugindexstats', [])
1675 @command(b'debugindexstats', [])
1675 def debugindexstats(ui, repo):
1676 def debugindexstats(ui, repo):
1676 """show stats related to the changelog index"""
1677 """show stats related to the changelog index"""
1677 repo.changelog.shortest(repo.nullid, 1)
1678 repo.changelog.shortest(repo.nullid, 1)
1678 index = repo.changelog.index
1679 index = repo.changelog.index
1679 if not util.safehasattr(index, b'stats'):
1680 if not util.safehasattr(index, b'stats'):
1680 raise error.Abort(_(b'debugindexstats only works with native code'))
1681 raise error.Abort(_(b'debugindexstats only works with native code'))
1681 for k, v in sorted(index.stats().items()):
1682 for k, v in sorted(index.stats().items()):
1682 ui.write(b'%s: %d\n' % (k, v))
1683 ui.write(b'%s: %d\n' % (k, v))
1683
1684
1684
1685
1685 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1686 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1686 def debuginstall(ui, **opts):
1687 def debuginstall(ui, **opts):
1687 """test Mercurial installation
1688 """test Mercurial installation
1688
1689
1689 Returns 0 on success.
1690 Returns 0 on success.
1690 """
1691 """
1691 opts = pycompat.byteskwargs(opts)
1692 opts = pycompat.byteskwargs(opts)
1692
1693
1693 problems = 0
1694 problems = 0
1694
1695
1695 fm = ui.formatter(b'debuginstall', opts)
1696 fm = ui.formatter(b'debuginstall', opts)
1696 fm.startitem()
1697 fm.startitem()
1697
1698
1698 # encoding might be unknown or wrong. don't translate these messages.
1699 # encoding might be unknown or wrong. don't translate these messages.
1699 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1700 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1700 err = None
1701 err = None
1701 try:
1702 try:
1702 codecs.lookup(pycompat.sysstr(encoding.encoding))
1703 codecs.lookup(pycompat.sysstr(encoding.encoding))
1703 except LookupError as inst:
1704 except LookupError as inst:
1704 err = stringutil.forcebytestr(inst)
1705 err = stringutil.forcebytestr(inst)
1705 problems += 1
1706 problems += 1
1706 fm.condwrite(
1707 fm.condwrite(
1707 err,
1708 err,
1708 b'encodingerror',
1709 b'encodingerror',
1709 b" %s\n (check that your locale is properly set)\n",
1710 b" %s\n (check that your locale is properly set)\n",
1710 err,
1711 err,
1711 )
1712 )
1712
1713
1713 # Python
1714 # Python
1714 pythonlib = None
1715 pythonlib = None
1715 if util.safehasattr(os, '__file__'):
1716 if util.safehasattr(os, '__file__'):
1716 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1717 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1717 elif getattr(sys, 'oxidized', False):
1718 elif getattr(sys, 'oxidized', False):
1718 pythonlib = pycompat.sysexecutable
1719 pythonlib = pycompat.sysexecutable
1719
1720
1720 fm.write(
1721 fm.write(
1721 b'pythonexe',
1722 b'pythonexe',
1722 _(b"checking Python executable (%s)\n"),
1723 _(b"checking Python executable (%s)\n"),
1723 pycompat.sysexecutable or _(b"unknown"),
1724 pycompat.sysexecutable or _(b"unknown"),
1724 )
1725 )
1725 fm.write(
1726 fm.write(
1726 b'pythonimplementation',
1727 b'pythonimplementation',
1727 _(b"checking Python implementation (%s)\n"),
1728 _(b"checking Python implementation (%s)\n"),
1728 pycompat.sysbytes(platform.python_implementation()),
1729 pycompat.sysbytes(platform.python_implementation()),
1729 )
1730 )
1730 fm.write(
1731 fm.write(
1731 b'pythonver',
1732 b'pythonver',
1732 _(b"checking Python version (%s)\n"),
1733 _(b"checking Python version (%s)\n"),
1733 (b"%d.%d.%d" % sys.version_info[:3]),
1734 (b"%d.%d.%d" % sys.version_info[:3]),
1734 )
1735 )
1735 fm.write(
1736 fm.write(
1736 b'pythonlib',
1737 b'pythonlib',
1737 _(b"checking Python lib (%s)...\n"),
1738 _(b"checking Python lib (%s)...\n"),
1738 pythonlib or _(b"unknown"),
1739 pythonlib or _(b"unknown"),
1739 )
1740 )
1740
1741
1741 try:
1742 try:
1742 from . import rustext # pytype: disable=import-error
1743 from . import rustext # pytype: disable=import-error
1743
1744
1744 rustext.__doc__ # trigger lazy import
1745 rustext.__doc__ # trigger lazy import
1745 except ImportError:
1746 except ImportError:
1746 rustext = None
1747 rustext = None
1747
1748
1748 security = set(sslutil.supportedprotocols)
1749 security = set(sslutil.supportedprotocols)
1749 if sslutil.hassni:
1750 if sslutil.hassni:
1750 security.add(b'sni')
1751 security.add(b'sni')
1751
1752
1752 fm.write(
1753 fm.write(
1753 b'pythonsecurity',
1754 b'pythonsecurity',
1754 _(b"checking Python security support (%s)\n"),
1755 _(b"checking Python security support (%s)\n"),
1755 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1756 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1756 )
1757 )
1757
1758
1758 # These are warnings, not errors. So don't increment problem count. This
1759 # These are warnings, not errors. So don't increment problem count. This
1759 # may change in the future.
1760 # may change in the future.
1760 if b'tls1.2' not in security:
1761 if b'tls1.2' not in security:
1761 fm.plain(
1762 fm.plain(
1762 _(
1763 _(
1763 b' TLS 1.2 not supported by Python install; '
1764 b' TLS 1.2 not supported by Python install; '
1764 b'network connections lack modern security\n'
1765 b'network connections lack modern security\n'
1765 )
1766 )
1766 )
1767 )
1767 if b'sni' not in security:
1768 if b'sni' not in security:
1768 fm.plain(
1769 fm.plain(
1769 _(
1770 _(
1770 b' SNI not supported by Python install; may have '
1771 b' SNI not supported by Python install; may have '
1771 b'connectivity issues with some servers\n'
1772 b'connectivity issues with some servers\n'
1772 )
1773 )
1773 )
1774 )
1774
1775
1775 fm.plain(
1776 fm.plain(
1776 _(
1777 _(
1777 b"checking Rust extensions (%s)\n"
1778 b"checking Rust extensions (%s)\n"
1778 % (b'missing' if rustext is None else b'installed')
1779 % (b'missing' if rustext is None else b'installed')
1779 ),
1780 ),
1780 )
1781 )
1781
1782
1782 # TODO print CA cert info
1783 # TODO print CA cert info
1783
1784
1784 # hg version
1785 # hg version
1785 hgver = util.version()
1786 hgver = util.version()
1786 fm.write(
1787 fm.write(
1787 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1788 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1788 )
1789 )
1789 fm.write(
1790 fm.write(
1790 b'hgverextra',
1791 b'hgverextra',
1791 _(b"checking Mercurial custom build (%s)\n"),
1792 _(b"checking Mercurial custom build (%s)\n"),
1792 b'+'.join(hgver.split(b'+')[1:]),
1793 b'+'.join(hgver.split(b'+')[1:]),
1793 )
1794 )
1794
1795
1795 # compiled modules
1796 # compiled modules
1796 hgmodules = None
1797 hgmodules = None
1797 if util.safehasattr(sys.modules[__name__], '__file__'):
1798 if util.safehasattr(sys.modules[__name__], '__file__'):
1798 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1799 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1799 elif getattr(sys, 'oxidized', False):
1800 elif getattr(sys, 'oxidized', False):
1800 hgmodules = pycompat.sysexecutable
1801 hgmodules = pycompat.sysexecutable
1801
1802
1802 fm.write(
1803 fm.write(
1803 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1804 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1804 )
1805 )
1805 fm.write(
1806 fm.write(
1806 b'hgmodules',
1807 b'hgmodules',
1807 _(b"checking installed modules (%s)...\n"),
1808 _(b"checking installed modules (%s)...\n"),
1808 hgmodules or _(b"unknown"),
1809 hgmodules or _(b"unknown"),
1809 )
1810 )
1810
1811
1811 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1812 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1812 rustext = rustandc # for now, that's the only case
1813 rustext = rustandc # for now, that's the only case
1813 cext = policy.policy in (b'c', b'allow') or rustandc
1814 cext = policy.policy in (b'c', b'allow') or rustandc
1814 nopure = cext or rustext
1815 nopure = cext or rustext
1815 if nopure:
1816 if nopure:
1816 err = None
1817 err = None
1817 try:
1818 try:
1818 if cext:
1819 if cext:
1819 from .cext import ( # pytype: disable=import-error
1820 from .cext import ( # pytype: disable=import-error
1820 base85,
1821 base85,
1821 bdiff,
1822 bdiff,
1822 mpatch,
1823 mpatch,
1823 osutil,
1824 osutil,
1824 )
1825 )
1825
1826
1826 # quiet pyflakes
1827 # quiet pyflakes
1827 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1828 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1828 if rustext:
1829 if rustext:
1829 from .rustext import ( # pytype: disable=import-error
1830 from .rustext import ( # pytype: disable=import-error
1830 ancestor,
1831 ancestor,
1831 dirstate,
1832 dirstate,
1832 )
1833 )
1833
1834
1834 dir(ancestor), dir(dirstate) # quiet pyflakes
1835 dir(ancestor), dir(dirstate) # quiet pyflakes
1835 except Exception as inst:
1836 except Exception as inst:
1836 err = stringutil.forcebytestr(inst)
1837 err = stringutil.forcebytestr(inst)
1837 problems += 1
1838 problems += 1
1838 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1839 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1839
1840
1840 compengines = util.compengines._engines.values()
1841 compengines = util.compengines._engines.values()
1841 fm.write(
1842 fm.write(
1842 b'compengines',
1843 b'compengines',
1843 _(b'checking registered compression engines (%s)\n'),
1844 _(b'checking registered compression engines (%s)\n'),
1844 fm.formatlist(
1845 fm.formatlist(
1845 sorted(e.name() for e in compengines),
1846 sorted(e.name() for e in compengines),
1846 name=b'compengine',
1847 name=b'compengine',
1847 fmt=b'%s',
1848 fmt=b'%s',
1848 sep=b', ',
1849 sep=b', ',
1849 ),
1850 ),
1850 )
1851 )
1851 fm.write(
1852 fm.write(
1852 b'compenginesavail',
1853 b'compenginesavail',
1853 _(b'checking available compression engines (%s)\n'),
1854 _(b'checking available compression engines (%s)\n'),
1854 fm.formatlist(
1855 fm.formatlist(
1855 sorted(e.name() for e in compengines if e.available()),
1856 sorted(e.name() for e in compengines if e.available()),
1856 name=b'compengine',
1857 name=b'compengine',
1857 fmt=b'%s',
1858 fmt=b'%s',
1858 sep=b', ',
1859 sep=b', ',
1859 ),
1860 ),
1860 )
1861 )
1861 wirecompengines = compression.compengines.supportedwireengines(
1862 wirecompengines = compression.compengines.supportedwireengines(
1862 compression.SERVERROLE
1863 compression.SERVERROLE
1863 )
1864 )
1864 fm.write(
1865 fm.write(
1865 b'compenginesserver',
1866 b'compenginesserver',
1866 _(
1867 _(
1867 b'checking available compression engines '
1868 b'checking available compression engines '
1868 b'for wire protocol (%s)\n'
1869 b'for wire protocol (%s)\n'
1869 ),
1870 ),
1870 fm.formatlist(
1871 fm.formatlist(
1871 [e.name() for e in wirecompengines if e.wireprotosupport()],
1872 [e.name() for e in wirecompengines if e.wireprotosupport()],
1872 name=b'compengine',
1873 name=b'compengine',
1873 fmt=b'%s',
1874 fmt=b'%s',
1874 sep=b', ',
1875 sep=b', ',
1875 ),
1876 ),
1876 )
1877 )
1877 re2 = b'missing'
1878 re2 = b'missing'
1878 if util._re2:
1879 if util._re2:
1879 re2 = b'available'
1880 re2 = b'available'
1880 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1881 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1881 fm.data(re2=bool(util._re2))
1882 fm.data(re2=bool(util._re2))
1882
1883
1883 # templates
1884 # templates
1884 p = templater.templatedir()
1885 p = templater.templatedir()
1885 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1886 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1886 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1887 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1887 if p:
1888 if p:
1888 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1889 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1889 if m:
1890 if m:
1890 # template found, check if it is working
1891 # template found, check if it is working
1891 err = None
1892 err = None
1892 try:
1893 try:
1893 templater.templater.frommapfile(m)
1894 templater.templater.frommapfile(m)
1894 except Exception as inst:
1895 except Exception as inst:
1895 err = stringutil.forcebytestr(inst)
1896 err = stringutil.forcebytestr(inst)
1896 p = None
1897 p = None
1897 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1898 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1898 else:
1899 else:
1899 p = None
1900 p = None
1900 fm.condwrite(
1901 fm.condwrite(
1901 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1902 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1902 )
1903 )
1903 fm.condwrite(
1904 fm.condwrite(
1904 not m,
1905 not m,
1905 b'defaulttemplatenotfound',
1906 b'defaulttemplatenotfound',
1906 _(b" template '%s' not found\n"),
1907 _(b" template '%s' not found\n"),
1907 b"default",
1908 b"default",
1908 )
1909 )
1909 if not p:
1910 if not p:
1910 problems += 1
1911 problems += 1
1911 fm.condwrite(
1912 fm.condwrite(
1912 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1913 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1913 )
1914 )
1914
1915
1915 # editor
1916 # editor
1916 editor = ui.geteditor()
1917 editor = ui.geteditor()
1917 editor = util.expandpath(editor)
1918 editor = util.expandpath(editor)
1918 editorbin = procutil.shellsplit(editor)[0]
1919 editorbin = procutil.shellsplit(editor)[0]
1919 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1920 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1920 cmdpath = procutil.findexe(editorbin)
1921 cmdpath = procutil.findexe(editorbin)
1921 fm.condwrite(
1922 fm.condwrite(
1922 not cmdpath and editor == b'vi',
1923 not cmdpath and editor == b'vi',
1923 b'vinotfound',
1924 b'vinotfound',
1924 _(
1925 _(
1925 b" No commit editor set and can't find %s in PATH\n"
1926 b" No commit editor set and can't find %s in PATH\n"
1926 b" (specify a commit editor in your configuration"
1927 b" (specify a commit editor in your configuration"
1927 b" file)\n"
1928 b" file)\n"
1928 ),
1929 ),
1929 not cmdpath and editor == b'vi' and editorbin,
1930 not cmdpath and editor == b'vi' and editorbin,
1930 )
1931 )
1931 fm.condwrite(
1932 fm.condwrite(
1932 not cmdpath and editor != b'vi',
1933 not cmdpath and editor != b'vi',
1933 b'editornotfound',
1934 b'editornotfound',
1934 _(
1935 _(
1935 b" Can't find editor '%s' in PATH\n"
1936 b" Can't find editor '%s' in PATH\n"
1936 b" (specify a commit editor in your configuration"
1937 b" (specify a commit editor in your configuration"
1937 b" file)\n"
1938 b" file)\n"
1938 ),
1939 ),
1939 not cmdpath and editorbin,
1940 not cmdpath and editorbin,
1940 )
1941 )
1941 if not cmdpath and editor != b'vi':
1942 if not cmdpath and editor != b'vi':
1942 problems += 1
1943 problems += 1
1943
1944
1944 # check username
1945 # check username
1945 username = None
1946 username = None
1946 err = None
1947 err = None
1947 try:
1948 try:
1948 username = ui.username()
1949 username = ui.username()
1949 except error.Abort as e:
1950 except error.Abort as e:
1950 err = e.message
1951 err = e.message
1951 problems += 1
1952 problems += 1
1952
1953
1953 fm.condwrite(
1954 fm.condwrite(
1954 username, b'username', _(b"checking username (%s)\n"), username
1955 username, b'username', _(b"checking username (%s)\n"), username
1955 )
1956 )
1956 fm.condwrite(
1957 fm.condwrite(
1957 err,
1958 err,
1958 b'usernameerror',
1959 b'usernameerror',
1959 _(
1960 _(
1960 b"checking username...\n %s\n"
1961 b"checking username...\n %s\n"
1961 b" (specify a username in your configuration file)\n"
1962 b" (specify a username in your configuration file)\n"
1962 ),
1963 ),
1963 err,
1964 err,
1964 )
1965 )
1965
1966
1966 for name, mod in extensions.extensions():
1967 for name, mod in extensions.extensions():
1967 handler = getattr(mod, 'debuginstall', None)
1968 handler = getattr(mod, 'debuginstall', None)
1968 if handler is not None:
1969 if handler is not None:
1969 problems += handler(ui, fm)
1970 problems += handler(ui, fm)
1970
1971
1971 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1972 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1972 if not problems:
1973 if not problems:
1973 fm.data(problems=problems)
1974 fm.data(problems=problems)
1974 fm.condwrite(
1975 fm.condwrite(
1975 problems,
1976 problems,
1976 b'problems',
1977 b'problems',
1977 _(b"%d problems detected, please check your install!\n"),
1978 _(b"%d problems detected, please check your install!\n"),
1978 problems,
1979 problems,
1979 )
1980 )
1980 fm.end()
1981 fm.end()
1981
1982
1982 return problems
1983 return problems
1983
1984
1984
1985
1985 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1986 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1986 def debugknown(ui, repopath, *ids, **opts):
1987 def debugknown(ui, repopath, *ids, **opts):
1987 """test whether node ids are known to a repo
1988 """test whether node ids are known to a repo
1988
1989
1989 Every ID must be a full-length hex node id string. Returns a list of 0s
1990 Every ID must be a full-length hex node id string. Returns a list of 0s
1990 and 1s indicating unknown/known.
1991 and 1s indicating unknown/known.
1991 """
1992 """
1992 opts = pycompat.byteskwargs(opts)
1993 opts = pycompat.byteskwargs(opts)
1993 repo = hg.peer(ui, opts, repopath)
1994 repo = hg.peer(ui, opts, repopath)
1994 if not repo.capable(b'known'):
1995 if not repo.capable(b'known'):
1995 raise error.Abort(b"known() not supported by target repository")
1996 raise error.Abort(b"known() not supported by target repository")
1996 flags = repo.known([bin(s) for s in ids])
1997 flags = repo.known([bin(s) for s in ids])
1997 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1998 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1998
1999
1999
2000
2000 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2001 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2001 def debuglabelcomplete(ui, repo, *args):
2002 def debuglabelcomplete(ui, repo, *args):
2002 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2003 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2003 debugnamecomplete(ui, repo, *args)
2004 debugnamecomplete(ui, repo, *args)
2004
2005
2005
2006
2006 @command(
2007 @command(
2007 b'debuglocks',
2008 b'debuglocks',
2008 [
2009 [
2009 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2010 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2010 (
2011 (
2011 b'W',
2012 b'W',
2012 b'force-free-wlock',
2013 b'force-free-wlock',
2013 None,
2014 None,
2014 _(b'free the working state lock (DANGEROUS)'),
2015 _(b'free the working state lock (DANGEROUS)'),
2015 ),
2016 ),
2016 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2017 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2017 (
2018 (
2018 b'S',
2019 b'S',
2019 b'set-wlock',
2020 b'set-wlock',
2020 None,
2021 None,
2021 _(b'set the working state lock until stopped'),
2022 _(b'set the working state lock until stopped'),
2022 ),
2023 ),
2023 ],
2024 ],
2024 _(b'[OPTION]...'),
2025 _(b'[OPTION]...'),
2025 )
2026 )
2026 def debuglocks(ui, repo, **opts):
2027 def debuglocks(ui, repo, **opts):
2027 """show or modify state of locks
2028 """show or modify state of locks
2028
2029
2029 By default, this command will show which locks are held. This
2030 By default, this command will show which locks are held. This
2030 includes the user and process holding the lock, the amount of time
2031 includes the user and process holding the lock, the amount of time
2031 the lock has been held, and the machine name where the process is
2032 the lock has been held, and the machine name where the process is
2032 running if it's not local.
2033 running if it's not local.
2033
2034
2034 Locks protect the integrity of Mercurial's data, so should be
2035 Locks protect the integrity of Mercurial's data, so should be
2035 treated with care. System crashes or other interruptions may cause
2036 treated with care. System crashes or other interruptions may cause
2036 locks to not be properly released, though Mercurial will usually
2037 locks to not be properly released, though Mercurial will usually
2037 detect and remove such stale locks automatically.
2038 detect and remove such stale locks automatically.
2038
2039
2039 However, detecting stale locks may not always be possible (for
2040 However, detecting stale locks may not always be possible (for
2040 instance, on a shared filesystem). Removing locks may also be
2041 instance, on a shared filesystem). Removing locks may also be
2041 blocked by filesystem permissions.
2042 blocked by filesystem permissions.
2042
2043
2043 Setting a lock will prevent other commands from changing the data.
2044 Setting a lock will prevent other commands from changing the data.
2044 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2045 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2045 The set locks are removed when the command exits.
2046 The set locks are removed when the command exits.
2046
2047
2047 Returns 0 if no locks are held.
2048 Returns 0 if no locks are held.
2048
2049
2049 """
2050 """
2050
2051
2051 if opts.get('force_free_lock'):
2052 if opts.get('force_free_lock'):
2052 repo.svfs.unlink(b'lock')
2053 repo.svfs.unlink(b'lock')
2053 if opts.get('force_free_wlock'):
2054 if opts.get('force_free_wlock'):
2054 repo.vfs.unlink(b'wlock')
2055 repo.vfs.unlink(b'wlock')
2055 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2056 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2056 return 0
2057 return 0
2057
2058
2058 locks = []
2059 locks = []
2059 try:
2060 try:
2060 if opts.get('set_wlock'):
2061 if opts.get('set_wlock'):
2061 try:
2062 try:
2062 locks.append(repo.wlock(False))
2063 locks.append(repo.wlock(False))
2063 except error.LockHeld:
2064 except error.LockHeld:
2064 raise error.Abort(_(b'wlock is already held'))
2065 raise error.Abort(_(b'wlock is already held'))
2065 if opts.get('set_lock'):
2066 if opts.get('set_lock'):
2066 try:
2067 try:
2067 locks.append(repo.lock(False))
2068 locks.append(repo.lock(False))
2068 except error.LockHeld:
2069 except error.LockHeld:
2069 raise error.Abort(_(b'lock is already held'))
2070 raise error.Abort(_(b'lock is already held'))
2070 if len(locks):
2071 if len(locks):
2071 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2072 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2072 return 0
2073 return 0
2073 finally:
2074 finally:
2074 release(*locks)
2075 release(*locks)
2075
2076
2076 now = time.time()
2077 now = time.time()
2077 held = 0
2078 held = 0
2078
2079
2079 def report(vfs, name, method):
2080 def report(vfs, name, method):
2080 # this causes stale locks to get reaped for more accurate reporting
2081 # this causes stale locks to get reaped for more accurate reporting
2081 try:
2082 try:
2082 l = method(False)
2083 l = method(False)
2083 except error.LockHeld:
2084 except error.LockHeld:
2084 l = None
2085 l = None
2085
2086
2086 if l:
2087 if l:
2087 l.release()
2088 l.release()
2088 else:
2089 else:
2089 try:
2090 try:
2090 st = vfs.lstat(name)
2091 st = vfs.lstat(name)
2091 age = now - st[stat.ST_MTIME]
2092 age = now - st[stat.ST_MTIME]
2092 user = util.username(st.st_uid)
2093 user = util.username(st.st_uid)
2093 locker = vfs.readlock(name)
2094 locker = vfs.readlock(name)
2094 if b":" in locker:
2095 if b":" in locker:
2095 host, pid = locker.split(b':')
2096 host, pid = locker.split(b':')
2096 if host == socket.gethostname():
2097 if host == socket.gethostname():
2097 locker = b'user %s, process %s' % (user or b'None', pid)
2098 locker = b'user %s, process %s' % (user or b'None', pid)
2098 else:
2099 else:
2099 locker = b'user %s, process %s, host %s' % (
2100 locker = b'user %s, process %s, host %s' % (
2100 user or b'None',
2101 user or b'None',
2101 pid,
2102 pid,
2102 host,
2103 host,
2103 )
2104 )
2104 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2105 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2105 return 1
2106 return 1
2106 except OSError as e:
2107 except OSError as e:
2107 if e.errno != errno.ENOENT:
2108 if e.errno != errno.ENOENT:
2108 raise
2109 raise
2109
2110
2110 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2111 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2111 return 0
2112 return 0
2112
2113
2113 held += report(repo.svfs, b"lock", repo.lock)
2114 held += report(repo.svfs, b"lock", repo.lock)
2114 held += report(repo.vfs, b"wlock", repo.wlock)
2115 held += report(repo.vfs, b"wlock", repo.wlock)
2115
2116
2116 return held
2117 return held
2117
2118
2118
2119
2119 @command(
2120 @command(
2120 b'debugmanifestfulltextcache',
2121 b'debugmanifestfulltextcache',
2121 [
2122 [
2122 (b'', b'clear', False, _(b'clear the cache')),
2123 (b'', b'clear', False, _(b'clear the cache')),
2123 (
2124 (
2124 b'a',
2125 b'a',
2125 b'add',
2126 b'add',
2126 [],
2127 [],
2127 _(b'add the given manifest nodes to the cache'),
2128 _(b'add the given manifest nodes to the cache'),
2128 _(b'NODE'),
2129 _(b'NODE'),
2129 ),
2130 ),
2130 ],
2131 ],
2131 b'',
2132 b'',
2132 )
2133 )
2133 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2134 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2134 """show, clear or amend the contents of the manifest fulltext cache"""
2135 """show, clear or amend the contents of the manifest fulltext cache"""
2135
2136
2136 def getcache():
2137 def getcache():
2137 r = repo.manifestlog.getstorage(b'')
2138 r = repo.manifestlog.getstorage(b'')
2138 try:
2139 try:
2139 return r._fulltextcache
2140 return r._fulltextcache
2140 except AttributeError:
2141 except AttributeError:
2141 msg = _(
2142 msg = _(
2142 b"Current revlog implementation doesn't appear to have a "
2143 b"Current revlog implementation doesn't appear to have a "
2143 b"manifest fulltext cache\n"
2144 b"manifest fulltext cache\n"
2144 )
2145 )
2145 raise error.Abort(msg)
2146 raise error.Abort(msg)
2146
2147
2147 if opts.get('clear'):
2148 if opts.get('clear'):
2148 with repo.wlock():
2149 with repo.wlock():
2149 cache = getcache()
2150 cache = getcache()
2150 cache.clear(clear_persisted_data=True)
2151 cache.clear(clear_persisted_data=True)
2151 return
2152 return
2152
2153
2153 if add:
2154 if add:
2154 with repo.wlock():
2155 with repo.wlock():
2155 m = repo.manifestlog
2156 m = repo.manifestlog
2156 store = m.getstorage(b'')
2157 store = m.getstorage(b'')
2157 for n in add:
2158 for n in add:
2158 try:
2159 try:
2159 manifest = m[store.lookup(n)]
2160 manifest = m[store.lookup(n)]
2160 except error.LookupError as e:
2161 except error.LookupError as e:
2161 raise error.Abort(
2162 raise error.Abort(
2162 bytes(e), hint=b"Check your manifest node id"
2163 bytes(e), hint=b"Check your manifest node id"
2163 )
2164 )
2164 manifest.read() # stores revisision in cache too
2165 manifest.read() # stores revisision in cache too
2165 return
2166 return
2166
2167
2167 cache = getcache()
2168 cache = getcache()
2168 if not len(cache):
2169 if not len(cache):
2169 ui.write(_(b'cache empty\n'))
2170 ui.write(_(b'cache empty\n'))
2170 else:
2171 else:
2171 ui.write(
2172 ui.write(
2172 _(
2173 _(
2173 b'cache contains %d manifest entries, in order of most to '
2174 b'cache contains %d manifest entries, in order of most to '
2174 b'least recent:\n'
2175 b'least recent:\n'
2175 )
2176 )
2176 % (len(cache),)
2177 % (len(cache),)
2177 )
2178 )
2178 totalsize = 0
2179 totalsize = 0
2179 for nodeid in cache:
2180 for nodeid in cache:
2180 # Use cache.get to not update the LRU order
2181 # Use cache.get to not update the LRU order
2181 data = cache.peek(nodeid)
2182 data = cache.peek(nodeid)
2182 size = len(data)
2183 size = len(data)
2183 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2184 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2184 ui.write(
2185 ui.write(
2185 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2186 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2186 )
2187 )
2187 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2188 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2188 ui.write(
2189 ui.write(
2189 _(b'total cache data size %s, on-disk %s\n')
2190 _(b'total cache data size %s, on-disk %s\n')
2190 % (util.bytecount(totalsize), util.bytecount(ondisk))
2191 % (util.bytecount(totalsize), util.bytecount(ondisk))
2191 )
2192 )
2192
2193
2193
2194
2194 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2195 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2195 def debugmergestate(ui, repo, *args, **opts):
2196 def debugmergestate(ui, repo, *args, **opts):
2196 """print merge state
2197 """print merge state
2197
2198
2198 Use --verbose to print out information about whether v1 or v2 merge state
2199 Use --verbose to print out information about whether v1 or v2 merge state
2199 was chosen."""
2200 was chosen."""
2200
2201
2201 if ui.verbose:
2202 if ui.verbose:
2202 ms = mergestatemod.mergestate(repo)
2203 ms = mergestatemod.mergestate(repo)
2203
2204
2204 # sort so that reasonable information is on top
2205 # sort so that reasonable information is on top
2205 v1records = ms._readrecordsv1()
2206 v1records = ms._readrecordsv1()
2206 v2records = ms._readrecordsv2()
2207 v2records = ms._readrecordsv2()
2207
2208
2208 if not v1records and not v2records:
2209 if not v1records and not v2records:
2209 pass
2210 pass
2210 elif not v2records:
2211 elif not v2records:
2211 ui.writenoi18n(b'no version 2 merge state\n')
2212 ui.writenoi18n(b'no version 2 merge state\n')
2212 elif ms._v1v2match(v1records, v2records):
2213 elif ms._v1v2match(v1records, v2records):
2213 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2214 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2214 else:
2215 else:
2215 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2216 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2216
2217
2217 opts = pycompat.byteskwargs(opts)
2218 opts = pycompat.byteskwargs(opts)
2218 if not opts[b'template']:
2219 if not opts[b'template']:
2219 opts[b'template'] = (
2220 opts[b'template'] = (
2220 b'{if(commits, "", "no merge state found\n")}'
2221 b'{if(commits, "", "no merge state found\n")}'
2221 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2222 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2222 b'{files % "file: {path} (state \\"{state}\\")\n'
2223 b'{files % "file: {path} (state \\"{state}\\")\n'
2223 b'{if(local_path, "'
2224 b'{if(local_path, "'
2224 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2225 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2225 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2226 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2226 b' other path: {other_path} (node {other_node})\n'
2227 b' other path: {other_path} (node {other_node})\n'
2227 b'")}'
2228 b'")}'
2228 b'{if(rename_side, "'
2229 b'{if(rename_side, "'
2229 b' rename side: {rename_side}\n'
2230 b' rename side: {rename_side}\n'
2230 b' renamed path: {renamed_path}\n'
2231 b' renamed path: {renamed_path}\n'
2231 b'")}'
2232 b'")}'
2232 b'{extras % " extra: {key} = {value}\n"}'
2233 b'{extras % " extra: {key} = {value}\n"}'
2233 b'"}'
2234 b'"}'
2234 b'{extras % "extra: {file} ({key} = {value})\n"}'
2235 b'{extras % "extra: {file} ({key} = {value})\n"}'
2235 )
2236 )
2236
2237
2237 ms = mergestatemod.mergestate.read(repo)
2238 ms = mergestatemod.mergestate.read(repo)
2238
2239
2239 fm = ui.formatter(b'debugmergestate', opts)
2240 fm = ui.formatter(b'debugmergestate', opts)
2240 fm.startitem()
2241 fm.startitem()
2241
2242
2242 fm_commits = fm.nested(b'commits')
2243 fm_commits = fm.nested(b'commits')
2243 if ms.active():
2244 if ms.active():
2244 for name, node, label_index in (
2245 for name, node, label_index in (
2245 (b'local', ms.local, 0),
2246 (b'local', ms.local, 0),
2246 (b'other', ms.other, 1),
2247 (b'other', ms.other, 1),
2247 ):
2248 ):
2248 fm_commits.startitem()
2249 fm_commits.startitem()
2249 fm_commits.data(name=name)
2250 fm_commits.data(name=name)
2250 fm_commits.data(node=hex(node))
2251 fm_commits.data(node=hex(node))
2251 if ms._labels and len(ms._labels) > label_index:
2252 if ms._labels and len(ms._labels) > label_index:
2252 fm_commits.data(label=ms._labels[label_index])
2253 fm_commits.data(label=ms._labels[label_index])
2253 fm_commits.end()
2254 fm_commits.end()
2254
2255
2255 fm_files = fm.nested(b'files')
2256 fm_files = fm.nested(b'files')
2256 if ms.active():
2257 if ms.active():
2257 for f in ms:
2258 for f in ms:
2258 fm_files.startitem()
2259 fm_files.startitem()
2259 fm_files.data(path=f)
2260 fm_files.data(path=f)
2260 state = ms._state[f]
2261 state = ms._state[f]
2261 fm_files.data(state=state[0])
2262 fm_files.data(state=state[0])
2262 if state[0] in (
2263 if state[0] in (
2263 mergestatemod.MERGE_RECORD_UNRESOLVED,
2264 mergestatemod.MERGE_RECORD_UNRESOLVED,
2264 mergestatemod.MERGE_RECORD_RESOLVED,
2265 mergestatemod.MERGE_RECORD_RESOLVED,
2265 ):
2266 ):
2266 fm_files.data(local_key=state[1])
2267 fm_files.data(local_key=state[1])
2267 fm_files.data(local_path=state[2])
2268 fm_files.data(local_path=state[2])
2268 fm_files.data(ancestor_path=state[3])
2269 fm_files.data(ancestor_path=state[3])
2269 fm_files.data(ancestor_node=state[4])
2270 fm_files.data(ancestor_node=state[4])
2270 fm_files.data(other_path=state[5])
2271 fm_files.data(other_path=state[5])
2271 fm_files.data(other_node=state[6])
2272 fm_files.data(other_node=state[6])
2272 fm_files.data(local_flags=state[7])
2273 fm_files.data(local_flags=state[7])
2273 elif state[0] in (
2274 elif state[0] in (
2274 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2275 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2275 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2276 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2276 ):
2277 ):
2277 fm_files.data(renamed_path=state[1])
2278 fm_files.data(renamed_path=state[1])
2278 fm_files.data(rename_side=state[2])
2279 fm_files.data(rename_side=state[2])
2279 fm_extras = fm_files.nested(b'extras')
2280 fm_extras = fm_files.nested(b'extras')
2280 for k, v in sorted(ms.extras(f).items()):
2281 for k, v in sorted(ms.extras(f).items()):
2281 fm_extras.startitem()
2282 fm_extras.startitem()
2282 fm_extras.data(key=k)
2283 fm_extras.data(key=k)
2283 fm_extras.data(value=v)
2284 fm_extras.data(value=v)
2284 fm_extras.end()
2285 fm_extras.end()
2285
2286
2286 fm_files.end()
2287 fm_files.end()
2287
2288
2288 fm_extras = fm.nested(b'extras')
2289 fm_extras = fm.nested(b'extras')
2289 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2290 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2290 if f in ms:
2291 if f in ms:
2291 # If file is in mergestate, we have already processed it's extras
2292 # If file is in mergestate, we have already processed it's extras
2292 continue
2293 continue
2293 for k, v in pycompat.iteritems(d):
2294 for k, v in pycompat.iteritems(d):
2294 fm_extras.startitem()
2295 fm_extras.startitem()
2295 fm_extras.data(file=f)
2296 fm_extras.data(file=f)
2296 fm_extras.data(key=k)
2297 fm_extras.data(key=k)
2297 fm_extras.data(value=v)
2298 fm_extras.data(value=v)
2298 fm_extras.end()
2299 fm_extras.end()
2299
2300
2300 fm.end()
2301 fm.end()
2301
2302
2302
2303
2303 @command(b'debugnamecomplete', [], _(b'NAME...'))
2304 @command(b'debugnamecomplete', [], _(b'NAME...'))
2304 def debugnamecomplete(ui, repo, *args):
2305 def debugnamecomplete(ui, repo, *args):
2305 '''complete "names" - tags, open branch names, bookmark names'''
2306 '''complete "names" - tags, open branch names, bookmark names'''
2306
2307
2307 names = set()
2308 names = set()
2308 # since we previously only listed open branches, we will handle that
2309 # since we previously only listed open branches, we will handle that
2309 # specially (after this for loop)
2310 # specially (after this for loop)
2310 for name, ns in pycompat.iteritems(repo.names):
2311 for name, ns in pycompat.iteritems(repo.names):
2311 if name != b'branches':
2312 if name != b'branches':
2312 names.update(ns.listnames(repo))
2313 names.update(ns.listnames(repo))
2313 names.update(
2314 names.update(
2314 tag
2315 tag
2315 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2316 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2316 if not closed
2317 if not closed
2317 )
2318 )
2318 completions = set()
2319 completions = set()
2319 if not args:
2320 if not args:
2320 args = [b'']
2321 args = [b'']
2321 for a in args:
2322 for a in args:
2322 completions.update(n for n in names if n.startswith(a))
2323 completions.update(n for n in names if n.startswith(a))
2323 ui.write(b'\n'.join(sorted(completions)))
2324 ui.write(b'\n'.join(sorted(completions)))
2324 ui.write(b'\n')
2325 ui.write(b'\n')
2325
2326
2326
2327
2327 @command(
2328 @command(
2328 b'debugnodemap',
2329 b'debugnodemap',
2329 [
2330 [
2330 (
2331 (
2331 b'',
2332 b'',
2332 b'dump-new',
2333 b'dump-new',
2333 False,
2334 False,
2334 _(b'write a (new) persistent binary nodemap on stdout'),
2335 _(b'write a (new) persistent binary nodemap on stdout'),
2335 ),
2336 ),
2336 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2337 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2337 (
2338 (
2338 b'',
2339 b'',
2339 b'check',
2340 b'check',
2340 False,
2341 False,
2341 _(b'check that the data on disk data are correct.'),
2342 _(b'check that the data on disk data are correct.'),
2342 ),
2343 ),
2343 (
2344 (
2344 b'',
2345 b'',
2345 b'metadata',
2346 b'metadata',
2346 False,
2347 False,
2347 _(b'display the on disk meta data for the nodemap'),
2348 _(b'display the on disk meta data for the nodemap'),
2348 ),
2349 ),
2349 ],
2350 ],
2350 )
2351 )
2351 def debugnodemap(ui, repo, **opts):
2352 def debugnodemap(ui, repo, **opts):
2352 """write and inspect on disk nodemap"""
2353 """write and inspect on disk nodemap"""
2353 if opts['dump_new']:
2354 if opts['dump_new']:
2354 unfi = repo.unfiltered()
2355 unfi = repo.unfiltered()
2355 cl = unfi.changelog
2356 cl = unfi.changelog
2356 if util.safehasattr(cl.index, "nodemap_data_all"):
2357 if util.safehasattr(cl.index, "nodemap_data_all"):
2357 data = cl.index.nodemap_data_all()
2358 data = cl.index.nodemap_data_all()
2358 else:
2359 else:
2359 data = nodemap.persistent_data(cl.index)
2360 data = nodemap.persistent_data(cl.index)
2360 ui.write(data)
2361 ui.write(data)
2361 elif opts['dump_disk']:
2362 elif opts['dump_disk']:
2362 unfi = repo.unfiltered()
2363 unfi = repo.unfiltered()
2363 cl = unfi.changelog
2364 cl = unfi.changelog
2364 nm_data = nodemap.persisted_data(cl)
2365 nm_data = nodemap.persisted_data(cl)
2365 if nm_data is not None:
2366 if nm_data is not None:
2366 docket, data = nm_data
2367 docket, data = nm_data
2367 ui.write(data[:])
2368 ui.write(data[:])
2368 elif opts['check']:
2369 elif opts['check']:
2369 unfi = repo.unfiltered()
2370 unfi = repo.unfiltered()
2370 cl = unfi.changelog
2371 cl = unfi.changelog
2371 nm_data = nodemap.persisted_data(cl)
2372 nm_data = nodemap.persisted_data(cl)
2372 if nm_data is not None:
2373 if nm_data is not None:
2373 docket, data = nm_data
2374 docket, data = nm_data
2374 return nodemap.check_data(ui, cl.index, data)
2375 return nodemap.check_data(ui, cl.index, data)
2375 elif opts['metadata']:
2376 elif opts['metadata']:
2376 unfi = repo.unfiltered()
2377 unfi = repo.unfiltered()
2377 cl = unfi.changelog
2378 cl = unfi.changelog
2378 nm_data = nodemap.persisted_data(cl)
2379 nm_data = nodemap.persisted_data(cl)
2379 if nm_data is not None:
2380 if nm_data is not None:
2380 docket, data = nm_data
2381 docket, data = nm_data
2381 ui.write((b"uid: %s\n") % docket.uid)
2382 ui.write((b"uid: %s\n") % docket.uid)
2382 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2383 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2383 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2384 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2384 ui.write((b"data-length: %d\n") % docket.data_length)
2385 ui.write((b"data-length: %d\n") % docket.data_length)
2385 ui.write((b"data-unused: %d\n") % docket.data_unused)
2386 ui.write((b"data-unused: %d\n") % docket.data_unused)
2386 unused_perc = docket.data_unused * 100.0 / docket.data_length
2387 unused_perc = docket.data_unused * 100.0 / docket.data_length
2387 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2388 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2388
2389
2389
2390
2390 @command(
2391 @command(
2391 b'debugobsolete',
2392 b'debugobsolete',
2392 [
2393 [
2393 (b'', b'flags', 0, _(b'markers flag')),
2394 (b'', b'flags', 0, _(b'markers flag')),
2394 (
2395 (
2395 b'',
2396 b'',
2396 b'record-parents',
2397 b'record-parents',
2397 False,
2398 False,
2398 _(b'record parent information for the precursor'),
2399 _(b'record parent information for the precursor'),
2399 ),
2400 ),
2400 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2401 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2401 (
2402 (
2402 b'',
2403 b'',
2403 b'exclusive',
2404 b'exclusive',
2404 False,
2405 False,
2405 _(b'restrict display to markers only relevant to REV'),
2406 _(b'restrict display to markers only relevant to REV'),
2406 ),
2407 ),
2407 (b'', b'index', False, _(b'display index of the marker')),
2408 (b'', b'index', False, _(b'display index of the marker')),
2408 (b'', b'delete', [], _(b'delete markers specified by indices')),
2409 (b'', b'delete', [], _(b'delete markers specified by indices')),
2409 ]
2410 ]
2410 + cmdutil.commitopts2
2411 + cmdutil.commitopts2
2411 + cmdutil.formatteropts,
2412 + cmdutil.formatteropts,
2412 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2413 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2413 )
2414 )
2414 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2415 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2415 """create arbitrary obsolete marker
2416 """create arbitrary obsolete marker
2416
2417
2417 With no arguments, displays the list of obsolescence markers."""
2418 With no arguments, displays the list of obsolescence markers."""
2418
2419
2419 opts = pycompat.byteskwargs(opts)
2420 opts = pycompat.byteskwargs(opts)
2420
2421
2421 def parsenodeid(s):
2422 def parsenodeid(s):
2422 try:
2423 try:
2423 # We do not use revsingle/revrange functions here to accept
2424 # We do not use revsingle/revrange functions here to accept
2424 # arbitrary node identifiers, possibly not present in the
2425 # arbitrary node identifiers, possibly not present in the
2425 # local repository.
2426 # local repository.
2426 n = bin(s)
2427 n = bin(s)
2427 if len(n) != repo.nodeconstants.nodelen:
2428 if len(n) != repo.nodeconstants.nodelen:
2428 raise TypeError()
2429 raise TypeError()
2429 return n
2430 return n
2430 except TypeError:
2431 except TypeError:
2431 raise error.InputError(
2432 raise error.InputError(
2432 b'changeset references must be full hexadecimal '
2433 b'changeset references must be full hexadecimal '
2433 b'node identifiers'
2434 b'node identifiers'
2434 )
2435 )
2435
2436
2436 if opts.get(b'delete'):
2437 if opts.get(b'delete'):
2437 indices = []
2438 indices = []
2438 for v in opts.get(b'delete'):
2439 for v in opts.get(b'delete'):
2439 try:
2440 try:
2440 indices.append(int(v))
2441 indices.append(int(v))
2441 except ValueError:
2442 except ValueError:
2442 raise error.InputError(
2443 raise error.InputError(
2443 _(b'invalid index value: %r') % v,
2444 _(b'invalid index value: %r') % v,
2444 hint=_(b'use integers for indices'),
2445 hint=_(b'use integers for indices'),
2445 )
2446 )
2446
2447
2447 if repo.currenttransaction():
2448 if repo.currenttransaction():
2448 raise error.Abort(
2449 raise error.Abort(
2449 _(b'cannot delete obsmarkers in the middle of transaction.')
2450 _(b'cannot delete obsmarkers in the middle of transaction.')
2450 )
2451 )
2451
2452
2452 with repo.lock():
2453 with repo.lock():
2453 n = repair.deleteobsmarkers(repo.obsstore, indices)
2454 n = repair.deleteobsmarkers(repo.obsstore, indices)
2454 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2455 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2455
2456
2456 return
2457 return
2457
2458
2458 if precursor is not None:
2459 if precursor is not None:
2459 if opts[b'rev']:
2460 if opts[b'rev']:
2460 raise error.InputError(
2461 raise error.InputError(
2461 b'cannot select revision when creating marker'
2462 b'cannot select revision when creating marker'
2462 )
2463 )
2463 metadata = {}
2464 metadata = {}
2464 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2465 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2465 succs = tuple(parsenodeid(succ) for succ in successors)
2466 succs = tuple(parsenodeid(succ) for succ in successors)
2466 l = repo.lock()
2467 l = repo.lock()
2467 try:
2468 try:
2468 tr = repo.transaction(b'debugobsolete')
2469 tr = repo.transaction(b'debugobsolete')
2469 try:
2470 try:
2470 date = opts.get(b'date')
2471 date = opts.get(b'date')
2471 if date:
2472 if date:
2472 date = dateutil.parsedate(date)
2473 date = dateutil.parsedate(date)
2473 else:
2474 else:
2474 date = None
2475 date = None
2475 prec = parsenodeid(precursor)
2476 prec = parsenodeid(precursor)
2476 parents = None
2477 parents = None
2477 if opts[b'record_parents']:
2478 if opts[b'record_parents']:
2478 if prec not in repo.unfiltered():
2479 if prec not in repo.unfiltered():
2479 raise error.Abort(
2480 raise error.Abort(
2480 b'cannot used --record-parents on '
2481 b'cannot used --record-parents on '
2481 b'unknown changesets'
2482 b'unknown changesets'
2482 )
2483 )
2483 parents = repo.unfiltered()[prec].parents()
2484 parents = repo.unfiltered()[prec].parents()
2484 parents = tuple(p.node() for p in parents)
2485 parents = tuple(p.node() for p in parents)
2485 repo.obsstore.create(
2486 repo.obsstore.create(
2486 tr,
2487 tr,
2487 prec,
2488 prec,
2488 succs,
2489 succs,
2489 opts[b'flags'],
2490 opts[b'flags'],
2490 parents=parents,
2491 parents=parents,
2491 date=date,
2492 date=date,
2492 metadata=metadata,
2493 metadata=metadata,
2493 ui=ui,
2494 ui=ui,
2494 )
2495 )
2495 tr.close()
2496 tr.close()
2496 except ValueError as exc:
2497 except ValueError as exc:
2497 raise error.Abort(
2498 raise error.Abort(
2498 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2499 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2499 )
2500 )
2500 finally:
2501 finally:
2501 tr.release()
2502 tr.release()
2502 finally:
2503 finally:
2503 l.release()
2504 l.release()
2504 else:
2505 else:
2505 if opts[b'rev']:
2506 if opts[b'rev']:
2506 revs = scmutil.revrange(repo, opts[b'rev'])
2507 revs = scmutil.revrange(repo, opts[b'rev'])
2507 nodes = [repo[r].node() for r in revs]
2508 nodes = [repo[r].node() for r in revs]
2508 markers = list(
2509 markers = list(
2509 obsutil.getmarkers(
2510 obsutil.getmarkers(
2510 repo, nodes=nodes, exclusive=opts[b'exclusive']
2511 repo, nodes=nodes, exclusive=opts[b'exclusive']
2511 )
2512 )
2512 )
2513 )
2513 markers.sort(key=lambda x: x._data)
2514 markers.sort(key=lambda x: x._data)
2514 else:
2515 else:
2515 markers = obsutil.getmarkers(repo)
2516 markers = obsutil.getmarkers(repo)
2516
2517
2517 markerstoiter = markers
2518 markerstoiter = markers
2518 isrelevant = lambda m: True
2519 isrelevant = lambda m: True
2519 if opts.get(b'rev') and opts.get(b'index'):
2520 if opts.get(b'rev') and opts.get(b'index'):
2520 markerstoiter = obsutil.getmarkers(repo)
2521 markerstoiter = obsutil.getmarkers(repo)
2521 markerset = set(markers)
2522 markerset = set(markers)
2522 isrelevant = lambda m: m in markerset
2523 isrelevant = lambda m: m in markerset
2523
2524
2524 fm = ui.formatter(b'debugobsolete', opts)
2525 fm = ui.formatter(b'debugobsolete', opts)
2525 for i, m in enumerate(markerstoiter):
2526 for i, m in enumerate(markerstoiter):
2526 if not isrelevant(m):
2527 if not isrelevant(m):
2527 # marker can be irrelevant when we're iterating over a set
2528 # marker can be irrelevant when we're iterating over a set
2528 # of markers (markerstoiter) which is bigger than the set
2529 # of markers (markerstoiter) which is bigger than the set
2529 # of markers we want to display (markers)
2530 # of markers we want to display (markers)
2530 # this can happen if both --index and --rev options are
2531 # this can happen if both --index and --rev options are
2531 # provided and thus we need to iterate over all of the markers
2532 # provided and thus we need to iterate over all of the markers
2532 # to get the correct indices, but only display the ones that
2533 # to get the correct indices, but only display the ones that
2533 # are relevant to --rev value
2534 # are relevant to --rev value
2534 continue
2535 continue
2535 fm.startitem()
2536 fm.startitem()
2536 ind = i if opts.get(b'index') else None
2537 ind = i if opts.get(b'index') else None
2537 cmdutil.showmarker(fm, m, index=ind)
2538 cmdutil.showmarker(fm, m, index=ind)
2538 fm.end()
2539 fm.end()
2539
2540
2540
2541
2541 @command(
2542 @command(
2542 b'debugp1copies',
2543 b'debugp1copies',
2543 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2544 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2544 _(b'[-r REV]'),
2545 _(b'[-r REV]'),
2545 )
2546 )
2546 def debugp1copies(ui, repo, **opts):
2547 def debugp1copies(ui, repo, **opts):
2547 """dump copy information compared to p1"""
2548 """dump copy information compared to p1"""
2548
2549
2549 opts = pycompat.byteskwargs(opts)
2550 opts = pycompat.byteskwargs(opts)
2550 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2551 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2551 for dst, src in ctx.p1copies().items():
2552 for dst, src in ctx.p1copies().items():
2552 ui.write(b'%s -> %s\n' % (src, dst))
2553 ui.write(b'%s -> %s\n' % (src, dst))
2553
2554
2554
2555
2555 @command(
2556 @command(
2556 b'debugp2copies',
2557 b'debugp2copies',
2557 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2558 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2558 _(b'[-r REV]'),
2559 _(b'[-r REV]'),
2559 )
2560 )
2560 def debugp1copies(ui, repo, **opts):
2561 def debugp1copies(ui, repo, **opts):
2561 """dump copy information compared to p2"""
2562 """dump copy information compared to p2"""
2562
2563
2563 opts = pycompat.byteskwargs(opts)
2564 opts = pycompat.byteskwargs(opts)
2564 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2565 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2565 for dst, src in ctx.p2copies().items():
2566 for dst, src in ctx.p2copies().items():
2566 ui.write(b'%s -> %s\n' % (src, dst))
2567 ui.write(b'%s -> %s\n' % (src, dst))
2567
2568
2568
2569
2569 @command(
2570 @command(
2570 b'debugpathcomplete',
2571 b'debugpathcomplete',
2571 [
2572 [
2572 (b'f', b'full', None, _(b'complete an entire path')),
2573 (b'f', b'full', None, _(b'complete an entire path')),
2573 (b'n', b'normal', None, _(b'show only normal files')),
2574 (b'n', b'normal', None, _(b'show only normal files')),
2574 (b'a', b'added', None, _(b'show only added files')),
2575 (b'a', b'added', None, _(b'show only added files')),
2575 (b'r', b'removed', None, _(b'show only removed files')),
2576 (b'r', b'removed', None, _(b'show only removed files')),
2576 ],
2577 ],
2577 _(b'FILESPEC...'),
2578 _(b'FILESPEC...'),
2578 )
2579 )
2579 def debugpathcomplete(ui, repo, *specs, **opts):
2580 def debugpathcomplete(ui, repo, *specs, **opts):
2580 """complete part or all of a tracked path
2581 """complete part or all of a tracked path
2581
2582
2582 This command supports shells that offer path name completion. It
2583 This command supports shells that offer path name completion. It
2583 currently completes only files already known to the dirstate.
2584 currently completes only files already known to the dirstate.
2584
2585
2585 Completion extends only to the next path segment unless
2586 Completion extends only to the next path segment unless
2586 --full is specified, in which case entire paths are used."""
2587 --full is specified, in which case entire paths are used."""
2587
2588
2588 def complete(path, acceptable):
2589 def complete(path, acceptable):
2589 dirstate = repo.dirstate
2590 dirstate = repo.dirstate
2590 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2591 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2591 rootdir = repo.root + pycompat.ossep
2592 rootdir = repo.root + pycompat.ossep
2592 if spec != repo.root and not spec.startswith(rootdir):
2593 if spec != repo.root and not spec.startswith(rootdir):
2593 return [], []
2594 return [], []
2594 if os.path.isdir(spec):
2595 if os.path.isdir(spec):
2595 spec += b'/'
2596 spec += b'/'
2596 spec = spec[len(rootdir) :]
2597 spec = spec[len(rootdir) :]
2597 fixpaths = pycompat.ossep != b'/'
2598 fixpaths = pycompat.ossep != b'/'
2598 if fixpaths:
2599 if fixpaths:
2599 spec = spec.replace(pycompat.ossep, b'/')
2600 spec = spec.replace(pycompat.ossep, b'/')
2600 speclen = len(spec)
2601 speclen = len(spec)
2601 fullpaths = opts['full']
2602 fullpaths = opts['full']
2602 files, dirs = set(), set()
2603 files, dirs = set(), set()
2603 adddir, addfile = dirs.add, files.add
2604 adddir, addfile = dirs.add, files.add
2604 for f, st in pycompat.iteritems(dirstate):
2605 for f, st in pycompat.iteritems(dirstate):
2605 if f.startswith(spec) and st[0] in acceptable:
2606 if f.startswith(spec) and st[0] in acceptable:
2606 if fixpaths:
2607 if fixpaths:
2607 f = f.replace(b'/', pycompat.ossep)
2608 f = f.replace(b'/', pycompat.ossep)
2608 if fullpaths:
2609 if fullpaths:
2609 addfile(f)
2610 addfile(f)
2610 continue
2611 continue
2611 s = f.find(pycompat.ossep, speclen)
2612 s = f.find(pycompat.ossep, speclen)
2612 if s >= 0:
2613 if s >= 0:
2613 adddir(f[:s])
2614 adddir(f[:s])
2614 else:
2615 else:
2615 addfile(f)
2616 addfile(f)
2616 return files, dirs
2617 return files, dirs
2617
2618
2618 acceptable = b''
2619 acceptable = b''
2619 if opts['normal']:
2620 if opts['normal']:
2620 acceptable += b'nm'
2621 acceptable += b'nm'
2621 if opts['added']:
2622 if opts['added']:
2622 acceptable += b'a'
2623 acceptable += b'a'
2623 if opts['removed']:
2624 if opts['removed']:
2624 acceptable += b'r'
2625 acceptable += b'r'
2625 cwd = repo.getcwd()
2626 cwd = repo.getcwd()
2626 if not specs:
2627 if not specs:
2627 specs = [b'.']
2628 specs = [b'.']
2628
2629
2629 files, dirs = set(), set()
2630 files, dirs = set(), set()
2630 for spec in specs:
2631 for spec in specs:
2631 f, d = complete(spec, acceptable or b'nmar')
2632 f, d = complete(spec, acceptable or b'nmar')
2632 files.update(f)
2633 files.update(f)
2633 dirs.update(d)
2634 dirs.update(d)
2634 files.update(dirs)
2635 files.update(dirs)
2635 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2636 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2636 ui.write(b'\n')
2637 ui.write(b'\n')
2637
2638
2638
2639
2639 @command(
2640 @command(
2640 b'debugpathcopies',
2641 b'debugpathcopies',
2641 cmdutil.walkopts,
2642 cmdutil.walkopts,
2642 b'hg debugpathcopies REV1 REV2 [FILE]',
2643 b'hg debugpathcopies REV1 REV2 [FILE]',
2643 inferrepo=True,
2644 inferrepo=True,
2644 )
2645 )
2645 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2646 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2646 """show copies between two revisions"""
2647 """show copies between two revisions"""
2647 ctx1 = scmutil.revsingle(repo, rev1)
2648 ctx1 = scmutil.revsingle(repo, rev1)
2648 ctx2 = scmutil.revsingle(repo, rev2)
2649 ctx2 = scmutil.revsingle(repo, rev2)
2649 m = scmutil.match(ctx1, pats, opts)
2650 m = scmutil.match(ctx1, pats, opts)
2650 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2651 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2651 ui.write(b'%s -> %s\n' % (src, dst))
2652 ui.write(b'%s -> %s\n' % (src, dst))
2652
2653
2653
2654
2654 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2655 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2655 def debugpeer(ui, path):
2656 def debugpeer(ui, path):
2656 """establish a connection to a peer repository"""
2657 """establish a connection to a peer repository"""
2657 # Always enable peer request logging. Requires --debug to display
2658 # Always enable peer request logging. Requires --debug to display
2658 # though.
2659 # though.
2659 overrides = {
2660 overrides = {
2660 (b'devel', b'debug.peer-request'): True,
2661 (b'devel', b'debug.peer-request'): True,
2661 }
2662 }
2662
2663
2663 with ui.configoverride(overrides):
2664 with ui.configoverride(overrides):
2664 peer = hg.peer(ui, {}, path)
2665 peer = hg.peer(ui, {}, path)
2665
2666
2666 try:
2667 try:
2667 local = peer.local() is not None
2668 local = peer.local() is not None
2668 canpush = peer.canpush()
2669 canpush = peer.canpush()
2669
2670
2670 ui.write(_(b'url: %s\n') % peer.url())
2671 ui.write(_(b'url: %s\n') % peer.url())
2671 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2672 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2672 ui.write(
2673 ui.write(
2673 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2674 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2674 )
2675 )
2675 finally:
2676 finally:
2676 peer.close()
2677 peer.close()
2677
2678
2678
2679
2679 @command(
2680 @command(
2680 b'debugpickmergetool',
2681 b'debugpickmergetool',
2681 [
2682 [
2682 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2683 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2683 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2684 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2684 ]
2685 ]
2685 + cmdutil.walkopts
2686 + cmdutil.walkopts
2686 + cmdutil.mergetoolopts,
2687 + cmdutil.mergetoolopts,
2687 _(b'[PATTERN]...'),
2688 _(b'[PATTERN]...'),
2688 inferrepo=True,
2689 inferrepo=True,
2689 )
2690 )
2690 def debugpickmergetool(ui, repo, *pats, **opts):
2691 def debugpickmergetool(ui, repo, *pats, **opts):
2691 """examine which merge tool is chosen for specified file
2692 """examine which merge tool is chosen for specified file
2692
2693
2693 As described in :hg:`help merge-tools`, Mercurial examines
2694 As described in :hg:`help merge-tools`, Mercurial examines
2694 configurations below in this order to decide which merge tool is
2695 configurations below in this order to decide which merge tool is
2695 chosen for specified file.
2696 chosen for specified file.
2696
2697
2697 1. ``--tool`` option
2698 1. ``--tool`` option
2698 2. ``HGMERGE`` environment variable
2699 2. ``HGMERGE`` environment variable
2699 3. configurations in ``merge-patterns`` section
2700 3. configurations in ``merge-patterns`` section
2700 4. configuration of ``ui.merge``
2701 4. configuration of ``ui.merge``
2701 5. configurations in ``merge-tools`` section
2702 5. configurations in ``merge-tools`` section
2702 6. ``hgmerge`` tool (for historical reason only)
2703 6. ``hgmerge`` tool (for historical reason only)
2703 7. default tool for fallback (``:merge`` or ``:prompt``)
2704 7. default tool for fallback (``:merge`` or ``:prompt``)
2704
2705
2705 This command writes out examination result in the style below::
2706 This command writes out examination result in the style below::
2706
2707
2707 FILE = MERGETOOL
2708 FILE = MERGETOOL
2708
2709
2709 By default, all files known in the first parent context of the
2710 By default, all files known in the first parent context of the
2710 working directory are examined. Use file patterns and/or -I/-X
2711 working directory are examined. Use file patterns and/or -I/-X
2711 options to limit target files. -r/--rev is also useful to examine
2712 options to limit target files. -r/--rev is also useful to examine
2712 files in another context without actual updating to it.
2713 files in another context without actual updating to it.
2713
2714
2714 With --debug, this command shows warning messages while matching
2715 With --debug, this command shows warning messages while matching
2715 against ``merge-patterns`` and so on, too. It is recommended to
2716 against ``merge-patterns`` and so on, too. It is recommended to
2716 use this option with explicit file patterns and/or -I/-X options,
2717 use this option with explicit file patterns and/or -I/-X options,
2717 because this option increases amount of output per file according
2718 because this option increases amount of output per file according
2718 to configurations in hgrc.
2719 to configurations in hgrc.
2719
2720
2720 With -v/--verbose, this command shows configurations below at
2721 With -v/--verbose, this command shows configurations below at
2721 first (only if specified).
2722 first (only if specified).
2722
2723
2723 - ``--tool`` option
2724 - ``--tool`` option
2724 - ``HGMERGE`` environment variable
2725 - ``HGMERGE`` environment variable
2725 - configuration of ``ui.merge``
2726 - configuration of ``ui.merge``
2726
2727
2727 If merge tool is chosen before matching against
2728 If merge tool is chosen before matching against
2728 ``merge-patterns``, this command can't show any helpful
2729 ``merge-patterns``, this command can't show any helpful
2729 information, even with --debug. In such case, information above is
2730 information, even with --debug. In such case, information above is
2730 useful to know why a merge tool is chosen.
2731 useful to know why a merge tool is chosen.
2731 """
2732 """
2732 opts = pycompat.byteskwargs(opts)
2733 opts = pycompat.byteskwargs(opts)
2733 overrides = {}
2734 overrides = {}
2734 if opts[b'tool']:
2735 if opts[b'tool']:
2735 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2736 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2736 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2737 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2737
2738
2738 with ui.configoverride(overrides, b'debugmergepatterns'):
2739 with ui.configoverride(overrides, b'debugmergepatterns'):
2739 hgmerge = encoding.environ.get(b"HGMERGE")
2740 hgmerge = encoding.environ.get(b"HGMERGE")
2740 if hgmerge is not None:
2741 if hgmerge is not None:
2741 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2742 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2742 uimerge = ui.config(b"ui", b"merge")
2743 uimerge = ui.config(b"ui", b"merge")
2743 if uimerge:
2744 if uimerge:
2744 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2745 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2745
2746
2746 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2747 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2747 m = scmutil.match(ctx, pats, opts)
2748 m = scmutil.match(ctx, pats, opts)
2748 changedelete = opts[b'changedelete']
2749 changedelete = opts[b'changedelete']
2749 for path in ctx.walk(m):
2750 for path in ctx.walk(m):
2750 fctx = ctx[path]
2751 fctx = ctx[path]
2751 try:
2752 try:
2752 if not ui.debugflag:
2753 if not ui.debugflag:
2753 ui.pushbuffer(error=True)
2754 ui.pushbuffer(error=True)
2754 tool, toolpath = filemerge._picktool(
2755 tool, toolpath = filemerge._picktool(
2755 repo,
2756 repo,
2756 ui,
2757 ui,
2757 path,
2758 path,
2758 fctx.isbinary(),
2759 fctx.isbinary(),
2759 b'l' in fctx.flags(),
2760 b'l' in fctx.flags(),
2760 changedelete,
2761 changedelete,
2761 )
2762 )
2762 finally:
2763 finally:
2763 if not ui.debugflag:
2764 if not ui.debugflag:
2764 ui.popbuffer()
2765 ui.popbuffer()
2765 ui.write(b'%s = %s\n' % (path, tool))
2766 ui.write(b'%s = %s\n' % (path, tool))
2766
2767
2767
2768
2768 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2769 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2769 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2770 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2770 """access the pushkey key/value protocol
2771 """access the pushkey key/value protocol
2771
2772
2772 With two args, list the keys in the given namespace.
2773 With two args, list the keys in the given namespace.
2773
2774
2774 With five args, set a key to new if it currently is set to old.
2775 With five args, set a key to new if it currently is set to old.
2775 Reports success or failure.
2776 Reports success or failure.
2776 """
2777 """
2777
2778
2778 target = hg.peer(ui, {}, repopath)
2779 target = hg.peer(ui, {}, repopath)
2779 try:
2780 try:
2780 if keyinfo:
2781 if keyinfo:
2781 key, old, new = keyinfo
2782 key, old, new = keyinfo
2782 with target.commandexecutor() as e:
2783 with target.commandexecutor() as e:
2783 r = e.callcommand(
2784 r = e.callcommand(
2784 b'pushkey',
2785 b'pushkey',
2785 {
2786 {
2786 b'namespace': namespace,
2787 b'namespace': namespace,
2787 b'key': key,
2788 b'key': key,
2788 b'old': old,
2789 b'old': old,
2789 b'new': new,
2790 b'new': new,
2790 },
2791 },
2791 ).result()
2792 ).result()
2792
2793
2793 ui.status(pycompat.bytestr(r) + b'\n')
2794 ui.status(pycompat.bytestr(r) + b'\n')
2794 return not r
2795 return not r
2795 else:
2796 else:
2796 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2797 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2797 ui.write(
2798 ui.write(
2798 b"%s\t%s\n"
2799 b"%s\t%s\n"
2799 % (stringutil.escapestr(k), stringutil.escapestr(v))
2800 % (stringutil.escapestr(k), stringutil.escapestr(v))
2800 )
2801 )
2801 finally:
2802 finally:
2802 target.close()
2803 target.close()
2803
2804
2804
2805
2805 @command(b'debugpvec', [], _(b'A B'))
2806 @command(b'debugpvec', [], _(b'A B'))
2806 def debugpvec(ui, repo, a, b=None):
2807 def debugpvec(ui, repo, a, b=None):
2807 ca = scmutil.revsingle(repo, a)
2808 ca = scmutil.revsingle(repo, a)
2808 cb = scmutil.revsingle(repo, b)
2809 cb = scmutil.revsingle(repo, b)
2809 pa = pvec.ctxpvec(ca)
2810 pa = pvec.ctxpvec(ca)
2810 pb = pvec.ctxpvec(cb)
2811 pb = pvec.ctxpvec(cb)
2811 if pa == pb:
2812 if pa == pb:
2812 rel = b"="
2813 rel = b"="
2813 elif pa > pb:
2814 elif pa > pb:
2814 rel = b">"
2815 rel = b">"
2815 elif pa < pb:
2816 elif pa < pb:
2816 rel = b"<"
2817 rel = b"<"
2817 elif pa | pb:
2818 elif pa | pb:
2818 rel = b"|"
2819 rel = b"|"
2819 ui.write(_(b"a: %s\n") % pa)
2820 ui.write(_(b"a: %s\n") % pa)
2820 ui.write(_(b"b: %s\n") % pb)
2821 ui.write(_(b"b: %s\n") % pb)
2821 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2822 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2822 ui.write(
2823 ui.write(
2823 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2824 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2824 % (
2825 % (
2825 abs(pa._depth - pb._depth),
2826 abs(pa._depth - pb._depth),
2826 pvec._hamming(pa._vec, pb._vec),
2827 pvec._hamming(pa._vec, pb._vec),
2827 pa.distance(pb),
2828 pa.distance(pb),
2828 rel,
2829 rel,
2829 )
2830 )
2830 )
2831 )
2831
2832
2832
2833
2833 @command(
2834 @command(
2834 b'debugrebuilddirstate|debugrebuildstate',
2835 b'debugrebuilddirstate|debugrebuildstate',
2835 [
2836 [
2836 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2837 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2837 (
2838 (
2838 b'',
2839 b'',
2839 b'minimal',
2840 b'minimal',
2840 None,
2841 None,
2841 _(
2842 _(
2842 b'only rebuild files that are inconsistent with '
2843 b'only rebuild files that are inconsistent with '
2843 b'the working copy parent'
2844 b'the working copy parent'
2844 ),
2845 ),
2845 ),
2846 ),
2846 ],
2847 ],
2847 _(b'[-r REV]'),
2848 _(b'[-r REV]'),
2848 )
2849 )
2849 def debugrebuilddirstate(ui, repo, rev, **opts):
2850 def debugrebuilddirstate(ui, repo, rev, **opts):
2850 """rebuild the dirstate as it would look like for the given revision
2851 """rebuild the dirstate as it would look like for the given revision
2851
2852
2852 If no revision is specified the first current parent will be used.
2853 If no revision is specified the first current parent will be used.
2853
2854
2854 The dirstate will be set to the files of the given revision.
2855 The dirstate will be set to the files of the given revision.
2855 The actual working directory content or existing dirstate
2856 The actual working directory content or existing dirstate
2856 information such as adds or removes is not considered.
2857 information such as adds or removes is not considered.
2857
2858
2858 ``minimal`` will only rebuild the dirstate status for files that claim to be
2859 ``minimal`` will only rebuild the dirstate status for files that claim to be
2859 tracked but are not in the parent manifest, or that exist in the parent
2860 tracked but are not in the parent manifest, or that exist in the parent
2860 manifest but are not in the dirstate. It will not change adds, removes, or
2861 manifest but are not in the dirstate. It will not change adds, removes, or
2861 modified files that are in the working copy parent.
2862 modified files that are in the working copy parent.
2862
2863
2863 One use of this command is to make the next :hg:`status` invocation
2864 One use of this command is to make the next :hg:`status` invocation
2864 check the actual file content.
2865 check the actual file content.
2865 """
2866 """
2866 ctx = scmutil.revsingle(repo, rev)
2867 ctx = scmutil.revsingle(repo, rev)
2867 with repo.wlock():
2868 with repo.wlock():
2868 dirstate = repo.dirstate
2869 dirstate = repo.dirstate
2869 changedfiles = None
2870 changedfiles = None
2870 # See command doc for what minimal does.
2871 # See command doc for what minimal does.
2871 if opts.get('minimal'):
2872 if opts.get('minimal'):
2872 manifestfiles = set(ctx.manifest().keys())
2873 manifestfiles = set(ctx.manifest().keys())
2873 dirstatefiles = set(dirstate)
2874 dirstatefiles = set(dirstate)
2874 manifestonly = manifestfiles - dirstatefiles
2875 manifestonly = manifestfiles - dirstatefiles
2875 dsonly = dirstatefiles - manifestfiles
2876 dsonly = dirstatefiles - manifestfiles
2876 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2877 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2877 changedfiles = manifestonly | dsnotadded
2878 changedfiles = manifestonly | dsnotadded
2878
2879
2879 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2880 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2880
2881
2881
2882
2882 @command(b'debugrebuildfncache', [], b'')
2883 @command(b'debugrebuildfncache', [], b'')
2883 def debugrebuildfncache(ui, repo):
2884 def debugrebuildfncache(ui, repo):
2884 """rebuild the fncache file"""
2885 """rebuild the fncache file"""
2885 repair.rebuildfncache(ui, repo)
2886 repair.rebuildfncache(ui, repo)
2886
2887
2887
2888
2888 @command(
2889 @command(
2889 b'debugrename',
2890 b'debugrename',
2890 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2891 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2891 _(b'[-r REV] [FILE]...'),
2892 _(b'[-r REV] [FILE]...'),
2892 )
2893 )
2893 def debugrename(ui, repo, *pats, **opts):
2894 def debugrename(ui, repo, *pats, **opts):
2894 """dump rename information"""
2895 """dump rename information"""
2895
2896
2896 opts = pycompat.byteskwargs(opts)
2897 opts = pycompat.byteskwargs(opts)
2897 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2898 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2898 m = scmutil.match(ctx, pats, opts)
2899 m = scmutil.match(ctx, pats, opts)
2899 for abs in ctx.walk(m):
2900 for abs in ctx.walk(m):
2900 fctx = ctx[abs]
2901 fctx = ctx[abs]
2901 o = fctx.filelog().renamed(fctx.filenode())
2902 o = fctx.filelog().renamed(fctx.filenode())
2902 rel = repo.pathto(abs)
2903 rel = repo.pathto(abs)
2903 if o:
2904 if o:
2904 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2905 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2905 else:
2906 else:
2906 ui.write(_(b"%s not renamed\n") % rel)
2907 ui.write(_(b"%s not renamed\n") % rel)
2907
2908
2908
2909
2909 @command(b'debugrequires|debugrequirements', [], b'')
2910 @command(b'debugrequires|debugrequirements', [], b'')
2910 def debugrequirements(ui, repo):
2911 def debugrequirements(ui, repo):
2911 """print the current repo requirements"""
2912 """print the current repo requirements"""
2912 for r in sorted(repo.requirements):
2913 for r in sorted(repo.requirements):
2913 ui.write(b"%s\n" % r)
2914 ui.write(b"%s\n" % r)
2914
2915
2915
2916
2916 @command(
2917 @command(
2917 b'debugrevlog',
2918 b'debugrevlog',
2918 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2919 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2919 _(b'-c|-m|FILE'),
2920 _(b'-c|-m|FILE'),
2920 optionalrepo=True,
2921 optionalrepo=True,
2921 )
2922 )
2922 def debugrevlog(ui, repo, file_=None, **opts):
2923 def debugrevlog(ui, repo, file_=None, **opts):
2923 """show data and statistics about a revlog"""
2924 """show data and statistics about a revlog"""
2924 opts = pycompat.byteskwargs(opts)
2925 opts = pycompat.byteskwargs(opts)
2925 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2926 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2926
2927
2927 if opts.get(b"dump"):
2928 if opts.get(b"dump"):
2928 numrevs = len(r)
2929 numrevs = len(r)
2929 ui.write(
2930 ui.write(
2930 (
2931 (
2931 b"# rev p1rev p2rev start end deltastart base p1 p2"
2932 b"# rev p1rev p2rev start end deltastart base p1 p2"
2932 b" rawsize totalsize compression heads chainlen\n"
2933 b" rawsize totalsize compression heads chainlen\n"
2933 )
2934 )
2934 )
2935 )
2935 ts = 0
2936 ts = 0
2936 heads = set()
2937 heads = set()
2937
2938
2938 for rev in pycompat.xrange(numrevs):
2939 for rev in pycompat.xrange(numrevs):
2939 dbase = r.deltaparent(rev)
2940 dbase = r.deltaparent(rev)
2940 if dbase == -1:
2941 if dbase == -1:
2941 dbase = rev
2942 dbase = rev
2942 cbase = r.chainbase(rev)
2943 cbase = r.chainbase(rev)
2943 clen = r.chainlen(rev)
2944 clen = r.chainlen(rev)
2944 p1, p2 = r.parentrevs(rev)
2945 p1, p2 = r.parentrevs(rev)
2945 rs = r.rawsize(rev)
2946 rs = r.rawsize(rev)
2946 ts = ts + rs
2947 ts = ts + rs
2947 heads -= set(r.parentrevs(rev))
2948 heads -= set(r.parentrevs(rev))
2948 heads.add(rev)
2949 heads.add(rev)
2949 try:
2950 try:
2950 compression = ts / r.end(rev)
2951 compression = ts / r.end(rev)
2951 except ZeroDivisionError:
2952 except ZeroDivisionError:
2952 compression = 0
2953 compression = 0
2953 ui.write(
2954 ui.write(
2954 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2955 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2955 b"%11d %5d %8d\n"
2956 b"%11d %5d %8d\n"
2956 % (
2957 % (
2957 rev,
2958 rev,
2958 p1,
2959 p1,
2959 p2,
2960 p2,
2960 r.start(rev),
2961 r.start(rev),
2961 r.end(rev),
2962 r.end(rev),
2962 r.start(dbase),
2963 r.start(dbase),
2963 r.start(cbase),
2964 r.start(cbase),
2964 r.start(p1),
2965 r.start(p1),
2965 r.start(p2),
2966 r.start(p2),
2966 rs,
2967 rs,
2967 ts,
2968 ts,
2968 compression,
2969 compression,
2969 len(heads),
2970 len(heads),
2970 clen,
2971 clen,
2971 )
2972 )
2972 )
2973 )
2973 return 0
2974 return 0
2974
2975
2975 format = r._format_version
2976 format = r._format_version
2976 v = r._format_flags
2977 v = r._format_flags
2977 flags = []
2978 flags = []
2978 gdelta = False
2979 gdelta = False
2979 if v & revlog.FLAG_INLINE_DATA:
2980 if v & revlog.FLAG_INLINE_DATA:
2980 flags.append(b'inline')
2981 flags.append(b'inline')
2981 if v & revlog.FLAG_GENERALDELTA:
2982 if v & revlog.FLAG_GENERALDELTA:
2982 gdelta = True
2983 gdelta = True
2983 flags.append(b'generaldelta')
2984 flags.append(b'generaldelta')
2984 if not flags:
2985 if not flags:
2985 flags = [b'(none)']
2986 flags = [b'(none)']
2986
2987
2987 ### tracks merge vs single parent
2988 ### tracks merge vs single parent
2988 nummerges = 0
2989 nummerges = 0
2989
2990
2990 ### tracks ways the "delta" are build
2991 ### tracks ways the "delta" are build
2991 # nodelta
2992 # nodelta
2992 numempty = 0
2993 numempty = 0
2993 numemptytext = 0
2994 numemptytext = 0
2994 numemptydelta = 0
2995 numemptydelta = 0
2995 # full file content
2996 # full file content
2996 numfull = 0
2997 numfull = 0
2997 # intermediate snapshot against a prior snapshot
2998 # intermediate snapshot against a prior snapshot
2998 numsemi = 0
2999 numsemi = 0
2999 # snapshot count per depth
3000 # snapshot count per depth
3000 numsnapdepth = collections.defaultdict(lambda: 0)
3001 numsnapdepth = collections.defaultdict(lambda: 0)
3001 # delta against previous revision
3002 # delta against previous revision
3002 numprev = 0
3003 numprev = 0
3003 # delta against first or second parent (not prev)
3004 # delta against first or second parent (not prev)
3004 nump1 = 0
3005 nump1 = 0
3005 nump2 = 0
3006 nump2 = 0
3006 # delta against neither prev nor parents
3007 # delta against neither prev nor parents
3007 numother = 0
3008 numother = 0
3008 # delta against prev that are also first or second parent
3009 # delta against prev that are also first or second parent
3009 # (details of `numprev`)
3010 # (details of `numprev`)
3010 nump1prev = 0
3011 nump1prev = 0
3011 nump2prev = 0
3012 nump2prev = 0
3012
3013
3013 # data about delta chain of each revs
3014 # data about delta chain of each revs
3014 chainlengths = []
3015 chainlengths = []
3015 chainbases = []
3016 chainbases = []
3016 chainspans = []
3017 chainspans = []
3017
3018
3018 # data about each revision
3019 # data about each revision
3019 datasize = [None, 0, 0]
3020 datasize = [None, 0, 0]
3020 fullsize = [None, 0, 0]
3021 fullsize = [None, 0, 0]
3021 semisize = [None, 0, 0]
3022 semisize = [None, 0, 0]
3022 # snapshot count per depth
3023 # snapshot count per depth
3023 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3024 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3024 deltasize = [None, 0, 0]
3025 deltasize = [None, 0, 0]
3025 chunktypecounts = {}
3026 chunktypecounts = {}
3026 chunktypesizes = {}
3027 chunktypesizes = {}
3027
3028
3028 def addsize(size, l):
3029 def addsize(size, l):
3029 if l[0] is None or size < l[0]:
3030 if l[0] is None or size < l[0]:
3030 l[0] = size
3031 l[0] = size
3031 if size > l[1]:
3032 if size > l[1]:
3032 l[1] = size
3033 l[1] = size
3033 l[2] += size
3034 l[2] += size
3034
3035
3035 numrevs = len(r)
3036 numrevs = len(r)
3036 for rev in pycompat.xrange(numrevs):
3037 for rev in pycompat.xrange(numrevs):
3037 p1, p2 = r.parentrevs(rev)
3038 p1, p2 = r.parentrevs(rev)
3038 delta = r.deltaparent(rev)
3039 delta = r.deltaparent(rev)
3039 if format > 0:
3040 if format > 0:
3040 addsize(r.rawsize(rev), datasize)
3041 addsize(r.rawsize(rev), datasize)
3041 if p2 != nullrev:
3042 if p2 != nullrev:
3042 nummerges += 1
3043 nummerges += 1
3043 size = r.length(rev)
3044 size = r.length(rev)
3044 if delta == nullrev:
3045 if delta == nullrev:
3045 chainlengths.append(0)
3046 chainlengths.append(0)
3046 chainbases.append(r.start(rev))
3047 chainbases.append(r.start(rev))
3047 chainspans.append(size)
3048 chainspans.append(size)
3048 if size == 0:
3049 if size == 0:
3049 numempty += 1
3050 numempty += 1
3050 numemptytext += 1
3051 numemptytext += 1
3051 else:
3052 else:
3052 numfull += 1
3053 numfull += 1
3053 numsnapdepth[0] += 1
3054 numsnapdepth[0] += 1
3054 addsize(size, fullsize)
3055 addsize(size, fullsize)
3055 addsize(size, snapsizedepth[0])
3056 addsize(size, snapsizedepth[0])
3056 else:
3057 else:
3057 chainlengths.append(chainlengths[delta] + 1)
3058 chainlengths.append(chainlengths[delta] + 1)
3058 baseaddr = chainbases[delta]
3059 baseaddr = chainbases[delta]
3059 revaddr = r.start(rev)
3060 revaddr = r.start(rev)
3060 chainbases.append(baseaddr)
3061 chainbases.append(baseaddr)
3061 chainspans.append((revaddr - baseaddr) + size)
3062 chainspans.append((revaddr - baseaddr) + size)
3062 if size == 0:
3063 if size == 0:
3063 numempty += 1
3064 numempty += 1
3064 numemptydelta += 1
3065 numemptydelta += 1
3065 elif r.issnapshot(rev):
3066 elif r.issnapshot(rev):
3066 addsize(size, semisize)
3067 addsize(size, semisize)
3067 numsemi += 1
3068 numsemi += 1
3068 depth = r.snapshotdepth(rev)
3069 depth = r.snapshotdepth(rev)
3069 numsnapdepth[depth] += 1
3070 numsnapdepth[depth] += 1
3070 addsize(size, snapsizedepth[depth])
3071 addsize(size, snapsizedepth[depth])
3071 else:
3072 else:
3072 addsize(size, deltasize)
3073 addsize(size, deltasize)
3073 if delta == rev - 1:
3074 if delta == rev - 1:
3074 numprev += 1
3075 numprev += 1
3075 if delta == p1:
3076 if delta == p1:
3076 nump1prev += 1
3077 nump1prev += 1
3077 elif delta == p2:
3078 elif delta == p2:
3078 nump2prev += 1
3079 nump2prev += 1
3079 elif delta == p1:
3080 elif delta == p1:
3080 nump1 += 1
3081 nump1 += 1
3081 elif delta == p2:
3082 elif delta == p2:
3082 nump2 += 1
3083 nump2 += 1
3083 elif delta != nullrev:
3084 elif delta != nullrev:
3084 numother += 1
3085 numother += 1
3085
3086
3086 # Obtain data on the raw chunks in the revlog.
3087 # Obtain data on the raw chunks in the revlog.
3087 if util.safehasattr(r, b'_getsegmentforrevs'):
3088 if util.safehasattr(r, b'_getsegmentforrevs'):
3088 segment = r._getsegmentforrevs(rev, rev)[1]
3089 segment = r._getsegmentforrevs(rev, rev)[1]
3089 else:
3090 else:
3090 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3091 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3091 if segment:
3092 if segment:
3092 chunktype = bytes(segment[0:1])
3093 chunktype = bytes(segment[0:1])
3093 else:
3094 else:
3094 chunktype = b'empty'
3095 chunktype = b'empty'
3095
3096
3096 if chunktype not in chunktypecounts:
3097 if chunktype not in chunktypecounts:
3097 chunktypecounts[chunktype] = 0
3098 chunktypecounts[chunktype] = 0
3098 chunktypesizes[chunktype] = 0
3099 chunktypesizes[chunktype] = 0
3099
3100
3100 chunktypecounts[chunktype] += 1
3101 chunktypecounts[chunktype] += 1
3101 chunktypesizes[chunktype] += size
3102 chunktypesizes[chunktype] += size
3102
3103
3103 # Adjust size min value for empty cases
3104 # Adjust size min value for empty cases
3104 for size in (datasize, fullsize, semisize, deltasize):
3105 for size in (datasize, fullsize, semisize, deltasize):
3105 if size[0] is None:
3106 if size[0] is None:
3106 size[0] = 0
3107 size[0] = 0
3107
3108
3108 numdeltas = numrevs - numfull - numempty - numsemi
3109 numdeltas = numrevs - numfull - numempty - numsemi
3109 numoprev = numprev - nump1prev - nump2prev
3110 numoprev = numprev - nump1prev - nump2prev
3110 totalrawsize = datasize[2]
3111 totalrawsize = datasize[2]
3111 datasize[2] /= numrevs
3112 datasize[2] /= numrevs
3112 fulltotal = fullsize[2]
3113 fulltotal = fullsize[2]
3113 if numfull == 0:
3114 if numfull == 0:
3114 fullsize[2] = 0
3115 fullsize[2] = 0
3115 else:
3116 else:
3116 fullsize[2] /= numfull
3117 fullsize[2] /= numfull
3117 semitotal = semisize[2]
3118 semitotal = semisize[2]
3118 snaptotal = {}
3119 snaptotal = {}
3119 if numsemi > 0:
3120 if numsemi > 0:
3120 semisize[2] /= numsemi
3121 semisize[2] /= numsemi
3121 for depth in snapsizedepth:
3122 for depth in snapsizedepth:
3122 snaptotal[depth] = snapsizedepth[depth][2]
3123 snaptotal[depth] = snapsizedepth[depth][2]
3123 snapsizedepth[depth][2] /= numsnapdepth[depth]
3124 snapsizedepth[depth][2] /= numsnapdepth[depth]
3124
3125
3125 deltatotal = deltasize[2]
3126 deltatotal = deltasize[2]
3126 if numdeltas > 0:
3127 if numdeltas > 0:
3127 deltasize[2] /= numdeltas
3128 deltasize[2] /= numdeltas
3128 totalsize = fulltotal + semitotal + deltatotal
3129 totalsize = fulltotal + semitotal + deltatotal
3129 avgchainlen = sum(chainlengths) / numrevs
3130 avgchainlen = sum(chainlengths) / numrevs
3130 maxchainlen = max(chainlengths)
3131 maxchainlen = max(chainlengths)
3131 maxchainspan = max(chainspans)
3132 maxchainspan = max(chainspans)
3132 compratio = 1
3133 compratio = 1
3133 if totalsize:
3134 if totalsize:
3134 compratio = totalrawsize / totalsize
3135 compratio = totalrawsize / totalsize
3135
3136
3136 basedfmtstr = b'%%%dd\n'
3137 basedfmtstr = b'%%%dd\n'
3137 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3138 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3138
3139
3139 def dfmtstr(max):
3140 def dfmtstr(max):
3140 return basedfmtstr % len(str(max))
3141 return basedfmtstr % len(str(max))
3141
3142
3142 def pcfmtstr(max, padding=0):
3143 def pcfmtstr(max, padding=0):
3143 return basepcfmtstr % (len(str(max)), b' ' * padding)
3144 return basepcfmtstr % (len(str(max)), b' ' * padding)
3144
3145
3145 def pcfmt(value, total):
3146 def pcfmt(value, total):
3146 if total:
3147 if total:
3147 return (value, 100 * float(value) / total)
3148 return (value, 100 * float(value) / total)
3148 else:
3149 else:
3149 return value, 100.0
3150 return value, 100.0
3150
3151
3151 ui.writenoi18n(b'format : %d\n' % format)
3152 ui.writenoi18n(b'format : %d\n' % format)
3152 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3153 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3153
3154
3154 ui.write(b'\n')
3155 ui.write(b'\n')
3155 fmt = pcfmtstr(totalsize)
3156 fmt = pcfmtstr(totalsize)
3156 fmt2 = dfmtstr(totalsize)
3157 fmt2 = dfmtstr(totalsize)
3157 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3158 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3158 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3159 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3159 ui.writenoi18n(
3160 ui.writenoi18n(
3160 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3161 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3161 )
3162 )
3162 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3163 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3163 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3164 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3164 ui.writenoi18n(
3165 ui.writenoi18n(
3165 b' text : '
3166 b' text : '
3166 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3167 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3167 )
3168 )
3168 ui.writenoi18n(
3169 ui.writenoi18n(
3169 b' delta : '
3170 b' delta : '
3170 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3171 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3171 )
3172 )
3172 ui.writenoi18n(
3173 ui.writenoi18n(
3173 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3174 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3174 )
3175 )
3175 for depth in sorted(numsnapdepth):
3176 for depth in sorted(numsnapdepth):
3176 ui.write(
3177 ui.write(
3177 (b' lvl-%-3d : ' % depth)
3178 (b' lvl-%-3d : ' % depth)
3178 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3179 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3179 )
3180 )
3180 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3181 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3181 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3182 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3182 ui.writenoi18n(
3183 ui.writenoi18n(
3183 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3184 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3184 )
3185 )
3185 for depth in sorted(numsnapdepth):
3186 for depth in sorted(numsnapdepth):
3186 ui.write(
3187 ui.write(
3187 (b' lvl-%-3d : ' % depth)
3188 (b' lvl-%-3d : ' % depth)
3188 + fmt % pcfmt(snaptotal[depth], totalsize)
3189 + fmt % pcfmt(snaptotal[depth], totalsize)
3189 )
3190 )
3190 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3191 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3191
3192
3192 def fmtchunktype(chunktype):
3193 def fmtchunktype(chunktype):
3193 if chunktype == b'empty':
3194 if chunktype == b'empty':
3194 return b' %s : ' % chunktype
3195 return b' %s : ' % chunktype
3195 elif chunktype in pycompat.bytestr(string.ascii_letters):
3196 elif chunktype in pycompat.bytestr(string.ascii_letters):
3196 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3197 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3197 else:
3198 else:
3198 return b' 0x%s : ' % hex(chunktype)
3199 return b' 0x%s : ' % hex(chunktype)
3199
3200
3200 ui.write(b'\n')
3201 ui.write(b'\n')
3201 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3202 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3202 for chunktype in sorted(chunktypecounts):
3203 for chunktype in sorted(chunktypecounts):
3203 ui.write(fmtchunktype(chunktype))
3204 ui.write(fmtchunktype(chunktype))
3204 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3205 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3205 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3206 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3206 for chunktype in sorted(chunktypecounts):
3207 for chunktype in sorted(chunktypecounts):
3207 ui.write(fmtchunktype(chunktype))
3208 ui.write(fmtchunktype(chunktype))
3208 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3209 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3209
3210
3210 ui.write(b'\n')
3211 ui.write(b'\n')
3211 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3212 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3212 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3213 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3213 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3214 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3214 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3215 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3215 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3216 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3216
3217
3217 if format > 0:
3218 if format > 0:
3218 ui.write(b'\n')
3219 ui.write(b'\n')
3219 ui.writenoi18n(
3220 ui.writenoi18n(
3220 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3221 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3221 % tuple(datasize)
3222 % tuple(datasize)
3222 )
3223 )
3223 ui.writenoi18n(
3224 ui.writenoi18n(
3224 b'full revision size (min/max/avg) : %d / %d / %d\n'
3225 b'full revision size (min/max/avg) : %d / %d / %d\n'
3225 % tuple(fullsize)
3226 % tuple(fullsize)
3226 )
3227 )
3227 ui.writenoi18n(
3228 ui.writenoi18n(
3228 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3229 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3229 % tuple(semisize)
3230 % tuple(semisize)
3230 )
3231 )
3231 for depth in sorted(snapsizedepth):
3232 for depth in sorted(snapsizedepth):
3232 if depth == 0:
3233 if depth == 0:
3233 continue
3234 continue
3234 ui.writenoi18n(
3235 ui.writenoi18n(
3235 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3236 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3236 % ((depth,) + tuple(snapsizedepth[depth]))
3237 % ((depth,) + tuple(snapsizedepth[depth]))
3237 )
3238 )
3238 ui.writenoi18n(
3239 ui.writenoi18n(
3239 b'delta size (min/max/avg) : %d / %d / %d\n'
3240 b'delta size (min/max/avg) : %d / %d / %d\n'
3240 % tuple(deltasize)
3241 % tuple(deltasize)
3241 )
3242 )
3242
3243
3243 if numdeltas > 0:
3244 if numdeltas > 0:
3244 ui.write(b'\n')
3245 ui.write(b'\n')
3245 fmt = pcfmtstr(numdeltas)
3246 fmt = pcfmtstr(numdeltas)
3246 fmt2 = pcfmtstr(numdeltas, 4)
3247 fmt2 = pcfmtstr(numdeltas, 4)
3247 ui.writenoi18n(
3248 ui.writenoi18n(
3248 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3249 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3249 )
3250 )
3250 if numprev > 0:
3251 if numprev > 0:
3251 ui.writenoi18n(
3252 ui.writenoi18n(
3252 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3253 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3253 )
3254 )
3254 ui.writenoi18n(
3255 ui.writenoi18n(
3255 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3256 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3256 )
3257 )
3257 ui.writenoi18n(
3258 ui.writenoi18n(
3258 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3259 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3259 )
3260 )
3260 if gdelta:
3261 if gdelta:
3261 ui.writenoi18n(
3262 ui.writenoi18n(
3262 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3263 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3263 )
3264 )
3264 ui.writenoi18n(
3265 ui.writenoi18n(
3265 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3266 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3266 )
3267 )
3267 ui.writenoi18n(
3268 ui.writenoi18n(
3268 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3269 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3269 )
3270 )
3270
3271
3271
3272
3272 @command(
3273 @command(
3273 b'debugrevlogindex',
3274 b'debugrevlogindex',
3274 cmdutil.debugrevlogopts
3275 cmdutil.debugrevlogopts
3275 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3276 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3276 _(b'[-f FORMAT] -c|-m|FILE'),
3277 _(b'[-f FORMAT] -c|-m|FILE'),
3277 optionalrepo=True,
3278 optionalrepo=True,
3278 )
3279 )
3279 def debugrevlogindex(ui, repo, file_=None, **opts):
3280 def debugrevlogindex(ui, repo, file_=None, **opts):
3280 """dump the contents of a revlog index"""
3281 """dump the contents of a revlog index"""
3281 opts = pycompat.byteskwargs(opts)
3282 opts = pycompat.byteskwargs(opts)
3282 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3283 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3283 format = opts.get(b'format', 0)
3284 format = opts.get(b'format', 0)
3284 if format not in (0, 1):
3285 if format not in (0, 1):
3285 raise error.Abort(_(b"unknown format %d") % format)
3286 raise error.Abort(_(b"unknown format %d") % format)
3286
3287
3287 if ui.debugflag:
3288 if ui.debugflag:
3288 shortfn = hex
3289 shortfn = hex
3289 else:
3290 else:
3290 shortfn = short
3291 shortfn = short
3291
3292
3292 # There might not be anything in r, so have a sane default
3293 # There might not be anything in r, so have a sane default
3293 idlen = 12
3294 idlen = 12
3294 for i in r:
3295 for i in r:
3295 idlen = len(shortfn(r.node(i)))
3296 idlen = len(shortfn(r.node(i)))
3296 break
3297 break
3297
3298
3298 if format == 0:
3299 if format == 0:
3299 if ui.verbose:
3300 if ui.verbose:
3300 ui.writenoi18n(
3301 ui.writenoi18n(
3301 b" rev offset length linkrev %s %s p2\n"
3302 b" rev offset length linkrev %s %s p2\n"
3302 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3303 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3303 )
3304 )
3304 else:
3305 else:
3305 ui.writenoi18n(
3306 ui.writenoi18n(
3306 b" rev linkrev %s %s p2\n"
3307 b" rev linkrev %s %s p2\n"
3307 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3308 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3308 )
3309 )
3309 elif format == 1:
3310 elif format == 1:
3310 if ui.verbose:
3311 if ui.verbose:
3311 ui.writenoi18n(
3312 ui.writenoi18n(
3312 (
3313 (
3313 b" rev flag offset length size link p1"
3314 b" rev flag offset length size link p1"
3314 b" p2 %s\n"
3315 b" p2 %s\n"
3315 )
3316 )
3316 % b"nodeid".rjust(idlen)
3317 % b"nodeid".rjust(idlen)
3317 )
3318 )
3318 else:
3319 else:
3319 ui.writenoi18n(
3320 ui.writenoi18n(
3320 b" rev flag size link p1 p2 %s\n"
3321 b" rev flag size link p1 p2 %s\n"
3321 % b"nodeid".rjust(idlen)
3322 % b"nodeid".rjust(idlen)
3322 )
3323 )
3323
3324
3324 for i in r:
3325 for i in r:
3325 node = r.node(i)
3326 node = r.node(i)
3326 if format == 0:
3327 if format == 0:
3327 try:
3328 try:
3328 pp = r.parents(node)
3329 pp = r.parents(node)
3329 except Exception:
3330 except Exception:
3330 pp = [repo.nullid, repo.nullid]
3331 pp = [repo.nullid, repo.nullid]
3331 if ui.verbose:
3332 if ui.verbose:
3332 ui.write(
3333 ui.write(
3333 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3334 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3334 % (
3335 % (
3335 i,
3336 i,
3336 r.start(i),
3337 r.start(i),
3337 r.length(i),
3338 r.length(i),
3338 r.linkrev(i),
3339 r.linkrev(i),
3339 shortfn(node),
3340 shortfn(node),
3340 shortfn(pp[0]),
3341 shortfn(pp[0]),
3341 shortfn(pp[1]),
3342 shortfn(pp[1]),
3342 )
3343 )
3343 )
3344 )
3344 else:
3345 else:
3345 ui.write(
3346 ui.write(
3346 b"% 6d % 7d %s %s %s\n"
3347 b"% 6d % 7d %s %s %s\n"
3347 % (
3348 % (
3348 i,
3349 i,
3349 r.linkrev(i),
3350 r.linkrev(i),
3350 shortfn(node),
3351 shortfn(node),
3351 shortfn(pp[0]),
3352 shortfn(pp[0]),
3352 shortfn(pp[1]),
3353 shortfn(pp[1]),
3353 )
3354 )
3354 )
3355 )
3355 elif format == 1:
3356 elif format == 1:
3356 pr = r.parentrevs(i)
3357 pr = r.parentrevs(i)
3357 if ui.verbose:
3358 if ui.verbose:
3358 ui.write(
3359 ui.write(
3359 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3360 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3360 % (
3361 % (
3361 i,
3362 i,
3362 r.flags(i),
3363 r.flags(i),
3363 r.start(i),
3364 r.start(i),
3364 r.length(i),
3365 r.length(i),
3365 r.rawsize(i),
3366 r.rawsize(i),
3366 r.linkrev(i),
3367 r.linkrev(i),
3367 pr[0],
3368 pr[0],
3368 pr[1],
3369 pr[1],
3369 shortfn(node),
3370 shortfn(node),
3370 )
3371 )
3371 )
3372 )
3372 else:
3373 else:
3373 ui.write(
3374 ui.write(
3374 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3375 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3375 % (
3376 % (
3376 i,
3377 i,
3377 r.flags(i),
3378 r.flags(i),
3378 r.rawsize(i),
3379 r.rawsize(i),
3379 r.linkrev(i),
3380 r.linkrev(i),
3380 pr[0],
3381 pr[0],
3381 pr[1],
3382 pr[1],
3382 shortfn(node),
3383 shortfn(node),
3383 )
3384 )
3384 )
3385 )
3385
3386
3386
3387
3387 @command(
3388 @command(
3388 b'debugrevspec',
3389 b'debugrevspec',
3389 [
3390 [
3390 (
3391 (
3391 b'',
3392 b'',
3392 b'optimize',
3393 b'optimize',
3393 None,
3394 None,
3394 _(b'print parsed tree after optimizing (DEPRECATED)'),
3395 _(b'print parsed tree after optimizing (DEPRECATED)'),
3395 ),
3396 ),
3396 (
3397 (
3397 b'',
3398 b'',
3398 b'show-revs',
3399 b'show-revs',
3399 True,
3400 True,
3400 _(b'print list of result revisions (default)'),
3401 _(b'print list of result revisions (default)'),
3401 ),
3402 ),
3402 (
3403 (
3403 b's',
3404 b's',
3404 b'show-set',
3405 b'show-set',
3405 None,
3406 None,
3406 _(b'print internal representation of result set'),
3407 _(b'print internal representation of result set'),
3407 ),
3408 ),
3408 (
3409 (
3409 b'p',
3410 b'p',
3410 b'show-stage',
3411 b'show-stage',
3411 [],
3412 [],
3412 _(b'print parsed tree at the given stage'),
3413 _(b'print parsed tree at the given stage'),
3413 _(b'NAME'),
3414 _(b'NAME'),
3414 ),
3415 ),
3415 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3416 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3416 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3417 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3417 ],
3418 ],
3418 b'REVSPEC',
3419 b'REVSPEC',
3419 )
3420 )
3420 def debugrevspec(ui, repo, expr, **opts):
3421 def debugrevspec(ui, repo, expr, **opts):
3421 """parse and apply a revision specification
3422 """parse and apply a revision specification
3422
3423
3423 Use -p/--show-stage option to print the parsed tree at the given stages.
3424 Use -p/--show-stage option to print the parsed tree at the given stages.
3424 Use -p all to print tree at every stage.
3425 Use -p all to print tree at every stage.
3425
3426
3426 Use --no-show-revs option with -s or -p to print only the set
3427 Use --no-show-revs option with -s or -p to print only the set
3427 representation or the parsed tree respectively.
3428 representation or the parsed tree respectively.
3428
3429
3429 Use --verify-optimized to compare the optimized result with the unoptimized
3430 Use --verify-optimized to compare the optimized result with the unoptimized
3430 one. Returns 1 if the optimized result differs.
3431 one. Returns 1 if the optimized result differs.
3431 """
3432 """
3432 opts = pycompat.byteskwargs(opts)
3433 opts = pycompat.byteskwargs(opts)
3433 aliases = ui.configitems(b'revsetalias')
3434 aliases = ui.configitems(b'revsetalias')
3434 stages = [
3435 stages = [
3435 (b'parsed', lambda tree: tree),
3436 (b'parsed', lambda tree: tree),
3436 (
3437 (
3437 b'expanded',
3438 b'expanded',
3438 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3439 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3439 ),
3440 ),
3440 (b'concatenated', revsetlang.foldconcat),
3441 (b'concatenated', revsetlang.foldconcat),
3441 (b'analyzed', revsetlang.analyze),
3442 (b'analyzed', revsetlang.analyze),
3442 (b'optimized', revsetlang.optimize),
3443 (b'optimized', revsetlang.optimize),
3443 ]
3444 ]
3444 if opts[b'no_optimized']:
3445 if opts[b'no_optimized']:
3445 stages = stages[:-1]
3446 stages = stages[:-1]
3446 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3447 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3447 raise error.Abort(
3448 raise error.Abort(
3448 _(b'cannot use --verify-optimized with --no-optimized')
3449 _(b'cannot use --verify-optimized with --no-optimized')
3449 )
3450 )
3450 stagenames = {n for n, f in stages}
3451 stagenames = {n for n, f in stages}
3451
3452
3452 showalways = set()
3453 showalways = set()
3453 showchanged = set()
3454 showchanged = set()
3454 if ui.verbose and not opts[b'show_stage']:
3455 if ui.verbose and not opts[b'show_stage']:
3455 # show parsed tree by --verbose (deprecated)
3456 # show parsed tree by --verbose (deprecated)
3456 showalways.add(b'parsed')
3457 showalways.add(b'parsed')
3457 showchanged.update([b'expanded', b'concatenated'])
3458 showchanged.update([b'expanded', b'concatenated'])
3458 if opts[b'optimize']:
3459 if opts[b'optimize']:
3459 showalways.add(b'optimized')
3460 showalways.add(b'optimized')
3460 if opts[b'show_stage'] and opts[b'optimize']:
3461 if opts[b'show_stage'] and opts[b'optimize']:
3461 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3462 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3462 if opts[b'show_stage'] == [b'all']:
3463 if opts[b'show_stage'] == [b'all']:
3463 showalways.update(stagenames)
3464 showalways.update(stagenames)
3464 else:
3465 else:
3465 for n in opts[b'show_stage']:
3466 for n in opts[b'show_stage']:
3466 if n not in stagenames:
3467 if n not in stagenames:
3467 raise error.Abort(_(b'invalid stage name: %s') % n)
3468 raise error.Abort(_(b'invalid stage name: %s') % n)
3468 showalways.update(opts[b'show_stage'])
3469 showalways.update(opts[b'show_stage'])
3469
3470
3470 treebystage = {}
3471 treebystage = {}
3471 printedtree = None
3472 printedtree = None
3472 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3473 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3473 for n, f in stages:
3474 for n, f in stages:
3474 treebystage[n] = tree = f(tree)
3475 treebystage[n] = tree = f(tree)
3475 if n in showalways or (n in showchanged and tree != printedtree):
3476 if n in showalways or (n in showchanged and tree != printedtree):
3476 if opts[b'show_stage'] or n != b'parsed':
3477 if opts[b'show_stage'] or n != b'parsed':
3477 ui.write(b"* %s:\n" % n)
3478 ui.write(b"* %s:\n" % n)
3478 ui.write(revsetlang.prettyformat(tree), b"\n")
3479 ui.write(revsetlang.prettyformat(tree), b"\n")
3479 printedtree = tree
3480 printedtree = tree
3480
3481
3481 if opts[b'verify_optimized']:
3482 if opts[b'verify_optimized']:
3482 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3483 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3483 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3484 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3484 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3485 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3485 ui.writenoi18n(
3486 ui.writenoi18n(
3486 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3487 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3487 )
3488 )
3488 ui.writenoi18n(
3489 ui.writenoi18n(
3489 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3490 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3490 )
3491 )
3491 arevs = list(arevs)
3492 arevs = list(arevs)
3492 brevs = list(brevs)
3493 brevs = list(brevs)
3493 if arevs == brevs:
3494 if arevs == brevs:
3494 return 0
3495 return 0
3495 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3496 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3496 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3497 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3497 sm = difflib.SequenceMatcher(None, arevs, brevs)
3498 sm = difflib.SequenceMatcher(None, arevs, brevs)
3498 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3499 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3499 if tag in ('delete', 'replace'):
3500 if tag in ('delete', 'replace'):
3500 for c in arevs[alo:ahi]:
3501 for c in arevs[alo:ahi]:
3501 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3502 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3502 if tag in ('insert', 'replace'):
3503 if tag in ('insert', 'replace'):
3503 for c in brevs[blo:bhi]:
3504 for c in brevs[blo:bhi]:
3504 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3505 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3505 if tag == 'equal':
3506 if tag == 'equal':
3506 for c in arevs[alo:ahi]:
3507 for c in arevs[alo:ahi]:
3507 ui.write(b' %d\n' % c)
3508 ui.write(b' %d\n' % c)
3508 return 1
3509 return 1
3509
3510
3510 func = revset.makematcher(tree)
3511 func = revset.makematcher(tree)
3511 revs = func(repo)
3512 revs = func(repo)
3512 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3513 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3513 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3514 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3514 if not opts[b'show_revs']:
3515 if not opts[b'show_revs']:
3515 return
3516 return
3516 for c in revs:
3517 for c in revs:
3517 ui.write(b"%d\n" % c)
3518 ui.write(b"%d\n" % c)
3518
3519
3519
3520
3520 @command(
3521 @command(
3521 b'debugserve',
3522 b'debugserve',
3522 [
3523 [
3523 (
3524 (
3524 b'',
3525 b'',
3525 b'sshstdio',
3526 b'sshstdio',
3526 False,
3527 False,
3527 _(b'run an SSH server bound to process handles'),
3528 _(b'run an SSH server bound to process handles'),
3528 ),
3529 ),
3529 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3530 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3530 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3531 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3531 ],
3532 ],
3532 b'',
3533 b'',
3533 )
3534 )
3534 def debugserve(ui, repo, **opts):
3535 def debugserve(ui, repo, **opts):
3535 """run a server with advanced settings
3536 """run a server with advanced settings
3536
3537
3537 This command is similar to :hg:`serve`. It exists partially as a
3538 This command is similar to :hg:`serve`. It exists partially as a
3538 workaround to the fact that ``hg serve --stdio`` must have specific
3539 workaround to the fact that ``hg serve --stdio`` must have specific
3539 arguments for security reasons.
3540 arguments for security reasons.
3540 """
3541 """
3541 opts = pycompat.byteskwargs(opts)
3542 opts = pycompat.byteskwargs(opts)
3542
3543
3543 if not opts[b'sshstdio']:
3544 if not opts[b'sshstdio']:
3544 raise error.Abort(_(b'only --sshstdio is currently supported'))
3545 raise error.Abort(_(b'only --sshstdio is currently supported'))
3545
3546
3546 logfh = None
3547 logfh = None
3547
3548
3548 if opts[b'logiofd'] and opts[b'logiofile']:
3549 if opts[b'logiofd'] and opts[b'logiofile']:
3549 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3550 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3550
3551
3551 if opts[b'logiofd']:
3552 if opts[b'logiofd']:
3552 # Ideally we would be line buffered. But line buffering in binary
3553 # Ideally we would be line buffered. But line buffering in binary
3553 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3554 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3554 # buffering could have performance impacts. But since this isn't
3555 # buffering could have performance impacts. But since this isn't
3555 # performance critical code, it should be fine.
3556 # performance critical code, it should be fine.
3556 try:
3557 try:
3557 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3558 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3558 except OSError as e:
3559 except OSError as e:
3559 if e.errno != errno.ESPIPE:
3560 if e.errno != errno.ESPIPE:
3560 raise
3561 raise
3561 # can't seek a pipe, so `ab` mode fails on py3
3562 # can't seek a pipe, so `ab` mode fails on py3
3562 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3563 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3563 elif opts[b'logiofile']:
3564 elif opts[b'logiofile']:
3564 logfh = open(opts[b'logiofile'], b'ab', 0)
3565 logfh = open(opts[b'logiofile'], b'ab', 0)
3565
3566
3566 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3567 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3567 s.serve_forever()
3568 s.serve_forever()
3568
3569
3569
3570
3570 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3571 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3571 def debugsetparents(ui, repo, rev1, rev2=None):
3572 def debugsetparents(ui, repo, rev1, rev2=None):
3572 """manually set the parents of the current working directory (DANGEROUS)
3573 """manually set the parents of the current working directory (DANGEROUS)
3573
3574
3574 This command is not what you are looking for and should not be used. Using
3575 This command is not what you are looking for and should not be used. Using
3575 this command will most certainly results in slight corruption of the file
3576 this command will most certainly results in slight corruption of the file
3576 level histories withing your repository. DO NOT USE THIS COMMAND.
3577 level histories withing your repository. DO NOT USE THIS COMMAND.
3577
3578
3578 The command update the p1 and p2 field in the dirstate, and not touching
3579 The command update the p1 and p2 field in the dirstate, and not touching
3579 anything else. This useful for writing repository conversion tools, but
3580 anything else. This useful for writing repository conversion tools, but
3580 should be used with extreme care. For example, neither the working
3581 should be used with extreme care. For example, neither the working
3581 directory nor the dirstate is updated, so file status may be incorrect
3582 directory nor the dirstate is updated, so file status may be incorrect
3582 after running this command. Only used if you are one of the few people that
3583 after running this command. Only used if you are one of the few people that
3583 deeply unstand both conversion tools and file level histories. If you are
3584 deeply unstand both conversion tools and file level histories. If you are
3584 reading this help, you are not one of this people (most of them sailed west
3585 reading this help, you are not one of this people (most of them sailed west
3585 from Mithlond anyway.
3586 from Mithlond anyway.
3586
3587
3587 So one last time DO NOT USE THIS COMMAND.
3588 So one last time DO NOT USE THIS COMMAND.
3588
3589
3589 Returns 0 on success.
3590 Returns 0 on success.
3590 """
3591 """
3591
3592
3592 node1 = scmutil.revsingle(repo, rev1).node()
3593 node1 = scmutil.revsingle(repo, rev1).node()
3593 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3594 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3594
3595
3595 with repo.wlock():
3596 with repo.wlock():
3596 repo.setparents(node1, node2)
3597 repo.setparents(node1, node2)
3597
3598
3598
3599
3599 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3600 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3600 def debugsidedata(ui, repo, file_, rev=None, **opts):
3601 def debugsidedata(ui, repo, file_, rev=None, **opts):
3601 """dump the side data for a cl/manifest/file revision
3602 """dump the side data for a cl/manifest/file revision
3602
3603
3603 Use --verbose to dump the sidedata content."""
3604 Use --verbose to dump the sidedata content."""
3604 opts = pycompat.byteskwargs(opts)
3605 opts = pycompat.byteskwargs(opts)
3605 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3606 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3606 if rev is not None:
3607 if rev is not None:
3607 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3608 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3608 file_, rev = None, file_
3609 file_, rev = None, file_
3609 elif rev is None:
3610 elif rev is None:
3610 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3611 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3611 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3612 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3612 r = getattr(r, '_revlog', r)
3613 r = getattr(r, '_revlog', r)
3613 try:
3614 try:
3614 sidedata = r.sidedata(r.lookup(rev))
3615 sidedata = r.sidedata(r.lookup(rev))
3615 except KeyError:
3616 except KeyError:
3616 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3617 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3617 if sidedata:
3618 if sidedata:
3618 sidedata = list(sidedata.items())
3619 sidedata = list(sidedata.items())
3619 sidedata.sort()
3620 sidedata.sort()
3620 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3621 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3621 for key, value in sidedata:
3622 for key, value in sidedata:
3622 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3623 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3623 if ui.verbose:
3624 if ui.verbose:
3624 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3625 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3625
3626
3626
3627
3627 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3628 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3628 def debugssl(ui, repo, source=None, **opts):
3629 def debugssl(ui, repo, source=None, **opts):
3629 """test a secure connection to a server
3630 """test a secure connection to a server
3630
3631
3631 This builds the certificate chain for the server on Windows, installing the
3632 This builds the certificate chain for the server on Windows, installing the
3632 missing intermediates and trusted root via Windows Update if necessary. It
3633 missing intermediates and trusted root via Windows Update if necessary. It
3633 does nothing on other platforms.
3634 does nothing on other platforms.
3634
3635
3635 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3636 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3636 that server is used. See :hg:`help urls` for more information.
3637 that server is used. See :hg:`help urls` for more information.
3637
3638
3638 If the update succeeds, retry the original operation. Otherwise, the cause
3639 If the update succeeds, retry the original operation. Otherwise, the cause
3639 of the SSL error is likely another issue.
3640 of the SSL error is likely another issue.
3640 """
3641 """
3641 if not pycompat.iswindows:
3642 if not pycompat.iswindows:
3642 raise error.Abort(
3643 raise error.Abort(
3643 _(b'certificate chain building is only possible on Windows')
3644 _(b'certificate chain building is only possible on Windows')
3644 )
3645 )
3645
3646
3646 if not source:
3647 if not source:
3647 if not repo:
3648 if not repo:
3648 raise error.Abort(
3649 raise error.Abort(
3649 _(
3650 _(
3650 b"there is no Mercurial repository here, and no "
3651 b"there is no Mercurial repository here, and no "
3651 b"server specified"
3652 b"server specified"
3652 )
3653 )
3653 )
3654 )
3654 source = b"default"
3655 source = b"default"
3655
3656
3656 source, branches = urlutil.get_unique_pull_path(
3657 source, branches = urlutil.get_unique_pull_path(
3657 b'debugssl', repo, ui, source
3658 b'debugssl', repo, ui, source
3658 )
3659 )
3659 url = urlutil.url(source)
3660 url = urlutil.url(source)
3660
3661
3661 defaultport = {b'https': 443, b'ssh': 22}
3662 defaultport = {b'https': 443, b'ssh': 22}
3662 if url.scheme in defaultport:
3663 if url.scheme in defaultport:
3663 try:
3664 try:
3664 addr = (url.host, int(url.port or defaultport[url.scheme]))
3665 addr = (url.host, int(url.port or defaultport[url.scheme]))
3665 except ValueError:
3666 except ValueError:
3666 raise error.Abort(_(b"malformed port number in URL"))
3667 raise error.Abort(_(b"malformed port number in URL"))
3667 else:
3668 else:
3668 raise error.Abort(_(b"only https and ssh connections are supported"))
3669 raise error.Abort(_(b"only https and ssh connections are supported"))
3669
3670
3670 from . import win32
3671 from . import win32
3671
3672
3672 s = ssl.wrap_socket(
3673 s = ssl.wrap_socket(
3673 socket.socket(),
3674 socket.socket(),
3674 ssl_version=ssl.PROTOCOL_TLS,
3675 ssl_version=ssl.PROTOCOL_TLS,
3675 cert_reqs=ssl.CERT_NONE,
3676 cert_reqs=ssl.CERT_NONE,
3676 ca_certs=None,
3677 ca_certs=None,
3677 )
3678 )
3678
3679
3679 try:
3680 try:
3680 s.connect(addr)
3681 s.connect(addr)
3681 cert = s.getpeercert(True)
3682 cert = s.getpeercert(True)
3682
3683
3683 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3684 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3684
3685
3685 complete = win32.checkcertificatechain(cert, build=False)
3686 complete = win32.checkcertificatechain(cert, build=False)
3686
3687
3687 if not complete:
3688 if not complete:
3688 ui.status(_(b'certificate chain is incomplete, updating... '))
3689 ui.status(_(b'certificate chain is incomplete, updating... '))
3689
3690
3690 if not win32.checkcertificatechain(cert):
3691 if not win32.checkcertificatechain(cert):
3691 ui.status(_(b'failed.\n'))
3692 ui.status(_(b'failed.\n'))
3692 else:
3693 else:
3693 ui.status(_(b'done.\n'))
3694 ui.status(_(b'done.\n'))
3694 else:
3695 else:
3695 ui.status(_(b'full certificate chain is available\n'))
3696 ui.status(_(b'full certificate chain is available\n'))
3696 finally:
3697 finally:
3697 s.close()
3698 s.close()
3698
3699
3699
3700
3700 @command(
3701 @command(
3701 b"debugbackupbundle",
3702 b"debugbackupbundle",
3702 [
3703 [
3703 (
3704 (
3704 b"",
3705 b"",
3705 b"recover",
3706 b"recover",
3706 b"",
3707 b"",
3707 b"brings the specified changeset back into the repository",
3708 b"brings the specified changeset back into the repository",
3708 )
3709 )
3709 ]
3710 ]
3710 + cmdutil.logopts,
3711 + cmdutil.logopts,
3711 _(b"hg debugbackupbundle [--recover HASH]"),
3712 _(b"hg debugbackupbundle [--recover HASH]"),
3712 )
3713 )
3713 def debugbackupbundle(ui, repo, *pats, **opts):
3714 def debugbackupbundle(ui, repo, *pats, **opts):
3714 """lists the changesets available in backup bundles
3715 """lists the changesets available in backup bundles
3715
3716
3716 Without any arguments, this command prints a list of the changesets in each
3717 Without any arguments, this command prints a list of the changesets in each
3717 backup bundle.
3718 backup bundle.
3718
3719
3719 --recover takes a changeset hash and unbundles the first bundle that
3720 --recover takes a changeset hash and unbundles the first bundle that
3720 contains that hash, which puts that changeset back in your repository.
3721 contains that hash, which puts that changeset back in your repository.
3721
3722
3722 --verbose will print the entire commit message and the bundle path for that
3723 --verbose will print the entire commit message and the bundle path for that
3723 backup.
3724 backup.
3724 """
3725 """
3725 backups = list(
3726 backups = list(
3726 filter(
3727 filter(
3727 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3728 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3728 )
3729 )
3729 )
3730 )
3730 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3731 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3731
3732
3732 opts = pycompat.byteskwargs(opts)
3733 opts = pycompat.byteskwargs(opts)
3733 opts[b"bundle"] = b""
3734 opts[b"bundle"] = b""
3734 opts[b"force"] = None
3735 opts[b"force"] = None
3735 limit = logcmdutil.getlimit(opts)
3736 limit = logcmdutil.getlimit(opts)
3736
3737
3737 def display(other, chlist, displayer):
3738 def display(other, chlist, displayer):
3738 if opts.get(b"newest_first"):
3739 if opts.get(b"newest_first"):
3739 chlist.reverse()
3740 chlist.reverse()
3740 count = 0
3741 count = 0
3741 for n in chlist:
3742 for n in chlist:
3742 if limit is not None and count >= limit:
3743 if limit is not None and count >= limit:
3743 break
3744 break
3744 parents = [
3745 parents = [
3745 True for p in other.changelog.parents(n) if p != repo.nullid
3746 True for p in other.changelog.parents(n) if p != repo.nullid
3746 ]
3747 ]
3747 if opts.get(b"no_merges") and len(parents) == 2:
3748 if opts.get(b"no_merges") and len(parents) == 2:
3748 continue
3749 continue
3749 count += 1
3750 count += 1
3750 displayer.show(other[n])
3751 displayer.show(other[n])
3751
3752
3752 recovernode = opts.get(b"recover")
3753 recovernode = opts.get(b"recover")
3753 if recovernode:
3754 if recovernode:
3754 if scmutil.isrevsymbol(repo, recovernode):
3755 if scmutil.isrevsymbol(repo, recovernode):
3755 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3756 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3756 return
3757 return
3757 elif backups:
3758 elif backups:
3758 msg = _(
3759 msg = _(
3759 b"Recover changesets using: hg debugbackupbundle --recover "
3760 b"Recover changesets using: hg debugbackupbundle --recover "
3760 b"<changeset hash>\n\nAvailable backup changesets:"
3761 b"<changeset hash>\n\nAvailable backup changesets:"
3761 )
3762 )
3762 ui.status(msg, label=b"status.removed")
3763 ui.status(msg, label=b"status.removed")
3763 else:
3764 else:
3764 ui.status(_(b"no backup changesets found\n"))
3765 ui.status(_(b"no backup changesets found\n"))
3765 return
3766 return
3766
3767
3767 for backup in backups:
3768 for backup in backups:
3768 # Much of this is copied from the hg incoming logic
3769 # Much of this is copied from the hg incoming logic
3769 source = os.path.relpath(backup, encoding.getcwd())
3770 source = os.path.relpath(backup, encoding.getcwd())
3770 source, branches = urlutil.get_unique_pull_path(
3771 source, branches = urlutil.get_unique_pull_path(
3771 b'debugbackupbundle',
3772 b'debugbackupbundle',
3772 repo,
3773 repo,
3773 ui,
3774 ui,
3774 source,
3775 source,
3775 default_branches=opts.get(b'branch'),
3776 default_branches=opts.get(b'branch'),
3776 )
3777 )
3777 try:
3778 try:
3778 other = hg.peer(repo, opts, source)
3779 other = hg.peer(repo, opts, source)
3779 except error.LookupError as ex:
3780 except error.LookupError as ex:
3780 msg = _(b"\nwarning: unable to open bundle %s") % source
3781 msg = _(b"\nwarning: unable to open bundle %s") % source
3781 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3782 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3782 ui.warn(msg, hint=hint)
3783 ui.warn(msg, hint=hint)
3783 continue
3784 continue
3784 revs, checkout = hg.addbranchrevs(
3785 revs, checkout = hg.addbranchrevs(
3785 repo, other, branches, opts.get(b"rev")
3786 repo, other, branches, opts.get(b"rev")
3786 )
3787 )
3787
3788
3788 if revs:
3789 if revs:
3789 revs = [other.lookup(rev) for rev in revs]
3790 revs = [other.lookup(rev) for rev in revs]
3790
3791
3791 quiet = ui.quiet
3792 quiet = ui.quiet
3792 try:
3793 try:
3793 ui.quiet = True
3794 ui.quiet = True
3794 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3795 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3795 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3796 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3796 )
3797 )
3797 except error.LookupError:
3798 except error.LookupError:
3798 continue
3799 continue
3799 finally:
3800 finally:
3800 ui.quiet = quiet
3801 ui.quiet = quiet
3801
3802
3802 try:
3803 try:
3803 if not chlist:
3804 if not chlist:
3804 continue
3805 continue
3805 if recovernode:
3806 if recovernode:
3806 with repo.lock(), repo.transaction(b"unbundle") as tr:
3807 with repo.lock(), repo.transaction(b"unbundle") as tr:
3807 if scmutil.isrevsymbol(other, recovernode):
3808 if scmutil.isrevsymbol(other, recovernode):
3808 ui.status(_(b"Unbundling %s\n") % (recovernode))
3809 ui.status(_(b"Unbundling %s\n") % (recovernode))
3809 f = hg.openpath(ui, source)
3810 f = hg.openpath(ui, source)
3810 gen = exchange.readbundle(ui, f, source)
3811 gen = exchange.readbundle(ui, f, source)
3811 if isinstance(gen, bundle2.unbundle20):
3812 if isinstance(gen, bundle2.unbundle20):
3812 bundle2.applybundle(
3813 bundle2.applybundle(
3813 repo,
3814 repo,
3814 gen,
3815 gen,
3815 tr,
3816 tr,
3816 source=b"unbundle",
3817 source=b"unbundle",
3817 url=b"bundle:" + source,
3818 url=b"bundle:" + source,
3818 )
3819 )
3819 else:
3820 else:
3820 gen.apply(repo, b"unbundle", b"bundle:" + source)
3821 gen.apply(repo, b"unbundle", b"bundle:" + source)
3821 break
3822 break
3822 else:
3823 else:
3823 backupdate = encoding.strtolocal(
3824 backupdate = encoding.strtolocal(
3824 time.strftime(
3825 time.strftime(
3825 "%a %H:%M, %Y-%m-%d",
3826 "%a %H:%M, %Y-%m-%d",
3826 time.localtime(os.path.getmtime(source)),
3827 time.localtime(os.path.getmtime(source)),
3827 )
3828 )
3828 )
3829 )
3829 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3830 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3830 if ui.verbose:
3831 if ui.verbose:
3831 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3832 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3832 else:
3833 else:
3833 opts[
3834 opts[
3834 b"template"
3835 b"template"
3835 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3836 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3836 displayer = logcmdutil.changesetdisplayer(
3837 displayer = logcmdutil.changesetdisplayer(
3837 ui, other, opts, False
3838 ui, other, opts, False
3838 )
3839 )
3839 display(other, chlist, displayer)
3840 display(other, chlist, displayer)
3840 displayer.close()
3841 displayer.close()
3841 finally:
3842 finally:
3842 cleanupfn()
3843 cleanupfn()
3843
3844
3844
3845
3845 @command(
3846 @command(
3846 b'debugsub',
3847 b'debugsub',
3847 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3848 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3848 _(b'[-r REV] [REV]'),
3849 _(b'[-r REV] [REV]'),
3849 )
3850 )
3850 def debugsub(ui, repo, rev=None):
3851 def debugsub(ui, repo, rev=None):
3851 ctx = scmutil.revsingle(repo, rev, None)
3852 ctx = scmutil.revsingle(repo, rev, None)
3852 for k, v in sorted(ctx.substate.items()):
3853 for k, v in sorted(ctx.substate.items()):
3853 ui.writenoi18n(b'path %s\n' % k)
3854 ui.writenoi18n(b'path %s\n' % k)
3854 ui.writenoi18n(b' source %s\n' % v[0])
3855 ui.writenoi18n(b' source %s\n' % v[0])
3855 ui.writenoi18n(b' revision %s\n' % v[1])
3856 ui.writenoi18n(b' revision %s\n' % v[1])
3856
3857
3857
3858
3858 @command(b'debugshell', optionalrepo=True)
3859 @command(b'debugshell', optionalrepo=True)
3859 def debugshell(ui, repo):
3860 def debugshell(ui, repo):
3860 """run an interactive Python interpreter
3861 """run an interactive Python interpreter
3861
3862
3862 The local namespace is provided with a reference to the ui and
3863 The local namespace is provided with a reference to the ui and
3863 the repo instance (if available).
3864 the repo instance (if available).
3864 """
3865 """
3865 import code
3866 import code
3866
3867
3867 imported_objects = {
3868 imported_objects = {
3868 'ui': ui,
3869 'ui': ui,
3869 'repo': repo,
3870 'repo': repo,
3870 }
3871 }
3871
3872
3872 code.interact(local=imported_objects)
3873 code.interact(local=imported_objects)
3873
3874
3874
3875
3875 @command(
3876 @command(
3876 b'debugsuccessorssets',
3877 b'debugsuccessorssets',
3877 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3878 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3878 _(b'[REV]'),
3879 _(b'[REV]'),
3879 )
3880 )
3880 def debugsuccessorssets(ui, repo, *revs, **opts):
3881 def debugsuccessorssets(ui, repo, *revs, **opts):
3881 """show set of successors for revision
3882 """show set of successors for revision
3882
3883
3883 A successors set of changeset A is a consistent group of revisions that
3884 A successors set of changeset A is a consistent group of revisions that
3884 succeed A. It contains non-obsolete changesets only unless closests
3885 succeed A. It contains non-obsolete changesets only unless closests
3885 successors set is set.
3886 successors set is set.
3886
3887
3887 In most cases a changeset A has a single successors set containing a single
3888 In most cases a changeset A has a single successors set containing a single
3888 successor (changeset A replaced by A').
3889 successor (changeset A replaced by A').
3889
3890
3890 A changeset that is made obsolete with no successors are called "pruned".
3891 A changeset that is made obsolete with no successors are called "pruned".
3891 Such changesets have no successors sets at all.
3892 Such changesets have no successors sets at all.
3892
3893
3893 A changeset that has been "split" will have a successors set containing
3894 A changeset that has been "split" will have a successors set containing
3894 more than one successor.
3895 more than one successor.
3895
3896
3896 A changeset that has been rewritten in multiple different ways is called
3897 A changeset that has been rewritten in multiple different ways is called
3897 "divergent". Such changesets have multiple successor sets (each of which
3898 "divergent". Such changesets have multiple successor sets (each of which
3898 may also be split, i.e. have multiple successors).
3899 may also be split, i.e. have multiple successors).
3899
3900
3900 Results are displayed as follows::
3901 Results are displayed as follows::
3901
3902
3902 <rev1>
3903 <rev1>
3903 <successors-1A>
3904 <successors-1A>
3904 <rev2>
3905 <rev2>
3905 <successors-2A>
3906 <successors-2A>
3906 <successors-2B1> <successors-2B2> <successors-2B3>
3907 <successors-2B1> <successors-2B2> <successors-2B3>
3907
3908
3908 Here rev2 has two possible (i.e. divergent) successors sets. The first
3909 Here rev2 has two possible (i.e. divergent) successors sets. The first
3909 holds one element, whereas the second holds three (i.e. the changeset has
3910 holds one element, whereas the second holds three (i.e. the changeset has
3910 been split).
3911 been split).
3911 """
3912 """
3912 # passed to successorssets caching computation from one call to another
3913 # passed to successorssets caching computation from one call to another
3913 cache = {}
3914 cache = {}
3914 ctx2str = bytes
3915 ctx2str = bytes
3915 node2str = short
3916 node2str = short
3916 for rev in scmutil.revrange(repo, revs):
3917 for rev in scmutil.revrange(repo, revs):
3917 ctx = repo[rev]
3918 ctx = repo[rev]
3918 ui.write(b'%s\n' % ctx2str(ctx))
3919 ui.write(b'%s\n' % ctx2str(ctx))
3919 for succsset in obsutil.successorssets(
3920 for succsset in obsutil.successorssets(
3920 repo, ctx.node(), closest=opts['closest'], cache=cache
3921 repo, ctx.node(), closest=opts['closest'], cache=cache
3921 ):
3922 ):
3922 if succsset:
3923 if succsset:
3923 ui.write(b' ')
3924 ui.write(b' ')
3924 ui.write(node2str(succsset[0]))
3925 ui.write(node2str(succsset[0]))
3925 for node in succsset[1:]:
3926 for node in succsset[1:]:
3926 ui.write(b' ')
3927 ui.write(b' ')
3927 ui.write(node2str(node))
3928 ui.write(node2str(node))
3928 ui.write(b'\n')
3929 ui.write(b'\n')
3929
3930
3930
3931
3931 @command(b'debugtagscache', [])
3932 @command(b'debugtagscache', [])
3932 def debugtagscache(ui, repo):
3933 def debugtagscache(ui, repo):
3933 """display the contents of .hg/cache/hgtagsfnodes1"""
3934 """display the contents of .hg/cache/hgtagsfnodes1"""
3934 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3935 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3935 flog = repo.file(b'.hgtags')
3936 flog = repo.file(b'.hgtags')
3936 for r in repo:
3937 for r in repo:
3937 node = repo[r].node()
3938 node = repo[r].node()
3938 tagsnode = cache.getfnode(node, computemissing=False)
3939 tagsnode = cache.getfnode(node, computemissing=False)
3939 if tagsnode:
3940 if tagsnode:
3940 tagsnodedisplay = hex(tagsnode)
3941 tagsnodedisplay = hex(tagsnode)
3941 if not flog.hasnode(tagsnode):
3942 if not flog.hasnode(tagsnode):
3942 tagsnodedisplay += b' (unknown node)'
3943 tagsnodedisplay += b' (unknown node)'
3943 elif tagsnode is None:
3944 elif tagsnode is None:
3944 tagsnodedisplay = b'missing'
3945 tagsnodedisplay = b'missing'
3945 else:
3946 else:
3946 tagsnodedisplay = b'invalid'
3947 tagsnodedisplay = b'invalid'
3947
3948
3948 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3949 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3949
3950
3950
3951
3951 @command(
3952 @command(
3952 b'debugtemplate',
3953 b'debugtemplate',
3953 [
3954 [
3954 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3955 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3955 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3956 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3956 ],
3957 ],
3957 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3958 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3958 optionalrepo=True,
3959 optionalrepo=True,
3959 )
3960 )
3960 def debugtemplate(ui, repo, tmpl, **opts):
3961 def debugtemplate(ui, repo, tmpl, **opts):
3961 """parse and apply a template
3962 """parse and apply a template
3962
3963
3963 If -r/--rev is given, the template is processed as a log template and
3964 If -r/--rev is given, the template is processed as a log template and
3964 applied to the given changesets. Otherwise, it is processed as a generic
3965 applied to the given changesets. Otherwise, it is processed as a generic
3965 template.
3966 template.
3966
3967
3967 Use --verbose to print the parsed tree.
3968 Use --verbose to print the parsed tree.
3968 """
3969 """
3969 revs = None
3970 revs = None
3970 if opts['rev']:
3971 if opts['rev']:
3971 if repo is None:
3972 if repo is None:
3972 raise error.RepoError(
3973 raise error.RepoError(
3973 _(b'there is no Mercurial repository here (.hg not found)')
3974 _(b'there is no Mercurial repository here (.hg not found)')
3974 )
3975 )
3975 revs = scmutil.revrange(repo, opts['rev'])
3976 revs = scmutil.revrange(repo, opts['rev'])
3976
3977
3977 props = {}
3978 props = {}
3978 for d in opts['define']:
3979 for d in opts['define']:
3979 try:
3980 try:
3980 k, v = (e.strip() for e in d.split(b'=', 1))
3981 k, v = (e.strip() for e in d.split(b'=', 1))
3981 if not k or k == b'ui':
3982 if not k or k == b'ui':
3982 raise ValueError
3983 raise ValueError
3983 props[k] = v
3984 props[k] = v
3984 except ValueError:
3985 except ValueError:
3985 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3986 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3986
3987
3987 if ui.verbose:
3988 if ui.verbose:
3988 aliases = ui.configitems(b'templatealias')
3989 aliases = ui.configitems(b'templatealias')
3989 tree = templater.parse(tmpl)
3990 tree = templater.parse(tmpl)
3990 ui.note(templater.prettyformat(tree), b'\n')
3991 ui.note(templater.prettyformat(tree), b'\n')
3991 newtree = templater.expandaliases(tree, aliases)
3992 newtree = templater.expandaliases(tree, aliases)
3992 if newtree != tree:
3993 if newtree != tree:
3993 ui.notenoi18n(
3994 ui.notenoi18n(
3994 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3995 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3995 )
3996 )
3996
3997
3997 if revs is None:
3998 if revs is None:
3998 tres = formatter.templateresources(ui, repo)
3999 tres = formatter.templateresources(ui, repo)
3999 t = formatter.maketemplater(ui, tmpl, resources=tres)
4000 t = formatter.maketemplater(ui, tmpl, resources=tres)
4000 if ui.verbose:
4001 if ui.verbose:
4001 kwds, funcs = t.symbolsuseddefault()
4002 kwds, funcs = t.symbolsuseddefault()
4002 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4003 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4003 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4004 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4004 ui.write(t.renderdefault(props))
4005 ui.write(t.renderdefault(props))
4005 else:
4006 else:
4006 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4007 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4007 if ui.verbose:
4008 if ui.verbose:
4008 kwds, funcs = displayer.t.symbolsuseddefault()
4009 kwds, funcs = displayer.t.symbolsuseddefault()
4009 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4010 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4010 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4011 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4011 for r in revs:
4012 for r in revs:
4012 displayer.show(repo[r], **pycompat.strkwargs(props))
4013 displayer.show(repo[r], **pycompat.strkwargs(props))
4013 displayer.close()
4014 displayer.close()
4014
4015
4015
4016
4016 @command(
4017 @command(
4017 b'debuguigetpass',
4018 b'debuguigetpass',
4018 [
4019 [
4019 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4020 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4020 ],
4021 ],
4021 _(b'[-p TEXT]'),
4022 _(b'[-p TEXT]'),
4022 norepo=True,
4023 norepo=True,
4023 )
4024 )
4024 def debuguigetpass(ui, prompt=b''):
4025 def debuguigetpass(ui, prompt=b''):
4025 """show prompt to type password"""
4026 """show prompt to type password"""
4026 r = ui.getpass(prompt)
4027 r = ui.getpass(prompt)
4027 if r is None:
4028 if r is None:
4028 r = b"<default response>"
4029 r = b"<default response>"
4029 ui.writenoi18n(b'response: %s\n' % r)
4030 ui.writenoi18n(b'response: %s\n' % r)
4030
4031
4031
4032
4032 @command(
4033 @command(
4033 b'debuguiprompt',
4034 b'debuguiprompt',
4034 [
4035 [
4035 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4036 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4036 ],
4037 ],
4037 _(b'[-p TEXT]'),
4038 _(b'[-p TEXT]'),
4038 norepo=True,
4039 norepo=True,
4039 )
4040 )
4040 def debuguiprompt(ui, prompt=b''):
4041 def debuguiprompt(ui, prompt=b''):
4041 """show plain prompt"""
4042 """show plain prompt"""
4042 r = ui.prompt(prompt)
4043 r = ui.prompt(prompt)
4043 ui.writenoi18n(b'response: %s\n' % r)
4044 ui.writenoi18n(b'response: %s\n' % r)
4044
4045
4045
4046
4046 @command(b'debugupdatecaches', [])
4047 @command(b'debugupdatecaches', [])
4047 def debugupdatecaches(ui, repo, *pats, **opts):
4048 def debugupdatecaches(ui, repo, *pats, **opts):
4048 """warm all known caches in the repository"""
4049 """warm all known caches in the repository"""
4049 with repo.wlock(), repo.lock():
4050 with repo.wlock(), repo.lock():
4050 repo.updatecaches(full=True)
4051 repo.updatecaches(caches=repository.CACHES_ALL)
4051
4052
4052
4053
4053 @command(
4054 @command(
4054 b'debugupgraderepo',
4055 b'debugupgraderepo',
4055 [
4056 [
4056 (
4057 (
4057 b'o',
4058 b'o',
4058 b'optimize',
4059 b'optimize',
4059 [],
4060 [],
4060 _(b'extra optimization to perform'),
4061 _(b'extra optimization to perform'),
4061 _(b'NAME'),
4062 _(b'NAME'),
4062 ),
4063 ),
4063 (b'', b'run', False, _(b'performs an upgrade')),
4064 (b'', b'run', False, _(b'performs an upgrade')),
4064 (b'', b'backup', True, _(b'keep the old repository content around')),
4065 (b'', b'backup', True, _(b'keep the old repository content around')),
4065 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4066 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4066 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4067 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4067 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4068 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4068 ],
4069 ],
4069 )
4070 )
4070 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4071 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4071 """upgrade a repository to use different features
4072 """upgrade a repository to use different features
4072
4073
4073 If no arguments are specified, the repository is evaluated for upgrade
4074 If no arguments are specified, the repository is evaluated for upgrade
4074 and a list of problems and potential optimizations is printed.
4075 and a list of problems and potential optimizations is printed.
4075
4076
4076 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4077 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4077 can be influenced via additional arguments. More details will be provided
4078 can be influenced via additional arguments. More details will be provided
4078 by the command output when run without ``--run``.
4079 by the command output when run without ``--run``.
4079
4080
4080 During the upgrade, the repository will be locked and no writes will be
4081 During the upgrade, the repository will be locked and no writes will be
4081 allowed.
4082 allowed.
4082
4083
4083 At the end of the upgrade, the repository may not be readable while new
4084 At the end of the upgrade, the repository may not be readable while new
4084 repository data is swapped in. This window will be as long as it takes to
4085 repository data is swapped in. This window will be as long as it takes to
4085 rename some directories inside the ``.hg`` directory. On most machines, this
4086 rename some directories inside the ``.hg`` directory. On most machines, this
4086 should complete almost instantaneously and the chances of a consumer being
4087 should complete almost instantaneously and the chances of a consumer being
4087 unable to access the repository should be low.
4088 unable to access the repository should be low.
4088
4089
4089 By default, all revlogs will be upgraded. You can restrict this using flags
4090 By default, all revlogs will be upgraded. You can restrict this using flags
4090 such as `--manifest`:
4091 such as `--manifest`:
4091
4092
4092 * `--manifest`: only optimize the manifest
4093 * `--manifest`: only optimize the manifest
4093 * `--no-manifest`: optimize all revlog but the manifest
4094 * `--no-manifest`: optimize all revlog but the manifest
4094 * `--changelog`: optimize the changelog only
4095 * `--changelog`: optimize the changelog only
4095 * `--no-changelog --no-manifest`: optimize filelogs only
4096 * `--no-changelog --no-manifest`: optimize filelogs only
4096 * `--filelogs`: optimize the filelogs only
4097 * `--filelogs`: optimize the filelogs only
4097 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4098 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4098 """
4099 """
4099 return upgrade.upgraderepo(
4100 return upgrade.upgraderepo(
4100 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4101 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4101 )
4102 )
4102
4103
4103
4104
4104 @command(
4105 @command(
4105 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4106 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4106 )
4107 )
4107 def debugwalk(ui, repo, *pats, **opts):
4108 def debugwalk(ui, repo, *pats, **opts):
4108 """show how files match on given patterns"""
4109 """show how files match on given patterns"""
4109 opts = pycompat.byteskwargs(opts)
4110 opts = pycompat.byteskwargs(opts)
4110 m = scmutil.match(repo[None], pats, opts)
4111 m = scmutil.match(repo[None], pats, opts)
4111 if ui.verbose:
4112 if ui.verbose:
4112 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4113 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4113 items = list(repo[None].walk(m))
4114 items = list(repo[None].walk(m))
4114 if not items:
4115 if not items:
4115 return
4116 return
4116 f = lambda fn: fn
4117 f = lambda fn: fn
4117 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4118 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4118 f = lambda fn: util.normpath(fn)
4119 f = lambda fn: util.normpath(fn)
4119 fmt = b'f %%-%ds %%-%ds %%s' % (
4120 fmt = b'f %%-%ds %%-%ds %%s' % (
4120 max([len(abs) for abs in items]),
4121 max([len(abs) for abs in items]),
4121 max([len(repo.pathto(abs)) for abs in items]),
4122 max([len(repo.pathto(abs)) for abs in items]),
4122 )
4123 )
4123 for abs in items:
4124 for abs in items:
4124 line = fmt % (
4125 line = fmt % (
4125 abs,
4126 abs,
4126 f(repo.pathto(abs)),
4127 f(repo.pathto(abs)),
4127 m.exact(abs) and b'exact' or b'',
4128 m.exact(abs) and b'exact' or b'',
4128 )
4129 )
4129 ui.write(b"%s\n" % line.rstrip())
4130 ui.write(b"%s\n" % line.rstrip())
4130
4131
4131
4132
4132 @command(b'debugwhyunstable', [], _(b'REV'))
4133 @command(b'debugwhyunstable', [], _(b'REV'))
4133 def debugwhyunstable(ui, repo, rev):
4134 def debugwhyunstable(ui, repo, rev):
4134 """explain instabilities of a changeset"""
4135 """explain instabilities of a changeset"""
4135 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4136 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4136 dnodes = b''
4137 dnodes = b''
4137 if entry.get(b'divergentnodes'):
4138 if entry.get(b'divergentnodes'):
4138 dnodes = (
4139 dnodes = (
4139 b' '.join(
4140 b' '.join(
4140 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4141 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4141 for ctx in entry[b'divergentnodes']
4142 for ctx in entry[b'divergentnodes']
4142 )
4143 )
4143 + b' '
4144 + b' '
4144 )
4145 )
4145 ui.write(
4146 ui.write(
4146 b'%s: %s%s %s\n'
4147 b'%s: %s%s %s\n'
4147 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4148 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4148 )
4149 )
4149
4150
4150
4151
4151 @command(
4152 @command(
4152 b'debugwireargs',
4153 b'debugwireargs',
4153 [
4154 [
4154 (b'', b'three', b'', b'three'),
4155 (b'', b'three', b'', b'three'),
4155 (b'', b'four', b'', b'four'),
4156 (b'', b'four', b'', b'four'),
4156 (b'', b'five', b'', b'five'),
4157 (b'', b'five', b'', b'five'),
4157 ]
4158 ]
4158 + cmdutil.remoteopts,
4159 + cmdutil.remoteopts,
4159 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4160 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4160 norepo=True,
4161 norepo=True,
4161 )
4162 )
4162 def debugwireargs(ui, repopath, *vals, **opts):
4163 def debugwireargs(ui, repopath, *vals, **opts):
4163 opts = pycompat.byteskwargs(opts)
4164 opts = pycompat.byteskwargs(opts)
4164 repo = hg.peer(ui, opts, repopath)
4165 repo = hg.peer(ui, opts, repopath)
4165 try:
4166 try:
4166 for opt in cmdutil.remoteopts:
4167 for opt in cmdutil.remoteopts:
4167 del opts[opt[1]]
4168 del opts[opt[1]]
4168 args = {}
4169 args = {}
4169 for k, v in pycompat.iteritems(opts):
4170 for k, v in pycompat.iteritems(opts):
4170 if v:
4171 if v:
4171 args[k] = v
4172 args[k] = v
4172 args = pycompat.strkwargs(args)
4173 args = pycompat.strkwargs(args)
4173 # run twice to check that we don't mess up the stream for the next command
4174 # run twice to check that we don't mess up the stream for the next command
4174 res1 = repo.debugwireargs(*vals, **args)
4175 res1 = repo.debugwireargs(*vals, **args)
4175 res2 = repo.debugwireargs(*vals, **args)
4176 res2 = repo.debugwireargs(*vals, **args)
4176 ui.write(b"%s\n" % res1)
4177 ui.write(b"%s\n" % res1)
4177 if res1 != res2:
4178 if res1 != res2:
4178 ui.warn(b"%s\n" % res2)
4179 ui.warn(b"%s\n" % res2)
4179 finally:
4180 finally:
4180 repo.close()
4181 repo.close()
4181
4182
4182
4183
4183 def _parsewirelangblocks(fh):
4184 def _parsewirelangblocks(fh):
4184 activeaction = None
4185 activeaction = None
4185 blocklines = []
4186 blocklines = []
4186 lastindent = 0
4187 lastindent = 0
4187
4188
4188 for line in fh:
4189 for line in fh:
4189 line = line.rstrip()
4190 line = line.rstrip()
4190 if not line:
4191 if not line:
4191 continue
4192 continue
4192
4193
4193 if line.startswith(b'#'):
4194 if line.startswith(b'#'):
4194 continue
4195 continue
4195
4196
4196 if not line.startswith(b' '):
4197 if not line.startswith(b' '):
4197 # New block. Flush previous one.
4198 # New block. Flush previous one.
4198 if activeaction:
4199 if activeaction:
4199 yield activeaction, blocklines
4200 yield activeaction, blocklines
4200
4201
4201 activeaction = line
4202 activeaction = line
4202 blocklines = []
4203 blocklines = []
4203 lastindent = 0
4204 lastindent = 0
4204 continue
4205 continue
4205
4206
4206 # Else we start with an indent.
4207 # Else we start with an indent.
4207
4208
4208 if not activeaction:
4209 if not activeaction:
4209 raise error.Abort(_(b'indented line outside of block'))
4210 raise error.Abort(_(b'indented line outside of block'))
4210
4211
4211 indent = len(line) - len(line.lstrip())
4212 indent = len(line) - len(line.lstrip())
4212
4213
4213 # If this line is indented more than the last line, concatenate it.
4214 # If this line is indented more than the last line, concatenate it.
4214 if indent > lastindent and blocklines:
4215 if indent > lastindent and blocklines:
4215 blocklines[-1] += line.lstrip()
4216 blocklines[-1] += line.lstrip()
4216 else:
4217 else:
4217 blocklines.append(line)
4218 blocklines.append(line)
4218 lastindent = indent
4219 lastindent = indent
4219
4220
4220 # Flush last block.
4221 # Flush last block.
4221 if activeaction:
4222 if activeaction:
4222 yield activeaction, blocklines
4223 yield activeaction, blocklines
4223
4224
4224
4225
4225 @command(
4226 @command(
4226 b'debugwireproto',
4227 b'debugwireproto',
4227 [
4228 [
4228 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4229 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4229 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4230 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4230 (
4231 (
4231 b'',
4232 b'',
4232 b'noreadstderr',
4233 b'noreadstderr',
4233 False,
4234 False,
4234 _(b'do not read from stderr of the remote'),
4235 _(b'do not read from stderr of the remote'),
4235 ),
4236 ),
4236 (
4237 (
4237 b'',
4238 b'',
4238 b'nologhandshake',
4239 b'nologhandshake',
4239 False,
4240 False,
4240 _(b'do not log I/O related to the peer handshake'),
4241 _(b'do not log I/O related to the peer handshake'),
4241 ),
4242 ),
4242 ]
4243 ]
4243 + cmdutil.remoteopts,
4244 + cmdutil.remoteopts,
4244 _(b'[PATH]'),
4245 _(b'[PATH]'),
4245 optionalrepo=True,
4246 optionalrepo=True,
4246 )
4247 )
4247 def debugwireproto(ui, repo, path=None, **opts):
4248 def debugwireproto(ui, repo, path=None, **opts):
4248 """send wire protocol commands to a server
4249 """send wire protocol commands to a server
4249
4250
4250 This command can be used to issue wire protocol commands to remote
4251 This command can be used to issue wire protocol commands to remote
4251 peers and to debug the raw data being exchanged.
4252 peers and to debug the raw data being exchanged.
4252
4253
4253 ``--localssh`` will start an SSH server against the current repository
4254 ``--localssh`` will start an SSH server against the current repository
4254 and connect to that. By default, the connection will perform a handshake
4255 and connect to that. By default, the connection will perform a handshake
4255 and establish an appropriate peer instance.
4256 and establish an appropriate peer instance.
4256
4257
4257 ``--peer`` can be used to bypass the handshake protocol and construct a
4258 ``--peer`` can be used to bypass the handshake protocol and construct a
4258 peer instance using the specified class type. Valid values are ``raw``,
4259 peer instance using the specified class type. Valid values are ``raw``,
4259 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4260 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4260 raw data payloads and don't support higher-level command actions.
4261 raw data payloads and don't support higher-level command actions.
4261
4262
4262 ``--noreadstderr`` can be used to disable automatic reading from stderr
4263 ``--noreadstderr`` can be used to disable automatic reading from stderr
4263 of the peer (for SSH connections only). Disabling automatic reading of
4264 of the peer (for SSH connections only). Disabling automatic reading of
4264 stderr is useful for making output more deterministic.
4265 stderr is useful for making output more deterministic.
4265
4266
4266 Commands are issued via a mini language which is specified via stdin.
4267 Commands are issued via a mini language which is specified via stdin.
4267 The language consists of individual actions to perform. An action is
4268 The language consists of individual actions to perform. An action is
4268 defined by a block. A block is defined as a line with no leading
4269 defined by a block. A block is defined as a line with no leading
4269 space followed by 0 or more lines with leading space. Blocks are
4270 space followed by 0 or more lines with leading space. Blocks are
4270 effectively a high-level command with additional metadata.
4271 effectively a high-level command with additional metadata.
4271
4272
4272 Lines beginning with ``#`` are ignored.
4273 Lines beginning with ``#`` are ignored.
4273
4274
4274 The following sections denote available actions.
4275 The following sections denote available actions.
4275
4276
4276 raw
4277 raw
4277 ---
4278 ---
4278
4279
4279 Send raw data to the server.
4280 Send raw data to the server.
4280
4281
4281 The block payload contains the raw data to send as one atomic send
4282 The block payload contains the raw data to send as one atomic send
4282 operation. The data may not actually be delivered in a single system
4283 operation. The data may not actually be delivered in a single system
4283 call: it depends on the abilities of the transport being used.
4284 call: it depends on the abilities of the transport being used.
4284
4285
4285 Each line in the block is de-indented and concatenated. Then, that
4286 Each line in the block is de-indented and concatenated. Then, that
4286 value is evaluated as a Python b'' literal. This allows the use of
4287 value is evaluated as a Python b'' literal. This allows the use of
4287 backslash escaping, etc.
4288 backslash escaping, etc.
4288
4289
4289 raw+
4290 raw+
4290 ----
4291 ----
4291
4292
4292 Behaves like ``raw`` except flushes output afterwards.
4293 Behaves like ``raw`` except flushes output afterwards.
4293
4294
4294 command <X>
4295 command <X>
4295 -----------
4296 -----------
4296
4297
4297 Send a request to run a named command, whose name follows the ``command``
4298 Send a request to run a named command, whose name follows the ``command``
4298 string.
4299 string.
4299
4300
4300 Arguments to the command are defined as lines in this block. The format of
4301 Arguments to the command are defined as lines in this block. The format of
4301 each line is ``<key> <value>``. e.g.::
4302 each line is ``<key> <value>``. e.g.::
4302
4303
4303 command listkeys
4304 command listkeys
4304 namespace bookmarks
4305 namespace bookmarks
4305
4306
4306 If the value begins with ``eval:``, it will be interpreted as a Python
4307 If the value begins with ``eval:``, it will be interpreted as a Python
4307 literal expression. Otherwise values are interpreted as Python b'' literals.
4308 literal expression. Otherwise values are interpreted as Python b'' literals.
4308 This allows sending complex types and encoding special byte sequences via
4309 This allows sending complex types and encoding special byte sequences via
4309 backslash escaping.
4310 backslash escaping.
4310
4311
4311 The following arguments have special meaning:
4312 The following arguments have special meaning:
4312
4313
4313 ``PUSHFILE``
4314 ``PUSHFILE``
4314 When defined, the *push* mechanism of the peer will be used instead
4315 When defined, the *push* mechanism of the peer will be used instead
4315 of the static request-response mechanism and the content of the
4316 of the static request-response mechanism and the content of the
4316 file specified in the value of this argument will be sent as the
4317 file specified in the value of this argument will be sent as the
4317 command payload.
4318 command payload.
4318
4319
4319 This can be used to submit a local bundle file to the remote.
4320 This can be used to submit a local bundle file to the remote.
4320
4321
4321 batchbegin
4322 batchbegin
4322 ----------
4323 ----------
4323
4324
4324 Instruct the peer to begin a batched send.
4325 Instruct the peer to begin a batched send.
4325
4326
4326 All ``command`` blocks are queued for execution until the next
4327 All ``command`` blocks are queued for execution until the next
4327 ``batchsubmit`` block.
4328 ``batchsubmit`` block.
4328
4329
4329 batchsubmit
4330 batchsubmit
4330 -----------
4331 -----------
4331
4332
4332 Submit previously queued ``command`` blocks as a batch request.
4333 Submit previously queued ``command`` blocks as a batch request.
4333
4334
4334 This action MUST be paired with a ``batchbegin`` action.
4335 This action MUST be paired with a ``batchbegin`` action.
4335
4336
4336 httprequest <method> <path>
4337 httprequest <method> <path>
4337 ---------------------------
4338 ---------------------------
4338
4339
4339 (HTTP peer only)
4340 (HTTP peer only)
4340
4341
4341 Send an HTTP request to the peer.
4342 Send an HTTP request to the peer.
4342
4343
4343 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4344 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4344
4345
4345 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4346 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4346 headers to add to the request. e.g. ``Accept: foo``.
4347 headers to add to the request. e.g. ``Accept: foo``.
4347
4348
4348 The following arguments are special:
4349 The following arguments are special:
4349
4350
4350 ``BODYFILE``
4351 ``BODYFILE``
4351 The content of the file defined as the value to this argument will be
4352 The content of the file defined as the value to this argument will be
4352 transferred verbatim as the HTTP request body.
4353 transferred verbatim as the HTTP request body.
4353
4354
4354 ``frame <type> <flags> <payload>``
4355 ``frame <type> <flags> <payload>``
4355 Send a unified protocol frame as part of the request body.
4356 Send a unified protocol frame as part of the request body.
4356
4357
4357 All frames will be collected and sent as the body to the HTTP
4358 All frames will be collected and sent as the body to the HTTP
4358 request.
4359 request.
4359
4360
4360 close
4361 close
4361 -----
4362 -----
4362
4363
4363 Close the connection to the server.
4364 Close the connection to the server.
4364
4365
4365 flush
4366 flush
4366 -----
4367 -----
4367
4368
4368 Flush data written to the server.
4369 Flush data written to the server.
4369
4370
4370 readavailable
4371 readavailable
4371 -------------
4372 -------------
4372
4373
4373 Close the write end of the connection and read all available data from
4374 Close the write end of the connection and read all available data from
4374 the server.
4375 the server.
4375
4376
4376 If the connection to the server encompasses multiple pipes, we poll both
4377 If the connection to the server encompasses multiple pipes, we poll both
4377 pipes and read available data.
4378 pipes and read available data.
4378
4379
4379 readline
4380 readline
4380 --------
4381 --------
4381
4382
4382 Read a line of output from the server. If there are multiple output
4383 Read a line of output from the server. If there are multiple output
4383 pipes, reads only the main pipe.
4384 pipes, reads only the main pipe.
4384
4385
4385 ereadline
4386 ereadline
4386 ---------
4387 ---------
4387
4388
4388 Like ``readline``, but read from the stderr pipe, if available.
4389 Like ``readline``, but read from the stderr pipe, if available.
4389
4390
4390 read <X>
4391 read <X>
4391 --------
4392 --------
4392
4393
4393 ``read()`` N bytes from the server's main output pipe.
4394 ``read()`` N bytes from the server's main output pipe.
4394
4395
4395 eread <X>
4396 eread <X>
4396 ---------
4397 ---------
4397
4398
4398 ``read()`` N bytes from the server's stderr pipe, if available.
4399 ``read()`` N bytes from the server's stderr pipe, if available.
4399
4400
4400 Specifying Unified Frame-Based Protocol Frames
4401 Specifying Unified Frame-Based Protocol Frames
4401 ----------------------------------------------
4402 ----------------------------------------------
4402
4403
4403 It is possible to emit a *Unified Frame-Based Protocol* by using special
4404 It is possible to emit a *Unified Frame-Based Protocol* by using special
4404 syntax.
4405 syntax.
4405
4406
4406 A frame is composed as a type, flags, and payload. These can be parsed
4407 A frame is composed as a type, flags, and payload. These can be parsed
4407 from a string of the form:
4408 from a string of the form:
4408
4409
4409 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4410 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4410
4411
4411 ``request-id`` and ``stream-id`` are integers defining the request and
4412 ``request-id`` and ``stream-id`` are integers defining the request and
4412 stream identifiers.
4413 stream identifiers.
4413
4414
4414 ``type`` can be an integer value for the frame type or the string name
4415 ``type`` can be an integer value for the frame type or the string name
4415 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4416 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4416 ``command-name``.
4417 ``command-name``.
4417
4418
4418 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4419 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4419 components. Each component (and there can be just one) can be an integer
4420 components. Each component (and there can be just one) can be an integer
4420 or a flag name for stream flags or frame flags, respectively. Values are
4421 or a flag name for stream flags or frame flags, respectively. Values are
4421 resolved to integers and then bitwise OR'd together.
4422 resolved to integers and then bitwise OR'd together.
4422
4423
4423 ``payload`` represents the raw frame payload. If it begins with
4424 ``payload`` represents the raw frame payload. If it begins with
4424 ``cbor:``, the following string is evaluated as Python code and the
4425 ``cbor:``, the following string is evaluated as Python code and the
4425 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4426 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4426 as a Python byte string literal.
4427 as a Python byte string literal.
4427 """
4428 """
4428 opts = pycompat.byteskwargs(opts)
4429 opts = pycompat.byteskwargs(opts)
4429
4430
4430 if opts[b'localssh'] and not repo:
4431 if opts[b'localssh'] and not repo:
4431 raise error.Abort(_(b'--localssh requires a repository'))
4432 raise error.Abort(_(b'--localssh requires a repository'))
4432
4433
4433 if opts[b'peer'] and opts[b'peer'] not in (
4434 if opts[b'peer'] and opts[b'peer'] not in (
4434 b'raw',
4435 b'raw',
4435 b'http2',
4436 b'http2',
4436 b'ssh1',
4437 b'ssh1',
4437 b'ssh2',
4438 b'ssh2',
4438 ):
4439 ):
4439 raise error.Abort(
4440 raise error.Abort(
4440 _(b'invalid value for --peer'),
4441 _(b'invalid value for --peer'),
4441 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4442 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4442 )
4443 )
4443
4444
4444 if path and opts[b'localssh']:
4445 if path and opts[b'localssh']:
4445 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4446 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4446
4447
4447 if ui.interactive():
4448 if ui.interactive():
4448 ui.write(_(b'(waiting for commands on stdin)\n'))
4449 ui.write(_(b'(waiting for commands on stdin)\n'))
4449
4450
4450 blocks = list(_parsewirelangblocks(ui.fin))
4451 blocks = list(_parsewirelangblocks(ui.fin))
4451
4452
4452 proc = None
4453 proc = None
4453 stdin = None
4454 stdin = None
4454 stdout = None
4455 stdout = None
4455 stderr = None
4456 stderr = None
4456 opener = None
4457 opener = None
4457
4458
4458 if opts[b'localssh']:
4459 if opts[b'localssh']:
4459 # We start the SSH server in its own process so there is process
4460 # We start the SSH server in its own process so there is process
4460 # separation. This prevents a whole class of potential bugs around
4461 # separation. This prevents a whole class of potential bugs around
4461 # shared state from interfering with server operation.
4462 # shared state from interfering with server operation.
4462 args = procutil.hgcmd() + [
4463 args = procutil.hgcmd() + [
4463 b'-R',
4464 b'-R',
4464 repo.root,
4465 repo.root,
4465 b'debugserve',
4466 b'debugserve',
4466 b'--sshstdio',
4467 b'--sshstdio',
4467 ]
4468 ]
4468 proc = subprocess.Popen(
4469 proc = subprocess.Popen(
4469 pycompat.rapply(procutil.tonativestr, args),
4470 pycompat.rapply(procutil.tonativestr, args),
4470 stdin=subprocess.PIPE,
4471 stdin=subprocess.PIPE,
4471 stdout=subprocess.PIPE,
4472 stdout=subprocess.PIPE,
4472 stderr=subprocess.PIPE,
4473 stderr=subprocess.PIPE,
4473 bufsize=0,
4474 bufsize=0,
4474 )
4475 )
4475
4476
4476 stdin = proc.stdin
4477 stdin = proc.stdin
4477 stdout = proc.stdout
4478 stdout = proc.stdout
4478 stderr = proc.stderr
4479 stderr = proc.stderr
4479
4480
4480 # We turn the pipes into observers so we can log I/O.
4481 # We turn the pipes into observers so we can log I/O.
4481 if ui.verbose or opts[b'peer'] == b'raw':
4482 if ui.verbose or opts[b'peer'] == b'raw':
4482 stdin = util.makeloggingfileobject(
4483 stdin = util.makeloggingfileobject(
4483 ui, proc.stdin, b'i', logdata=True
4484 ui, proc.stdin, b'i', logdata=True
4484 )
4485 )
4485 stdout = util.makeloggingfileobject(
4486 stdout = util.makeloggingfileobject(
4486 ui, proc.stdout, b'o', logdata=True
4487 ui, proc.stdout, b'o', logdata=True
4487 )
4488 )
4488 stderr = util.makeloggingfileobject(
4489 stderr = util.makeloggingfileobject(
4489 ui, proc.stderr, b'e', logdata=True
4490 ui, proc.stderr, b'e', logdata=True
4490 )
4491 )
4491
4492
4492 # --localssh also implies the peer connection settings.
4493 # --localssh also implies the peer connection settings.
4493
4494
4494 url = b'ssh://localserver'
4495 url = b'ssh://localserver'
4495 autoreadstderr = not opts[b'noreadstderr']
4496 autoreadstderr = not opts[b'noreadstderr']
4496
4497
4497 if opts[b'peer'] == b'ssh1':
4498 if opts[b'peer'] == b'ssh1':
4498 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4499 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4499 peer = sshpeer.sshv1peer(
4500 peer = sshpeer.sshv1peer(
4500 ui,
4501 ui,
4501 url,
4502 url,
4502 proc,
4503 proc,
4503 stdin,
4504 stdin,
4504 stdout,
4505 stdout,
4505 stderr,
4506 stderr,
4506 None,
4507 None,
4507 autoreadstderr=autoreadstderr,
4508 autoreadstderr=autoreadstderr,
4508 )
4509 )
4509 elif opts[b'peer'] == b'ssh2':
4510 elif opts[b'peer'] == b'ssh2':
4510 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4511 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4511 peer = sshpeer.sshv2peer(
4512 peer = sshpeer.sshv2peer(
4512 ui,
4513 ui,
4513 url,
4514 url,
4514 proc,
4515 proc,
4515 stdin,
4516 stdin,
4516 stdout,
4517 stdout,
4517 stderr,
4518 stderr,
4518 None,
4519 None,
4519 autoreadstderr=autoreadstderr,
4520 autoreadstderr=autoreadstderr,
4520 )
4521 )
4521 elif opts[b'peer'] == b'raw':
4522 elif opts[b'peer'] == b'raw':
4522 ui.write(_(b'using raw connection to peer\n'))
4523 ui.write(_(b'using raw connection to peer\n'))
4523 peer = None
4524 peer = None
4524 else:
4525 else:
4525 ui.write(_(b'creating ssh peer from handshake results\n'))
4526 ui.write(_(b'creating ssh peer from handshake results\n'))
4526 peer = sshpeer.makepeer(
4527 peer = sshpeer.makepeer(
4527 ui,
4528 ui,
4528 url,
4529 url,
4529 proc,
4530 proc,
4530 stdin,
4531 stdin,
4531 stdout,
4532 stdout,
4532 stderr,
4533 stderr,
4533 autoreadstderr=autoreadstderr,
4534 autoreadstderr=autoreadstderr,
4534 )
4535 )
4535
4536
4536 elif path:
4537 elif path:
4537 # We bypass hg.peer() so we can proxy the sockets.
4538 # We bypass hg.peer() so we can proxy the sockets.
4538 # TODO consider not doing this because we skip
4539 # TODO consider not doing this because we skip
4539 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4540 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4540 u = urlutil.url(path)
4541 u = urlutil.url(path)
4541 if u.scheme != b'http':
4542 if u.scheme != b'http':
4542 raise error.Abort(_(b'only http:// paths are currently supported'))
4543 raise error.Abort(_(b'only http:// paths are currently supported'))
4543
4544
4544 url, authinfo = u.authinfo()
4545 url, authinfo = u.authinfo()
4545 openerargs = {
4546 openerargs = {
4546 'useragent': b'Mercurial debugwireproto',
4547 'useragent': b'Mercurial debugwireproto',
4547 }
4548 }
4548
4549
4549 # Turn pipes/sockets into observers so we can log I/O.
4550 # Turn pipes/sockets into observers so we can log I/O.
4550 if ui.verbose:
4551 if ui.verbose:
4551 openerargs.update(
4552 openerargs.update(
4552 {
4553 {
4553 'loggingfh': ui,
4554 'loggingfh': ui,
4554 'loggingname': b's',
4555 'loggingname': b's',
4555 'loggingopts': {
4556 'loggingopts': {
4556 'logdata': True,
4557 'logdata': True,
4557 'logdataapis': False,
4558 'logdataapis': False,
4558 },
4559 },
4559 }
4560 }
4560 )
4561 )
4561
4562
4562 if ui.debugflag:
4563 if ui.debugflag:
4563 openerargs['loggingopts']['logdataapis'] = True
4564 openerargs['loggingopts']['logdataapis'] = True
4564
4565
4565 # Don't send default headers when in raw mode. This allows us to
4566 # Don't send default headers when in raw mode. This allows us to
4566 # bypass most of the behavior of our URL handling code so we can
4567 # bypass most of the behavior of our URL handling code so we can
4567 # have near complete control over what's sent on the wire.
4568 # have near complete control over what's sent on the wire.
4568 if opts[b'peer'] == b'raw':
4569 if opts[b'peer'] == b'raw':
4569 openerargs['sendaccept'] = False
4570 openerargs['sendaccept'] = False
4570
4571
4571 opener = urlmod.opener(ui, authinfo, **openerargs)
4572 opener = urlmod.opener(ui, authinfo, **openerargs)
4572
4573
4573 if opts[b'peer'] == b'http2':
4574 if opts[b'peer'] == b'http2':
4574 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4575 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4575 # We go through makepeer() because we need an API descriptor for
4576 # We go through makepeer() because we need an API descriptor for
4576 # the peer instance to be useful.
4577 # the peer instance to be useful.
4577 with ui.configoverride(
4578 with ui.configoverride(
4578 {(b'experimental', b'httppeer.advertise-v2'): True}
4579 {(b'experimental', b'httppeer.advertise-v2'): True}
4579 ):
4580 ):
4580 if opts[b'nologhandshake']:
4581 if opts[b'nologhandshake']:
4581 ui.pushbuffer()
4582 ui.pushbuffer()
4582
4583
4583 peer = httppeer.makepeer(ui, path, opener=opener)
4584 peer = httppeer.makepeer(ui, path, opener=opener)
4584
4585
4585 if opts[b'nologhandshake']:
4586 if opts[b'nologhandshake']:
4586 ui.popbuffer()
4587 ui.popbuffer()
4587
4588
4588 if not isinstance(peer, httppeer.httpv2peer):
4589 if not isinstance(peer, httppeer.httpv2peer):
4589 raise error.Abort(
4590 raise error.Abort(
4590 _(
4591 _(
4591 b'could not instantiate HTTP peer for '
4592 b'could not instantiate HTTP peer for '
4592 b'wire protocol version 2'
4593 b'wire protocol version 2'
4593 ),
4594 ),
4594 hint=_(
4595 hint=_(
4595 b'the server may not have the feature '
4596 b'the server may not have the feature '
4596 b'enabled or is not allowing this '
4597 b'enabled or is not allowing this '
4597 b'client version'
4598 b'client version'
4598 ),
4599 ),
4599 )
4600 )
4600
4601
4601 elif opts[b'peer'] == b'raw':
4602 elif opts[b'peer'] == b'raw':
4602 ui.write(_(b'using raw connection to peer\n'))
4603 ui.write(_(b'using raw connection to peer\n'))
4603 peer = None
4604 peer = None
4604 elif opts[b'peer']:
4605 elif opts[b'peer']:
4605 raise error.Abort(
4606 raise error.Abort(
4606 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4607 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4607 )
4608 )
4608 else:
4609 else:
4609 peer = httppeer.makepeer(ui, path, opener=opener)
4610 peer = httppeer.makepeer(ui, path, opener=opener)
4610
4611
4611 # We /could/ populate stdin/stdout with sock.makefile()...
4612 # We /could/ populate stdin/stdout with sock.makefile()...
4612 else:
4613 else:
4613 raise error.Abort(_(b'unsupported connection configuration'))
4614 raise error.Abort(_(b'unsupported connection configuration'))
4614
4615
4615 batchedcommands = None
4616 batchedcommands = None
4616
4617
4617 # Now perform actions based on the parsed wire language instructions.
4618 # Now perform actions based on the parsed wire language instructions.
4618 for action, lines in blocks:
4619 for action, lines in blocks:
4619 if action in (b'raw', b'raw+'):
4620 if action in (b'raw', b'raw+'):
4620 if not stdin:
4621 if not stdin:
4621 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4622 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4622
4623
4623 # Concatenate the data together.
4624 # Concatenate the data together.
4624 data = b''.join(l.lstrip() for l in lines)
4625 data = b''.join(l.lstrip() for l in lines)
4625 data = stringutil.unescapestr(data)
4626 data = stringutil.unescapestr(data)
4626 stdin.write(data)
4627 stdin.write(data)
4627
4628
4628 if action == b'raw+':
4629 if action == b'raw+':
4629 stdin.flush()
4630 stdin.flush()
4630 elif action == b'flush':
4631 elif action == b'flush':
4631 if not stdin:
4632 if not stdin:
4632 raise error.Abort(_(b'cannot call flush on this peer'))
4633 raise error.Abort(_(b'cannot call flush on this peer'))
4633 stdin.flush()
4634 stdin.flush()
4634 elif action.startswith(b'command'):
4635 elif action.startswith(b'command'):
4635 if not peer:
4636 if not peer:
4636 raise error.Abort(
4637 raise error.Abort(
4637 _(
4638 _(
4638 b'cannot send commands unless peer instance '
4639 b'cannot send commands unless peer instance '
4639 b'is available'
4640 b'is available'
4640 )
4641 )
4641 )
4642 )
4642
4643
4643 command = action.split(b' ', 1)[1]
4644 command = action.split(b' ', 1)[1]
4644
4645
4645 args = {}
4646 args = {}
4646 for line in lines:
4647 for line in lines:
4647 # We need to allow empty values.
4648 # We need to allow empty values.
4648 fields = line.lstrip().split(b' ', 1)
4649 fields = line.lstrip().split(b' ', 1)
4649 if len(fields) == 1:
4650 if len(fields) == 1:
4650 key = fields[0]
4651 key = fields[0]
4651 value = b''
4652 value = b''
4652 else:
4653 else:
4653 key, value = fields
4654 key, value = fields
4654
4655
4655 if value.startswith(b'eval:'):
4656 if value.startswith(b'eval:'):
4656 value = stringutil.evalpythonliteral(value[5:])
4657 value = stringutil.evalpythonliteral(value[5:])
4657 else:
4658 else:
4658 value = stringutil.unescapestr(value)
4659 value = stringutil.unescapestr(value)
4659
4660
4660 args[key] = value
4661 args[key] = value
4661
4662
4662 if batchedcommands is not None:
4663 if batchedcommands is not None:
4663 batchedcommands.append((command, args))
4664 batchedcommands.append((command, args))
4664 continue
4665 continue
4665
4666
4666 ui.status(_(b'sending %s command\n') % command)
4667 ui.status(_(b'sending %s command\n') % command)
4667
4668
4668 if b'PUSHFILE' in args:
4669 if b'PUSHFILE' in args:
4669 with open(args[b'PUSHFILE'], 'rb') as fh:
4670 with open(args[b'PUSHFILE'], 'rb') as fh:
4670 del args[b'PUSHFILE']
4671 del args[b'PUSHFILE']
4671 res, output = peer._callpush(
4672 res, output = peer._callpush(
4672 command, fh, **pycompat.strkwargs(args)
4673 command, fh, **pycompat.strkwargs(args)
4673 )
4674 )
4674 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4675 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4675 ui.status(
4676 ui.status(
4676 _(b'remote output: %s\n') % stringutil.escapestr(output)
4677 _(b'remote output: %s\n') % stringutil.escapestr(output)
4677 )
4678 )
4678 else:
4679 else:
4679 with peer.commandexecutor() as e:
4680 with peer.commandexecutor() as e:
4680 res = e.callcommand(command, args).result()
4681 res = e.callcommand(command, args).result()
4681
4682
4682 if isinstance(res, wireprotov2peer.commandresponse):
4683 if isinstance(res, wireprotov2peer.commandresponse):
4683 val = res.objects()
4684 val = res.objects()
4684 ui.status(
4685 ui.status(
4685 _(b'response: %s\n')
4686 _(b'response: %s\n')
4686 % stringutil.pprint(val, bprefix=True, indent=2)
4687 % stringutil.pprint(val, bprefix=True, indent=2)
4687 )
4688 )
4688 else:
4689 else:
4689 ui.status(
4690 ui.status(
4690 _(b'response: %s\n')
4691 _(b'response: %s\n')
4691 % stringutil.pprint(res, bprefix=True, indent=2)
4692 % stringutil.pprint(res, bprefix=True, indent=2)
4692 )
4693 )
4693
4694
4694 elif action == b'batchbegin':
4695 elif action == b'batchbegin':
4695 if batchedcommands is not None:
4696 if batchedcommands is not None:
4696 raise error.Abort(_(b'nested batchbegin not allowed'))
4697 raise error.Abort(_(b'nested batchbegin not allowed'))
4697
4698
4698 batchedcommands = []
4699 batchedcommands = []
4699 elif action == b'batchsubmit':
4700 elif action == b'batchsubmit':
4700 # There is a batching API we could go through. But it would be
4701 # There is a batching API we could go through. But it would be
4701 # difficult to normalize requests into function calls. It is easier
4702 # difficult to normalize requests into function calls. It is easier
4702 # to bypass this layer and normalize to commands + args.
4703 # to bypass this layer and normalize to commands + args.
4703 ui.status(
4704 ui.status(
4704 _(b'sending batch with %d sub-commands\n')
4705 _(b'sending batch with %d sub-commands\n')
4705 % len(batchedcommands)
4706 % len(batchedcommands)
4706 )
4707 )
4707 assert peer is not None
4708 assert peer is not None
4708 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4709 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4709 ui.status(
4710 ui.status(
4710 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4711 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4711 )
4712 )
4712
4713
4713 batchedcommands = None
4714 batchedcommands = None
4714
4715
4715 elif action.startswith(b'httprequest '):
4716 elif action.startswith(b'httprequest '):
4716 if not opener:
4717 if not opener:
4717 raise error.Abort(
4718 raise error.Abort(
4718 _(b'cannot use httprequest without an HTTP peer')
4719 _(b'cannot use httprequest without an HTTP peer')
4719 )
4720 )
4720
4721
4721 request = action.split(b' ', 2)
4722 request = action.split(b' ', 2)
4722 if len(request) != 3:
4723 if len(request) != 3:
4723 raise error.Abort(
4724 raise error.Abort(
4724 _(
4725 _(
4725 b'invalid httprequest: expected format is '
4726 b'invalid httprequest: expected format is '
4726 b'"httprequest <method> <path>'
4727 b'"httprequest <method> <path>'
4727 )
4728 )
4728 )
4729 )
4729
4730
4730 method, httppath = request[1:]
4731 method, httppath = request[1:]
4731 headers = {}
4732 headers = {}
4732 body = None
4733 body = None
4733 frames = []
4734 frames = []
4734 for line in lines:
4735 for line in lines:
4735 line = line.lstrip()
4736 line = line.lstrip()
4736 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4737 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4737 if m:
4738 if m:
4738 # Headers need to use native strings.
4739 # Headers need to use native strings.
4739 key = pycompat.strurl(m.group(1))
4740 key = pycompat.strurl(m.group(1))
4740 value = pycompat.strurl(m.group(2))
4741 value = pycompat.strurl(m.group(2))
4741 headers[key] = value
4742 headers[key] = value
4742 continue
4743 continue
4743
4744
4744 if line.startswith(b'BODYFILE '):
4745 if line.startswith(b'BODYFILE '):
4745 with open(line.split(b' ', 1), b'rb') as fh:
4746 with open(line.split(b' ', 1), b'rb') as fh:
4746 body = fh.read()
4747 body = fh.read()
4747 elif line.startswith(b'frame '):
4748 elif line.startswith(b'frame '):
4748 frame = wireprotoframing.makeframefromhumanstring(
4749 frame = wireprotoframing.makeframefromhumanstring(
4749 line[len(b'frame ') :]
4750 line[len(b'frame ') :]
4750 )
4751 )
4751
4752
4752 frames.append(frame)
4753 frames.append(frame)
4753 else:
4754 else:
4754 raise error.Abort(
4755 raise error.Abort(
4755 _(b'unknown argument to httprequest: %s') % line
4756 _(b'unknown argument to httprequest: %s') % line
4756 )
4757 )
4757
4758
4758 url = path + httppath
4759 url = path + httppath
4759
4760
4760 if frames:
4761 if frames:
4761 body = b''.join(bytes(f) for f in frames)
4762 body = b''.join(bytes(f) for f in frames)
4762
4763
4763 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4764 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4764
4765
4765 # urllib.Request insists on using has_data() as a proxy for
4766 # urllib.Request insists on using has_data() as a proxy for
4766 # determining the request method. Override that to use our
4767 # determining the request method. Override that to use our
4767 # explicitly requested method.
4768 # explicitly requested method.
4768 req.get_method = lambda: pycompat.sysstr(method)
4769 req.get_method = lambda: pycompat.sysstr(method)
4769
4770
4770 try:
4771 try:
4771 res = opener.open(req)
4772 res = opener.open(req)
4772 body = res.read()
4773 body = res.read()
4773 except util.urlerr.urlerror as e:
4774 except util.urlerr.urlerror as e:
4774 # read() method must be called, but only exists in Python 2
4775 # read() method must be called, but only exists in Python 2
4775 getattr(e, 'read', lambda: None)()
4776 getattr(e, 'read', lambda: None)()
4776 continue
4777 continue
4777
4778
4778 ct = res.headers.get('Content-Type')
4779 ct = res.headers.get('Content-Type')
4779 if ct == 'application/mercurial-cbor':
4780 if ct == 'application/mercurial-cbor':
4780 ui.write(
4781 ui.write(
4781 _(b'cbor> %s\n')
4782 _(b'cbor> %s\n')
4782 % stringutil.pprint(
4783 % stringutil.pprint(
4783 cborutil.decodeall(body), bprefix=True, indent=2
4784 cborutil.decodeall(body), bprefix=True, indent=2
4784 )
4785 )
4785 )
4786 )
4786
4787
4787 elif action == b'close':
4788 elif action == b'close':
4788 assert peer is not None
4789 assert peer is not None
4789 peer.close()
4790 peer.close()
4790 elif action == b'readavailable':
4791 elif action == b'readavailable':
4791 if not stdout or not stderr:
4792 if not stdout or not stderr:
4792 raise error.Abort(
4793 raise error.Abort(
4793 _(b'readavailable not available on this peer')
4794 _(b'readavailable not available on this peer')
4794 )
4795 )
4795
4796
4796 stdin.close()
4797 stdin.close()
4797 stdout.read()
4798 stdout.read()
4798 stderr.read()
4799 stderr.read()
4799
4800
4800 elif action == b'readline':
4801 elif action == b'readline':
4801 if not stdout:
4802 if not stdout:
4802 raise error.Abort(_(b'readline not available on this peer'))
4803 raise error.Abort(_(b'readline not available on this peer'))
4803 stdout.readline()
4804 stdout.readline()
4804 elif action == b'ereadline':
4805 elif action == b'ereadline':
4805 if not stderr:
4806 if not stderr:
4806 raise error.Abort(_(b'ereadline not available on this peer'))
4807 raise error.Abort(_(b'ereadline not available on this peer'))
4807 stderr.readline()
4808 stderr.readline()
4808 elif action.startswith(b'read '):
4809 elif action.startswith(b'read '):
4809 count = int(action.split(b' ', 1)[1])
4810 count = int(action.split(b' ', 1)[1])
4810 if not stdout:
4811 if not stdout:
4811 raise error.Abort(_(b'read not available on this peer'))
4812 raise error.Abort(_(b'read not available on this peer'))
4812 stdout.read(count)
4813 stdout.read(count)
4813 elif action.startswith(b'eread '):
4814 elif action.startswith(b'eread '):
4814 count = int(action.split(b' ', 1)[1])
4815 count = int(action.split(b' ', 1)[1])
4815 if not stderr:
4816 if not stderr:
4816 raise error.Abort(_(b'eread not available on this peer'))
4817 raise error.Abort(_(b'eread not available on this peer'))
4817 stderr.read(count)
4818 stderr.read(count)
4818 else:
4819 else:
4819 raise error.Abort(_(b'unknown action: %s') % action)
4820 raise error.Abort(_(b'unknown action: %s') % action)
4820
4821
4821 if batchedcommands is not None:
4822 if batchedcommands is not None:
4822 raise error.Abort(_(b'unclosed "batchbegin" request'))
4823 raise error.Abort(_(b'unclosed "batchbegin" request'))
4823
4824
4824 if peer:
4825 if peer:
4825 peer.close()
4826 peer.close()
4826
4827
4827 if proc:
4828 if proc:
4828 proc.kill()
4829 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now