##// END OF EJS Templates
debugdiscovery: document relevant config option...
marmoute -
r47560:13d97369 default
parent child Browse files
Show More
@@ -1,4781 +1,4810 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import string
24 import string
25 import subprocess
25 import subprocess
26 import sys
26 import sys
27 import time
27 import time
28
28
29 from .i18n import _
29 from .i18n import _
30 from .node import (
30 from .node import (
31 bin,
31 bin,
32 hex,
32 hex,
33 nullid,
33 nullid,
34 nullrev,
34 nullrev,
35 short,
35 short,
36 )
36 )
37 from .pycompat import (
37 from .pycompat import (
38 getattr,
38 getattr,
39 open,
39 open,
40 )
40 )
41 from . import (
41 from . import (
42 bundle2,
42 bundle2,
43 bundlerepo,
43 bundlerepo,
44 changegroup,
44 changegroup,
45 cmdutil,
45 cmdutil,
46 color,
46 color,
47 context,
47 context,
48 copies,
48 copies,
49 dagparser,
49 dagparser,
50 encoding,
50 encoding,
51 error,
51 error,
52 exchange,
52 exchange,
53 extensions,
53 extensions,
54 filemerge,
54 filemerge,
55 filesetlang,
55 filesetlang,
56 formatter,
56 formatter,
57 hg,
57 hg,
58 httppeer,
58 httppeer,
59 localrepo,
59 localrepo,
60 lock as lockmod,
60 lock as lockmod,
61 logcmdutil,
61 logcmdutil,
62 mergestate as mergestatemod,
62 mergestate as mergestatemod,
63 metadata,
63 metadata,
64 obsolete,
64 obsolete,
65 obsutil,
65 obsutil,
66 pathutil,
66 pathutil,
67 phases,
67 phases,
68 policy,
68 policy,
69 pvec,
69 pvec,
70 pycompat,
70 pycompat,
71 registrar,
71 registrar,
72 repair,
72 repair,
73 repoview,
73 repoview,
74 revlog,
74 revlog,
75 revset,
75 revset,
76 revsetlang,
76 revsetlang,
77 scmutil,
77 scmutil,
78 setdiscovery,
78 setdiscovery,
79 simplemerge,
79 simplemerge,
80 sshpeer,
80 sshpeer,
81 sslutil,
81 sslutil,
82 streamclone,
82 streamclone,
83 strip,
83 strip,
84 tags as tagsmod,
84 tags as tagsmod,
85 templater,
85 templater,
86 treediscovery,
86 treediscovery,
87 upgrade,
87 upgrade,
88 url as urlmod,
88 url as urlmod,
89 util,
89 util,
90 vfs as vfsmod,
90 vfs as vfsmod,
91 wireprotoframing,
91 wireprotoframing,
92 wireprotoserver,
92 wireprotoserver,
93 wireprotov2peer,
93 wireprotov2peer,
94 )
94 )
95 from .utils import (
95 from .utils import (
96 cborutil,
96 cborutil,
97 compression,
97 compression,
98 dateutil,
98 dateutil,
99 procutil,
99 procutil,
100 stringutil,
100 stringutil,
101 )
101 )
102
102
103 from .revlogutils import (
103 from .revlogutils import (
104 deltas as deltautil,
104 deltas as deltautil,
105 nodemap,
105 nodemap,
106 sidedata,
106 sidedata,
107 )
107 )
108
108
109 release = lockmod.release
109 release = lockmod.release
110
110
111 table = {}
111 table = {}
112 table.update(strip.command._table)
112 table.update(strip.command._table)
113 command = registrar.command(table)
113 command = registrar.command(table)
114
114
115
115
116 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
116 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
117 def debugancestor(ui, repo, *args):
117 def debugancestor(ui, repo, *args):
118 """find the ancestor revision of two revisions in a given index"""
118 """find the ancestor revision of two revisions in a given index"""
119 if len(args) == 3:
119 if len(args) == 3:
120 index, rev1, rev2 = args
120 index, rev1, rev2 = args
121 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
121 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
122 lookup = r.lookup
122 lookup = r.lookup
123 elif len(args) == 2:
123 elif len(args) == 2:
124 if not repo:
124 if not repo:
125 raise error.Abort(
125 raise error.Abort(
126 _(b'there is no Mercurial repository here (.hg not found)')
126 _(b'there is no Mercurial repository here (.hg not found)')
127 )
127 )
128 rev1, rev2 = args
128 rev1, rev2 = args
129 r = repo.changelog
129 r = repo.changelog
130 lookup = repo.lookup
130 lookup = repo.lookup
131 else:
131 else:
132 raise error.Abort(_(b'either two or three arguments required'))
132 raise error.Abort(_(b'either two or three arguments required'))
133 a = r.ancestor(lookup(rev1), lookup(rev2))
133 a = r.ancestor(lookup(rev1), lookup(rev2))
134 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
134 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
135
135
136
136
137 @command(b'debugantivirusrunning', [])
137 @command(b'debugantivirusrunning', [])
138 def debugantivirusrunning(ui, repo):
138 def debugantivirusrunning(ui, repo):
139 """attempt to trigger an antivirus scanner to see if one is active"""
139 """attempt to trigger an antivirus scanner to see if one is active"""
140 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
140 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
141 f.write(
141 f.write(
142 util.b85decode(
142 util.b85decode(
143 # This is a base85-armored version of the EICAR test file. See
143 # This is a base85-armored version of the EICAR test file. See
144 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
144 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
145 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
145 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
146 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
146 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
147 )
147 )
148 )
148 )
149 # Give an AV engine time to scan the file.
149 # Give an AV engine time to scan the file.
150 time.sleep(2)
150 time.sleep(2)
151 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
151 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
152
152
153
153
154 @command(b'debugapplystreamclonebundle', [], b'FILE')
154 @command(b'debugapplystreamclonebundle', [], b'FILE')
155 def debugapplystreamclonebundle(ui, repo, fname):
155 def debugapplystreamclonebundle(ui, repo, fname):
156 """apply a stream clone bundle file"""
156 """apply a stream clone bundle file"""
157 f = hg.openpath(ui, fname)
157 f = hg.openpath(ui, fname)
158 gen = exchange.readbundle(ui, f, fname)
158 gen = exchange.readbundle(ui, f, fname)
159 gen.apply(repo)
159 gen.apply(repo)
160
160
161
161
162 @command(
162 @command(
163 b'debugbuilddag',
163 b'debugbuilddag',
164 [
164 [
165 (
165 (
166 b'm',
166 b'm',
167 b'mergeable-file',
167 b'mergeable-file',
168 None,
168 None,
169 _(b'add single file mergeable changes'),
169 _(b'add single file mergeable changes'),
170 ),
170 ),
171 (
171 (
172 b'o',
172 b'o',
173 b'overwritten-file',
173 b'overwritten-file',
174 None,
174 None,
175 _(b'add single file all revs overwrite'),
175 _(b'add single file all revs overwrite'),
176 ),
176 ),
177 (b'n', b'new-file', None, _(b'add new file at each rev')),
177 (b'n', b'new-file', None, _(b'add new file at each rev')),
178 ],
178 ],
179 _(b'[OPTION]... [TEXT]'),
179 _(b'[OPTION]... [TEXT]'),
180 )
180 )
181 def debugbuilddag(
181 def debugbuilddag(
182 ui,
182 ui,
183 repo,
183 repo,
184 text=None,
184 text=None,
185 mergeable_file=False,
185 mergeable_file=False,
186 overwritten_file=False,
186 overwritten_file=False,
187 new_file=False,
187 new_file=False,
188 ):
188 ):
189 """builds a repo with a given DAG from scratch in the current empty repo
189 """builds a repo with a given DAG from scratch in the current empty repo
190
190
191 The description of the DAG is read from stdin if not given on the
191 The description of the DAG is read from stdin if not given on the
192 command line.
192 command line.
193
193
194 Elements:
194 Elements:
195
195
196 - "+n" is a linear run of n nodes based on the current default parent
196 - "+n" is a linear run of n nodes based on the current default parent
197 - "." is a single node based on the current default parent
197 - "." is a single node based on the current default parent
198 - "$" resets the default parent to null (implied at the start);
198 - "$" resets the default parent to null (implied at the start);
199 otherwise the default parent is always the last node created
199 otherwise the default parent is always the last node created
200 - "<p" sets the default parent to the backref p
200 - "<p" sets the default parent to the backref p
201 - "*p" is a fork at parent p, which is a backref
201 - "*p" is a fork at parent p, which is a backref
202 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
202 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
203 - "/p2" is a merge of the preceding node and p2
203 - "/p2" is a merge of the preceding node and p2
204 - ":tag" defines a local tag for the preceding node
204 - ":tag" defines a local tag for the preceding node
205 - "@branch" sets the named branch for subsequent nodes
205 - "@branch" sets the named branch for subsequent nodes
206 - "#...\\n" is a comment up to the end of the line
206 - "#...\\n" is a comment up to the end of the line
207
207
208 Whitespace between the above elements is ignored.
208 Whitespace between the above elements is ignored.
209
209
210 A backref is either
210 A backref is either
211
211
212 - a number n, which references the node curr-n, where curr is the current
212 - a number n, which references the node curr-n, where curr is the current
213 node, or
213 node, or
214 - the name of a local tag you placed earlier using ":tag", or
214 - the name of a local tag you placed earlier using ":tag", or
215 - empty to denote the default parent.
215 - empty to denote the default parent.
216
216
217 All string valued-elements are either strictly alphanumeric, or must
217 All string valued-elements are either strictly alphanumeric, or must
218 be enclosed in double quotes ("..."), with "\\" as escape character.
218 be enclosed in double quotes ("..."), with "\\" as escape character.
219 """
219 """
220
220
221 if text is None:
221 if text is None:
222 ui.status(_(b"reading DAG from stdin\n"))
222 ui.status(_(b"reading DAG from stdin\n"))
223 text = ui.fin.read()
223 text = ui.fin.read()
224
224
225 cl = repo.changelog
225 cl = repo.changelog
226 if len(cl) > 0:
226 if len(cl) > 0:
227 raise error.Abort(_(b'repository is not empty'))
227 raise error.Abort(_(b'repository is not empty'))
228
228
229 # determine number of revs in DAG
229 # determine number of revs in DAG
230 total = 0
230 total = 0
231 for type, data in dagparser.parsedag(text):
231 for type, data in dagparser.parsedag(text):
232 if type == b'n':
232 if type == b'n':
233 total += 1
233 total += 1
234
234
235 if mergeable_file:
235 if mergeable_file:
236 linesperrev = 2
236 linesperrev = 2
237 # make a file with k lines per rev
237 # make a file with k lines per rev
238 initialmergedlines = [
238 initialmergedlines = [
239 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
239 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
240 ]
240 ]
241 initialmergedlines.append(b"")
241 initialmergedlines.append(b"")
242
242
243 tags = []
243 tags = []
244 progress = ui.makeprogress(
244 progress = ui.makeprogress(
245 _(b'building'), unit=_(b'revisions'), total=total
245 _(b'building'), unit=_(b'revisions'), total=total
246 )
246 )
247 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
247 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
248 at = -1
248 at = -1
249 atbranch = b'default'
249 atbranch = b'default'
250 nodeids = []
250 nodeids = []
251 id = 0
251 id = 0
252 progress.update(id)
252 progress.update(id)
253 for type, data in dagparser.parsedag(text):
253 for type, data in dagparser.parsedag(text):
254 if type == b'n':
254 if type == b'n':
255 ui.note((b'node %s\n' % pycompat.bytestr(data)))
255 ui.note((b'node %s\n' % pycompat.bytestr(data)))
256 id, ps = data
256 id, ps = data
257
257
258 files = []
258 files = []
259 filecontent = {}
259 filecontent = {}
260
260
261 p2 = None
261 p2 = None
262 if mergeable_file:
262 if mergeable_file:
263 fn = b"mf"
263 fn = b"mf"
264 p1 = repo[ps[0]]
264 p1 = repo[ps[0]]
265 if len(ps) > 1:
265 if len(ps) > 1:
266 p2 = repo[ps[1]]
266 p2 = repo[ps[1]]
267 pa = p1.ancestor(p2)
267 pa = p1.ancestor(p2)
268 base, local, other = [
268 base, local, other = [
269 x[fn].data() for x in (pa, p1, p2)
269 x[fn].data() for x in (pa, p1, p2)
270 ]
270 ]
271 m3 = simplemerge.Merge3Text(base, local, other)
271 m3 = simplemerge.Merge3Text(base, local, other)
272 ml = [l.strip() for l in m3.merge_lines()]
272 ml = [l.strip() for l in m3.merge_lines()]
273 ml.append(b"")
273 ml.append(b"")
274 elif at > 0:
274 elif at > 0:
275 ml = p1[fn].data().split(b"\n")
275 ml = p1[fn].data().split(b"\n")
276 else:
276 else:
277 ml = initialmergedlines
277 ml = initialmergedlines
278 ml[id * linesperrev] += b" r%i" % id
278 ml[id * linesperrev] += b" r%i" % id
279 mergedtext = b"\n".join(ml)
279 mergedtext = b"\n".join(ml)
280 files.append(fn)
280 files.append(fn)
281 filecontent[fn] = mergedtext
281 filecontent[fn] = mergedtext
282
282
283 if overwritten_file:
283 if overwritten_file:
284 fn = b"of"
284 fn = b"of"
285 files.append(fn)
285 files.append(fn)
286 filecontent[fn] = b"r%i\n" % id
286 filecontent[fn] = b"r%i\n" % id
287
287
288 if new_file:
288 if new_file:
289 fn = b"nf%i" % id
289 fn = b"nf%i" % id
290 files.append(fn)
290 files.append(fn)
291 filecontent[fn] = b"r%i\n" % id
291 filecontent[fn] = b"r%i\n" % id
292 if len(ps) > 1:
292 if len(ps) > 1:
293 if not p2:
293 if not p2:
294 p2 = repo[ps[1]]
294 p2 = repo[ps[1]]
295 for fn in p2:
295 for fn in p2:
296 if fn.startswith(b"nf"):
296 if fn.startswith(b"nf"):
297 files.append(fn)
297 files.append(fn)
298 filecontent[fn] = p2[fn].data()
298 filecontent[fn] = p2[fn].data()
299
299
300 def fctxfn(repo, cx, path):
300 def fctxfn(repo, cx, path):
301 if path in filecontent:
301 if path in filecontent:
302 return context.memfilectx(
302 return context.memfilectx(
303 repo, cx, path, filecontent[path]
303 repo, cx, path, filecontent[path]
304 )
304 )
305 return None
305 return None
306
306
307 if len(ps) == 0 or ps[0] < 0:
307 if len(ps) == 0 or ps[0] < 0:
308 pars = [None, None]
308 pars = [None, None]
309 elif len(ps) == 1:
309 elif len(ps) == 1:
310 pars = [nodeids[ps[0]], None]
310 pars = [nodeids[ps[0]], None]
311 else:
311 else:
312 pars = [nodeids[p] for p in ps]
312 pars = [nodeids[p] for p in ps]
313 cx = context.memctx(
313 cx = context.memctx(
314 repo,
314 repo,
315 pars,
315 pars,
316 b"r%i" % id,
316 b"r%i" % id,
317 files,
317 files,
318 fctxfn,
318 fctxfn,
319 date=(id, 0),
319 date=(id, 0),
320 user=b"debugbuilddag",
320 user=b"debugbuilddag",
321 extra={b'branch': atbranch},
321 extra={b'branch': atbranch},
322 )
322 )
323 nodeid = repo.commitctx(cx)
323 nodeid = repo.commitctx(cx)
324 nodeids.append(nodeid)
324 nodeids.append(nodeid)
325 at = id
325 at = id
326 elif type == b'l':
326 elif type == b'l':
327 id, name = data
327 id, name = data
328 ui.note((b'tag %s\n' % name))
328 ui.note((b'tag %s\n' % name))
329 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
329 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
330 elif type == b'a':
330 elif type == b'a':
331 ui.note((b'branch %s\n' % data))
331 ui.note((b'branch %s\n' % data))
332 atbranch = data
332 atbranch = data
333 progress.update(id)
333 progress.update(id)
334
334
335 if tags:
335 if tags:
336 repo.vfs.write(b"localtags", b"".join(tags))
336 repo.vfs.write(b"localtags", b"".join(tags))
337
337
338
338
339 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
339 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
340 indent_string = b' ' * indent
340 indent_string = b' ' * indent
341 if all:
341 if all:
342 ui.writenoi18n(
342 ui.writenoi18n(
343 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
343 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
344 % indent_string
344 % indent_string
345 )
345 )
346
346
347 def showchunks(named):
347 def showchunks(named):
348 ui.write(b"\n%s%s\n" % (indent_string, named))
348 ui.write(b"\n%s%s\n" % (indent_string, named))
349 for deltadata in gen.deltaiter():
349 for deltadata in gen.deltaiter():
350 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
350 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
351 ui.write(
351 ui.write(
352 b"%s%s %s %s %s %s %d\n"
352 b"%s%s %s %s %s %s %d\n"
353 % (
353 % (
354 indent_string,
354 indent_string,
355 hex(node),
355 hex(node),
356 hex(p1),
356 hex(p1),
357 hex(p2),
357 hex(p2),
358 hex(cs),
358 hex(cs),
359 hex(deltabase),
359 hex(deltabase),
360 len(delta),
360 len(delta),
361 )
361 )
362 )
362 )
363
363
364 gen.changelogheader()
364 gen.changelogheader()
365 showchunks(b"changelog")
365 showchunks(b"changelog")
366 gen.manifestheader()
366 gen.manifestheader()
367 showchunks(b"manifest")
367 showchunks(b"manifest")
368 for chunkdata in iter(gen.filelogheader, {}):
368 for chunkdata in iter(gen.filelogheader, {}):
369 fname = chunkdata[b'filename']
369 fname = chunkdata[b'filename']
370 showchunks(fname)
370 showchunks(fname)
371 else:
371 else:
372 if isinstance(gen, bundle2.unbundle20):
372 if isinstance(gen, bundle2.unbundle20):
373 raise error.Abort(_(b'use debugbundle2 for this file'))
373 raise error.Abort(_(b'use debugbundle2 for this file'))
374 gen.changelogheader()
374 gen.changelogheader()
375 for deltadata in gen.deltaiter():
375 for deltadata in gen.deltaiter():
376 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
376 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
377 ui.write(b"%s%s\n" % (indent_string, hex(node)))
377 ui.write(b"%s%s\n" % (indent_string, hex(node)))
378
378
379
379
380 def _debugobsmarkers(ui, part, indent=0, **opts):
380 def _debugobsmarkers(ui, part, indent=0, **opts):
381 """display version and markers contained in 'data'"""
381 """display version and markers contained in 'data'"""
382 opts = pycompat.byteskwargs(opts)
382 opts = pycompat.byteskwargs(opts)
383 data = part.read()
383 data = part.read()
384 indent_string = b' ' * indent
384 indent_string = b' ' * indent
385 try:
385 try:
386 version, markers = obsolete._readmarkers(data)
386 version, markers = obsolete._readmarkers(data)
387 except error.UnknownVersion as exc:
387 except error.UnknownVersion as exc:
388 msg = b"%sunsupported version: %s (%d bytes)\n"
388 msg = b"%sunsupported version: %s (%d bytes)\n"
389 msg %= indent_string, exc.version, len(data)
389 msg %= indent_string, exc.version, len(data)
390 ui.write(msg)
390 ui.write(msg)
391 else:
391 else:
392 msg = b"%sversion: %d (%d bytes)\n"
392 msg = b"%sversion: %d (%d bytes)\n"
393 msg %= indent_string, version, len(data)
393 msg %= indent_string, version, len(data)
394 ui.write(msg)
394 ui.write(msg)
395 fm = ui.formatter(b'debugobsolete', opts)
395 fm = ui.formatter(b'debugobsolete', opts)
396 for rawmarker in sorted(markers):
396 for rawmarker in sorted(markers):
397 m = obsutil.marker(None, rawmarker)
397 m = obsutil.marker(None, rawmarker)
398 fm.startitem()
398 fm.startitem()
399 fm.plain(indent_string)
399 fm.plain(indent_string)
400 cmdutil.showmarker(fm, m)
400 cmdutil.showmarker(fm, m)
401 fm.end()
401 fm.end()
402
402
403
403
404 def _debugphaseheads(ui, data, indent=0):
404 def _debugphaseheads(ui, data, indent=0):
405 """display version and markers contained in 'data'"""
405 """display version and markers contained in 'data'"""
406 indent_string = b' ' * indent
406 indent_string = b' ' * indent
407 headsbyphase = phases.binarydecode(data)
407 headsbyphase = phases.binarydecode(data)
408 for phase in phases.allphases:
408 for phase in phases.allphases:
409 for head in headsbyphase[phase]:
409 for head in headsbyphase[phase]:
410 ui.write(indent_string)
410 ui.write(indent_string)
411 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
411 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
412
412
413
413
414 def _quasirepr(thing):
414 def _quasirepr(thing):
415 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
415 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
416 return b'{%s}' % (
416 return b'{%s}' % (
417 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
417 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
418 )
418 )
419 return pycompat.bytestr(repr(thing))
419 return pycompat.bytestr(repr(thing))
420
420
421
421
422 def _debugbundle2(ui, gen, all=None, **opts):
422 def _debugbundle2(ui, gen, all=None, **opts):
423 """lists the contents of a bundle2"""
423 """lists the contents of a bundle2"""
424 if not isinstance(gen, bundle2.unbundle20):
424 if not isinstance(gen, bundle2.unbundle20):
425 raise error.Abort(_(b'not a bundle2 file'))
425 raise error.Abort(_(b'not a bundle2 file'))
426 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
426 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
427 parttypes = opts.get('part_type', [])
427 parttypes = opts.get('part_type', [])
428 for part in gen.iterparts():
428 for part in gen.iterparts():
429 if parttypes and part.type not in parttypes:
429 if parttypes and part.type not in parttypes:
430 continue
430 continue
431 msg = b'%s -- %s (mandatory: %r)\n'
431 msg = b'%s -- %s (mandatory: %r)\n'
432 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
432 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
433 if part.type == b'changegroup':
433 if part.type == b'changegroup':
434 version = part.params.get(b'version', b'01')
434 version = part.params.get(b'version', b'01')
435 cg = changegroup.getunbundler(version, part, b'UN')
435 cg = changegroup.getunbundler(version, part, b'UN')
436 if not ui.quiet:
436 if not ui.quiet:
437 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
437 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
438 if part.type == b'obsmarkers':
438 if part.type == b'obsmarkers':
439 if not ui.quiet:
439 if not ui.quiet:
440 _debugobsmarkers(ui, part, indent=4, **opts)
440 _debugobsmarkers(ui, part, indent=4, **opts)
441 if part.type == b'phase-heads':
441 if part.type == b'phase-heads':
442 if not ui.quiet:
442 if not ui.quiet:
443 _debugphaseheads(ui, part, indent=4)
443 _debugphaseheads(ui, part, indent=4)
444
444
445
445
446 @command(
446 @command(
447 b'debugbundle',
447 b'debugbundle',
448 [
448 [
449 (b'a', b'all', None, _(b'show all details')),
449 (b'a', b'all', None, _(b'show all details')),
450 (b'', b'part-type', [], _(b'show only the named part type')),
450 (b'', b'part-type', [], _(b'show only the named part type')),
451 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
451 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
452 ],
452 ],
453 _(b'FILE'),
453 _(b'FILE'),
454 norepo=True,
454 norepo=True,
455 )
455 )
456 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
456 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
457 """lists the contents of a bundle"""
457 """lists the contents of a bundle"""
458 with hg.openpath(ui, bundlepath) as f:
458 with hg.openpath(ui, bundlepath) as f:
459 if spec:
459 if spec:
460 spec = exchange.getbundlespec(ui, f)
460 spec = exchange.getbundlespec(ui, f)
461 ui.write(b'%s\n' % spec)
461 ui.write(b'%s\n' % spec)
462 return
462 return
463
463
464 gen = exchange.readbundle(ui, f, bundlepath)
464 gen = exchange.readbundle(ui, f, bundlepath)
465 if isinstance(gen, bundle2.unbundle20):
465 if isinstance(gen, bundle2.unbundle20):
466 return _debugbundle2(ui, gen, all=all, **opts)
466 return _debugbundle2(ui, gen, all=all, **opts)
467 _debugchangegroup(ui, gen, all=all, **opts)
467 _debugchangegroup(ui, gen, all=all, **opts)
468
468
469
469
470 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
470 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
471 def debugcapabilities(ui, path, **opts):
471 def debugcapabilities(ui, path, **opts):
472 """lists the capabilities of a remote peer"""
472 """lists the capabilities of a remote peer"""
473 opts = pycompat.byteskwargs(opts)
473 opts = pycompat.byteskwargs(opts)
474 peer = hg.peer(ui, opts, path)
474 peer = hg.peer(ui, opts, path)
475 try:
475 try:
476 caps = peer.capabilities()
476 caps = peer.capabilities()
477 ui.writenoi18n(b'Main capabilities:\n')
477 ui.writenoi18n(b'Main capabilities:\n')
478 for c in sorted(caps):
478 for c in sorted(caps):
479 ui.write(b' %s\n' % c)
479 ui.write(b' %s\n' % c)
480 b2caps = bundle2.bundle2caps(peer)
480 b2caps = bundle2.bundle2caps(peer)
481 if b2caps:
481 if b2caps:
482 ui.writenoi18n(b'Bundle2 capabilities:\n')
482 ui.writenoi18n(b'Bundle2 capabilities:\n')
483 for key, values in sorted(pycompat.iteritems(b2caps)):
483 for key, values in sorted(pycompat.iteritems(b2caps)):
484 ui.write(b' %s\n' % key)
484 ui.write(b' %s\n' % key)
485 for v in values:
485 for v in values:
486 ui.write(b' %s\n' % v)
486 ui.write(b' %s\n' % v)
487 finally:
487 finally:
488 peer.close()
488 peer.close()
489
489
490
490
491 @command(
491 @command(
492 b'debugchangedfiles',
492 b'debugchangedfiles',
493 [
493 [
494 (
494 (
495 b'',
495 b'',
496 b'compute',
496 b'compute',
497 False,
497 False,
498 b"compute information instead of reading it from storage",
498 b"compute information instead of reading it from storage",
499 ),
499 ),
500 ],
500 ],
501 b'REV',
501 b'REV',
502 )
502 )
503 def debugchangedfiles(ui, repo, rev, **opts):
503 def debugchangedfiles(ui, repo, rev, **opts):
504 """list the stored files changes for a revision"""
504 """list the stored files changes for a revision"""
505 ctx = scmutil.revsingle(repo, rev, None)
505 ctx = scmutil.revsingle(repo, rev, None)
506 files = None
506 files = None
507
507
508 if opts['compute']:
508 if opts['compute']:
509 files = metadata.compute_all_files_changes(ctx)
509 files = metadata.compute_all_files_changes(ctx)
510 else:
510 else:
511 sd = repo.changelog.sidedata(ctx.rev())
511 sd = repo.changelog.sidedata(ctx.rev())
512 files_block = sd.get(sidedata.SD_FILES)
512 files_block = sd.get(sidedata.SD_FILES)
513 if files_block is not None:
513 if files_block is not None:
514 files = metadata.decode_files_sidedata(sd)
514 files = metadata.decode_files_sidedata(sd)
515 if files is not None:
515 if files is not None:
516 for f in sorted(files.touched):
516 for f in sorted(files.touched):
517 if f in files.added:
517 if f in files.added:
518 action = b"added"
518 action = b"added"
519 elif f in files.removed:
519 elif f in files.removed:
520 action = b"removed"
520 action = b"removed"
521 elif f in files.merged:
521 elif f in files.merged:
522 action = b"merged"
522 action = b"merged"
523 elif f in files.salvaged:
523 elif f in files.salvaged:
524 action = b"salvaged"
524 action = b"salvaged"
525 else:
525 else:
526 action = b"touched"
526 action = b"touched"
527
527
528 copy_parent = b""
528 copy_parent = b""
529 copy_source = b""
529 copy_source = b""
530 if f in files.copied_from_p1:
530 if f in files.copied_from_p1:
531 copy_parent = b"p1"
531 copy_parent = b"p1"
532 copy_source = files.copied_from_p1[f]
532 copy_source = files.copied_from_p1[f]
533 elif f in files.copied_from_p2:
533 elif f in files.copied_from_p2:
534 copy_parent = b"p2"
534 copy_parent = b"p2"
535 copy_source = files.copied_from_p2[f]
535 copy_source = files.copied_from_p2[f]
536
536
537 data = (action, copy_parent, f, copy_source)
537 data = (action, copy_parent, f, copy_source)
538 template = b"%-8s %2s: %s, %s;\n"
538 template = b"%-8s %2s: %s, %s;\n"
539 ui.write(template % data)
539 ui.write(template % data)
540
540
541
541
542 @command(b'debugcheckstate', [], b'')
542 @command(b'debugcheckstate', [], b'')
543 def debugcheckstate(ui, repo):
543 def debugcheckstate(ui, repo):
544 """validate the correctness of the current dirstate"""
544 """validate the correctness of the current dirstate"""
545 parent1, parent2 = repo.dirstate.parents()
545 parent1, parent2 = repo.dirstate.parents()
546 m1 = repo[parent1].manifest()
546 m1 = repo[parent1].manifest()
547 m2 = repo[parent2].manifest()
547 m2 = repo[parent2].manifest()
548 errors = 0
548 errors = 0
549 for f in repo.dirstate:
549 for f in repo.dirstate:
550 state = repo.dirstate[f]
550 state = repo.dirstate[f]
551 if state in b"nr" and f not in m1:
551 if state in b"nr" and f not in m1:
552 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
552 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
553 errors += 1
553 errors += 1
554 if state in b"a" and f in m1:
554 if state in b"a" and f in m1:
555 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
555 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
556 errors += 1
556 errors += 1
557 if state in b"m" and f not in m1 and f not in m2:
557 if state in b"m" and f not in m1 and f not in m2:
558 ui.warn(
558 ui.warn(
559 _(b"%s in state %s, but not in either manifest\n") % (f, state)
559 _(b"%s in state %s, but not in either manifest\n") % (f, state)
560 )
560 )
561 errors += 1
561 errors += 1
562 for f in m1:
562 for f in m1:
563 state = repo.dirstate[f]
563 state = repo.dirstate[f]
564 if state not in b"nrm":
564 if state not in b"nrm":
565 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
565 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
566 errors += 1
566 errors += 1
567 if errors:
567 if errors:
568 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
568 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
569 raise error.Abort(errstr)
569 raise error.Abort(errstr)
570
570
571
571
572 @command(
572 @command(
573 b'debugcolor',
573 b'debugcolor',
574 [(b'', b'style', None, _(b'show all configured styles'))],
574 [(b'', b'style', None, _(b'show all configured styles'))],
575 b'hg debugcolor',
575 b'hg debugcolor',
576 )
576 )
577 def debugcolor(ui, repo, **opts):
577 def debugcolor(ui, repo, **opts):
578 """show available color, effects or style"""
578 """show available color, effects or style"""
579 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
579 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
580 if opts.get('style'):
580 if opts.get('style'):
581 return _debugdisplaystyle(ui)
581 return _debugdisplaystyle(ui)
582 else:
582 else:
583 return _debugdisplaycolor(ui)
583 return _debugdisplaycolor(ui)
584
584
585
585
586 def _debugdisplaycolor(ui):
586 def _debugdisplaycolor(ui):
587 ui = ui.copy()
587 ui = ui.copy()
588 ui._styles.clear()
588 ui._styles.clear()
589 for effect in color._activeeffects(ui).keys():
589 for effect in color._activeeffects(ui).keys():
590 ui._styles[effect] = effect
590 ui._styles[effect] = effect
591 if ui._terminfoparams:
591 if ui._terminfoparams:
592 for k, v in ui.configitems(b'color'):
592 for k, v in ui.configitems(b'color'):
593 if k.startswith(b'color.'):
593 if k.startswith(b'color.'):
594 ui._styles[k] = k[6:]
594 ui._styles[k] = k[6:]
595 elif k.startswith(b'terminfo.'):
595 elif k.startswith(b'terminfo.'):
596 ui._styles[k] = k[9:]
596 ui._styles[k] = k[9:]
597 ui.write(_(b'available colors:\n'))
597 ui.write(_(b'available colors:\n'))
598 # sort label with a '_' after the other to group '_background' entry.
598 # sort label with a '_' after the other to group '_background' entry.
599 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
599 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
600 for colorname, label in items:
600 for colorname, label in items:
601 ui.write(b'%s\n' % colorname, label=label)
601 ui.write(b'%s\n' % colorname, label=label)
602
602
603
603
604 def _debugdisplaystyle(ui):
604 def _debugdisplaystyle(ui):
605 ui.write(_(b'available style:\n'))
605 ui.write(_(b'available style:\n'))
606 if not ui._styles:
606 if not ui._styles:
607 return
607 return
608 width = max(len(s) for s in ui._styles)
608 width = max(len(s) for s in ui._styles)
609 for label, effects in sorted(ui._styles.items()):
609 for label, effects in sorted(ui._styles.items()):
610 ui.write(b'%s' % label, label=label)
610 ui.write(b'%s' % label, label=label)
611 if effects:
611 if effects:
612 # 50
612 # 50
613 ui.write(b': ')
613 ui.write(b': ')
614 ui.write(b' ' * (max(0, width - len(label))))
614 ui.write(b' ' * (max(0, width - len(label))))
615 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
615 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
616 ui.write(b'\n')
616 ui.write(b'\n')
617
617
618
618
619 @command(b'debugcreatestreamclonebundle', [], b'FILE')
619 @command(b'debugcreatestreamclonebundle', [], b'FILE')
620 def debugcreatestreamclonebundle(ui, repo, fname):
620 def debugcreatestreamclonebundle(ui, repo, fname):
621 """create a stream clone bundle file
621 """create a stream clone bundle file
622
622
623 Stream bundles are special bundles that are essentially archives of
623 Stream bundles are special bundles that are essentially archives of
624 revlog files. They are commonly used for cloning very quickly.
624 revlog files. They are commonly used for cloning very quickly.
625 """
625 """
626 # TODO we may want to turn this into an abort when this functionality
626 # TODO we may want to turn this into an abort when this functionality
627 # is moved into `hg bundle`.
627 # is moved into `hg bundle`.
628 if phases.hassecret(repo):
628 if phases.hassecret(repo):
629 ui.warn(
629 ui.warn(
630 _(
630 _(
631 b'(warning: stream clone bundle will contain secret '
631 b'(warning: stream clone bundle will contain secret '
632 b'revisions)\n'
632 b'revisions)\n'
633 )
633 )
634 )
634 )
635
635
636 requirements, gen = streamclone.generatebundlev1(repo)
636 requirements, gen = streamclone.generatebundlev1(repo)
637 changegroup.writechunks(ui, gen, fname)
637 changegroup.writechunks(ui, gen, fname)
638
638
639 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
639 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
640
640
641
641
642 @command(
642 @command(
643 b'debugdag',
643 b'debugdag',
644 [
644 [
645 (b't', b'tags', None, _(b'use tags as labels')),
645 (b't', b'tags', None, _(b'use tags as labels')),
646 (b'b', b'branches', None, _(b'annotate with branch names')),
646 (b'b', b'branches', None, _(b'annotate with branch names')),
647 (b'', b'dots', None, _(b'use dots for runs')),
647 (b'', b'dots', None, _(b'use dots for runs')),
648 (b's', b'spaces', None, _(b'separate elements by spaces')),
648 (b's', b'spaces', None, _(b'separate elements by spaces')),
649 ],
649 ],
650 _(b'[OPTION]... [FILE [REV]...]'),
650 _(b'[OPTION]... [FILE [REV]...]'),
651 optionalrepo=True,
651 optionalrepo=True,
652 )
652 )
653 def debugdag(ui, repo, file_=None, *revs, **opts):
653 def debugdag(ui, repo, file_=None, *revs, **opts):
654 """format the changelog or an index DAG as a concise textual description
654 """format the changelog or an index DAG as a concise textual description
655
655
656 If you pass a revlog index, the revlog's DAG is emitted. If you list
656 If you pass a revlog index, the revlog's DAG is emitted. If you list
657 revision numbers, they get labeled in the output as rN.
657 revision numbers, they get labeled in the output as rN.
658
658
659 Otherwise, the changelog DAG of the current repo is emitted.
659 Otherwise, the changelog DAG of the current repo is emitted.
660 """
660 """
661 spaces = opts.get('spaces')
661 spaces = opts.get('spaces')
662 dots = opts.get('dots')
662 dots = opts.get('dots')
663 if file_:
663 if file_:
664 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
664 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
665 revs = {int(r) for r in revs}
665 revs = {int(r) for r in revs}
666
666
667 def events():
667 def events():
668 for r in rlog:
668 for r in rlog:
669 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
669 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
670 if r in revs:
670 if r in revs:
671 yield b'l', (r, b"r%i" % r)
671 yield b'l', (r, b"r%i" % r)
672
672
673 elif repo:
673 elif repo:
674 cl = repo.changelog
674 cl = repo.changelog
675 tags = opts.get('tags')
675 tags = opts.get('tags')
676 branches = opts.get('branches')
676 branches = opts.get('branches')
677 if tags:
677 if tags:
678 labels = {}
678 labels = {}
679 for l, n in repo.tags().items():
679 for l, n in repo.tags().items():
680 labels.setdefault(cl.rev(n), []).append(l)
680 labels.setdefault(cl.rev(n), []).append(l)
681
681
682 def events():
682 def events():
683 b = b"default"
683 b = b"default"
684 for r in cl:
684 for r in cl:
685 if branches:
685 if branches:
686 newb = cl.read(cl.node(r))[5][b'branch']
686 newb = cl.read(cl.node(r))[5][b'branch']
687 if newb != b:
687 if newb != b:
688 yield b'a', newb
688 yield b'a', newb
689 b = newb
689 b = newb
690 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
690 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
691 if tags:
691 if tags:
692 ls = labels.get(r)
692 ls = labels.get(r)
693 if ls:
693 if ls:
694 for l in ls:
694 for l in ls:
695 yield b'l', (r, l)
695 yield b'l', (r, l)
696
696
697 else:
697 else:
698 raise error.Abort(_(b'need repo for changelog dag'))
698 raise error.Abort(_(b'need repo for changelog dag'))
699
699
700 for line in dagparser.dagtextlines(
700 for line in dagparser.dagtextlines(
701 events(),
701 events(),
702 addspaces=spaces,
702 addspaces=spaces,
703 wraplabels=True,
703 wraplabels=True,
704 wrapannotations=True,
704 wrapannotations=True,
705 wrapnonlinear=dots,
705 wrapnonlinear=dots,
706 usedots=dots,
706 usedots=dots,
707 maxlinewidth=70,
707 maxlinewidth=70,
708 ):
708 ):
709 ui.write(line)
709 ui.write(line)
710 ui.write(b"\n")
710 ui.write(b"\n")
711
711
712
712
713 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
713 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
714 def debugdata(ui, repo, file_, rev=None, **opts):
714 def debugdata(ui, repo, file_, rev=None, **opts):
715 """dump the contents of a data file revision"""
715 """dump the contents of a data file revision"""
716 opts = pycompat.byteskwargs(opts)
716 opts = pycompat.byteskwargs(opts)
717 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
717 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
718 if rev is not None:
718 if rev is not None:
719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
720 file_, rev = None, file_
720 file_, rev = None, file_
721 elif rev is None:
721 elif rev is None:
722 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
722 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
723 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
723 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
724 try:
724 try:
725 ui.write(r.rawdata(r.lookup(rev)))
725 ui.write(r.rawdata(r.lookup(rev)))
726 except KeyError:
726 except KeyError:
727 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727 raise error.Abort(_(b'invalid revision identifier %s') % rev)
728
728
729
729
730 @command(
730 @command(
731 b'debugdate',
731 b'debugdate',
732 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 [(b'e', b'extended', None, _(b'try extended date formats'))],
733 _(b'[-e] DATE [RANGE]'),
733 _(b'[-e] DATE [RANGE]'),
734 norepo=True,
734 norepo=True,
735 optionalrepo=True,
735 optionalrepo=True,
736 )
736 )
737 def debugdate(ui, date, range=None, **opts):
737 def debugdate(ui, date, range=None, **opts):
738 """parse and display a date"""
738 """parse and display a date"""
739 if opts["extended"]:
739 if opts["extended"]:
740 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 d = dateutil.parsedate(date, dateutil.extendeddateformats)
741 else:
741 else:
742 d = dateutil.parsedate(date)
742 d = dateutil.parsedate(date)
743 ui.writenoi18n(b"internal: %d %d\n" % d)
743 ui.writenoi18n(b"internal: %d %d\n" % d)
744 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
745 if range:
745 if range:
746 m = dateutil.matchdate(range)
746 m = dateutil.matchdate(range)
747 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747 ui.writenoi18n(b"match: %s\n" % m(d[0]))
748
748
749
749
750 @command(
750 @command(
751 b'debugdeltachain',
751 b'debugdeltachain',
752 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 cmdutil.debugrevlogopts + cmdutil.formatteropts,
753 _(b'-c|-m|FILE'),
753 _(b'-c|-m|FILE'),
754 optionalrepo=True,
754 optionalrepo=True,
755 )
755 )
756 def debugdeltachain(ui, repo, file_=None, **opts):
756 def debugdeltachain(ui, repo, file_=None, **opts):
757 """dump information about delta chains in a revlog
757 """dump information about delta chains in a revlog
758
758
759 Output can be templatized. Available template keywords are:
759 Output can be templatized. Available template keywords are:
760
760
761 :``rev``: revision number
761 :``rev``: revision number
762 :``chainid``: delta chain identifier (numbered by unique base)
762 :``chainid``: delta chain identifier (numbered by unique base)
763 :``chainlen``: delta chain length to this revision
763 :``chainlen``: delta chain length to this revision
764 :``prevrev``: previous revision in delta chain
764 :``prevrev``: previous revision in delta chain
765 :``deltatype``: role of delta / how it was computed
765 :``deltatype``: role of delta / how it was computed
766 :``compsize``: compressed size of revision
766 :``compsize``: compressed size of revision
767 :``uncompsize``: uncompressed size of revision
767 :``uncompsize``: uncompressed size of revision
768 :``chainsize``: total size of compressed revisions in chain
768 :``chainsize``: total size of compressed revisions in chain
769 :``chainratio``: total chain size divided by uncompressed revision size
769 :``chainratio``: total chain size divided by uncompressed revision size
770 (new delta chains typically start at ratio 2.00)
770 (new delta chains typically start at ratio 2.00)
771 :``lindist``: linear distance from base revision in delta chain to end
771 :``lindist``: linear distance from base revision in delta chain to end
772 of this revision
772 of this revision
773 :``extradist``: total size of revisions not part of this delta chain from
773 :``extradist``: total size of revisions not part of this delta chain from
774 base of delta chain to end of this revision; a measurement
774 base of delta chain to end of this revision; a measurement
775 of how much extra data we need to read/seek across to read
775 of how much extra data we need to read/seek across to read
776 the delta chain for this revision
776 the delta chain for this revision
777 :``extraratio``: extradist divided by chainsize; another representation of
777 :``extraratio``: extradist divided by chainsize; another representation of
778 how much unrelated data is needed to load this delta chain
778 how much unrelated data is needed to load this delta chain
779
779
780 If the repository is configured to use the sparse read, additional keywords
780 If the repository is configured to use the sparse read, additional keywords
781 are available:
781 are available:
782
782
783 :``readsize``: total size of data read from the disk for a revision
783 :``readsize``: total size of data read from the disk for a revision
784 (sum of the sizes of all the blocks)
784 (sum of the sizes of all the blocks)
785 :``largestblock``: size of the largest block of data read from the disk
785 :``largestblock``: size of the largest block of data read from the disk
786 :``readdensity``: density of useful bytes in the data read from the disk
786 :``readdensity``: density of useful bytes in the data read from the disk
787 :``srchunks``: in how many data hunks the whole revision would be read
787 :``srchunks``: in how many data hunks the whole revision would be read
788
788
789 The sparse read can be enabled with experimental.sparse-read = True
789 The sparse read can be enabled with experimental.sparse-read = True
790 """
790 """
791 opts = pycompat.byteskwargs(opts)
791 opts = pycompat.byteskwargs(opts)
792 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
792 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
793 index = r.index
793 index = r.index
794 start = r.start
794 start = r.start
795 length = r.length
795 length = r.length
796 generaldelta = r.version & revlog.FLAG_GENERALDELTA
796 generaldelta = r.version & revlog.FLAG_GENERALDELTA
797 withsparseread = getattr(r, '_withsparseread', False)
797 withsparseread = getattr(r, '_withsparseread', False)
798
798
799 def revinfo(rev):
799 def revinfo(rev):
800 e = index[rev]
800 e = index[rev]
801 compsize = e[1]
801 compsize = e[1]
802 uncompsize = e[2]
802 uncompsize = e[2]
803 chainsize = 0
803 chainsize = 0
804
804
805 if generaldelta:
805 if generaldelta:
806 if e[3] == e[5]:
806 if e[3] == e[5]:
807 deltatype = b'p1'
807 deltatype = b'p1'
808 elif e[3] == e[6]:
808 elif e[3] == e[6]:
809 deltatype = b'p2'
809 deltatype = b'p2'
810 elif e[3] == rev - 1:
810 elif e[3] == rev - 1:
811 deltatype = b'prev'
811 deltatype = b'prev'
812 elif e[3] == rev:
812 elif e[3] == rev:
813 deltatype = b'base'
813 deltatype = b'base'
814 else:
814 else:
815 deltatype = b'other'
815 deltatype = b'other'
816 else:
816 else:
817 if e[3] == rev:
817 if e[3] == rev:
818 deltatype = b'base'
818 deltatype = b'base'
819 else:
819 else:
820 deltatype = b'prev'
820 deltatype = b'prev'
821
821
822 chain = r._deltachain(rev)[0]
822 chain = r._deltachain(rev)[0]
823 for iterrev in chain:
823 for iterrev in chain:
824 e = index[iterrev]
824 e = index[iterrev]
825 chainsize += e[1]
825 chainsize += e[1]
826
826
827 return compsize, uncompsize, deltatype, chain, chainsize
827 return compsize, uncompsize, deltatype, chain, chainsize
828
828
829 fm = ui.formatter(b'debugdeltachain', opts)
829 fm = ui.formatter(b'debugdeltachain', opts)
830
830
831 fm.plain(
831 fm.plain(
832 b' rev chain# chainlen prev delta '
832 b' rev chain# chainlen prev delta '
833 b'size rawsize chainsize ratio lindist extradist '
833 b'size rawsize chainsize ratio lindist extradist '
834 b'extraratio'
834 b'extraratio'
835 )
835 )
836 if withsparseread:
836 if withsparseread:
837 fm.plain(b' readsize largestblk rddensity srchunks')
837 fm.plain(b' readsize largestblk rddensity srchunks')
838 fm.plain(b'\n')
838 fm.plain(b'\n')
839
839
840 chainbases = {}
840 chainbases = {}
841 for rev in r:
841 for rev in r:
842 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
842 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
843 chainbase = chain[0]
843 chainbase = chain[0]
844 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
844 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
845 basestart = start(chainbase)
845 basestart = start(chainbase)
846 revstart = start(rev)
846 revstart = start(rev)
847 lineardist = revstart + comp - basestart
847 lineardist = revstart + comp - basestart
848 extradist = lineardist - chainsize
848 extradist = lineardist - chainsize
849 try:
849 try:
850 prevrev = chain[-2]
850 prevrev = chain[-2]
851 except IndexError:
851 except IndexError:
852 prevrev = -1
852 prevrev = -1
853
853
854 if uncomp != 0:
854 if uncomp != 0:
855 chainratio = float(chainsize) / float(uncomp)
855 chainratio = float(chainsize) / float(uncomp)
856 else:
856 else:
857 chainratio = chainsize
857 chainratio = chainsize
858
858
859 if chainsize != 0:
859 if chainsize != 0:
860 extraratio = float(extradist) / float(chainsize)
860 extraratio = float(extradist) / float(chainsize)
861 else:
861 else:
862 extraratio = extradist
862 extraratio = extradist
863
863
864 fm.startitem()
864 fm.startitem()
865 fm.write(
865 fm.write(
866 b'rev chainid chainlen prevrev deltatype compsize '
866 b'rev chainid chainlen prevrev deltatype compsize '
867 b'uncompsize chainsize chainratio lindist extradist '
867 b'uncompsize chainsize chainratio lindist extradist '
868 b'extraratio',
868 b'extraratio',
869 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
869 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
870 rev,
870 rev,
871 chainid,
871 chainid,
872 len(chain),
872 len(chain),
873 prevrev,
873 prevrev,
874 deltatype,
874 deltatype,
875 comp,
875 comp,
876 uncomp,
876 uncomp,
877 chainsize,
877 chainsize,
878 chainratio,
878 chainratio,
879 lineardist,
879 lineardist,
880 extradist,
880 extradist,
881 extraratio,
881 extraratio,
882 rev=rev,
882 rev=rev,
883 chainid=chainid,
883 chainid=chainid,
884 chainlen=len(chain),
884 chainlen=len(chain),
885 prevrev=prevrev,
885 prevrev=prevrev,
886 deltatype=deltatype,
886 deltatype=deltatype,
887 compsize=comp,
887 compsize=comp,
888 uncompsize=uncomp,
888 uncompsize=uncomp,
889 chainsize=chainsize,
889 chainsize=chainsize,
890 chainratio=chainratio,
890 chainratio=chainratio,
891 lindist=lineardist,
891 lindist=lineardist,
892 extradist=extradist,
892 extradist=extradist,
893 extraratio=extraratio,
893 extraratio=extraratio,
894 )
894 )
895 if withsparseread:
895 if withsparseread:
896 readsize = 0
896 readsize = 0
897 largestblock = 0
897 largestblock = 0
898 srchunks = 0
898 srchunks = 0
899
899
900 for revschunk in deltautil.slicechunk(r, chain):
900 for revschunk in deltautil.slicechunk(r, chain):
901 srchunks += 1
901 srchunks += 1
902 blkend = start(revschunk[-1]) + length(revschunk[-1])
902 blkend = start(revschunk[-1]) + length(revschunk[-1])
903 blksize = blkend - start(revschunk[0])
903 blksize = blkend - start(revschunk[0])
904
904
905 readsize += blksize
905 readsize += blksize
906 if largestblock < blksize:
906 if largestblock < blksize:
907 largestblock = blksize
907 largestblock = blksize
908
908
909 if readsize:
909 if readsize:
910 readdensity = float(chainsize) / float(readsize)
910 readdensity = float(chainsize) / float(readsize)
911 else:
911 else:
912 readdensity = 1
912 readdensity = 1
913
913
914 fm.write(
914 fm.write(
915 b'readsize largestblock readdensity srchunks',
915 b'readsize largestblock readdensity srchunks',
916 b' %10d %10d %9.5f %8d',
916 b' %10d %10d %9.5f %8d',
917 readsize,
917 readsize,
918 largestblock,
918 largestblock,
919 readdensity,
919 readdensity,
920 srchunks,
920 srchunks,
921 readsize=readsize,
921 readsize=readsize,
922 largestblock=largestblock,
922 largestblock=largestblock,
923 readdensity=readdensity,
923 readdensity=readdensity,
924 srchunks=srchunks,
924 srchunks=srchunks,
925 )
925 )
926
926
927 fm.plain(b'\n')
927 fm.plain(b'\n')
928
928
929 fm.end()
929 fm.end()
930
930
931
931
932 @command(
932 @command(
933 b'debugdirstate|debugstate',
933 b'debugdirstate|debugstate',
934 [
934 [
935 (
935 (
936 b'',
936 b'',
937 b'nodates',
937 b'nodates',
938 None,
938 None,
939 _(b'do not display the saved mtime (DEPRECATED)'),
939 _(b'do not display the saved mtime (DEPRECATED)'),
940 ),
940 ),
941 (b'', b'dates', True, _(b'display the saved mtime')),
941 (b'', b'dates', True, _(b'display the saved mtime')),
942 (b'', b'datesort', None, _(b'sort by saved mtime')),
942 (b'', b'datesort', None, _(b'sort by saved mtime')),
943 ],
943 ],
944 _(b'[OPTION]...'),
944 _(b'[OPTION]...'),
945 )
945 )
946 def debugstate(ui, repo, **opts):
946 def debugstate(ui, repo, **opts):
947 """show the contents of the current dirstate"""
947 """show the contents of the current dirstate"""
948
948
949 nodates = not opts['dates']
949 nodates = not opts['dates']
950 if opts.get('nodates') is not None:
950 if opts.get('nodates') is not None:
951 nodates = True
951 nodates = True
952 datesort = opts.get('datesort')
952 datesort = opts.get('datesort')
953
953
954 if datesort:
954 if datesort:
955 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
955 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
956 else:
956 else:
957 keyfunc = None # sort by filename
957 keyfunc = None # sort by filename
958 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
958 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
959 if ent[3] == -1:
959 if ent[3] == -1:
960 timestr = b'unset '
960 timestr = b'unset '
961 elif nodates:
961 elif nodates:
962 timestr = b'set '
962 timestr = b'set '
963 else:
963 else:
964 timestr = time.strftime(
964 timestr = time.strftime(
965 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
965 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
966 )
966 )
967 timestr = encoding.strtolocal(timestr)
967 timestr = encoding.strtolocal(timestr)
968 if ent[1] & 0o20000:
968 if ent[1] & 0o20000:
969 mode = b'lnk'
969 mode = b'lnk'
970 else:
970 else:
971 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
971 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
972 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
972 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
973 for f in repo.dirstate.copies():
973 for f in repo.dirstate.copies():
974 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
974 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
975
975
976
976
977 @command(
977 @command(
978 b'debugdiscovery',
978 b'debugdiscovery',
979 [
979 [
980 (b'', b'old', None, _(b'use old-style discovery')),
980 (b'', b'old', None, _(b'use old-style discovery')),
981 (
981 (
982 b'',
982 b'',
983 b'nonheads',
983 b'nonheads',
984 None,
984 None,
985 _(b'use old-style discovery with non-heads included'),
985 _(b'use old-style discovery with non-heads included'),
986 ),
986 ),
987 (b'', b'rev', [], b'restrict discovery to this set of revs'),
987 (b'', b'rev', [], b'restrict discovery to this set of revs'),
988 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
988 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
989 (
989 (
990 b'',
990 b'',
991 b'local-as-revs',
991 b'local-as-revs',
992 b"",
992 b"",
993 b'treat local has having these revisions only',
993 b'treat local has having these revisions only',
994 ),
994 ),
995 (
995 (
996 b'',
996 b'',
997 b'remote-as-revs',
997 b'remote-as-revs',
998 b"",
998 b"",
999 b'use local as remote, with only these these revisions',
999 b'use local as remote, with only these these revisions',
1000 ),
1000 ),
1001 ]
1001 ]
1002 + cmdutil.remoteopts
1002 + cmdutil.remoteopts
1003 + cmdutil.formatteropts,
1003 + cmdutil.formatteropts,
1004 _(b'[--rev REV] [OTHER]'),
1004 _(b'[--rev REV] [OTHER]'),
1005 )
1005 )
1006 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1006 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1007 """runs the changeset discovery protocol in isolation
1007 """runs the changeset discovery protocol in isolation
1008
1008
1009 The local peer can be "replaced" by a subset of the local repository by
1009 The local peer can be "replaced" by a subset of the local repository by
1010 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1010 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1011 be "replaced" by a subset of the local repository using the
1011 be "replaced" by a subset of the local repository using the
1012 `--local-as-revs` flag. This is useful to efficiently debug pathological
1012 `--local-as-revs` flag. This is useful to efficiently debug pathological
1013 discovery situation.
1013 discovery situation.
1014
1015 The following developer oriented config are relevant for people playing with this command:
1016
1017 * devel.discovery.exchange-heads=True
1018
1019 If False, the discovery will not start with
1020 remote head fetching and local head querying.
1021
1022 * devel.discovery.grow-sample=True
1023
1024 If False, the sample size used in set discovery will not be increased
1025 through the process
1026
1027 * devel.discovery.grow-sample.rate=1.05
1028
1029 the rate at which the sample grow
1030
1031 * devel.discovery.randomize=True
1032
1033 If andom sampling during discovery are deterministic. It is meant for
1034 integration tests.
1035
1036 * devel.discovery.sample-size=200
1037
1038 Control the initial size of the discovery sample
1039
1040 * devel.discovery.sample-size.initial=100
1041
1042 Control the initial size of the discovery for initial change
1014 """
1043 """
1015 opts = pycompat.byteskwargs(opts)
1044 opts = pycompat.byteskwargs(opts)
1016 unfi = repo.unfiltered()
1045 unfi = repo.unfiltered()
1017
1046
1018 # setup potential extra filtering
1047 # setup potential extra filtering
1019 local_revs = opts[b"local_as_revs"]
1048 local_revs = opts[b"local_as_revs"]
1020 remote_revs = opts[b"remote_as_revs"]
1049 remote_revs = opts[b"remote_as_revs"]
1021
1050
1022 # make sure tests are repeatable
1051 # make sure tests are repeatable
1023 random.seed(int(opts[b'seed']))
1052 random.seed(int(opts[b'seed']))
1024
1053
1025 if not remote_revs:
1054 if not remote_revs:
1026
1055
1027 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
1056 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
1028 remote = hg.peer(repo, opts, remoteurl)
1057 remote = hg.peer(repo, opts, remoteurl)
1029 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
1058 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
1030 else:
1059 else:
1031 branches = (None, [])
1060 branches = (None, [])
1032 remote_filtered_revs = scmutil.revrange(
1061 remote_filtered_revs = scmutil.revrange(
1033 unfi, [b"not (::(%s))" % remote_revs]
1062 unfi, [b"not (::(%s))" % remote_revs]
1034 )
1063 )
1035 remote_filtered_revs = frozenset(remote_filtered_revs)
1064 remote_filtered_revs = frozenset(remote_filtered_revs)
1036
1065
1037 def remote_func(x):
1066 def remote_func(x):
1038 return remote_filtered_revs
1067 return remote_filtered_revs
1039
1068
1040 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1069 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1041
1070
1042 remote = repo.peer()
1071 remote = repo.peer()
1043 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1072 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1044
1073
1045 if local_revs:
1074 if local_revs:
1046 local_filtered_revs = scmutil.revrange(
1075 local_filtered_revs = scmutil.revrange(
1047 unfi, [b"not (::(%s))" % local_revs]
1076 unfi, [b"not (::(%s))" % local_revs]
1048 )
1077 )
1049 local_filtered_revs = frozenset(local_filtered_revs)
1078 local_filtered_revs = frozenset(local_filtered_revs)
1050
1079
1051 def local_func(x):
1080 def local_func(x):
1052 return local_filtered_revs
1081 return local_filtered_revs
1053
1082
1054 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1083 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1055 repo = repo.filtered(b'debug-discovery-local-filter')
1084 repo = repo.filtered(b'debug-discovery-local-filter')
1056
1085
1057 data = {}
1086 data = {}
1058 if opts.get(b'old'):
1087 if opts.get(b'old'):
1059
1088
1060 def doit(pushedrevs, remoteheads, remote=remote):
1089 def doit(pushedrevs, remoteheads, remote=remote):
1061 if not util.safehasattr(remote, b'branches'):
1090 if not util.safehasattr(remote, b'branches'):
1062 # enable in-client legacy support
1091 # enable in-client legacy support
1063 remote = localrepo.locallegacypeer(remote.local())
1092 remote = localrepo.locallegacypeer(remote.local())
1064 common, _in, hds = treediscovery.findcommonincoming(
1093 common, _in, hds = treediscovery.findcommonincoming(
1065 repo, remote, force=True, audit=data
1094 repo, remote, force=True, audit=data
1066 )
1095 )
1067 common = set(common)
1096 common = set(common)
1068 if not opts.get(b'nonheads'):
1097 if not opts.get(b'nonheads'):
1069 ui.writenoi18n(
1098 ui.writenoi18n(
1070 b"unpruned common: %s\n"
1099 b"unpruned common: %s\n"
1071 % b" ".join(sorted(short(n) for n in common))
1100 % b" ".join(sorted(short(n) for n in common))
1072 )
1101 )
1073
1102
1074 clnode = repo.changelog.node
1103 clnode = repo.changelog.node
1075 common = repo.revs(b'heads(::%ln)', common)
1104 common = repo.revs(b'heads(::%ln)', common)
1076 common = {clnode(r) for r in common}
1105 common = {clnode(r) for r in common}
1077 return common, hds
1106 return common, hds
1078
1107
1079 else:
1108 else:
1080
1109
1081 def doit(pushedrevs, remoteheads, remote=remote):
1110 def doit(pushedrevs, remoteheads, remote=remote):
1082 nodes = None
1111 nodes = None
1083 if pushedrevs:
1112 if pushedrevs:
1084 revs = scmutil.revrange(repo, pushedrevs)
1113 revs = scmutil.revrange(repo, pushedrevs)
1085 nodes = [repo[r].node() for r in revs]
1114 nodes = [repo[r].node() for r in revs]
1086 common, any, hds = setdiscovery.findcommonheads(
1115 common, any, hds = setdiscovery.findcommonheads(
1087 ui, repo, remote, ancestorsof=nodes, audit=data
1116 ui, repo, remote, ancestorsof=nodes, audit=data
1088 )
1117 )
1089 return common, hds
1118 return common, hds
1090
1119
1091 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1120 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1092 localrevs = opts[b'rev']
1121 localrevs = opts[b'rev']
1093
1122
1094 fm = ui.formatter(b'debugdiscovery', opts)
1123 fm = ui.formatter(b'debugdiscovery', opts)
1095 if fm.strict_format:
1124 if fm.strict_format:
1096
1125
1097 @contextlib.contextmanager
1126 @contextlib.contextmanager
1098 def may_capture_output():
1127 def may_capture_output():
1099 ui.pushbuffer()
1128 ui.pushbuffer()
1100 yield
1129 yield
1101 data[b'output'] = ui.popbuffer()
1130 data[b'output'] = ui.popbuffer()
1102
1131
1103 else:
1132 else:
1104 may_capture_output = util.nullcontextmanager
1133 may_capture_output = util.nullcontextmanager
1105 with may_capture_output():
1134 with may_capture_output():
1106 with util.timedcm('debug-discovery') as t:
1135 with util.timedcm('debug-discovery') as t:
1107 common, hds = doit(localrevs, remoterevs)
1136 common, hds = doit(localrevs, remoterevs)
1108
1137
1109 # compute all statistics
1138 # compute all statistics
1110 heads_common = set(common)
1139 heads_common = set(common)
1111 heads_remote = set(hds)
1140 heads_remote = set(hds)
1112 heads_local = set(repo.heads())
1141 heads_local = set(repo.heads())
1113 # note: they cannot be a local or remote head that is in common and not
1142 # note: they cannot be a local or remote head that is in common and not
1114 # itself a head of common.
1143 # itself a head of common.
1115 heads_common_local = heads_common & heads_local
1144 heads_common_local = heads_common & heads_local
1116 heads_common_remote = heads_common & heads_remote
1145 heads_common_remote = heads_common & heads_remote
1117 heads_common_both = heads_common & heads_remote & heads_local
1146 heads_common_both = heads_common & heads_remote & heads_local
1118
1147
1119 all = repo.revs(b'all()')
1148 all = repo.revs(b'all()')
1120 common = repo.revs(b'::%ln', common)
1149 common = repo.revs(b'::%ln', common)
1121 roots_common = repo.revs(b'roots(::%ld)', common)
1150 roots_common = repo.revs(b'roots(::%ld)', common)
1122 missing = repo.revs(b'not ::%ld', common)
1151 missing = repo.revs(b'not ::%ld', common)
1123 heads_missing = repo.revs(b'heads(%ld)', missing)
1152 heads_missing = repo.revs(b'heads(%ld)', missing)
1124 roots_missing = repo.revs(b'roots(%ld)', missing)
1153 roots_missing = repo.revs(b'roots(%ld)', missing)
1125 assert len(common) + len(missing) == len(all)
1154 assert len(common) + len(missing) == len(all)
1126
1155
1127 initial_undecided = repo.revs(
1156 initial_undecided = repo.revs(
1128 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1157 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1129 )
1158 )
1130 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1159 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1131 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1160 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1132 common_initial_undecided = initial_undecided & common
1161 common_initial_undecided = initial_undecided & common
1133 missing_initial_undecided = initial_undecided & missing
1162 missing_initial_undecided = initial_undecided & missing
1134
1163
1135 data[b'elapsed'] = t.elapsed
1164 data[b'elapsed'] = t.elapsed
1136 data[b'nb-common-heads'] = len(heads_common)
1165 data[b'nb-common-heads'] = len(heads_common)
1137 data[b'nb-common-heads-local'] = len(heads_common_local)
1166 data[b'nb-common-heads-local'] = len(heads_common_local)
1138 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1167 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1139 data[b'nb-common-heads-both'] = len(heads_common_both)
1168 data[b'nb-common-heads-both'] = len(heads_common_both)
1140 data[b'nb-common-roots'] = len(roots_common)
1169 data[b'nb-common-roots'] = len(roots_common)
1141 data[b'nb-head-local'] = len(heads_local)
1170 data[b'nb-head-local'] = len(heads_local)
1142 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1171 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1143 data[b'nb-head-remote'] = len(heads_remote)
1172 data[b'nb-head-remote'] = len(heads_remote)
1144 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1173 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1145 heads_common_remote
1174 heads_common_remote
1146 )
1175 )
1147 data[b'nb-revs'] = len(all)
1176 data[b'nb-revs'] = len(all)
1148 data[b'nb-revs-common'] = len(common)
1177 data[b'nb-revs-common'] = len(common)
1149 data[b'nb-revs-missing'] = len(missing)
1178 data[b'nb-revs-missing'] = len(missing)
1150 data[b'nb-missing-heads'] = len(heads_missing)
1179 data[b'nb-missing-heads'] = len(heads_missing)
1151 data[b'nb-missing-roots'] = len(roots_missing)
1180 data[b'nb-missing-roots'] = len(roots_missing)
1152 data[b'nb-ini_und'] = len(initial_undecided)
1181 data[b'nb-ini_und'] = len(initial_undecided)
1153 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1182 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1154 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1183 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1155 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1184 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1156 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1185 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1157
1186
1158 fm.startitem()
1187 fm.startitem()
1159 fm.data(**pycompat.strkwargs(data))
1188 fm.data(**pycompat.strkwargs(data))
1160 # display discovery summary
1189 # display discovery summary
1161 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1190 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1162 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1191 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1163 fm.plain(b"heads summary:\n")
1192 fm.plain(b"heads summary:\n")
1164 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1193 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1165 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1194 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1166 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1195 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1167 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1196 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1168 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1197 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1169 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1198 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1170 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1199 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1171 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1200 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1172 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1201 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1173 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1202 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1174 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1203 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1175 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1204 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1176 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1205 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1177 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1206 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1178 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1207 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1179 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1208 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1180 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1209 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1181 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1210 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1182 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1211 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1183 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1212 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1184 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1213 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1185 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1214 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1186
1215
1187 if ui.verbose:
1216 if ui.verbose:
1188 fm.plain(
1217 fm.plain(
1189 b"common heads: %s\n"
1218 b"common heads: %s\n"
1190 % b" ".join(sorted(short(n) for n in heads_common))
1219 % b" ".join(sorted(short(n) for n in heads_common))
1191 )
1220 )
1192 fm.end()
1221 fm.end()
1193
1222
1194
1223
1195 _chunksize = 4 << 10
1224 _chunksize = 4 << 10
1196
1225
1197
1226
1198 @command(
1227 @command(
1199 b'debugdownload',
1228 b'debugdownload',
1200 [
1229 [
1201 (b'o', b'output', b'', _(b'path')),
1230 (b'o', b'output', b'', _(b'path')),
1202 ],
1231 ],
1203 optionalrepo=True,
1232 optionalrepo=True,
1204 )
1233 )
1205 def debugdownload(ui, repo, url, output=None, **opts):
1234 def debugdownload(ui, repo, url, output=None, **opts):
1206 """download a resource using Mercurial logic and config"""
1235 """download a resource using Mercurial logic and config"""
1207 fh = urlmod.open(ui, url, output)
1236 fh = urlmod.open(ui, url, output)
1208
1237
1209 dest = ui
1238 dest = ui
1210 if output:
1239 if output:
1211 dest = open(output, b"wb", _chunksize)
1240 dest = open(output, b"wb", _chunksize)
1212 try:
1241 try:
1213 data = fh.read(_chunksize)
1242 data = fh.read(_chunksize)
1214 while data:
1243 while data:
1215 dest.write(data)
1244 dest.write(data)
1216 data = fh.read(_chunksize)
1245 data = fh.read(_chunksize)
1217 finally:
1246 finally:
1218 if output:
1247 if output:
1219 dest.close()
1248 dest.close()
1220
1249
1221
1250
1222 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1251 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1223 def debugextensions(ui, repo, **opts):
1252 def debugextensions(ui, repo, **opts):
1224 '''show information about active extensions'''
1253 '''show information about active extensions'''
1225 opts = pycompat.byteskwargs(opts)
1254 opts = pycompat.byteskwargs(opts)
1226 exts = extensions.extensions(ui)
1255 exts = extensions.extensions(ui)
1227 hgver = util.version()
1256 hgver = util.version()
1228 fm = ui.formatter(b'debugextensions', opts)
1257 fm = ui.formatter(b'debugextensions', opts)
1229 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1258 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1230 isinternal = extensions.ismoduleinternal(extmod)
1259 isinternal = extensions.ismoduleinternal(extmod)
1231 extsource = None
1260 extsource = None
1232
1261
1233 if util.safehasattr(extmod, '__file__'):
1262 if util.safehasattr(extmod, '__file__'):
1234 extsource = pycompat.fsencode(extmod.__file__)
1263 extsource = pycompat.fsencode(extmod.__file__)
1235 elif getattr(sys, 'oxidized', False):
1264 elif getattr(sys, 'oxidized', False):
1236 extsource = pycompat.sysexecutable
1265 extsource = pycompat.sysexecutable
1237 if isinternal:
1266 if isinternal:
1238 exttestedwith = [] # never expose magic string to users
1267 exttestedwith = [] # never expose magic string to users
1239 else:
1268 else:
1240 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1269 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1241 extbuglink = getattr(extmod, 'buglink', None)
1270 extbuglink = getattr(extmod, 'buglink', None)
1242
1271
1243 fm.startitem()
1272 fm.startitem()
1244
1273
1245 if ui.quiet or ui.verbose:
1274 if ui.quiet or ui.verbose:
1246 fm.write(b'name', b'%s\n', extname)
1275 fm.write(b'name', b'%s\n', extname)
1247 else:
1276 else:
1248 fm.write(b'name', b'%s', extname)
1277 fm.write(b'name', b'%s', extname)
1249 if isinternal or hgver in exttestedwith:
1278 if isinternal or hgver in exttestedwith:
1250 fm.plain(b'\n')
1279 fm.plain(b'\n')
1251 elif not exttestedwith:
1280 elif not exttestedwith:
1252 fm.plain(_(b' (untested!)\n'))
1281 fm.plain(_(b' (untested!)\n'))
1253 else:
1282 else:
1254 lasttestedversion = exttestedwith[-1]
1283 lasttestedversion = exttestedwith[-1]
1255 fm.plain(b' (%s!)\n' % lasttestedversion)
1284 fm.plain(b' (%s!)\n' % lasttestedversion)
1256
1285
1257 fm.condwrite(
1286 fm.condwrite(
1258 ui.verbose and extsource,
1287 ui.verbose and extsource,
1259 b'source',
1288 b'source',
1260 _(b' location: %s\n'),
1289 _(b' location: %s\n'),
1261 extsource or b"",
1290 extsource or b"",
1262 )
1291 )
1263
1292
1264 if ui.verbose:
1293 if ui.verbose:
1265 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1294 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1266 fm.data(bundled=isinternal)
1295 fm.data(bundled=isinternal)
1267
1296
1268 fm.condwrite(
1297 fm.condwrite(
1269 ui.verbose and exttestedwith,
1298 ui.verbose and exttestedwith,
1270 b'testedwith',
1299 b'testedwith',
1271 _(b' tested with: %s\n'),
1300 _(b' tested with: %s\n'),
1272 fm.formatlist(exttestedwith, name=b'ver'),
1301 fm.formatlist(exttestedwith, name=b'ver'),
1273 )
1302 )
1274
1303
1275 fm.condwrite(
1304 fm.condwrite(
1276 ui.verbose and extbuglink,
1305 ui.verbose and extbuglink,
1277 b'buglink',
1306 b'buglink',
1278 _(b' bug reporting: %s\n'),
1307 _(b' bug reporting: %s\n'),
1279 extbuglink or b"",
1308 extbuglink or b"",
1280 )
1309 )
1281
1310
1282 fm.end()
1311 fm.end()
1283
1312
1284
1313
1285 @command(
1314 @command(
1286 b'debugfileset',
1315 b'debugfileset',
1287 [
1316 [
1288 (
1317 (
1289 b'r',
1318 b'r',
1290 b'rev',
1319 b'rev',
1291 b'',
1320 b'',
1292 _(b'apply the filespec on this revision'),
1321 _(b'apply the filespec on this revision'),
1293 _(b'REV'),
1322 _(b'REV'),
1294 ),
1323 ),
1295 (
1324 (
1296 b'',
1325 b'',
1297 b'all-files',
1326 b'all-files',
1298 False,
1327 False,
1299 _(b'test files from all revisions and working directory'),
1328 _(b'test files from all revisions and working directory'),
1300 ),
1329 ),
1301 (
1330 (
1302 b's',
1331 b's',
1303 b'show-matcher',
1332 b'show-matcher',
1304 None,
1333 None,
1305 _(b'print internal representation of matcher'),
1334 _(b'print internal representation of matcher'),
1306 ),
1335 ),
1307 (
1336 (
1308 b'p',
1337 b'p',
1309 b'show-stage',
1338 b'show-stage',
1310 [],
1339 [],
1311 _(b'print parsed tree at the given stage'),
1340 _(b'print parsed tree at the given stage'),
1312 _(b'NAME'),
1341 _(b'NAME'),
1313 ),
1342 ),
1314 ],
1343 ],
1315 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1344 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1316 )
1345 )
1317 def debugfileset(ui, repo, expr, **opts):
1346 def debugfileset(ui, repo, expr, **opts):
1318 '''parse and apply a fileset specification'''
1347 '''parse and apply a fileset specification'''
1319 from . import fileset
1348 from . import fileset
1320
1349
1321 fileset.symbols # force import of fileset so we have predicates to optimize
1350 fileset.symbols # force import of fileset so we have predicates to optimize
1322 opts = pycompat.byteskwargs(opts)
1351 opts = pycompat.byteskwargs(opts)
1323 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1352 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1324
1353
1325 stages = [
1354 stages = [
1326 (b'parsed', pycompat.identity),
1355 (b'parsed', pycompat.identity),
1327 (b'analyzed', filesetlang.analyze),
1356 (b'analyzed', filesetlang.analyze),
1328 (b'optimized', filesetlang.optimize),
1357 (b'optimized', filesetlang.optimize),
1329 ]
1358 ]
1330 stagenames = {n for n, f in stages}
1359 stagenames = {n for n, f in stages}
1331
1360
1332 showalways = set()
1361 showalways = set()
1333 if ui.verbose and not opts[b'show_stage']:
1362 if ui.verbose and not opts[b'show_stage']:
1334 # show parsed tree by --verbose (deprecated)
1363 # show parsed tree by --verbose (deprecated)
1335 showalways.add(b'parsed')
1364 showalways.add(b'parsed')
1336 if opts[b'show_stage'] == [b'all']:
1365 if opts[b'show_stage'] == [b'all']:
1337 showalways.update(stagenames)
1366 showalways.update(stagenames)
1338 else:
1367 else:
1339 for n in opts[b'show_stage']:
1368 for n in opts[b'show_stage']:
1340 if n not in stagenames:
1369 if n not in stagenames:
1341 raise error.Abort(_(b'invalid stage name: %s') % n)
1370 raise error.Abort(_(b'invalid stage name: %s') % n)
1342 showalways.update(opts[b'show_stage'])
1371 showalways.update(opts[b'show_stage'])
1343
1372
1344 tree = filesetlang.parse(expr)
1373 tree = filesetlang.parse(expr)
1345 for n, f in stages:
1374 for n, f in stages:
1346 tree = f(tree)
1375 tree = f(tree)
1347 if n in showalways:
1376 if n in showalways:
1348 if opts[b'show_stage'] or n != b'parsed':
1377 if opts[b'show_stage'] or n != b'parsed':
1349 ui.write(b"* %s:\n" % n)
1378 ui.write(b"* %s:\n" % n)
1350 ui.write(filesetlang.prettyformat(tree), b"\n")
1379 ui.write(filesetlang.prettyformat(tree), b"\n")
1351
1380
1352 files = set()
1381 files = set()
1353 if opts[b'all_files']:
1382 if opts[b'all_files']:
1354 for r in repo:
1383 for r in repo:
1355 c = repo[r]
1384 c = repo[r]
1356 files.update(c.files())
1385 files.update(c.files())
1357 files.update(c.substate)
1386 files.update(c.substate)
1358 if opts[b'all_files'] or ctx.rev() is None:
1387 if opts[b'all_files'] or ctx.rev() is None:
1359 wctx = repo[None]
1388 wctx = repo[None]
1360 files.update(
1389 files.update(
1361 repo.dirstate.walk(
1390 repo.dirstate.walk(
1362 scmutil.matchall(repo),
1391 scmutil.matchall(repo),
1363 subrepos=list(wctx.substate),
1392 subrepos=list(wctx.substate),
1364 unknown=True,
1393 unknown=True,
1365 ignored=True,
1394 ignored=True,
1366 )
1395 )
1367 )
1396 )
1368 files.update(wctx.substate)
1397 files.update(wctx.substate)
1369 else:
1398 else:
1370 files.update(ctx.files())
1399 files.update(ctx.files())
1371 files.update(ctx.substate)
1400 files.update(ctx.substate)
1372
1401
1373 m = ctx.matchfileset(repo.getcwd(), expr)
1402 m = ctx.matchfileset(repo.getcwd(), expr)
1374 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1403 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1375 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1404 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1376 for f in sorted(files):
1405 for f in sorted(files):
1377 if not m(f):
1406 if not m(f):
1378 continue
1407 continue
1379 ui.write(b"%s\n" % f)
1408 ui.write(b"%s\n" % f)
1380
1409
1381
1410
1382 @command(b'debugformat', [] + cmdutil.formatteropts)
1411 @command(b'debugformat', [] + cmdutil.formatteropts)
1383 def debugformat(ui, repo, **opts):
1412 def debugformat(ui, repo, **opts):
1384 """display format information about the current repository
1413 """display format information about the current repository
1385
1414
1386 Use --verbose to get extra information about current config value and
1415 Use --verbose to get extra information about current config value and
1387 Mercurial default."""
1416 Mercurial default."""
1388 opts = pycompat.byteskwargs(opts)
1417 opts = pycompat.byteskwargs(opts)
1389 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1418 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1390 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1419 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1391
1420
1392 def makeformatname(name):
1421 def makeformatname(name):
1393 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1422 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1394
1423
1395 fm = ui.formatter(b'debugformat', opts)
1424 fm = ui.formatter(b'debugformat', opts)
1396 if fm.isplain():
1425 if fm.isplain():
1397
1426
1398 def formatvalue(value):
1427 def formatvalue(value):
1399 if util.safehasattr(value, b'startswith'):
1428 if util.safehasattr(value, b'startswith'):
1400 return value
1429 return value
1401 if value:
1430 if value:
1402 return b'yes'
1431 return b'yes'
1403 else:
1432 else:
1404 return b'no'
1433 return b'no'
1405
1434
1406 else:
1435 else:
1407 formatvalue = pycompat.identity
1436 formatvalue = pycompat.identity
1408
1437
1409 fm.plain(b'format-variant')
1438 fm.plain(b'format-variant')
1410 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1439 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1411 fm.plain(b' repo')
1440 fm.plain(b' repo')
1412 if ui.verbose:
1441 if ui.verbose:
1413 fm.plain(b' config default')
1442 fm.plain(b' config default')
1414 fm.plain(b'\n')
1443 fm.plain(b'\n')
1415 for fv in upgrade.allformatvariant:
1444 for fv in upgrade.allformatvariant:
1416 fm.startitem()
1445 fm.startitem()
1417 repovalue = fv.fromrepo(repo)
1446 repovalue = fv.fromrepo(repo)
1418 configvalue = fv.fromconfig(repo)
1447 configvalue = fv.fromconfig(repo)
1419
1448
1420 if repovalue != configvalue:
1449 if repovalue != configvalue:
1421 namelabel = b'formatvariant.name.mismatchconfig'
1450 namelabel = b'formatvariant.name.mismatchconfig'
1422 repolabel = b'formatvariant.repo.mismatchconfig'
1451 repolabel = b'formatvariant.repo.mismatchconfig'
1423 elif repovalue != fv.default:
1452 elif repovalue != fv.default:
1424 namelabel = b'formatvariant.name.mismatchdefault'
1453 namelabel = b'formatvariant.name.mismatchdefault'
1425 repolabel = b'formatvariant.repo.mismatchdefault'
1454 repolabel = b'formatvariant.repo.mismatchdefault'
1426 else:
1455 else:
1427 namelabel = b'formatvariant.name.uptodate'
1456 namelabel = b'formatvariant.name.uptodate'
1428 repolabel = b'formatvariant.repo.uptodate'
1457 repolabel = b'formatvariant.repo.uptodate'
1429
1458
1430 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1459 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1431 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1460 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1432 if fv.default != configvalue:
1461 if fv.default != configvalue:
1433 configlabel = b'formatvariant.config.special'
1462 configlabel = b'formatvariant.config.special'
1434 else:
1463 else:
1435 configlabel = b'formatvariant.config.default'
1464 configlabel = b'formatvariant.config.default'
1436 fm.condwrite(
1465 fm.condwrite(
1437 ui.verbose,
1466 ui.verbose,
1438 b'config',
1467 b'config',
1439 b' %6s',
1468 b' %6s',
1440 formatvalue(configvalue),
1469 formatvalue(configvalue),
1441 label=configlabel,
1470 label=configlabel,
1442 )
1471 )
1443 fm.condwrite(
1472 fm.condwrite(
1444 ui.verbose,
1473 ui.verbose,
1445 b'default',
1474 b'default',
1446 b' %7s',
1475 b' %7s',
1447 formatvalue(fv.default),
1476 formatvalue(fv.default),
1448 label=b'formatvariant.default',
1477 label=b'formatvariant.default',
1449 )
1478 )
1450 fm.plain(b'\n')
1479 fm.plain(b'\n')
1451 fm.end()
1480 fm.end()
1452
1481
1453
1482
1454 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1483 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1455 def debugfsinfo(ui, path=b"."):
1484 def debugfsinfo(ui, path=b"."):
1456 """show information detected about current filesystem"""
1485 """show information detected about current filesystem"""
1457 ui.writenoi18n(b'path: %s\n' % path)
1486 ui.writenoi18n(b'path: %s\n' % path)
1458 ui.writenoi18n(
1487 ui.writenoi18n(
1459 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1488 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1460 )
1489 )
1461 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1490 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1462 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1491 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1463 ui.writenoi18n(
1492 ui.writenoi18n(
1464 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1493 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1465 )
1494 )
1466 ui.writenoi18n(
1495 ui.writenoi18n(
1467 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1496 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1468 )
1497 )
1469 casesensitive = b'(unknown)'
1498 casesensitive = b'(unknown)'
1470 try:
1499 try:
1471 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1500 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1472 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1501 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1473 except OSError:
1502 except OSError:
1474 pass
1503 pass
1475 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1504 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1476
1505
1477
1506
1478 @command(
1507 @command(
1479 b'debuggetbundle',
1508 b'debuggetbundle',
1480 [
1509 [
1481 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1510 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1482 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1511 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1483 (
1512 (
1484 b't',
1513 b't',
1485 b'type',
1514 b'type',
1486 b'bzip2',
1515 b'bzip2',
1487 _(b'bundle compression type to use'),
1516 _(b'bundle compression type to use'),
1488 _(b'TYPE'),
1517 _(b'TYPE'),
1489 ),
1518 ),
1490 ],
1519 ],
1491 _(b'REPO FILE [-H|-C ID]...'),
1520 _(b'REPO FILE [-H|-C ID]...'),
1492 norepo=True,
1521 norepo=True,
1493 )
1522 )
1494 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1523 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1495 """retrieves a bundle from a repo
1524 """retrieves a bundle from a repo
1496
1525
1497 Every ID must be a full-length hex node id string. Saves the bundle to the
1526 Every ID must be a full-length hex node id string. Saves the bundle to the
1498 given file.
1527 given file.
1499 """
1528 """
1500 opts = pycompat.byteskwargs(opts)
1529 opts = pycompat.byteskwargs(opts)
1501 repo = hg.peer(ui, opts, repopath)
1530 repo = hg.peer(ui, opts, repopath)
1502 if not repo.capable(b'getbundle'):
1531 if not repo.capable(b'getbundle'):
1503 raise error.Abort(b"getbundle() not supported by target repository")
1532 raise error.Abort(b"getbundle() not supported by target repository")
1504 args = {}
1533 args = {}
1505 if common:
1534 if common:
1506 args['common'] = [bin(s) for s in common]
1535 args['common'] = [bin(s) for s in common]
1507 if head:
1536 if head:
1508 args['heads'] = [bin(s) for s in head]
1537 args['heads'] = [bin(s) for s in head]
1509 # TODO: get desired bundlecaps from command line.
1538 # TODO: get desired bundlecaps from command line.
1510 args['bundlecaps'] = None
1539 args['bundlecaps'] = None
1511 bundle = repo.getbundle(b'debug', **args)
1540 bundle = repo.getbundle(b'debug', **args)
1512
1541
1513 bundletype = opts.get(b'type', b'bzip2').lower()
1542 bundletype = opts.get(b'type', b'bzip2').lower()
1514 btypes = {
1543 btypes = {
1515 b'none': b'HG10UN',
1544 b'none': b'HG10UN',
1516 b'bzip2': b'HG10BZ',
1545 b'bzip2': b'HG10BZ',
1517 b'gzip': b'HG10GZ',
1546 b'gzip': b'HG10GZ',
1518 b'bundle2': b'HG20',
1547 b'bundle2': b'HG20',
1519 }
1548 }
1520 bundletype = btypes.get(bundletype)
1549 bundletype = btypes.get(bundletype)
1521 if bundletype not in bundle2.bundletypes:
1550 if bundletype not in bundle2.bundletypes:
1522 raise error.Abort(_(b'unknown bundle type specified with --type'))
1551 raise error.Abort(_(b'unknown bundle type specified with --type'))
1523 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1552 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1524
1553
1525
1554
1526 @command(b'debugignore', [], b'[FILE]')
1555 @command(b'debugignore', [], b'[FILE]')
1527 def debugignore(ui, repo, *files, **opts):
1556 def debugignore(ui, repo, *files, **opts):
1528 """display the combined ignore pattern and information about ignored files
1557 """display the combined ignore pattern and information about ignored files
1529
1558
1530 With no argument display the combined ignore pattern.
1559 With no argument display the combined ignore pattern.
1531
1560
1532 Given space separated file names, shows if the given file is ignored and
1561 Given space separated file names, shows if the given file is ignored and
1533 if so, show the ignore rule (file and line number) that matched it.
1562 if so, show the ignore rule (file and line number) that matched it.
1534 """
1563 """
1535 ignore = repo.dirstate._ignore
1564 ignore = repo.dirstate._ignore
1536 if not files:
1565 if not files:
1537 # Show all the patterns
1566 # Show all the patterns
1538 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1567 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1539 else:
1568 else:
1540 m = scmutil.match(repo[None], pats=files)
1569 m = scmutil.match(repo[None], pats=files)
1541 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1570 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1542 for f in m.files():
1571 for f in m.files():
1543 nf = util.normpath(f)
1572 nf = util.normpath(f)
1544 ignored = None
1573 ignored = None
1545 ignoredata = None
1574 ignoredata = None
1546 if nf != b'.':
1575 if nf != b'.':
1547 if ignore(nf):
1576 if ignore(nf):
1548 ignored = nf
1577 ignored = nf
1549 ignoredata = repo.dirstate._ignorefileandline(nf)
1578 ignoredata = repo.dirstate._ignorefileandline(nf)
1550 else:
1579 else:
1551 for p in pathutil.finddirs(nf):
1580 for p in pathutil.finddirs(nf):
1552 if ignore(p):
1581 if ignore(p):
1553 ignored = p
1582 ignored = p
1554 ignoredata = repo.dirstate._ignorefileandline(p)
1583 ignoredata = repo.dirstate._ignorefileandline(p)
1555 break
1584 break
1556 if ignored:
1585 if ignored:
1557 if ignored == nf:
1586 if ignored == nf:
1558 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1587 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1559 else:
1588 else:
1560 ui.write(
1589 ui.write(
1561 _(
1590 _(
1562 b"%s is ignored because of "
1591 b"%s is ignored because of "
1563 b"containing directory %s\n"
1592 b"containing directory %s\n"
1564 )
1593 )
1565 % (uipathfn(f), ignored)
1594 % (uipathfn(f), ignored)
1566 )
1595 )
1567 ignorefile, lineno, line = ignoredata
1596 ignorefile, lineno, line = ignoredata
1568 ui.write(
1597 ui.write(
1569 _(b"(ignore rule in %s, line %d: '%s')\n")
1598 _(b"(ignore rule in %s, line %d: '%s')\n")
1570 % (ignorefile, lineno, line)
1599 % (ignorefile, lineno, line)
1571 )
1600 )
1572 else:
1601 else:
1573 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1602 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1574
1603
1575
1604
1576 @command(
1605 @command(
1577 b'debugindex',
1606 b'debugindex',
1578 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1607 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1579 _(b'-c|-m|FILE'),
1608 _(b'-c|-m|FILE'),
1580 )
1609 )
1581 def debugindex(ui, repo, file_=None, **opts):
1610 def debugindex(ui, repo, file_=None, **opts):
1582 """dump index data for a storage primitive"""
1611 """dump index data for a storage primitive"""
1583 opts = pycompat.byteskwargs(opts)
1612 opts = pycompat.byteskwargs(opts)
1584 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1613 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1585
1614
1586 if ui.debugflag:
1615 if ui.debugflag:
1587 shortfn = hex
1616 shortfn = hex
1588 else:
1617 else:
1589 shortfn = short
1618 shortfn = short
1590
1619
1591 idlen = 12
1620 idlen = 12
1592 for i in store:
1621 for i in store:
1593 idlen = len(shortfn(store.node(i)))
1622 idlen = len(shortfn(store.node(i)))
1594 break
1623 break
1595
1624
1596 fm = ui.formatter(b'debugindex', opts)
1625 fm = ui.formatter(b'debugindex', opts)
1597 fm.plain(
1626 fm.plain(
1598 b' rev linkrev %s %s p2\n'
1627 b' rev linkrev %s %s p2\n'
1599 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1628 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1600 )
1629 )
1601
1630
1602 for rev in store:
1631 for rev in store:
1603 node = store.node(rev)
1632 node = store.node(rev)
1604 parents = store.parents(node)
1633 parents = store.parents(node)
1605
1634
1606 fm.startitem()
1635 fm.startitem()
1607 fm.write(b'rev', b'%6d ', rev)
1636 fm.write(b'rev', b'%6d ', rev)
1608 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1637 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1609 fm.write(b'node', b'%s ', shortfn(node))
1638 fm.write(b'node', b'%s ', shortfn(node))
1610 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1639 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1611 fm.write(b'p2', b'%s', shortfn(parents[1]))
1640 fm.write(b'p2', b'%s', shortfn(parents[1]))
1612 fm.plain(b'\n')
1641 fm.plain(b'\n')
1613
1642
1614 fm.end()
1643 fm.end()
1615
1644
1616
1645
1617 @command(
1646 @command(
1618 b'debugindexdot',
1647 b'debugindexdot',
1619 cmdutil.debugrevlogopts,
1648 cmdutil.debugrevlogopts,
1620 _(b'-c|-m|FILE'),
1649 _(b'-c|-m|FILE'),
1621 optionalrepo=True,
1650 optionalrepo=True,
1622 )
1651 )
1623 def debugindexdot(ui, repo, file_=None, **opts):
1652 def debugindexdot(ui, repo, file_=None, **opts):
1624 """dump an index DAG as a graphviz dot file"""
1653 """dump an index DAG as a graphviz dot file"""
1625 opts = pycompat.byteskwargs(opts)
1654 opts = pycompat.byteskwargs(opts)
1626 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1655 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1627 ui.writenoi18n(b"digraph G {\n")
1656 ui.writenoi18n(b"digraph G {\n")
1628 for i in r:
1657 for i in r:
1629 node = r.node(i)
1658 node = r.node(i)
1630 pp = r.parents(node)
1659 pp = r.parents(node)
1631 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1660 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1632 if pp[1] != nullid:
1661 if pp[1] != nullid:
1633 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1662 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1634 ui.write(b"}\n")
1663 ui.write(b"}\n")
1635
1664
1636
1665
1637 @command(b'debugindexstats', [])
1666 @command(b'debugindexstats', [])
1638 def debugindexstats(ui, repo):
1667 def debugindexstats(ui, repo):
1639 """show stats related to the changelog index"""
1668 """show stats related to the changelog index"""
1640 repo.changelog.shortest(nullid, 1)
1669 repo.changelog.shortest(nullid, 1)
1641 index = repo.changelog.index
1670 index = repo.changelog.index
1642 if not util.safehasattr(index, b'stats'):
1671 if not util.safehasattr(index, b'stats'):
1643 raise error.Abort(_(b'debugindexstats only works with native code'))
1672 raise error.Abort(_(b'debugindexstats only works with native code'))
1644 for k, v in sorted(index.stats().items()):
1673 for k, v in sorted(index.stats().items()):
1645 ui.write(b'%s: %d\n' % (k, v))
1674 ui.write(b'%s: %d\n' % (k, v))
1646
1675
1647
1676
1648 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1677 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1649 def debuginstall(ui, **opts):
1678 def debuginstall(ui, **opts):
1650 """test Mercurial installation
1679 """test Mercurial installation
1651
1680
1652 Returns 0 on success.
1681 Returns 0 on success.
1653 """
1682 """
1654 opts = pycompat.byteskwargs(opts)
1683 opts = pycompat.byteskwargs(opts)
1655
1684
1656 problems = 0
1685 problems = 0
1657
1686
1658 fm = ui.formatter(b'debuginstall', opts)
1687 fm = ui.formatter(b'debuginstall', opts)
1659 fm.startitem()
1688 fm.startitem()
1660
1689
1661 # encoding might be unknown or wrong. don't translate these messages.
1690 # encoding might be unknown or wrong. don't translate these messages.
1662 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1691 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1663 err = None
1692 err = None
1664 try:
1693 try:
1665 codecs.lookup(pycompat.sysstr(encoding.encoding))
1694 codecs.lookup(pycompat.sysstr(encoding.encoding))
1666 except LookupError as inst:
1695 except LookupError as inst:
1667 err = stringutil.forcebytestr(inst)
1696 err = stringutil.forcebytestr(inst)
1668 problems += 1
1697 problems += 1
1669 fm.condwrite(
1698 fm.condwrite(
1670 err,
1699 err,
1671 b'encodingerror',
1700 b'encodingerror',
1672 b" %s\n (check that your locale is properly set)\n",
1701 b" %s\n (check that your locale is properly set)\n",
1673 err,
1702 err,
1674 )
1703 )
1675
1704
1676 # Python
1705 # Python
1677 pythonlib = None
1706 pythonlib = None
1678 if util.safehasattr(os, '__file__'):
1707 if util.safehasattr(os, '__file__'):
1679 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1708 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1680 elif getattr(sys, 'oxidized', False):
1709 elif getattr(sys, 'oxidized', False):
1681 pythonlib = pycompat.sysexecutable
1710 pythonlib = pycompat.sysexecutable
1682
1711
1683 fm.write(
1712 fm.write(
1684 b'pythonexe',
1713 b'pythonexe',
1685 _(b"checking Python executable (%s)\n"),
1714 _(b"checking Python executable (%s)\n"),
1686 pycompat.sysexecutable or _(b"unknown"),
1715 pycompat.sysexecutable or _(b"unknown"),
1687 )
1716 )
1688 fm.write(
1717 fm.write(
1689 b'pythonimplementation',
1718 b'pythonimplementation',
1690 _(b"checking Python implementation (%s)\n"),
1719 _(b"checking Python implementation (%s)\n"),
1691 pycompat.sysbytes(platform.python_implementation()),
1720 pycompat.sysbytes(platform.python_implementation()),
1692 )
1721 )
1693 fm.write(
1722 fm.write(
1694 b'pythonver',
1723 b'pythonver',
1695 _(b"checking Python version (%s)\n"),
1724 _(b"checking Python version (%s)\n"),
1696 (b"%d.%d.%d" % sys.version_info[:3]),
1725 (b"%d.%d.%d" % sys.version_info[:3]),
1697 )
1726 )
1698 fm.write(
1727 fm.write(
1699 b'pythonlib',
1728 b'pythonlib',
1700 _(b"checking Python lib (%s)...\n"),
1729 _(b"checking Python lib (%s)...\n"),
1701 pythonlib or _(b"unknown"),
1730 pythonlib or _(b"unknown"),
1702 )
1731 )
1703
1732
1704 try:
1733 try:
1705 from . import rustext # pytype: disable=import-error
1734 from . import rustext # pytype: disable=import-error
1706
1735
1707 rustext.__doc__ # trigger lazy import
1736 rustext.__doc__ # trigger lazy import
1708 except ImportError:
1737 except ImportError:
1709 rustext = None
1738 rustext = None
1710
1739
1711 security = set(sslutil.supportedprotocols)
1740 security = set(sslutil.supportedprotocols)
1712 if sslutil.hassni:
1741 if sslutil.hassni:
1713 security.add(b'sni')
1742 security.add(b'sni')
1714
1743
1715 fm.write(
1744 fm.write(
1716 b'pythonsecurity',
1745 b'pythonsecurity',
1717 _(b"checking Python security support (%s)\n"),
1746 _(b"checking Python security support (%s)\n"),
1718 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1747 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1719 )
1748 )
1720
1749
1721 # These are warnings, not errors. So don't increment problem count. This
1750 # These are warnings, not errors. So don't increment problem count. This
1722 # may change in the future.
1751 # may change in the future.
1723 if b'tls1.2' not in security:
1752 if b'tls1.2' not in security:
1724 fm.plain(
1753 fm.plain(
1725 _(
1754 _(
1726 b' TLS 1.2 not supported by Python install; '
1755 b' TLS 1.2 not supported by Python install; '
1727 b'network connections lack modern security\n'
1756 b'network connections lack modern security\n'
1728 )
1757 )
1729 )
1758 )
1730 if b'sni' not in security:
1759 if b'sni' not in security:
1731 fm.plain(
1760 fm.plain(
1732 _(
1761 _(
1733 b' SNI not supported by Python install; may have '
1762 b' SNI not supported by Python install; may have '
1734 b'connectivity issues with some servers\n'
1763 b'connectivity issues with some servers\n'
1735 )
1764 )
1736 )
1765 )
1737
1766
1738 fm.plain(
1767 fm.plain(
1739 _(
1768 _(
1740 b"checking Rust extensions (%s)\n"
1769 b"checking Rust extensions (%s)\n"
1741 % (b'missing' if rustext is None else b'installed')
1770 % (b'missing' if rustext is None else b'installed')
1742 ),
1771 ),
1743 )
1772 )
1744
1773
1745 # TODO print CA cert info
1774 # TODO print CA cert info
1746
1775
1747 # hg version
1776 # hg version
1748 hgver = util.version()
1777 hgver = util.version()
1749 fm.write(
1778 fm.write(
1750 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1779 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1751 )
1780 )
1752 fm.write(
1781 fm.write(
1753 b'hgverextra',
1782 b'hgverextra',
1754 _(b"checking Mercurial custom build (%s)\n"),
1783 _(b"checking Mercurial custom build (%s)\n"),
1755 b'+'.join(hgver.split(b'+')[1:]),
1784 b'+'.join(hgver.split(b'+')[1:]),
1756 )
1785 )
1757
1786
1758 # compiled modules
1787 # compiled modules
1759 hgmodules = None
1788 hgmodules = None
1760 if util.safehasattr(sys.modules[__name__], '__file__'):
1789 if util.safehasattr(sys.modules[__name__], '__file__'):
1761 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1790 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1762 elif getattr(sys, 'oxidized', False):
1791 elif getattr(sys, 'oxidized', False):
1763 hgmodules = pycompat.sysexecutable
1792 hgmodules = pycompat.sysexecutable
1764
1793
1765 fm.write(
1794 fm.write(
1766 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1795 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1767 )
1796 )
1768 fm.write(
1797 fm.write(
1769 b'hgmodules',
1798 b'hgmodules',
1770 _(b"checking installed modules (%s)...\n"),
1799 _(b"checking installed modules (%s)...\n"),
1771 hgmodules or _(b"unknown"),
1800 hgmodules or _(b"unknown"),
1772 )
1801 )
1773
1802
1774 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1803 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1775 rustext = rustandc # for now, that's the only case
1804 rustext = rustandc # for now, that's the only case
1776 cext = policy.policy in (b'c', b'allow') or rustandc
1805 cext = policy.policy in (b'c', b'allow') or rustandc
1777 nopure = cext or rustext
1806 nopure = cext or rustext
1778 if nopure:
1807 if nopure:
1779 err = None
1808 err = None
1780 try:
1809 try:
1781 if cext:
1810 if cext:
1782 from .cext import ( # pytype: disable=import-error
1811 from .cext import ( # pytype: disable=import-error
1783 base85,
1812 base85,
1784 bdiff,
1813 bdiff,
1785 mpatch,
1814 mpatch,
1786 osutil,
1815 osutil,
1787 )
1816 )
1788
1817
1789 # quiet pyflakes
1818 # quiet pyflakes
1790 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1819 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1791 if rustext:
1820 if rustext:
1792 from .rustext import ( # pytype: disable=import-error
1821 from .rustext import ( # pytype: disable=import-error
1793 ancestor,
1822 ancestor,
1794 dirstate,
1823 dirstate,
1795 )
1824 )
1796
1825
1797 dir(ancestor), dir(dirstate) # quiet pyflakes
1826 dir(ancestor), dir(dirstate) # quiet pyflakes
1798 except Exception as inst:
1827 except Exception as inst:
1799 err = stringutil.forcebytestr(inst)
1828 err = stringutil.forcebytestr(inst)
1800 problems += 1
1829 problems += 1
1801 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1830 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1802
1831
1803 compengines = util.compengines._engines.values()
1832 compengines = util.compengines._engines.values()
1804 fm.write(
1833 fm.write(
1805 b'compengines',
1834 b'compengines',
1806 _(b'checking registered compression engines (%s)\n'),
1835 _(b'checking registered compression engines (%s)\n'),
1807 fm.formatlist(
1836 fm.formatlist(
1808 sorted(e.name() for e in compengines),
1837 sorted(e.name() for e in compengines),
1809 name=b'compengine',
1838 name=b'compengine',
1810 fmt=b'%s',
1839 fmt=b'%s',
1811 sep=b', ',
1840 sep=b', ',
1812 ),
1841 ),
1813 )
1842 )
1814 fm.write(
1843 fm.write(
1815 b'compenginesavail',
1844 b'compenginesavail',
1816 _(b'checking available compression engines (%s)\n'),
1845 _(b'checking available compression engines (%s)\n'),
1817 fm.formatlist(
1846 fm.formatlist(
1818 sorted(e.name() for e in compengines if e.available()),
1847 sorted(e.name() for e in compengines if e.available()),
1819 name=b'compengine',
1848 name=b'compengine',
1820 fmt=b'%s',
1849 fmt=b'%s',
1821 sep=b', ',
1850 sep=b', ',
1822 ),
1851 ),
1823 )
1852 )
1824 wirecompengines = compression.compengines.supportedwireengines(
1853 wirecompengines = compression.compengines.supportedwireengines(
1825 compression.SERVERROLE
1854 compression.SERVERROLE
1826 )
1855 )
1827 fm.write(
1856 fm.write(
1828 b'compenginesserver',
1857 b'compenginesserver',
1829 _(
1858 _(
1830 b'checking available compression engines '
1859 b'checking available compression engines '
1831 b'for wire protocol (%s)\n'
1860 b'for wire protocol (%s)\n'
1832 ),
1861 ),
1833 fm.formatlist(
1862 fm.formatlist(
1834 [e.name() for e in wirecompengines if e.wireprotosupport()],
1863 [e.name() for e in wirecompengines if e.wireprotosupport()],
1835 name=b'compengine',
1864 name=b'compengine',
1836 fmt=b'%s',
1865 fmt=b'%s',
1837 sep=b', ',
1866 sep=b', ',
1838 ),
1867 ),
1839 )
1868 )
1840 re2 = b'missing'
1869 re2 = b'missing'
1841 if util._re2:
1870 if util._re2:
1842 re2 = b'available'
1871 re2 = b'available'
1843 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1872 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1844 fm.data(re2=bool(util._re2))
1873 fm.data(re2=bool(util._re2))
1845
1874
1846 # templates
1875 # templates
1847 p = templater.templatedir()
1876 p = templater.templatedir()
1848 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1877 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1849 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1878 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1850 if p:
1879 if p:
1851 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1880 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1852 if m:
1881 if m:
1853 # template found, check if it is working
1882 # template found, check if it is working
1854 err = None
1883 err = None
1855 try:
1884 try:
1856 templater.templater.frommapfile(m)
1885 templater.templater.frommapfile(m)
1857 except Exception as inst:
1886 except Exception as inst:
1858 err = stringutil.forcebytestr(inst)
1887 err = stringutil.forcebytestr(inst)
1859 p = None
1888 p = None
1860 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1889 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1861 else:
1890 else:
1862 p = None
1891 p = None
1863 fm.condwrite(
1892 fm.condwrite(
1864 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1893 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1865 )
1894 )
1866 fm.condwrite(
1895 fm.condwrite(
1867 not m,
1896 not m,
1868 b'defaulttemplatenotfound',
1897 b'defaulttemplatenotfound',
1869 _(b" template '%s' not found\n"),
1898 _(b" template '%s' not found\n"),
1870 b"default",
1899 b"default",
1871 )
1900 )
1872 if not p:
1901 if not p:
1873 problems += 1
1902 problems += 1
1874 fm.condwrite(
1903 fm.condwrite(
1875 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1904 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1876 )
1905 )
1877
1906
1878 # editor
1907 # editor
1879 editor = ui.geteditor()
1908 editor = ui.geteditor()
1880 editor = util.expandpath(editor)
1909 editor = util.expandpath(editor)
1881 editorbin = procutil.shellsplit(editor)[0]
1910 editorbin = procutil.shellsplit(editor)[0]
1882 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1911 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1883 cmdpath = procutil.findexe(editorbin)
1912 cmdpath = procutil.findexe(editorbin)
1884 fm.condwrite(
1913 fm.condwrite(
1885 not cmdpath and editor == b'vi',
1914 not cmdpath and editor == b'vi',
1886 b'vinotfound',
1915 b'vinotfound',
1887 _(
1916 _(
1888 b" No commit editor set and can't find %s in PATH\n"
1917 b" No commit editor set and can't find %s in PATH\n"
1889 b" (specify a commit editor in your configuration"
1918 b" (specify a commit editor in your configuration"
1890 b" file)\n"
1919 b" file)\n"
1891 ),
1920 ),
1892 not cmdpath and editor == b'vi' and editorbin,
1921 not cmdpath and editor == b'vi' and editorbin,
1893 )
1922 )
1894 fm.condwrite(
1923 fm.condwrite(
1895 not cmdpath and editor != b'vi',
1924 not cmdpath and editor != b'vi',
1896 b'editornotfound',
1925 b'editornotfound',
1897 _(
1926 _(
1898 b" Can't find editor '%s' in PATH\n"
1927 b" Can't find editor '%s' in PATH\n"
1899 b" (specify a commit editor in your configuration"
1928 b" (specify a commit editor in your configuration"
1900 b" file)\n"
1929 b" file)\n"
1901 ),
1930 ),
1902 not cmdpath and editorbin,
1931 not cmdpath and editorbin,
1903 )
1932 )
1904 if not cmdpath and editor != b'vi':
1933 if not cmdpath and editor != b'vi':
1905 problems += 1
1934 problems += 1
1906
1935
1907 # check username
1936 # check username
1908 username = None
1937 username = None
1909 err = None
1938 err = None
1910 try:
1939 try:
1911 username = ui.username()
1940 username = ui.username()
1912 except error.Abort as e:
1941 except error.Abort as e:
1913 err = e.message
1942 err = e.message
1914 problems += 1
1943 problems += 1
1915
1944
1916 fm.condwrite(
1945 fm.condwrite(
1917 username, b'username', _(b"checking username (%s)\n"), username
1946 username, b'username', _(b"checking username (%s)\n"), username
1918 )
1947 )
1919 fm.condwrite(
1948 fm.condwrite(
1920 err,
1949 err,
1921 b'usernameerror',
1950 b'usernameerror',
1922 _(
1951 _(
1923 b"checking username...\n %s\n"
1952 b"checking username...\n %s\n"
1924 b" (specify a username in your configuration file)\n"
1953 b" (specify a username in your configuration file)\n"
1925 ),
1954 ),
1926 err,
1955 err,
1927 )
1956 )
1928
1957
1929 for name, mod in extensions.extensions():
1958 for name, mod in extensions.extensions():
1930 handler = getattr(mod, 'debuginstall', None)
1959 handler = getattr(mod, 'debuginstall', None)
1931 if handler is not None:
1960 if handler is not None:
1932 problems += handler(ui, fm)
1961 problems += handler(ui, fm)
1933
1962
1934 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1963 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1935 if not problems:
1964 if not problems:
1936 fm.data(problems=problems)
1965 fm.data(problems=problems)
1937 fm.condwrite(
1966 fm.condwrite(
1938 problems,
1967 problems,
1939 b'problems',
1968 b'problems',
1940 _(b"%d problems detected, please check your install!\n"),
1969 _(b"%d problems detected, please check your install!\n"),
1941 problems,
1970 problems,
1942 )
1971 )
1943 fm.end()
1972 fm.end()
1944
1973
1945 return problems
1974 return problems
1946
1975
1947
1976
1948 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1977 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1949 def debugknown(ui, repopath, *ids, **opts):
1978 def debugknown(ui, repopath, *ids, **opts):
1950 """test whether node ids are known to a repo
1979 """test whether node ids are known to a repo
1951
1980
1952 Every ID must be a full-length hex node id string. Returns a list of 0s
1981 Every ID must be a full-length hex node id string. Returns a list of 0s
1953 and 1s indicating unknown/known.
1982 and 1s indicating unknown/known.
1954 """
1983 """
1955 opts = pycompat.byteskwargs(opts)
1984 opts = pycompat.byteskwargs(opts)
1956 repo = hg.peer(ui, opts, repopath)
1985 repo = hg.peer(ui, opts, repopath)
1957 if not repo.capable(b'known'):
1986 if not repo.capable(b'known'):
1958 raise error.Abort(b"known() not supported by target repository")
1987 raise error.Abort(b"known() not supported by target repository")
1959 flags = repo.known([bin(s) for s in ids])
1988 flags = repo.known([bin(s) for s in ids])
1960 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1989 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1961
1990
1962
1991
1963 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1992 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1964 def debuglabelcomplete(ui, repo, *args):
1993 def debuglabelcomplete(ui, repo, *args):
1965 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1994 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1966 debugnamecomplete(ui, repo, *args)
1995 debugnamecomplete(ui, repo, *args)
1967
1996
1968
1997
1969 @command(
1998 @command(
1970 b'debuglocks',
1999 b'debuglocks',
1971 [
2000 [
1972 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2001 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
1973 (
2002 (
1974 b'W',
2003 b'W',
1975 b'force-free-wlock',
2004 b'force-free-wlock',
1976 None,
2005 None,
1977 _(b'free the working state lock (DANGEROUS)'),
2006 _(b'free the working state lock (DANGEROUS)'),
1978 ),
2007 ),
1979 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2008 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1980 (
2009 (
1981 b'S',
2010 b'S',
1982 b'set-wlock',
2011 b'set-wlock',
1983 None,
2012 None,
1984 _(b'set the working state lock until stopped'),
2013 _(b'set the working state lock until stopped'),
1985 ),
2014 ),
1986 ],
2015 ],
1987 _(b'[OPTION]...'),
2016 _(b'[OPTION]...'),
1988 )
2017 )
1989 def debuglocks(ui, repo, **opts):
2018 def debuglocks(ui, repo, **opts):
1990 """show or modify state of locks
2019 """show or modify state of locks
1991
2020
1992 By default, this command will show which locks are held. This
2021 By default, this command will show which locks are held. This
1993 includes the user and process holding the lock, the amount of time
2022 includes the user and process holding the lock, the amount of time
1994 the lock has been held, and the machine name where the process is
2023 the lock has been held, and the machine name where the process is
1995 running if it's not local.
2024 running if it's not local.
1996
2025
1997 Locks protect the integrity of Mercurial's data, so should be
2026 Locks protect the integrity of Mercurial's data, so should be
1998 treated with care. System crashes or other interruptions may cause
2027 treated with care. System crashes or other interruptions may cause
1999 locks to not be properly released, though Mercurial will usually
2028 locks to not be properly released, though Mercurial will usually
2000 detect and remove such stale locks automatically.
2029 detect and remove such stale locks automatically.
2001
2030
2002 However, detecting stale locks may not always be possible (for
2031 However, detecting stale locks may not always be possible (for
2003 instance, on a shared filesystem). Removing locks may also be
2032 instance, on a shared filesystem). Removing locks may also be
2004 blocked by filesystem permissions.
2033 blocked by filesystem permissions.
2005
2034
2006 Setting a lock will prevent other commands from changing the data.
2035 Setting a lock will prevent other commands from changing the data.
2007 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2036 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2008 The set locks are removed when the command exits.
2037 The set locks are removed when the command exits.
2009
2038
2010 Returns 0 if no locks are held.
2039 Returns 0 if no locks are held.
2011
2040
2012 """
2041 """
2013
2042
2014 if opts.get('force_free_lock'):
2043 if opts.get('force_free_lock'):
2015 repo.svfs.unlink(b'lock')
2044 repo.svfs.unlink(b'lock')
2016 if opts.get('force_free_wlock'):
2045 if opts.get('force_free_wlock'):
2017 repo.vfs.unlink(b'wlock')
2046 repo.vfs.unlink(b'wlock')
2018 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2047 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2019 return 0
2048 return 0
2020
2049
2021 locks = []
2050 locks = []
2022 try:
2051 try:
2023 if opts.get('set_wlock'):
2052 if opts.get('set_wlock'):
2024 try:
2053 try:
2025 locks.append(repo.wlock(False))
2054 locks.append(repo.wlock(False))
2026 except error.LockHeld:
2055 except error.LockHeld:
2027 raise error.Abort(_(b'wlock is already held'))
2056 raise error.Abort(_(b'wlock is already held'))
2028 if opts.get('set_lock'):
2057 if opts.get('set_lock'):
2029 try:
2058 try:
2030 locks.append(repo.lock(False))
2059 locks.append(repo.lock(False))
2031 except error.LockHeld:
2060 except error.LockHeld:
2032 raise error.Abort(_(b'lock is already held'))
2061 raise error.Abort(_(b'lock is already held'))
2033 if len(locks):
2062 if len(locks):
2034 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2063 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2035 return 0
2064 return 0
2036 finally:
2065 finally:
2037 release(*locks)
2066 release(*locks)
2038
2067
2039 now = time.time()
2068 now = time.time()
2040 held = 0
2069 held = 0
2041
2070
2042 def report(vfs, name, method):
2071 def report(vfs, name, method):
2043 # this causes stale locks to get reaped for more accurate reporting
2072 # this causes stale locks to get reaped for more accurate reporting
2044 try:
2073 try:
2045 l = method(False)
2074 l = method(False)
2046 except error.LockHeld:
2075 except error.LockHeld:
2047 l = None
2076 l = None
2048
2077
2049 if l:
2078 if l:
2050 l.release()
2079 l.release()
2051 else:
2080 else:
2052 try:
2081 try:
2053 st = vfs.lstat(name)
2082 st = vfs.lstat(name)
2054 age = now - st[stat.ST_MTIME]
2083 age = now - st[stat.ST_MTIME]
2055 user = util.username(st.st_uid)
2084 user = util.username(st.st_uid)
2056 locker = vfs.readlock(name)
2085 locker = vfs.readlock(name)
2057 if b":" in locker:
2086 if b":" in locker:
2058 host, pid = locker.split(b':')
2087 host, pid = locker.split(b':')
2059 if host == socket.gethostname():
2088 if host == socket.gethostname():
2060 locker = b'user %s, process %s' % (user or b'None', pid)
2089 locker = b'user %s, process %s' % (user or b'None', pid)
2061 else:
2090 else:
2062 locker = b'user %s, process %s, host %s' % (
2091 locker = b'user %s, process %s, host %s' % (
2063 user or b'None',
2092 user or b'None',
2064 pid,
2093 pid,
2065 host,
2094 host,
2066 )
2095 )
2067 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2096 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2068 return 1
2097 return 1
2069 except OSError as e:
2098 except OSError as e:
2070 if e.errno != errno.ENOENT:
2099 if e.errno != errno.ENOENT:
2071 raise
2100 raise
2072
2101
2073 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2102 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2074 return 0
2103 return 0
2075
2104
2076 held += report(repo.svfs, b"lock", repo.lock)
2105 held += report(repo.svfs, b"lock", repo.lock)
2077 held += report(repo.vfs, b"wlock", repo.wlock)
2106 held += report(repo.vfs, b"wlock", repo.wlock)
2078
2107
2079 return held
2108 return held
2080
2109
2081
2110
2082 @command(
2111 @command(
2083 b'debugmanifestfulltextcache',
2112 b'debugmanifestfulltextcache',
2084 [
2113 [
2085 (b'', b'clear', False, _(b'clear the cache')),
2114 (b'', b'clear', False, _(b'clear the cache')),
2086 (
2115 (
2087 b'a',
2116 b'a',
2088 b'add',
2117 b'add',
2089 [],
2118 [],
2090 _(b'add the given manifest nodes to the cache'),
2119 _(b'add the given manifest nodes to the cache'),
2091 _(b'NODE'),
2120 _(b'NODE'),
2092 ),
2121 ),
2093 ],
2122 ],
2094 b'',
2123 b'',
2095 )
2124 )
2096 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2125 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2097 """show, clear or amend the contents of the manifest fulltext cache"""
2126 """show, clear or amend the contents of the manifest fulltext cache"""
2098
2127
2099 def getcache():
2128 def getcache():
2100 r = repo.manifestlog.getstorage(b'')
2129 r = repo.manifestlog.getstorage(b'')
2101 try:
2130 try:
2102 return r._fulltextcache
2131 return r._fulltextcache
2103 except AttributeError:
2132 except AttributeError:
2104 msg = _(
2133 msg = _(
2105 b"Current revlog implementation doesn't appear to have a "
2134 b"Current revlog implementation doesn't appear to have a "
2106 b"manifest fulltext cache\n"
2135 b"manifest fulltext cache\n"
2107 )
2136 )
2108 raise error.Abort(msg)
2137 raise error.Abort(msg)
2109
2138
2110 if opts.get('clear'):
2139 if opts.get('clear'):
2111 with repo.wlock():
2140 with repo.wlock():
2112 cache = getcache()
2141 cache = getcache()
2113 cache.clear(clear_persisted_data=True)
2142 cache.clear(clear_persisted_data=True)
2114 return
2143 return
2115
2144
2116 if add:
2145 if add:
2117 with repo.wlock():
2146 with repo.wlock():
2118 m = repo.manifestlog
2147 m = repo.manifestlog
2119 store = m.getstorage(b'')
2148 store = m.getstorage(b'')
2120 for n in add:
2149 for n in add:
2121 try:
2150 try:
2122 manifest = m[store.lookup(n)]
2151 manifest = m[store.lookup(n)]
2123 except error.LookupError as e:
2152 except error.LookupError as e:
2124 raise error.Abort(
2153 raise error.Abort(
2125 bytes(e), hint=b"Check your manifest node id"
2154 bytes(e), hint=b"Check your manifest node id"
2126 )
2155 )
2127 manifest.read() # stores revisision in cache too
2156 manifest.read() # stores revisision in cache too
2128 return
2157 return
2129
2158
2130 cache = getcache()
2159 cache = getcache()
2131 if not len(cache):
2160 if not len(cache):
2132 ui.write(_(b'cache empty\n'))
2161 ui.write(_(b'cache empty\n'))
2133 else:
2162 else:
2134 ui.write(
2163 ui.write(
2135 _(
2164 _(
2136 b'cache contains %d manifest entries, in order of most to '
2165 b'cache contains %d manifest entries, in order of most to '
2137 b'least recent:\n'
2166 b'least recent:\n'
2138 )
2167 )
2139 % (len(cache),)
2168 % (len(cache),)
2140 )
2169 )
2141 totalsize = 0
2170 totalsize = 0
2142 for nodeid in cache:
2171 for nodeid in cache:
2143 # Use cache.get to not update the LRU order
2172 # Use cache.get to not update the LRU order
2144 data = cache.peek(nodeid)
2173 data = cache.peek(nodeid)
2145 size = len(data)
2174 size = len(data)
2146 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2175 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2147 ui.write(
2176 ui.write(
2148 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2177 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2149 )
2178 )
2150 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2179 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2151 ui.write(
2180 ui.write(
2152 _(b'total cache data size %s, on-disk %s\n')
2181 _(b'total cache data size %s, on-disk %s\n')
2153 % (util.bytecount(totalsize), util.bytecount(ondisk))
2182 % (util.bytecount(totalsize), util.bytecount(ondisk))
2154 )
2183 )
2155
2184
2156
2185
2157 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2186 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2158 def debugmergestate(ui, repo, *args, **opts):
2187 def debugmergestate(ui, repo, *args, **opts):
2159 """print merge state
2188 """print merge state
2160
2189
2161 Use --verbose to print out information about whether v1 or v2 merge state
2190 Use --verbose to print out information about whether v1 or v2 merge state
2162 was chosen."""
2191 was chosen."""
2163
2192
2164 if ui.verbose:
2193 if ui.verbose:
2165 ms = mergestatemod.mergestate(repo)
2194 ms = mergestatemod.mergestate(repo)
2166
2195
2167 # sort so that reasonable information is on top
2196 # sort so that reasonable information is on top
2168 v1records = ms._readrecordsv1()
2197 v1records = ms._readrecordsv1()
2169 v2records = ms._readrecordsv2()
2198 v2records = ms._readrecordsv2()
2170
2199
2171 if not v1records and not v2records:
2200 if not v1records and not v2records:
2172 pass
2201 pass
2173 elif not v2records:
2202 elif not v2records:
2174 ui.writenoi18n(b'no version 2 merge state\n')
2203 ui.writenoi18n(b'no version 2 merge state\n')
2175 elif ms._v1v2match(v1records, v2records):
2204 elif ms._v1v2match(v1records, v2records):
2176 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2205 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2177 else:
2206 else:
2178 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2207 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2179
2208
2180 opts = pycompat.byteskwargs(opts)
2209 opts = pycompat.byteskwargs(opts)
2181 if not opts[b'template']:
2210 if not opts[b'template']:
2182 opts[b'template'] = (
2211 opts[b'template'] = (
2183 b'{if(commits, "", "no merge state found\n")}'
2212 b'{if(commits, "", "no merge state found\n")}'
2184 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2213 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2185 b'{files % "file: {path} (state \\"{state}\\")\n'
2214 b'{files % "file: {path} (state \\"{state}\\")\n'
2186 b'{if(local_path, "'
2215 b'{if(local_path, "'
2187 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2216 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2188 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2217 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2189 b' other path: {other_path} (node {other_node})\n'
2218 b' other path: {other_path} (node {other_node})\n'
2190 b'")}'
2219 b'")}'
2191 b'{if(rename_side, "'
2220 b'{if(rename_side, "'
2192 b' rename side: {rename_side}\n'
2221 b' rename side: {rename_side}\n'
2193 b' renamed path: {renamed_path}\n'
2222 b' renamed path: {renamed_path}\n'
2194 b'")}'
2223 b'")}'
2195 b'{extras % " extra: {key} = {value}\n"}'
2224 b'{extras % " extra: {key} = {value}\n"}'
2196 b'"}'
2225 b'"}'
2197 b'{extras % "extra: {file} ({key} = {value})\n"}'
2226 b'{extras % "extra: {file} ({key} = {value})\n"}'
2198 )
2227 )
2199
2228
2200 ms = mergestatemod.mergestate.read(repo)
2229 ms = mergestatemod.mergestate.read(repo)
2201
2230
2202 fm = ui.formatter(b'debugmergestate', opts)
2231 fm = ui.formatter(b'debugmergestate', opts)
2203 fm.startitem()
2232 fm.startitem()
2204
2233
2205 fm_commits = fm.nested(b'commits')
2234 fm_commits = fm.nested(b'commits')
2206 if ms.active():
2235 if ms.active():
2207 for name, node, label_index in (
2236 for name, node, label_index in (
2208 (b'local', ms.local, 0),
2237 (b'local', ms.local, 0),
2209 (b'other', ms.other, 1),
2238 (b'other', ms.other, 1),
2210 ):
2239 ):
2211 fm_commits.startitem()
2240 fm_commits.startitem()
2212 fm_commits.data(name=name)
2241 fm_commits.data(name=name)
2213 fm_commits.data(node=hex(node))
2242 fm_commits.data(node=hex(node))
2214 if ms._labels and len(ms._labels) > label_index:
2243 if ms._labels and len(ms._labels) > label_index:
2215 fm_commits.data(label=ms._labels[label_index])
2244 fm_commits.data(label=ms._labels[label_index])
2216 fm_commits.end()
2245 fm_commits.end()
2217
2246
2218 fm_files = fm.nested(b'files')
2247 fm_files = fm.nested(b'files')
2219 if ms.active():
2248 if ms.active():
2220 for f in ms:
2249 for f in ms:
2221 fm_files.startitem()
2250 fm_files.startitem()
2222 fm_files.data(path=f)
2251 fm_files.data(path=f)
2223 state = ms._state[f]
2252 state = ms._state[f]
2224 fm_files.data(state=state[0])
2253 fm_files.data(state=state[0])
2225 if state[0] in (
2254 if state[0] in (
2226 mergestatemod.MERGE_RECORD_UNRESOLVED,
2255 mergestatemod.MERGE_RECORD_UNRESOLVED,
2227 mergestatemod.MERGE_RECORD_RESOLVED,
2256 mergestatemod.MERGE_RECORD_RESOLVED,
2228 ):
2257 ):
2229 fm_files.data(local_key=state[1])
2258 fm_files.data(local_key=state[1])
2230 fm_files.data(local_path=state[2])
2259 fm_files.data(local_path=state[2])
2231 fm_files.data(ancestor_path=state[3])
2260 fm_files.data(ancestor_path=state[3])
2232 fm_files.data(ancestor_node=state[4])
2261 fm_files.data(ancestor_node=state[4])
2233 fm_files.data(other_path=state[5])
2262 fm_files.data(other_path=state[5])
2234 fm_files.data(other_node=state[6])
2263 fm_files.data(other_node=state[6])
2235 fm_files.data(local_flags=state[7])
2264 fm_files.data(local_flags=state[7])
2236 elif state[0] in (
2265 elif state[0] in (
2237 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2266 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2238 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2267 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2239 ):
2268 ):
2240 fm_files.data(renamed_path=state[1])
2269 fm_files.data(renamed_path=state[1])
2241 fm_files.data(rename_side=state[2])
2270 fm_files.data(rename_side=state[2])
2242 fm_extras = fm_files.nested(b'extras')
2271 fm_extras = fm_files.nested(b'extras')
2243 for k, v in sorted(ms.extras(f).items()):
2272 for k, v in sorted(ms.extras(f).items()):
2244 fm_extras.startitem()
2273 fm_extras.startitem()
2245 fm_extras.data(key=k)
2274 fm_extras.data(key=k)
2246 fm_extras.data(value=v)
2275 fm_extras.data(value=v)
2247 fm_extras.end()
2276 fm_extras.end()
2248
2277
2249 fm_files.end()
2278 fm_files.end()
2250
2279
2251 fm_extras = fm.nested(b'extras')
2280 fm_extras = fm.nested(b'extras')
2252 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2281 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2253 if f in ms:
2282 if f in ms:
2254 # If file is in mergestate, we have already processed it's extras
2283 # If file is in mergestate, we have already processed it's extras
2255 continue
2284 continue
2256 for k, v in pycompat.iteritems(d):
2285 for k, v in pycompat.iteritems(d):
2257 fm_extras.startitem()
2286 fm_extras.startitem()
2258 fm_extras.data(file=f)
2287 fm_extras.data(file=f)
2259 fm_extras.data(key=k)
2288 fm_extras.data(key=k)
2260 fm_extras.data(value=v)
2289 fm_extras.data(value=v)
2261 fm_extras.end()
2290 fm_extras.end()
2262
2291
2263 fm.end()
2292 fm.end()
2264
2293
2265
2294
2266 @command(b'debugnamecomplete', [], _(b'NAME...'))
2295 @command(b'debugnamecomplete', [], _(b'NAME...'))
2267 def debugnamecomplete(ui, repo, *args):
2296 def debugnamecomplete(ui, repo, *args):
2268 '''complete "names" - tags, open branch names, bookmark names'''
2297 '''complete "names" - tags, open branch names, bookmark names'''
2269
2298
2270 names = set()
2299 names = set()
2271 # since we previously only listed open branches, we will handle that
2300 # since we previously only listed open branches, we will handle that
2272 # specially (after this for loop)
2301 # specially (after this for loop)
2273 for name, ns in pycompat.iteritems(repo.names):
2302 for name, ns in pycompat.iteritems(repo.names):
2274 if name != b'branches':
2303 if name != b'branches':
2275 names.update(ns.listnames(repo))
2304 names.update(ns.listnames(repo))
2276 names.update(
2305 names.update(
2277 tag
2306 tag
2278 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2307 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2279 if not closed
2308 if not closed
2280 )
2309 )
2281 completions = set()
2310 completions = set()
2282 if not args:
2311 if not args:
2283 args = [b'']
2312 args = [b'']
2284 for a in args:
2313 for a in args:
2285 completions.update(n for n in names if n.startswith(a))
2314 completions.update(n for n in names if n.startswith(a))
2286 ui.write(b'\n'.join(sorted(completions)))
2315 ui.write(b'\n'.join(sorted(completions)))
2287 ui.write(b'\n')
2316 ui.write(b'\n')
2288
2317
2289
2318
2290 @command(
2319 @command(
2291 b'debugnodemap',
2320 b'debugnodemap',
2292 [
2321 [
2293 (
2322 (
2294 b'',
2323 b'',
2295 b'dump-new',
2324 b'dump-new',
2296 False,
2325 False,
2297 _(b'write a (new) persistent binary nodemap on stdout'),
2326 _(b'write a (new) persistent binary nodemap on stdout'),
2298 ),
2327 ),
2299 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2328 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2300 (
2329 (
2301 b'',
2330 b'',
2302 b'check',
2331 b'check',
2303 False,
2332 False,
2304 _(b'check that the data on disk data are correct.'),
2333 _(b'check that the data on disk data are correct.'),
2305 ),
2334 ),
2306 (
2335 (
2307 b'',
2336 b'',
2308 b'metadata',
2337 b'metadata',
2309 False,
2338 False,
2310 _(b'display the on disk meta data for the nodemap'),
2339 _(b'display the on disk meta data for the nodemap'),
2311 ),
2340 ),
2312 ],
2341 ],
2313 )
2342 )
2314 def debugnodemap(ui, repo, **opts):
2343 def debugnodemap(ui, repo, **opts):
2315 """write and inspect on disk nodemap"""
2344 """write and inspect on disk nodemap"""
2316 if opts['dump_new']:
2345 if opts['dump_new']:
2317 unfi = repo.unfiltered()
2346 unfi = repo.unfiltered()
2318 cl = unfi.changelog
2347 cl = unfi.changelog
2319 if util.safehasattr(cl.index, "nodemap_data_all"):
2348 if util.safehasattr(cl.index, "nodemap_data_all"):
2320 data = cl.index.nodemap_data_all()
2349 data = cl.index.nodemap_data_all()
2321 else:
2350 else:
2322 data = nodemap.persistent_data(cl.index)
2351 data = nodemap.persistent_data(cl.index)
2323 ui.write(data)
2352 ui.write(data)
2324 elif opts['dump_disk']:
2353 elif opts['dump_disk']:
2325 unfi = repo.unfiltered()
2354 unfi = repo.unfiltered()
2326 cl = unfi.changelog
2355 cl = unfi.changelog
2327 nm_data = nodemap.persisted_data(cl)
2356 nm_data = nodemap.persisted_data(cl)
2328 if nm_data is not None:
2357 if nm_data is not None:
2329 docket, data = nm_data
2358 docket, data = nm_data
2330 ui.write(data[:])
2359 ui.write(data[:])
2331 elif opts['check']:
2360 elif opts['check']:
2332 unfi = repo.unfiltered()
2361 unfi = repo.unfiltered()
2333 cl = unfi.changelog
2362 cl = unfi.changelog
2334 nm_data = nodemap.persisted_data(cl)
2363 nm_data = nodemap.persisted_data(cl)
2335 if nm_data is not None:
2364 if nm_data is not None:
2336 docket, data = nm_data
2365 docket, data = nm_data
2337 return nodemap.check_data(ui, cl.index, data)
2366 return nodemap.check_data(ui, cl.index, data)
2338 elif opts['metadata']:
2367 elif opts['metadata']:
2339 unfi = repo.unfiltered()
2368 unfi = repo.unfiltered()
2340 cl = unfi.changelog
2369 cl = unfi.changelog
2341 nm_data = nodemap.persisted_data(cl)
2370 nm_data = nodemap.persisted_data(cl)
2342 if nm_data is not None:
2371 if nm_data is not None:
2343 docket, data = nm_data
2372 docket, data = nm_data
2344 ui.write((b"uid: %s\n") % docket.uid)
2373 ui.write((b"uid: %s\n") % docket.uid)
2345 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2374 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2346 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2375 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2347 ui.write((b"data-length: %d\n") % docket.data_length)
2376 ui.write((b"data-length: %d\n") % docket.data_length)
2348 ui.write((b"data-unused: %d\n") % docket.data_unused)
2377 ui.write((b"data-unused: %d\n") % docket.data_unused)
2349 unused_perc = docket.data_unused * 100.0 / docket.data_length
2378 unused_perc = docket.data_unused * 100.0 / docket.data_length
2350 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2379 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2351
2380
2352
2381
2353 @command(
2382 @command(
2354 b'debugobsolete',
2383 b'debugobsolete',
2355 [
2384 [
2356 (b'', b'flags', 0, _(b'markers flag')),
2385 (b'', b'flags', 0, _(b'markers flag')),
2357 (
2386 (
2358 b'',
2387 b'',
2359 b'record-parents',
2388 b'record-parents',
2360 False,
2389 False,
2361 _(b'record parent information for the precursor'),
2390 _(b'record parent information for the precursor'),
2362 ),
2391 ),
2363 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2392 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2364 (
2393 (
2365 b'',
2394 b'',
2366 b'exclusive',
2395 b'exclusive',
2367 False,
2396 False,
2368 _(b'restrict display to markers only relevant to REV'),
2397 _(b'restrict display to markers only relevant to REV'),
2369 ),
2398 ),
2370 (b'', b'index', False, _(b'display index of the marker')),
2399 (b'', b'index', False, _(b'display index of the marker')),
2371 (b'', b'delete', [], _(b'delete markers specified by indices')),
2400 (b'', b'delete', [], _(b'delete markers specified by indices')),
2372 ]
2401 ]
2373 + cmdutil.commitopts2
2402 + cmdutil.commitopts2
2374 + cmdutil.formatteropts,
2403 + cmdutil.formatteropts,
2375 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2404 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2376 )
2405 )
2377 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2406 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2378 """create arbitrary obsolete marker
2407 """create arbitrary obsolete marker
2379
2408
2380 With no arguments, displays the list of obsolescence markers."""
2409 With no arguments, displays the list of obsolescence markers."""
2381
2410
2382 opts = pycompat.byteskwargs(opts)
2411 opts = pycompat.byteskwargs(opts)
2383
2412
2384 def parsenodeid(s):
2413 def parsenodeid(s):
2385 try:
2414 try:
2386 # We do not use revsingle/revrange functions here to accept
2415 # We do not use revsingle/revrange functions here to accept
2387 # arbitrary node identifiers, possibly not present in the
2416 # arbitrary node identifiers, possibly not present in the
2388 # local repository.
2417 # local repository.
2389 n = bin(s)
2418 n = bin(s)
2390 if len(n) != len(nullid):
2419 if len(n) != len(nullid):
2391 raise TypeError()
2420 raise TypeError()
2392 return n
2421 return n
2393 except TypeError:
2422 except TypeError:
2394 raise error.InputError(
2423 raise error.InputError(
2395 b'changeset references must be full hexadecimal '
2424 b'changeset references must be full hexadecimal '
2396 b'node identifiers'
2425 b'node identifiers'
2397 )
2426 )
2398
2427
2399 if opts.get(b'delete'):
2428 if opts.get(b'delete'):
2400 indices = []
2429 indices = []
2401 for v in opts.get(b'delete'):
2430 for v in opts.get(b'delete'):
2402 try:
2431 try:
2403 indices.append(int(v))
2432 indices.append(int(v))
2404 except ValueError:
2433 except ValueError:
2405 raise error.InputError(
2434 raise error.InputError(
2406 _(b'invalid index value: %r') % v,
2435 _(b'invalid index value: %r') % v,
2407 hint=_(b'use integers for indices'),
2436 hint=_(b'use integers for indices'),
2408 )
2437 )
2409
2438
2410 if repo.currenttransaction():
2439 if repo.currenttransaction():
2411 raise error.Abort(
2440 raise error.Abort(
2412 _(b'cannot delete obsmarkers in the middle of transaction.')
2441 _(b'cannot delete obsmarkers in the middle of transaction.')
2413 )
2442 )
2414
2443
2415 with repo.lock():
2444 with repo.lock():
2416 n = repair.deleteobsmarkers(repo.obsstore, indices)
2445 n = repair.deleteobsmarkers(repo.obsstore, indices)
2417 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2446 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2418
2447
2419 return
2448 return
2420
2449
2421 if precursor is not None:
2450 if precursor is not None:
2422 if opts[b'rev']:
2451 if opts[b'rev']:
2423 raise error.InputError(
2452 raise error.InputError(
2424 b'cannot select revision when creating marker'
2453 b'cannot select revision when creating marker'
2425 )
2454 )
2426 metadata = {}
2455 metadata = {}
2427 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2456 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2428 succs = tuple(parsenodeid(succ) for succ in successors)
2457 succs = tuple(parsenodeid(succ) for succ in successors)
2429 l = repo.lock()
2458 l = repo.lock()
2430 try:
2459 try:
2431 tr = repo.transaction(b'debugobsolete')
2460 tr = repo.transaction(b'debugobsolete')
2432 try:
2461 try:
2433 date = opts.get(b'date')
2462 date = opts.get(b'date')
2434 if date:
2463 if date:
2435 date = dateutil.parsedate(date)
2464 date = dateutil.parsedate(date)
2436 else:
2465 else:
2437 date = None
2466 date = None
2438 prec = parsenodeid(precursor)
2467 prec = parsenodeid(precursor)
2439 parents = None
2468 parents = None
2440 if opts[b'record_parents']:
2469 if opts[b'record_parents']:
2441 if prec not in repo.unfiltered():
2470 if prec not in repo.unfiltered():
2442 raise error.Abort(
2471 raise error.Abort(
2443 b'cannot used --record-parents on '
2472 b'cannot used --record-parents on '
2444 b'unknown changesets'
2473 b'unknown changesets'
2445 )
2474 )
2446 parents = repo.unfiltered()[prec].parents()
2475 parents = repo.unfiltered()[prec].parents()
2447 parents = tuple(p.node() for p in parents)
2476 parents = tuple(p.node() for p in parents)
2448 repo.obsstore.create(
2477 repo.obsstore.create(
2449 tr,
2478 tr,
2450 prec,
2479 prec,
2451 succs,
2480 succs,
2452 opts[b'flags'],
2481 opts[b'flags'],
2453 parents=parents,
2482 parents=parents,
2454 date=date,
2483 date=date,
2455 metadata=metadata,
2484 metadata=metadata,
2456 ui=ui,
2485 ui=ui,
2457 )
2486 )
2458 tr.close()
2487 tr.close()
2459 except ValueError as exc:
2488 except ValueError as exc:
2460 raise error.Abort(
2489 raise error.Abort(
2461 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2490 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2462 )
2491 )
2463 finally:
2492 finally:
2464 tr.release()
2493 tr.release()
2465 finally:
2494 finally:
2466 l.release()
2495 l.release()
2467 else:
2496 else:
2468 if opts[b'rev']:
2497 if opts[b'rev']:
2469 revs = scmutil.revrange(repo, opts[b'rev'])
2498 revs = scmutil.revrange(repo, opts[b'rev'])
2470 nodes = [repo[r].node() for r in revs]
2499 nodes = [repo[r].node() for r in revs]
2471 markers = list(
2500 markers = list(
2472 obsutil.getmarkers(
2501 obsutil.getmarkers(
2473 repo, nodes=nodes, exclusive=opts[b'exclusive']
2502 repo, nodes=nodes, exclusive=opts[b'exclusive']
2474 )
2503 )
2475 )
2504 )
2476 markers.sort(key=lambda x: x._data)
2505 markers.sort(key=lambda x: x._data)
2477 else:
2506 else:
2478 markers = obsutil.getmarkers(repo)
2507 markers = obsutil.getmarkers(repo)
2479
2508
2480 markerstoiter = markers
2509 markerstoiter = markers
2481 isrelevant = lambda m: True
2510 isrelevant = lambda m: True
2482 if opts.get(b'rev') and opts.get(b'index'):
2511 if opts.get(b'rev') and opts.get(b'index'):
2483 markerstoiter = obsutil.getmarkers(repo)
2512 markerstoiter = obsutil.getmarkers(repo)
2484 markerset = set(markers)
2513 markerset = set(markers)
2485 isrelevant = lambda m: m in markerset
2514 isrelevant = lambda m: m in markerset
2486
2515
2487 fm = ui.formatter(b'debugobsolete', opts)
2516 fm = ui.formatter(b'debugobsolete', opts)
2488 for i, m in enumerate(markerstoiter):
2517 for i, m in enumerate(markerstoiter):
2489 if not isrelevant(m):
2518 if not isrelevant(m):
2490 # marker can be irrelevant when we're iterating over a set
2519 # marker can be irrelevant when we're iterating over a set
2491 # of markers (markerstoiter) which is bigger than the set
2520 # of markers (markerstoiter) which is bigger than the set
2492 # of markers we want to display (markers)
2521 # of markers we want to display (markers)
2493 # this can happen if both --index and --rev options are
2522 # this can happen if both --index and --rev options are
2494 # provided and thus we need to iterate over all of the markers
2523 # provided and thus we need to iterate over all of the markers
2495 # to get the correct indices, but only display the ones that
2524 # to get the correct indices, but only display the ones that
2496 # are relevant to --rev value
2525 # are relevant to --rev value
2497 continue
2526 continue
2498 fm.startitem()
2527 fm.startitem()
2499 ind = i if opts.get(b'index') else None
2528 ind = i if opts.get(b'index') else None
2500 cmdutil.showmarker(fm, m, index=ind)
2529 cmdutil.showmarker(fm, m, index=ind)
2501 fm.end()
2530 fm.end()
2502
2531
2503
2532
2504 @command(
2533 @command(
2505 b'debugp1copies',
2534 b'debugp1copies',
2506 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2535 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2507 _(b'[-r REV]'),
2536 _(b'[-r REV]'),
2508 )
2537 )
2509 def debugp1copies(ui, repo, **opts):
2538 def debugp1copies(ui, repo, **opts):
2510 """dump copy information compared to p1"""
2539 """dump copy information compared to p1"""
2511
2540
2512 opts = pycompat.byteskwargs(opts)
2541 opts = pycompat.byteskwargs(opts)
2513 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2542 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2514 for dst, src in ctx.p1copies().items():
2543 for dst, src in ctx.p1copies().items():
2515 ui.write(b'%s -> %s\n' % (src, dst))
2544 ui.write(b'%s -> %s\n' % (src, dst))
2516
2545
2517
2546
2518 @command(
2547 @command(
2519 b'debugp2copies',
2548 b'debugp2copies',
2520 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2549 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2521 _(b'[-r REV]'),
2550 _(b'[-r REV]'),
2522 )
2551 )
2523 def debugp1copies(ui, repo, **opts):
2552 def debugp1copies(ui, repo, **opts):
2524 """dump copy information compared to p2"""
2553 """dump copy information compared to p2"""
2525
2554
2526 opts = pycompat.byteskwargs(opts)
2555 opts = pycompat.byteskwargs(opts)
2527 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2556 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2528 for dst, src in ctx.p2copies().items():
2557 for dst, src in ctx.p2copies().items():
2529 ui.write(b'%s -> %s\n' % (src, dst))
2558 ui.write(b'%s -> %s\n' % (src, dst))
2530
2559
2531
2560
2532 @command(
2561 @command(
2533 b'debugpathcomplete',
2562 b'debugpathcomplete',
2534 [
2563 [
2535 (b'f', b'full', None, _(b'complete an entire path')),
2564 (b'f', b'full', None, _(b'complete an entire path')),
2536 (b'n', b'normal', None, _(b'show only normal files')),
2565 (b'n', b'normal', None, _(b'show only normal files')),
2537 (b'a', b'added', None, _(b'show only added files')),
2566 (b'a', b'added', None, _(b'show only added files')),
2538 (b'r', b'removed', None, _(b'show only removed files')),
2567 (b'r', b'removed', None, _(b'show only removed files')),
2539 ],
2568 ],
2540 _(b'FILESPEC...'),
2569 _(b'FILESPEC...'),
2541 )
2570 )
2542 def debugpathcomplete(ui, repo, *specs, **opts):
2571 def debugpathcomplete(ui, repo, *specs, **opts):
2543 """complete part or all of a tracked path
2572 """complete part or all of a tracked path
2544
2573
2545 This command supports shells that offer path name completion. It
2574 This command supports shells that offer path name completion. It
2546 currently completes only files already known to the dirstate.
2575 currently completes only files already known to the dirstate.
2547
2576
2548 Completion extends only to the next path segment unless
2577 Completion extends only to the next path segment unless
2549 --full is specified, in which case entire paths are used."""
2578 --full is specified, in which case entire paths are used."""
2550
2579
2551 def complete(path, acceptable):
2580 def complete(path, acceptable):
2552 dirstate = repo.dirstate
2581 dirstate = repo.dirstate
2553 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2582 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2554 rootdir = repo.root + pycompat.ossep
2583 rootdir = repo.root + pycompat.ossep
2555 if spec != repo.root and not spec.startswith(rootdir):
2584 if spec != repo.root and not spec.startswith(rootdir):
2556 return [], []
2585 return [], []
2557 if os.path.isdir(spec):
2586 if os.path.isdir(spec):
2558 spec += b'/'
2587 spec += b'/'
2559 spec = spec[len(rootdir) :]
2588 spec = spec[len(rootdir) :]
2560 fixpaths = pycompat.ossep != b'/'
2589 fixpaths = pycompat.ossep != b'/'
2561 if fixpaths:
2590 if fixpaths:
2562 spec = spec.replace(pycompat.ossep, b'/')
2591 spec = spec.replace(pycompat.ossep, b'/')
2563 speclen = len(spec)
2592 speclen = len(spec)
2564 fullpaths = opts['full']
2593 fullpaths = opts['full']
2565 files, dirs = set(), set()
2594 files, dirs = set(), set()
2566 adddir, addfile = dirs.add, files.add
2595 adddir, addfile = dirs.add, files.add
2567 for f, st in pycompat.iteritems(dirstate):
2596 for f, st in pycompat.iteritems(dirstate):
2568 if f.startswith(spec) and st[0] in acceptable:
2597 if f.startswith(spec) and st[0] in acceptable:
2569 if fixpaths:
2598 if fixpaths:
2570 f = f.replace(b'/', pycompat.ossep)
2599 f = f.replace(b'/', pycompat.ossep)
2571 if fullpaths:
2600 if fullpaths:
2572 addfile(f)
2601 addfile(f)
2573 continue
2602 continue
2574 s = f.find(pycompat.ossep, speclen)
2603 s = f.find(pycompat.ossep, speclen)
2575 if s >= 0:
2604 if s >= 0:
2576 adddir(f[:s])
2605 adddir(f[:s])
2577 else:
2606 else:
2578 addfile(f)
2607 addfile(f)
2579 return files, dirs
2608 return files, dirs
2580
2609
2581 acceptable = b''
2610 acceptable = b''
2582 if opts['normal']:
2611 if opts['normal']:
2583 acceptable += b'nm'
2612 acceptable += b'nm'
2584 if opts['added']:
2613 if opts['added']:
2585 acceptable += b'a'
2614 acceptable += b'a'
2586 if opts['removed']:
2615 if opts['removed']:
2587 acceptable += b'r'
2616 acceptable += b'r'
2588 cwd = repo.getcwd()
2617 cwd = repo.getcwd()
2589 if not specs:
2618 if not specs:
2590 specs = [b'.']
2619 specs = [b'.']
2591
2620
2592 files, dirs = set(), set()
2621 files, dirs = set(), set()
2593 for spec in specs:
2622 for spec in specs:
2594 f, d = complete(spec, acceptable or b'nmar')
2623 f, d = complete(spec, acceptable or b'nmar')
2595 files.update(f)
2624 files.update(f)
2596 dirs.update(d)
2625 dirs.update(d)
2597 files.update(dirs)
2626 files.update(dirs)
2598 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2627 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2599 ui.write(b'\n')
2628 ui.write(b'\n')
2600
2629
2601
2630
2602 @command(
2631 @command(
2603 b'debugpathcopies',
2632 b'debugpathcopies',
2604 cmdutil.walkopts,
2633 cmdutil.walkopts,
2605 b'hg debugpathcopies REV1 REV2 [FILE]',
2634 b'hg debugpathcopies REV1 REV2 [FILE]',
2606 inferrepo=True,
2635 inferrepo=True,
2607 )
2636 )
2608 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2637 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2609 """show copies between two revisions"""
2638 """show copies between two revisions"""
2610 ctx1 = scmutil.revsingle(repo, rev1)
2639 ctx1 = scmutil.revsingle(repo, rev1)
2611 ctx2 = scmutil.revsingle(repo, rev2)
2640 ctx2 = scmutil.revsingle(repo, rev2)
2612 m = scmutil.match(ctx1, pats, opts)
2641 m = scmutil.match(ctx1, pats, opts)
2613 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2642 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2614 ui.write(b'%s -> %s\n' % (src, dst))
2643 ui.write(b'%s -> %s\n' % (src, dst))
2615
2644
2616
2645
2617 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2646 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2618 def debugpeer(ui, path):
2647 def debugpeer(ui, path):
2619 """establish a connection to a peer repository"""
2648 """establish a connection to a peer repository"""
2620 # Always enable peer request logging. Requires --debug to display
2649 # Always enable peer request logging. Requires --debug to display
2621 # though.
2650 # though.
2622 overrides = {
2651 overrides = {
2623 (b'devel', b'debug.peer-request'): True,
2652 (b'devel', b'debug.peer-request'): True,
2624 }
2653 }
2625
2654
2626 with ui.configoverride(overrides):
2655 with ui.configoverride(overrides):
2627 peer = hg.peer(ui, {}, path)
2656 peer = hg.peer(ui, {}, path)
2628
2657
2629 try:
2658 try:
2630 local = peer.local() is not None
2659 local = peer.local() is not None
2631 canpush = peer.canpush()
2660 canpush = peer.canpush()
2632
2661
2633 ui.write(_(b'url: %s\n') % peer.url())
2662 ui.write(_(b'url: %s\n') % peer.url())
2634 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2663 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2635 ui.write(
2664 ui.write(
2636 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2665 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2637 )
2666 )
2638 finally:
2667 finally:
2639 peer.close()
2668 peer.close()
2640
2669
2641
2670
2642 @command(
2671 @command(
2643 b'debugpickmergetool',
2672 b'debugpickmergetool',
2644 [
2673 [
2645 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2674 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2646 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2675 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2647 ]
2676 ]
2648 + cmdutil.walkopts
2677 + cmdutil.walkopts
2649 + cmdutil.mergetoolopts,
2678 + cmdutil.mergetoolopts,
2650 _(b'[PATTERN]...'),
2679 _(b'[PATTERN]...'),
2651 inferrepo=True,
2680 inferrepo=True,
2652 )
2681 )
2653 def debugpickmergetool(ui, repo, *pats, **opts):
2682 def debugpickmergetool(ui, repo, *pats, **opts):
2654 """examine which merge tool is chosen for specified file
2683 """examine which merge tool is chosen for specified file
2655
2684
2656 As described in :hg:`help merge-tools`, Mercurial examines
2685 As described in :hg:`help merge-tools`, Mercurial examines
2657 configurations below in this order to decide which merge tool is
2686 configurations below in this order to decide which merge tool is
2658 chosen for specified file.
2687 chosen for specified file.
2659
2688
2660 1. ``--tool`` option
2689 1. ``--tool`` option
2661 2. ``HGMERGE`` environment variable
2690 2. ``HGMERGE`` environment variable
2662 3. configurations in ``merge-patterns`` section
2691 3. configurations in ``merge-patterns`` section
2663 4. configuration of ``ui.merge``
2692 4. configuration of ``ui.merge``
2664 5. configurations in ``merge-tools`` section
2693 5. configurations in ``merge-tools`` section
2665 6. ``hgmerge`` tool (for historical reason only)
2694 6. ``hgmerge`` tool (for historical reason only)
2666 7. default tool for fallback (``:merge`` or ``:prompt``)
2695 7. default tool for fallback (``:merge`` or ``:prompt``)
2667
2696
2668 This command writes out examination result in the style below::
2697 This command writes out examination result in the style below::
2669
2698
2670 FILE = MERGETOOL
2699 FILE = MERGETOOL
2671
2700
2672 By default, all files known in the first parent context of the
2701 By default, all files known in the first parent context of the
2673 working directory are examined. Use file patterns and/or -I/-X
2702 working directory are examined. Use file patterns and/or -I/-X
2674 options to limit target files. -r/--rev is also useful to examine
2703 options to limit target files. -r/--rev is also useful to examine
2675 files in another context without actual updating to it.
2704 files in another context without actual updating to it.
2676
2705
2677 With --debug, this command shows warning messages while matching
2706 With --debug, this command shows warning messages while matching
2678 against ``merge-patterns`` and so on, too. It is recommended to
2707 against ``merge-patterns`` and so on, too. It is recommended to
2679 use this option with explicit file patterns and/or -I/-X options,
2708 use this option with explicit file patterns and/or -I/-X options,
2680 because this option increases amount of output per file according
2709 because this option increases amount of output per file according
2681 to configurations in hgrc.
2710 to configurations in hgrc.
2682
2711
2683 With -v/--verbose, this command shows configurations below at
2712 With -v/--verbose, this command shows configurations below at
2684 first (only if specified).
2713 first (only if specified).
2685
2714
2686 - ``--tool`` option
2715 - ``--tool`` option
2687 - ``HGMERGE`` environment variable
2716 - ``HGMERGE`` environment variable
2688 - configuration of ``ui.merge``
2717 - configuration of ``ui.merge``
2689
2718
2690 If merge tool is chosen before matching against
2719 If merge tool is chosen before matching against
2691 ``merge-patterns``, this command can't show any helpful
2720 ``merge-patterns``, this command can't show any helpful
2692 information, even with --debug. In such case, information above is
2721 information, even with --debug. In such case, information above is
2693 useful to know why a merge tool is chosen.
2722 useful to know why a merge tool is chosen.
2694 """
2723 """
2695 opts = pycompat.byteskwargs(opts)
2724 opts = pycompat.byteskwargs(opts)
2696 overrides = {}
2725 overrides = {}
2697 if opts[b'tool']:
2726 if opts[b'tool']:
2698 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2727 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2699 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2728 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2700
2729
2701 with ui.configoverride(overrides, b'debugmergepatterns'):
2730 with ui.configoverride(overrides, b'debugmergepatterns'):
2702 hgmerge = encoding.environ.get(b"HGMERGE")
2731 hgmerge = encoding.environ.get(b"HGMERGE")
2703 if hgmerge is not None:
2732 if hgmerge is not None:
2704 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2733 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2705 uimerge = ui.config(b"ui", b"merge")
2734 uimerge = ui.config(b"ui", b"merge")
2706 if uimerge:
2735 if uimerge:
2707 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2736 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2708
2737
2709 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2738 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2710 m = scmutil.match(ctx, pats, opts)
2739 m = scmutil.match(ctx, pats, opts)
2711 changedelete = opts[b'changedelete']
2740 changedelete = opts[b'changedelete']
2712 for path in ctx.walk(m):
2741 for path in ctx.walk(m):
2713 fctx = ctx[path]
2742 fctx = ctx[path]
2714 try:
2743 try:
2715 if not ui.debugflag:
2744 if not ui.debugflag:
2716 ui.pushbuffer(error=True)
2745 ui.pushbuffer(error=True)
2717 tool, toolpath = filemerge._picktool(
2746 tool, toolpath = filemerge._picktool(
2718 repo,
2747 repo,
2719 ui,
2748 ui,
2720 path,
2749 path,
2721 fctx.isbinary(),
2750 fctx.isbinary(),
2722 b'l' in fctx.flags(),
2751 b'l' in fctx.flags(),
2723 changedelete,
2752 changedelete,
2724 )
2753 )
2725 finally:
2754 finally:
2726 if not ui.debugflag:
2755 if not ui.debugflag:
2727 ui.popbuffer()
2756 ui.popbuffer()
2728 ui.write(b'%s = %s\n' % (path, tool))
2757 ui.write(b'%s = %s\n' % (path, tool))
2729
2758
2730
2759
2731 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2760 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2732 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2761 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2733 """access the pushkey key/value protocol
2762 """access the pushkey key/value protocol
2734
2763
2735 With two args, list the keys in the given namespace.
2764 With two args, list the keys in the given namespace.
2736
2765
2737 With five args, set a key to new if it currently is set to old.
2766 With five args, set a key to new if it currently is set to old.
2738 Reports success or failure.
2767 Reports success or failure.
2739 """
2768 """
2740
2769
2741 target = hg.peer(ui, {}, repopath)
2770 target = hg.peer(ui, {}, repopath)
2742 try:
2771 try:
2743 if keyinfo:
2772 if keyinfo:
2744 key, old, new = keyinfo
2773 key, old, new = keyinfo
2745 with target.commandexecutor() as e:
2774 with target.commandexecutor() as e:
2746 r = e.callcommand(
2775 r = e.callcommand(
2747 b'pushkey',
2776 b'pushkey',
2748 {
2777 {
2749 b'namespace': namespace,
2778 b'namespace': namespace,
2750 b'key': key,
2779 b'key': key,
2751 b'old': old,
2780 b'old': old,
2752 b'new': new,
2781 b'new': new,
2753 },
2782 },
2754 ).result()
2783 ).result()
2755
2784
2756 ui.status(pycompat.bytestr(r) + b'\n')
2785 ui.status(pycompat.bytestr(r) + b'\n')
2757 return not r
2786 return not r
2758 else:
2787 else:
2759 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2788 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2760 ui.write(
2789 ui.write(
2761 b"%s\t%s\n"
2790 b"%s\t%s\n"
2762 % (stringutil.escapestr(k), stringutil.escapestr(v))
2791 % (stringutil.escapestr(k), stringutil.escapestr(v))
2763 )
2792 )
2764 finally:
2793 finally:
2765 target.close()
2794 target.close()
2766
2795
2767
2796
2768 @command(b'debugpvec', [], _(b'A B'))
2797 @command(b'debugpvec', [], _(b'A B'))
2769 def debugpvec(ui, repo, a, b=None):
2798 def debugpvec(ui, repo, a, b=None):
2770 ca = scmutil.revsingle(repo, a)
2799 ca = scmutil.revsingle(repo, a)
2771 cb = scmutil.revsingle(repo, b)
2800 cb = scmutil.revsingle(repo, b)
2772 pa = pvec.ctxpvec(ca)
2801 pa = pvec.ctxpvec(ca)
2773 pb = pvec.ctxpvec(cb)
2802 pb = pvec.ctxpvec(cb)
2774 if pa == pb:
2803 if pa == pb:
2775 rel = b"="
2804 rel = b"="
2776 elif pa > pb:
2805 elif pa > pb:
2777 rel = b">"
2806 rel = b">"
2778 elif pa < pb:
2807 elif pa < pb:
2779 rel = b"<"
2808 rel = b"<"
2780 elif pa | pb:
2809 elif pa | pb:
2781 rel = b"|"
2810 rel = b"|"
2782 ui.write(_(b"a: %s\n") % pa)
2811 ui.write(_(b"a: %s\n") % pa)
2783 ui.write(_(b"b: %s\n") % pb)
2812 ui.write(_(b"b: %s\n") % pb)
2784 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2813 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2785 ui.write(
2814 ui.write(
2786 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2815 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2787 % (
2816 % (
2788 abs(pa._depth - pb._depth),
2817 abs(pa._depth - pb._depth),
2789 pvec._hamming(pa._vec, pb._vec),
2818 pvec._hamming(pa._vec, pb._vec),
2790 pa.distance(pb),
2819 pa.distance(pb),
2791 rel,
2820 rel,
2792 )
2821 )
2793 )
2822 )
2794
2823
2795
2824
2796 @command(
2825 @command(
2797 b'debugrebuilddirstate|debugrebuildstate',
2826 b'debugrebuilddirstate|debugrebuildstate',
2798 [
2827 [
2799 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2828 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2800 (
2829 (
2801 b'',
2830 b'',
2802 b'minimal',
2831 b'minimal',
2803 None,
2832 None,
2804 _(
2833 _(
2805 b'only rebuild files that are inconsistent with '
2834 b'only rebuild files that are inconsistent with '
2806 b'the working copy parent'
2835 b'the working copy parent'
2807 ),
2836 ),
2808 ),
2837 ),
2809 ],
2838 ],
2810 _(b'[-r REV]'),
2839 _(b'[-r REV]'),
2811 )
2840 )
2812 def debugrebuilddirstate(ui, repo, rev, **opts):
2841 def debugrebuilddirstate(ui, repo, rev, **opts):
2813 """rebuild the dirstate as it would look like for the given revision
2842 """rebuild the dirstate as it would look like for the given revision
2814
2843
2815 If no revision is specified the first current parent will be used.
2844 If no revision is specified the first current parent will be used.
2816
2845
2817 The dirstate will be set to the files of the given revision.
2846 The dirstate will be set to the files of the given revision.
2818 The actual working directory content or existing dirstate
2847 The actual working directory content or existing dirstate
2819 information such as adds or removes is not considered.
2848 information such as adds or removes is not considered.
2820
2849
2821 ``minimal`` will only rebuild the dirstate status for files that claim to be
2850 ``minimal`` will only rebuild the dirstate status for files that claim to be
2822 tracked but are not in the parent manifest, or that exist in the parent
2851 tracked but are not in the parent manifest, or that exist in the parent
2823 manifest but are not in the dirstate. It will not change adds, removes, or
2852 manifest but are not in the dirstate. It will not change adds, removes, or
2824 modified files that are in the working copy parent.
2853 modified files that are in the working copy parent.
2825
2854
2826 One use of this command is to make the next :hg:`status` invocation
2855 One use of this command is to make the next :hg:`status` invocation
2827 check the actual file content.
2856 check the actual file content.
2828 """
2857 """
2829 ctx = scmutil.revsingle(repo, rev)
2858 ctx = scmutil.revsingle(repo, rev)
2830 with repo.wlock():
2859 with repo.wlock():
2831 dirstate = repo.dirstate
2860 dirstate = repo.dirstate
2832 changedfiles = None
2861 changedfiles = None
2833 # See command doc for what minimal does.
2862 # See command doc for what minimal does.
2834 if opts.get('minimal'):
2863 if opts.get('minimal'):
2835 manifestfiles = set(ctx.manifest().keys())
2864 manifestfiles = set(ctx.manifest().keys())
2836 dirstatefiles = set(dirstate)
2865 dirstatefiles = set(dirstate)
2837 manifestonly = manifestfiles - dirstatefiles
2866 manifestonly = manifestfiles - dirstatefiles
2838 dsonly = dirstatefiles - manifestfiles
2867 dsonly = dirstatefiles - manifestfiles
2839 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2868 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2840 changedfiles = manifestonly | dsnotadded
2869 changedfiles = manifestonly | dsnotadded
2841
2870
2842 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2871 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2843
2872
2844
2873
2845 @command(b'debugrebuildfncache', [], b'')
2874 @command(b'debugrebuildfncache', [], b'')
2846 def debugrebuildfncache(ui, repo):
2875 def debugrebuildfncache(ui, repo):
2847 """rebuild the fncache file"""
2876 """rebuild the fncache file"""
2848 repair.rebuildfncache(ui, repo)
2877 repair.rebuildfncache(ui, repo)
2849
2878
2850
2879
2851 @command(
2880 @command(
2852 b'debugrename',
2881 b'debugrename',
2853 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2882 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2854 _(b'[-r REV] [FILE]...'),
2883 _(b'[-r REV] [FILE]...'),
2855 )
2884 )
2856 def debugrename(ui, repo, *pats, **opts):
2885 def debugrename(ui, repo, *pats, **opts):
2857 """dump rename information"""
2886 """dump rename information"""
2858
2887
2859 opts = pycompat.byteskwargs(opts)
2888 opts = pycompat.byteskwargs(opts)
2860 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2889 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2861 m = scmutil.match(ctx, pats, opts)
2890 m = scmutil.match(ctx, pats, opts)
2862 for abs in ctx.walk(m):
2891 for abs in ctx.walk(m):
2863 fctx = ctx[abs]
2892 fctx = ctx[abs]
2864 o = fctx.filelog().renamed(fctx.filenode())
2893 o = fctx.filelog().renamed(fctx.filenode())
2865 rel = repo.pathto(abs)
2894 rel = repo.pathto(abs)
2866 if o:
2895 if o:
2867 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2896 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2868 else:
2897 else:
2869 ui.write(_(b"%s not renamed\n") % rel)
2898 ui.write(_(b"%s not renamed\n") % rel)
2870
2899
2871
2900
2872 @command(b'debugrequires|debugrequirements', [], b'')
2901 @command(b'debugrequires|debugrequirements', [], b'')
2873 def debugrequirements(ui, repo):
2902 def debugrequirements(ui, repo):
2874 """ print the current repo requirements """
2903 """ print the current repo requirements """
2875 for r in sorted(repo.requirements):
2904 for r in sorted(repo.requirements):
2876 ui.write(b"%s\n" % r)
2905 ui.write(b"%s\n" % r)
2877
2906
2878
2907
2879 @command(
2908 @command(
2880 b'debugrevlog',
2909 b'debugrevlog',
2881 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2910 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2882 _(b'-c|-m|FILE'),
2911 _(b'-c|-m|FILE'),
2883 optionalrepo=True,
2912 optionalrepo=True,
2884 )
2913 )
2885 def debugrevlog(ui, repo, file_=None, **opts):
2914 def debugrevlog(ui, repo, file_=None, **opts):
2886 """show data and statistics about a revlog"""
2915 """show data and statistics about a revlog"""
2887 opts = pycompat.byteskwargs(opts)
2916 opts = pycompat.byteskwargs(opts)
2888 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2917 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2889
2918
2890 if opts.get(b"dump"):
2919 if opts.get(b"dump"):
2891 numrevs = len(r)
2920 numrevs = len(r)
2892 ui.write(
2921 ui.write(
2893 (
2922 (
2894 b"# rev p1rev p2rev start end deltastart base p1 p2"
2923 b"# rev p1rev p2rev start end deltastart base p1 p2"
2895 b" rawsize totalsize compression heads chainlen\n"
2924 b" rawsize totalsize compression heads chainlen\n"
2896 )
2925 )
2897 )
2926 )
2898 ts = 0
2927 ts = 0
2899 heads = set()
2928 heads = set()
2900
2929
2901 for rev in pycompat.xrange(numrevs):
2930 for rev in pycompat.xrange(numrevs):
2902 dbase = r.deltaparent(rev)
2931 dbase = r.deltaparent(rev)
2903 if dbase == -1:
2932 if dbase == -1:
2904 dbase = rev
2933 dbase = rev
2905 cbase = r.chainbase(rev)
2934 cbase = r.chainbase(rev)
2906 clen = r.chainlen(rev)
2935 clen = r.chainlen(rev)
2907 p1, p2 = r.parentrevs(rev)
2936 p1, p2 = r.parentrevs(rev)
2908 rs = r.rawsize(rev)
2937 rs = r.rawsize(rev)
2909 ts = ts + rs
2938 ts = ts + rs
2910 heads -= set(r.parentrevs(rev))
2939 heads -= set(r.parentrevs(rev))
2911 heads.add(rev)
2940 heads.add(rev)
2912 try:
2941 try:
2913 compression = ts / r.end(rev)
2942 compression = ts / r.end(rev)
2914 except ZeroDivisionError:
2943 except ZeroDivisionError:
2915 compression = 0
2944 compression = 0
2916 ui.write(
2945 ui.write(
2917 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2946 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2918 b"%11d %5d %8d\n"
2947 b"%11d %5d %8d\n"
2919 % (
2948 % (
2920 rev,
2949 rev,
2921 p1,
2950 p1,
2922 p2,
2951 p2,
2923 r.start(rev),
2952 r.start(rev),
2924 r.end(rev),
2953 r.end(rev),
2925 r.start(dbase),
2954 r.start(dbase),
2926 r.start(cbase),
2955 r.start(cbase),
2927 r.start(p1),
2956 r.start(p1),
2928 r.start(p2),
2957 r.start(p2),
2929 rs,
2958 rs,
2930 ts,
2959 ts,
2931 compression,
2960 compression,
2932 len(heads),
2961 len(heads),
2933 clen,
2962 clen,
2934 )
2963 )
2935 )
2964 )
2936 return 0
2965 return 0
2937
2966
2938 v = r.version
2967 v = r.version
2939 format = v & 0xFFFF
2968 format = v & 0xFFFF
2940 flags = []
2969 flags = []
2941 gdelta = False
2970 gdelta = False
2942 if v & revlog.FLAG_INLINE_DATA:
2971 if v & revlog.FLAG_INLINE_DATA:
2943 flags.append(b'inline')
2972 flags.append(b'inline')
2944 if v & revlog.FLAG_GENERALDELTA:
2973 if v & revlog.FLAG_GENERALDELTA:
2945 gdelta = True
2974 gdelta = True
2946 flags.append(b'generaldelta')
2975 flags.append(b'generaldelta')
2947 if not flags:
2976 if not flags:
2948 flags = [b'(none)']
2977 flags = [b'(none)']
2949
2978
2950 ### tracks merge vs single parent
2979 ### tracks merge vs single parent
2951 nummerges = 0
2980 nummerges = 0
2952
2981
2953 ### tracks ways the "delta" are build
2982 ### tracks ways the "delta" are build
2954 # nodelta
2983 # nodelta
2955 numempty = 0
2984 numempty = 0
2956 numemptytext = 0
2985 numemptytext = 0
2957 numemptydelta = 0
2986 numemptydelta = 0
2958 # full file content
2987 # full file content
2959 numfull = 0
2988 numfull = 0
2960 # intermediate snapshot against a prior snapshot
2989 # intermediate snapshot against a prior snapshot
2961 numsemi = 0
2990 numsemi = 0
2962 # snapshot count per depth
2991 # snapshot count per depth
2963 numsnapdepth = collections.defaultdict(lambda: 0)
2992 numsnapdepth = collections.defaultdict(lambda: 0)
2964 # delta against previous revision
2993 # delta against previous revision
2965 numprev = 0
2994 numprev = 0
2966 # delta against first or second parent (not prev)
2995 # delta against first or second parent (not prev)
2967 nump1 = 0
2996 nump1 = 0
2968 nump2 = 0
2997 nump2 = 0
2969 # delta against neither prev nor parents
2998 # delta against neither prev nor parents
2970 numother = 0
2999 numother = 0
2971 # delta against prev that are also first or second parent
3000 # delta against prev that are also first or second parent
2972 # (details of `numprev`)
3001 # (details of `numprev`)
2973 nump1prev = 0
3002 nump1prev = 0
2974 nump2prev = 0
3003 nump2prev = 0
2975
3004
2976 # data about delta chain of each revs
3005 # data about delta chain of each revs
2977 chainlengths = []
3006 chainlengths = []
2978 chainbases = []
3007 chainbases = []
2979 chainspans = []
3008 chainspans = []
2980
3009
2981 # data about each revision
3010 # data about each revision
2982 datasize = [None, 0, 0]
3011 datasize = [None, 0, 0]
2983 fullsize = [None, 0, 0]
3012 fullsize = [None, 0, 0]
2984 semisize = [None, 0, 0]
3013 semisize = [None, 0, 0]
2985 # snapshot count per depth
3014 # snapshot count per depth
2986 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3015 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2987 deltasize = [None, 0, 0]
3016 deltasize = [None, 0, 0]
2988 chunktypecounts = {}
3017 chunktypecounts = {}
2989 chunktypesizes = {}
3018 chunktypesizes = {}
2990
3019
2991 def addsize(size, l):
3020 def addsize(size, l):
2992 if l[0] is None or size < l[0]:
3021 if l[0] is None or size < l[0]:
2993 l[0] = size
3022 l[0] = size
2994 if size > l[1]:
3023 if size > l[1]:
2995 l[1] = size
3024 l[1] = size
2996 l[2] += size
3025 l[2] += size
2997
3026
2998 numrevs = len(r)
3027 numrevs = len(r)
2999 for rev in pycompat.xrange(numrevs):
3028 for rev in pycompat.xrange(numrevs):
3000 p1, p2 = r.parentrevs(rev)
3029 p1, p2 = r.parentrevs(rev)
3001 delta = r.deltaparent(rev)
3030 delta = r.deltaparent(rev)
3002 if format > 0:
3031 if format > 0:
3003 addsize(r.rawsize(rev), datasize)
3032 addsize(r.rawsize(rev), datasize)
3004 if p2 != nullrev:
3033 if p2 != nullrev:
3005 nummerges += 1
3034 nummerges += 1
3006 size = r.length(rev)
3035 size = r.length(rev)
3007 if delta == nullrev:
3036 if delta == nullrev:
3008 chainlengths.append(0)
3037 chainlengths.append(0)
3009 chainbases.append(r.start(rev))
3038 chainbases.append(r.start(rev))
3010 chainspans.append(size)
3039 chainspans.append(size)
3011 if size == 0:
3040 if size == 0:
3012 numempty += 1
3041 numempty += 1
3013 numemptytext += 1
3042 numemptytext += 1
3014 else:
3043 else:
3015 numfull += 1
3044 numfull += 1
3016 numsnapdepth[0] += 1
3045 numsnapdepth[0] += 1
3017 addsize(size, fullsize)
3046 addsize(size, fullsize)
3018 addsize(size, snapsizedepth[0])
3047 addsize(size, snapsizedepth[0])
3019 else:
3048 else:
3020 chainlengths.append(chainlengths[delta] + 1)
3049 chainlengths.append(chainlengths[delta] + 1)
3021 baseaddr = chainbases[delta]
3050 baseaddr = chainbases[delta]
3022 revaddr = r.start(rev)
3051 revaddr = r.start(rev)
3023 chainbases.append(baseaddr)
3052 chainbases.append(baseaddr)
3024 chainspans.append((revaddr - baseaddr) + size)
3053 chainspans.append((revaddr - baseaddr) + size)
3025 if size == 0:
3054 if size == 0:
3026 numempty += 1
3055 numempty += 1
3027 numemptydelta += 1
3056 numemptydelta += 1
3028 elif r.issnapshot(rev):
3057 elif r.issnapshot(rev):
3029 addsize(size, semisize)
3058 addsize(size, semisize)
3030 numsemi += 1
3059 numsemi += 1
3031 depth = r.snapshotdepth(rev)
3060 depth = r.snapshotdepth(rev)
3032 numsnapdepth[depth] += 1
3061 numsnapdepth[depth] += 1
3033 addsize(size, snapsizedepth[depth])
3062 addsize(size, snapsizedepth[depth])
3034 else:
3063 else:
3035 addsize(size, deltasize)
3064 addsize(size, deltasize)
3036 if delta == rev - 1:
3065 if delta == rev - 1:
3037 numprev += 1
3066 numprev += 1
3038 if delta == p1:
3067 if delta == p1:
3039 nump1prev += 1
3068 nump1prev += 1
3040 elif delta == p2:
3069 elif delta == p2:
3041 nump2prev += 1
3070 nump2prev += 1
3042 elif delta == p1:
3071 elif delta == p1:
3043 nump1 += 1
3072 nump1 += 1
3044 elif delta == p2:
3073 elif delta == p2:
3045 nump2 += 1
3074 nump2 += 1
3046 elif delta != nullrev:
3075 elif delta != nullrev:
3047 numother += 1
3076 numother += 1
3048
3077
3049 # Obtain data on the raw chunks in the revlog.
3078 # Obtain data on the raw chunks in the revlog.
3050 if util.safehasattr(r, b'_getsegmentforrevs'):
3079 if util.safehasattr(r, b'_getsegmentforrevs'):
3051 segment = r._getsegmentforrevs(rev, rev)[1]
3080 segment = r._getsegmentforrevs(rev, rev)[1]
3052 else:
3081 else:
3053 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3082 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3054 if segment:
3083 if segment:
3055 chunktype = bytes(segment[0:1])
3084 chunktype = bytes(segment[0:1])
3056 else:
3085 else:
3057 chunktype = b'empty'
3086 chunktype = b'empty'
3058
3087
3059 if chunktype not in chunktypecounts:
3088 if chunktype not in chunktypecounts:
3060 chunktypecounts[chunktype] = 0
3089 chunktypecounts[chunktype] = 0
3061 chunktypesizes[chunktype] = 0
3090 chunktypesizes[chunktype] = 0
3062
3091
3063 chunktypecounts[chunktype] += 1
3092 chunktypecounts[chunktype] += 1
3064 chunktypesizes[chunktype] += size
3093 chunktypesizes[chunktype] += size
3065
3094
3066 # Adjust size min value for empty cases
3095 # Adjust size min value for empty cases
3067 for size in (datasize, fullsize, semisize, deltasize):
3096 for size in (datasize, fullsize, semisize, deltasize):
3068 if size[0] is None:
3097 if size[0] is None:
3069 size[0] = 0
3098 size[0] = 0
3070
3099
3071 numdeltas = numrevs - numfull - numempty - numsemi
3100 numdeltas = numrevs - numfull - numempty - numsemi
3072 numoprev = numprev - nump1prev - nump2prev
3101 numoprev = numprev - nump1prev - nump2prev
3073 totalrawsize = datasize[2]
3102 totalrawsize = datasize[2]
3074 datasize[2] /= numrevs
3103 datasize[2] /= numrevs
3075 fulltotal = fullsize[2]
3104 fulltotal = fullsize[2]
3076 if numfull == 0:
3105 if numfull == 0:
3077 fullsize[2] = 0
3106 fullsize[2] = 0
3078 else:
3107 else:
3079 fullsize[2] /= numfull
3108 fullsize[2] /= numfull
3080 semitotal = semisize[2]
3109 semitotal = semisize[2]
3081 snaptotal = {}
3110 snaptotal = {}
3082 if numsemi > 0:
3111 if numsemi > 0:
3083 semisize[2] /= numsemi
3112 semisize[2] /= numsemi
3084 for depth in snapsizedepth:
3113 for depth in snapsizedepth:
3085 snaptotal[depth] = snapsizedepth[depth][2]
3114 snaptotal[depth] = snapsizedepth[depth][2]
3086 snapsizedepth[depth][2] /= numsnapdepth[depth]
3115 snapsizedepth[depth][2] /= numsnapdepth[depth]
3087
3116
3088 deltatotal = deltasize[2]
3117 deltatotal = deltasize[2]
3089 if numdeltas > 0:
3118 if numdeltas > 0:
3090 deltasize[2] /= numdeltas
3119 deltasize[2] /= numdeltas
3091 totalsize = fulltotal + semitotal + deltatotal
3120 totalsize = fulltotal + semitotal + deltatotal
3092 avgchainlen = sum(chainlengths) / numrevs
3121 avgchainlen = sum(chainlengths) / numrevs
3093 maxchainlen = max(chainlengths)
3122 maxchainlen = max(chainlengths)
3094 maxchainspan = max(chainspans)
3123 maxchainspan = max(chainspans)
3095 compratio = 1
3124 compratio = 1
3096 if totalsize:
3125 if totalsize:
3097 compratio = totalrawsize / totalsize
3126 compratio = totalrawsize / totalsize
3098
3127
3099 basedfmtstr = b'%%%dd\n'
3128 basedfmtstr = b'%%%dd\n'
3100 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3129 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3101
3130
3102 def dfmtstr(max):
3131 def dfmtstr(max):
3103 return basedfmtstr % len(str(max))
3132 return basedfmtstr % len(str(max))
3104
3133
3105 def pcfmtstr(max, padding=0):
3134 def pcfmtstr(max, padding=0):
3106 return basepcfmtstr % (len(str(max)), b' ' * padding)
3135 return basepcfmtstr % (len(str(max)), b' ' * padding)
3107
3136
3108 def pcfmt(value, total):
3137 def pcfmt(value, total):
3109 if total:
3138 if total:
3110 return (value, 100 * float(value) / total)
3139 return (value, 100 * float(value) / total)
3111 else:
3140 else:
3112 return value, 100.0
3141 return value, 100.0
3113
3142
3114 ui.writenoi18n(b'format : %d\n' % format)
3143 ui.writenoi18n(b'format : %d\n' % format)
3115 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3144 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3116
3145
3117 ui.write(b'\n')
3146 ui.write(b'\n')
3118 fmt = pcfmtstr(totalsize)
3147 fmt = pcfmtstr(totalsize)
3119 fmt2 = dfmtstr(totalsize)
3148 fmt2 = dfmtstr(totalsize)
3120 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3149 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3121 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3150 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3122 ui.writenoi18n(
3151 ui.writenoi18n(
3123 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3152 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3124 )
3153 )
3125 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3154 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3126 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3155 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3127 ui.writenoi18n(
3156 ui.writenoi18n(
3128 b' text : '
3157 b' text : '
3129 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3158 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3130 )
3159 )
3131 ui.writenoi18n(
3160 ui.writenoi18n(
3132 b' delta : '
3161 b' delta : '
3133 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3162 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3134 )
3163 )
3135 ui.writenoi18n(
3164 ui.writenoi18n(
3136 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3165 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3137 )
3166 )
3138 for depth in sorted(numsnapdepth):
3167 for depth in sorted(numsnapdepth):
3139 ui.write(
3168 ui.write(
3140 (b' lvl-%-3d : ' % depth)
3169 (b' lvl-%-3d : ' % depth)
3141 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3170 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3142 )
3171 )
3143 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3172 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3144 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3173 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3145 ui.writenoi18n(
3174 ui.writenoi18n(
3146 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3175 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3147 )
3176 )
3148 for depth in sorted(numsnapdepth):
3177 for depth in sorted(numsnapdepth):
3149 ui.write(
3178 ui.write(
3150 (b' lvl-%-3d : ' % depth)
3179 (b' lvl-%-3d : ' % depth)
3151 + fmt % pcfmt(snaptotal[depth], totalsize)
3180 + fmt % pcfmt(snaptotal[depth], totalsize)
3152 )
3181 )
3153 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3182 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3154
3183
3155 def fmtchunktype(chunktype):
3184 def fmtchunktype(chunktype):
3156 if chunktype == b'empty':
3185 if chunktype == b'empty':
3157 return b' %s : ' % chunktype
3186 return b' %s : ' % chunktype
3158 elif chunktype in pycompat.bytestr(string.ascii_letters):
3187 elif chunktype in pycompat.bytestr(string.ascii_letters):
3159 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3188 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3160 else:
3189 else:
3161 return b' 0x%s : ' % hex(chunktype)
3190 return b' 0x%s : ' % hex(chunktype)
3162
3191
3163 ui.write(b'\n')
3192 ui.write(b'\n')
3164 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3193 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3165 for chunktype in sorted(chunktypecounts):
3194 for chunktype in sorted(chunktypecounts):
3166 ui.write(fmtchunktype(chunktype))
3195 ui.write(fmtchunktype(chunktype))
3167 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3196 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3168 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3197 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3169 for chunktype in sorted(chunktypecounts):
3198 for chunktype in sorted(chunktypecounts):
3170 ui.write(fmtchunktype(chunktype))
3199 ui.write(fmtchunktype(chunktype))
3171 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3200 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3172
3201
3173 ui.write(b'\n')
3202 ui.write(b'\n')
3174 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3203 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3175 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3204 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3176 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3205 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3177 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3206 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3178 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3207 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3179
3208
3180 if format > 0:
3209 if format > 0:
3181 ui.write(b'\n')
3210 ui.write(b'\n')
3182 ui.writenoi18n(
3211 ui.writenoi18n(
3183 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3212 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3184 % tuple(datasize)
3213 % tuple(datasize)
3185 )
3214 )
3186 ui.writenoi18n(
3215 ui.writenoi18n(
3187 b'full revision size (min/max/avg) : %d / %d / %d\n'
3216 b'full revision size (min/max/avg) : %d / %d / %d\n'
3188 % tuple(fullsize)
3217 % tuple(fullsize)
3189 )
3218 )
3190 ui.writenoi18n(
3219 ui.writenoi18n(
3191 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3220 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3192 % tuple(semisize)
3221 % tuple(semisize)
3193 )
3222 )
3194 for depth in sorted(snapsizedepth):
3223 for depth in sorted(snapsizedepth):
3195 if depth == 0:
3224 if depth == 0:
3196 continue
3225 continue
3197 ui.writenoi18n(
3226 ui.writenoi18n(
3198 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3227 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3199 % ((depth,) + tuple(snapsizedepth[depth]))
3228 % ((depth,) + tuple(snapsizedepth[depth]))
3200 )
3229 )
3201 ui.writenoi18n(
3230 ui.writenoi18n(
3202 b'delta size (min/max/avg) : %d / %d / %d\n'
3231 b'delta size (min/max/avg) : %d / %d / %d\n'
3203 % tuple(deltasize)
3232 % tuple(deltasize)
3204 )
3233 )
3205
3234
3206 if numdeltas > 0:
3235 if numdeltas > 0:
3207 ui.write(b'\n')
3236 ui.write(b'\n')
3208 fmt = pcfmtstr(numdeltas)
3237 fmt = pcfmtstr(numdeltas)
3209 fmt2 = pcfmtstr(numdeltas, 4)
3238 fmt2 = pcfmtstr(numdeltas, 4)
3210 ui.writenoi18n(
3239 ui.writenoi18n(
3211 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3240 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3212 )
3241 )
3213 if numprev > 0:
3242 if numprev > 0:
3214 ui.writenoi18n(
3243 ui.writenoi18n(
3215 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3244 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3216 )
3245 )
3217 ui.writenoi18n(
3246 ui.writenoi18n(
3218 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3247 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3219 )
3248 )
3220 ui.writenoi18n(
3249 ui.writenoi18n(
3221 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3250 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3222 )
3251 )
3223 if gdelta:
3252 if gdelta:
3224 ui.writenoi18n(
3253 ui.writenoi18n(
3225 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3254 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3226 )
3255 )
3227 ui.writenoi18n(
3256 ui.writenoi18n(
3228 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3257 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3229 )
3258 )
3230 ui.writenoi18n(
3259 ui.writenoi18n(
3231 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3260 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3232 )
3261 )
3233
3262
3234
3263
3235 @command(
3264 @command(
3236 b'debugrevlogindex',
3265 b'debugrevlogindex',
3237 cmdutil.debugrevlogopts
3266 cmdutil.debugrevlogopts
3238 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3267 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3239 _(b'[-f FORMAT] -c|-m|FILE'),
3268 _(b'[-f FORMAT] -c|-m|FILE'),
3240 optionalrepo=True,
3269 optionalrepo=True,
3241 )
3270 )
3242 def debugrevlogindex(ui, repo, file_=None, **opts):
3271 def debugrevlogindex(ui, repo, file_=None, **opts):
3243 """dump the contents of a revlog index"""
3272 """dump the contents of a revlog index"""
3244 opts = pycompat.byteskwargs(opts)
3273 opts = pycompat.byteskwargs(opts)
3245 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3274 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3246 format = opts.get(b'format', 0)
3275 format = opts.get(b'format', 0)
3247 if format not in (0, 1):
3276 if format not in (0, 1):
3248 raise error.Abort(_(b"unknown format %d") % format)
3277 raise error.Abort(_(b"unknown format %d") % format)
3249
3278
3250 if ui.debugflag:
3279 if ui.debugflag:
3251 shortfn = hex
3280 shortfn = hex
3252 else:
3281 else:
3253 shortfn = short
3282 shortfn = short
3254
3283
3255 # There might not be anything in r, so have a sane default
3284 # There might not be anything in r, so have a sane default
3256 idlen = 12
3285 idlen = 12
3257 for i in r:
3286 for i in r:
3258 idlen = len(shortfn(r.node(i)))
3287 idlen = len(shortfn(r.node(i)))
3259 break
3288 break
3260
3289
3261 if format == 0:
3290 if format == 0:
3262 if ui.verbose:
3291 if ui.verbose:
3263 ui.writenoi18n(
3292 ui.writenoi18n(
3264 b" rev offset length linkrev %s %s p2\n"
3293 b" rev offset length linkrev %s %s p2\n"
3265 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3294 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3266 )
3295 )
3267 else:
3296 else:
3268 ui.writenoi18n(
3297 ui.writenoi18n(
3269 b" rev linkrev %s %s p2\n"
3298 b" rev linkrev %s %s p2\n"
3270 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3299 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3271 )
3300 )
3272 elif format == 1:
3301 elif format == 1:
3273 if ui.verbose:
3302 if ui.verbose:
3274 ui.writenoi18n(
3303 ui.writenoi18n(
3275 (
3304 (
3276 b" rev flag offset length size link p1"
3305 b" rev flag offset length size link p1"
3277 b" p2 %s\n"
3306 b" p2 %s\n"
3278 )
3307 )
3279 % b"nodeid".rjust(idlen)
3308 % b"nodeid".rjust(idlen)
3280 )
3309 )
3281 else:
3310 else:
3282 ui.writenoi18n(
3311 ui.writenoi18n(
3283 b" rev flag size link p1 p2 %s\n"
3312 b" rev flag size link p1 p2 %s\n"
3284 % b"nodeid".rjust(idlen)
3313 % b"nodeid".rjust(idlen)
3285 )
3314 )
3286
3315
3287 for i in r:
3316 for i in r:
3288 node = r.node(i)
3317 node = r.node(i)
3289 if format == 0:
3318 if format == 0:
3290 try:
3319 try:
3291 pp = r.parents(node)
3320 pp = r.parents(node)
3292 except Exception:
3321 except Exception:
3293 pp = [nullid, nullid]
3322 pp = [nullid, nullid]
3294 if ui.verbose:
3323 if ui.verbose:
3295 ui.write(
3324 ui.write(
3296 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3325 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3297 % (
3326 % (
3298 i,
3327 i,
3299 r.start(i),
3328 r.start(i),
3300 r.length(i),
3329 r.length(i),
3301 r.linkrev(i),
3330 r.linkrev(i),
3302 shortfn(node),
3331 shortfn(node),
3303 shortfn(pp[0]),
3332 shortfn(pp[0]),
3304 shortfn(pp[1]),
3333 shortfn(pp[1]),
3305 )
3334 )
3306 )
3335 )
3307 else:
3336 else:
3308 ui.write(
3337 ui.write(
3309 b"% 6d % 7d %s %s %s\n"
3338 b"% 6d % 7d %s %s %s\n"
3310 % (
3339 % (
3311 i,
3340 i,
3312 r.linkrev(i),
3341 r.linkrev(i),
3313 shortfn(node),
3342 shortfn(node),
3314 shortfn(pp[0]),
3343 shortfn(pp[0]),
3315 shortfn(pp[1]),
3344 shortfn(pp[1]),
3316 )
3345 )
3317 )
3346 )
3318 elif format == 1:
3347 elif format == 1:
3319 pr = r.parentrevs(i)
3348 pr = r.parentrevs(i)
3320 if ui.verbose:
3349 if ui.verbose:
3321 ui.write(
3350 ui.write(
3322 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3351 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3323 % (
3352 % (
3324 i,
3353 i,
3325 r.flags(i),
3354 r.flags(i),
3326 r.start(i),
3355 r.start(i),
3327 r.length(i),
3356 r.length(i),
3328 r.rawsize(i),
3357 r.rawsize(i),
3329 r.linkrev(i),
3358 r.linkrev(i),
3330 pr[0],
3359 pr[0],
3331 pr[1],
3360 pr[1],
3332 shortfn(node),
3361 shortfn(node),
3333 )
3362 )
3334 )
3363 )
3335 else:
3364 else:
3336 ui.write(
3365 ui.write(
3337 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3366 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3338 % (
3367 % (
3339 i,
3368 i,
3340 r.flags(i),
3369 r.flags(i),
3341 r.rawsize(i),
3370 r.rawsize(i),
3342 r.linkrev(i),
3371 r.linkrev(i),
3343 pr[0],
3372 pr[0],
3344 pr[1],
3373 pr[1],
3345 shortfn(node),
3374 shortfn(node),
3346 )
3375 )
3347 )
3376 )
3348
3377
3349
3378
3350 @command(
3379 @command(
3351 b'debugrevspec',
3380 b'debugrevspec',
3352 [
3381 [
3353 (
3382 (
3354 b'',
3383 b'',
3355 b'optimize',
3384 b'optimize',
3356 None,
3385 None,
3357 _(b'print parsed tree after optimizing (DEPRECATED)'),
3386 _(b'print parsed tree after optimizing (DEPRECATED)'),
3358 ),
3387 ),
3359 (
3388 (
3360 b'',
3389 b'',
3361 b'show-revs',
3390 b'show-revs',
3362 True,
3391 True,
3363 _(b'print list of result revisions (default)'),
3392 _(b'print list of result revisions (default)'),
3364 ),
3393 ),
3365 (
3394 (
3366 b's',
3395 b's',
3367 b'show-set',
3396 b'show-set',
3368 None,
3397 None,
3369 _(b'print internal representation of result set'),
3398 _(b'print internal representation of result set'),
3370 ),
3399 ),
3371 (
3400 (
3372 b'p',
3401 b'p',
3373 b'show-stage',
3402 b'show-stage',
3374 [],
3403 [],
3375 _(b'print parsed tree at the given stage'),
3404 _(b'print parsed tree at the given stage'),
3376 _(b'NAME'),
3405 _(b'NAME'),
3377 ),
3406 ),
3378 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3407 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3379 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3408 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3380 ],
3409 ],
3381 b'REVSPEC',
3410 b'REVSPEC',
3382 )
3411 )
3383 def debugrevspec(ui, repo, expr, **opts):
3412 def debugrevspec(ui, repo, expr, **opts):
3384 """parse and apply a revision specification
3413 """parse and apply a revision specification
3385
3414
3386 Use -p/--show-stage option to print the parsed tree at the given stages.
3415 Use -p/--show-stage option to print the parsed tree at the given stages.
3387 Use -p all to print tree at every stage.
3416 Use -p all to print tree at every stage.
3388
3417
3389 Use --no-show-revs option with -s or -p to print only the set
3418 Use --no-show-revs option with -s or -p to print only the set
3390 representation or the parsed tree respectively.
3419 representation or the parsed tree respectively.
3391
3420
3392 Use --verify-optimized to compare the optimized result with the unoptimized
3421 Use --verify-optimized to compare the optimized result with the unoptimized
3393 one. Returns 1 if the optimized result differs.
3422 one. Returns 1 if the optimized result differs.
3394 """
3423 """
3395 opts = pycompat.byteskwargs(opts)
3424 opts = pycompat.byteskwargs(opts)
3396 aliases = ui.configitems(b'revsetalias')
3425 aliases = ui.configitems(b'revsetalias')
3397 stages = [
3426 stages = [
3398 (b'parsed', lambda tree: tree),
3427 (b'parsed', lambda tree: tree),
3399 (
3428 (
3400 b'expanded',
3429 b'expanded',
3401 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3430 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3402 ),
3431 ),
3403 (b'concatenated', revsetlang.foldconcat),
3432 (b'concatenated', revsetlang.foldconcat),
3404 (b'analyzed', revsetlang.analyze),
3433 (b'analyzed', revsetlang.analyze),
3405 (b'optimized', revsetlang.optimize),
3434 (b'optimized', revsetlang.optimize),
3406 ]
3435 ]
3407 if opts[b'no_optimized']:
3436 if opts[b'no_optimized']:
3408 stages = stages[:-1]
3437 stages = stages[:-1]
3409 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3438 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3410 raise error.Abort(
3439 raise error.Abort(
3411 _(b'cannot use --verify-optimized with --no-optimized')
3440 _(b'cannot use --verify-optimized with --no-optimized')
3412 )
3441 )
3413 stagenames = {n for n, f in stages}
3442 stagenames = {n for n, f in stages}
3414
3443
3415 showalways = set()
3444 showalways = set()
3416 showchanged = set()
3445 showchanged = set()
3417 if ui.verbose and not opts[b'show_stage']:
3446 if ui.verbose and not opts[b'show_stage']:
3418 # show parsed tree by --verbose (deprecated)
3447 # show parsed tree by --verbose (deprecated)
3419 showalways.add(b'parsed')
3448 showalways.add(b'parsed')
3420 showchanged.update([b'expanded', b'concatenated'])
3449 showchanged.update([b'expanded', b'concatenated'])
3421 if opts[b'optimize']:
3450 if opts[b'optimize']:
3422 showalways.add(b'optimized')
3451 showalways.add(b'optimized')
3423 if opts[b'show_stage'] and opts[b'optimize']:
3452 if opts[b'show_stage'] and opts[b'optimize']:
3424 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3453 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3425 if opts[b'show_stage'] == [b'all']:
3454 if opts[b'show_stage'] == [b'all']:
3426 showalways.update(stagenames)
3455 showalways.update(stagenames)
3427 else:
3456 else:
3428 for n in opts[b'show_stage']:
3457 for n in opts[b'show_stage']:
3429 if n not in stagenames:
3458 if n not in stagenames:
3430 raise error.Abort(_(b'invalid stage name: %s') % n)
3459 raise error.Abort(_(b'invalid stage name: %s') % n)
3431 showalways.update(opts[b'show_stage'])
3460 showalways.update(opts[b'show_stage'])
3432
3461
3433 treebystage = {}
3462 treebystage = {}
3434 printedtree = None
3463 printedtree = None
3435 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3464 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3436 for n, f in stages:
3465 for n, f in stages:
3437 treebystage[n] = tree = f(tree)
3466 treebystage[n] = tree = f(tree)
3438 if n in showalways or (n in showchanged and tree != printedtree):
3467 if n in showalways or (n in showchanged and tree != printedtree):
3439 if opts[b'show_stage'] or n != b'parsed':
3468 if opts[b'show_stage'] or n != b'parsed':
3440 ui.write(b"* %s:\n" % n)
3469 ui.write(b"* %s:\n" % n)
3441 ui.write(revsetlang.prettyformat(tree), b"\n")
3470 ui.write(revsetlang.prettyformat(tree), b"\n")
3442 printedtree = tree
3471 printedtree = tree
3443
3472
3444 if opts[b'verify_optimized']:
3473 if opts[b'verify_optimized']:
3445 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3474 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3446 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3475 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3447 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3476 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3448 ui.writenoi18n(
3477 ui.writenoi18n(
3449 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3478 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3450 )
3479 )
3451 ui.writenoi18n(
3480 ui.writenoi18n(
3452 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3481 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3453 )
3482 )
3454 arevs = list(arevs)
3483 arevs = list(arevs)
3455 brevs = list(brevs)
3484 brevs = list(brevs)
3456 if arevs == brevs:
3485 if arevs == brevs:
3457 return 0
3486 return 0
3458 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3487 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3459 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3488 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3460 sm = difflib.SequenceMatcher(None, arevs, brevs)
3489 sm = difflib.SequenceMatcher(None, arevs, brevs)
3461 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3490 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3462 if tag in ('delete', 'replace'):
3491 if tag in ('delete', 'replace'):
3463 for c in arevs[alo:ahi]:
3492 for c in arevs[alo:ahi]:
3464 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3493 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3465 if tag in ('insert', 'replace'):
3494 if tag in ('insert', 'replace'):
3466 for c in brevs[blo:bhi]:
3495 for c in brevs[blo:bhi]:
3467 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3496 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3468 if tag == 'equal':
3497 if tag == 'equal':
3469 for c in arevs[alo:ahi]:
3498 for c in arevs[alo:ahi]:
3470 ui.write(b' %d\n' % c)
3499 ui.write(b' %d\n' % c)
3471 return 1
3500 return 1
3472
3501
3473 func = revset.makematcher(tree)
3502 func = revset.makematcher(tree)
3474 revs = func(repo)
3503 revs = func(repo)
3475 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3504 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3476 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3505 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3477 if not opts[b'show_revs']:
3506 if not opts[b'show_revs']:
3478 return
3507 return
3479 for c in revs:
3508 for c in revs:
3480 ui.write(b"%d\n" % c)
3509 ui.write(b"%d\n" % c)
3481
3510
3482
3511
3483 @command(
3512 @command(
3484 b'debugserve',
3513 b'debugserve',
3485 [
3514 [
3486 (
3515 (
3487 b'',
3516 b'',
3488 b'sshstdio',
3517 b'sshstdio',
3489 False,
3518 False,
3490 _(b'run an SSH server bound to process handles'),
3519 _(b'run an SSH server bound to process handles'),
3491 ),
3520 ),
3492 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3521 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3493 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3522 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3494 ],
3523 ],
3495 b'',
3524 b'',
3496 )
3525 )
3497 def debugserve(ui, repo, **opts):
3526 def debugserve(ui, repo, **opts):
3498 """run a server with advanced settings
3527 """run a server with advanced settings
3499
3528
3500 This command is similar to :hg:`serve`. It exists partially as a
3529 This command is similar to :hg:`serve`. It exists partially as a
3501 workaround to the fact that ``hg serve --stdio`` must have specific
3530 workaround to the fact that ``hg serve --stdio`` must have specific
3502 arguments for security reasons.
3531 arguments for security reasons.
3503 """
3532 """
3504 opts = pycompat.byteskwargs(opts)
3533 opts = pycompat.byteskwargs(opts)
3505
3534
3506 if not opts[b'sshstdio']:
3535 if not opts[b'sshstdio']:
3507 raise error.Abort(_(b'only --sshstdio is currently supported'))
3536 raise error.Abort(_(b'only --sshstdio is currently supported'))
3508
3537
3509 logfh = None
3538 logfh = None
3510
3539
3511 if opts[b'logiofd'] and opts[b'logiofile']:
3540 if opts[b'logiofd'] and opts[b'logiofile']:
3512 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3541 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3513
3542
3514 if opts[b'logiofd']:
3543 if opts[b'logiofd']:
3515 # Ideally we would be line buffered. But line buffering in binary
3544 # Ideally we would be line buffered. But line buffering in binary
3516 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3545 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3517 # buffering could have performance impacts. But since this isn't
3546 # buffering could have performance impacts. But since this isn't
3518 # performance critical code, it should be fine.
3547 # performance critical code, it should be fine.
3519 try:
3548 try:
3520 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3549 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3521 except OSError as e:
3550 except OSError as e:
3522 if e.errno != errno.ESPIPE:
3551 if e.errno != errno.ESPIPE:
3523 raise
3552 raise
3524 # can't seek a pipe, so `ab` mode fails on py3
3553 # can't seek a pipe, so `ab` mode fails on py3
3525 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3554 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3526 elif opts[b'logiofile']:
3555 elif opts[b'logiofile']:
3527 logfh = open(opts[b'logiofile'], b'ab', 0)
3556 logfh = open(opts[b'logiofile'], b'ab', 0)
3528
3557
3529 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3558 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3530 s.serve_forever()
3559 s.serve_forever()
3531
3560
3532
3561
3533 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3562 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3534 def debugsetparents(ui, repo, rev1, rev2=None):
3563 def debugsetparents(ui, repo, rev1, rev2=None):
3535 """manually set the parents of the current working directory (DANGEROUS)
3564 """manually set the parents of the current working directory (DANGEROUS)
3536
3565
3537 This command is not what you are looking for and should not be used. Using
3566 This command is not what you are looking for and should not be used. Using
3538 this command will most certainly results in slight corruption of the file
3567 this command will most certainly results in slight corruption of the file
3539 level histories withing your repository. DO NOT USE THIS COMMAND.
3568 level histories withing your repository. DO NOT USE THIS COMMAND.
3540
3569
3541 The command update the p1 and p2 field in the dirstate, and not touching
3570 The command update the p1 and p2 field in the dirstate, and not touching
3542 anything else. This useful for writing repository conversion tools, but
3571 anything else. This useful for writing repository conversion tools, but
3543 should be used with extreme care. For example, neither the working
3572 should be used with extreme care. For example, neither the working
3544 directory nor the dirstate is updated, so file status may be incorrect
3573 directory nor the dirstate is updated, so file status may be incorrect
3545 after running this command. Only used if you are one of the few people that
3574 after running this command. Only used if you are one of the few people that
3546 deeply unstand both conversion tools and file level histories. If you are
3575 deeply unstand both conversion tools and file level histories. If you are
3547 reading this help, you are not one of this people (most of them sailed west
3576 reading this help, you are not one of this people (most of them sailed west
3548 from Mithlond anyway.
3577 from Mithlond anyway.
3549
3578
3550 So one last time DO NOT USE THIS COMMAND.
3579 So one last time DO NOT USE THIS COMMAND.
3551
3580
3552 Returns 0 on success.
3581 Returns 0 on success.
3553 """
3582 """
3554
3583
3555 node1 = scmutil.revsingle(repo, rev1).node()
3584 node1 = scmutil.revsingle(repo, rev1).node()
3556 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3585 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3557
3586
3558 with repo.wlock():
3587 with repo.wlock():
3559 repo.setparents(node1, node2)
3588 repo.setparents(node1, node2)
3560
3589
3561
3590
3562 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3591 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3563 def debugsidedata(ui, repo, file_, rev=None, **opts):
3592 def debugsidedata(ui, repo, file_, rev=None, **opts):
3564 """dump the side data for a cl/manifest/file revision
3593 """dump the side data for a cl/manifest/file revision
3565
3594
3566 Use --verbose to dump the sidedata content."""
3595 Use --verbose to dump the sidedata content."""
3567 opts = pycompat.byteskwargs(opts)
3596 opts = pycompat.byteskwargs(opts)
3568 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3597 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3569 if rev is not None:
3598 if rev is not None:
3570 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3599 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3571 file_, rev = None, file_
3600 file_, rev = None, file_
3572 elif rev is None:
3601 elif rev is None:
3573 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3602 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3574 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3603 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3575 r = getattr(r, '_revlog', r)
3604 r = getattr(r, '_revlog', r)
3576 try:
3605 try:
3577 sidedata = r.sidedata(r.lookup(rev))
3606 sidedata = r.sidedata(r.lookup(rev))
3578 except KeyError:
3607 except KeyError:
3579 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3608 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3580 if sidedata:
3609 if sidedata:
3581 sidedata = list(sidedata.items())
3610 sidedata = list(sidedata.items())
3582 sidedata.sort()
3611 sidedata.sort()
3583 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3612 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3584 for key, value in sidedata:
3613 for key, value in sidedata:
3585 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3614 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3586 if ui.verbose:
3615 if ui.verbose:
3587 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3616 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3588
3617
3589
3618
3590 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3619 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3591 def debugssl(ui, repo, source=None, **opts):
3620 def debugssl(ui, repo, source=None, **opts):
3592 """test a secure connection to a server
3621 """test a secure connection to a server
3593
3622
3594 This builds the certificate chain for the server on Windows, installing the
3623 This builds the certificate chain for the server on Windows, installing the
3595 missing intermediates and trusted root via Windows Update if necessary. It
3624 missing intermediates and trusted root via Windows Update if necessary. It
3596 does nothing on other platforms.
3625 does nothing on other platforms.
3597
3626
3598 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3627 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3599 that server is used. See :hg:`help urls` for more information.
3628 that server is used. See :hg:`help urls` for more information.
3600
3629
3601 If the update succeeds, retry the original operation. Otherwise, the cause
3630 If the update succeeds, retry the original operation. Otherwise, the cause
3602 of the SSL error is likely another issue.
3631 of the SSL error is likely another issue.
3603 """
3632 """
3604 if not pycompat.iswindows:
3633 if not pycompat.iswindows:
3605 raise error.Abort(
3634 raise error.Abort(
3606 _(b'certificate chain building is only possible on Windows')
3635 _(b'certificate chain building is only possible on Windows')
3607 )
3636 )
3608
3637
3609 if not source:
3638 if not source:
3610 if not repo:
3639 if not repo:
3611 raise error.Abort(
3640 raise error.Abort(
3612 _(
3641 _(
3613 b"there is no Mercurial repository here, and no "
3642 b"there is no Mercurial repository here, and no "
3614 b"server specified"
3643 b"server specified"
3615 )
3644 )
3616 )
3645 )
3617 source = b"default"
3646 source = b"default"
3618
3647
3619 source, branches = hg.parseurl(ui.expandpath(source))
3648 source, branches = hg.parseurl(ui.expandpath(source))
3620 url = util.url(source)
3649 url = util.url(source)
3621
3650
3622 defaultport = {b'https': 443, b'ssh': 22}
3651 defaultport = {b'https': 443, b'ssh': 22}
3623 if url.scheme in defaultport:
3652 if url.scheme in defaultport:
3624 try:
3653 try:
3625 addr = (url.host, int(url.port or defaultport[url.scheme]))
3654 addr = (url.host, int(url.port or defaultport[url.scheme]))
3626 except ValueError:
3655 except ValueError:
3627 raise error.Abort(_(b"malformed port number in URL"))
3656 raise error.Abort(_(b"malformed port number in URL"))
3628 else:
3657 else:
3629 raise error.Abort(_(b"only https and ssh connections are supported"))
3658 raise error.Abort(_(b"only https and ssh connections are supported"))
3630
3659
3631 from . import win32
3660 from . import win32
3632
3661
3633 s = ssl.wrap_socket(
3662 s = ssl.wrap_socket(
3634 socket.socket(),
3663 socket.socket(),
3635 ssl_version=ssl.PROTOCOL_TLS,
3664 ssl_version=ssl.PROTOCOL_TLS,
3636 cert_reqs=ssl.CERT_NONE,
3665 cert_reqs=ssl.CERT_NONE,
3637 ca_certs=None,
3666 ca_certs=None,
3638 )
3667 )
3639
3668
3640 try:
3669 try:
3641 s.connect(addr)
3670 s.connect(addr)
3642 cert = s.getpeercert(True)
3671 cert = s.getpeercert(True)
3643
3672
3644 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3673 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3645
3674
3646 complete = win32.checkcertificatechain(cert, build=False)
3675 complete = win32.checkcertificatechain(cert, build=False)
3647
3676
3648 if not complete:
3677 if not complete:
3649 ui.status(_(b'certificate chain is incomplete, updating... '))
3678 ui.status(_(b'certificate chain is incomplete, updating... '))
3650
3679
3651 if not win32.checkcertificatechain(cert):
3680 if not win32.checkcertificatechain(cert):
3652 ui.status(_(b'failed.\n'))
3681 ui.status(_(b'failed.\n'))
3653 else:
3682 else:
3654 ui.status(_(b'done.\n'))
3683 ui.status(_(b'done.\n'))
3655 else:
3684 else:
3656 ui.status(_(b'full certificate chain is available\n'))
3685 ui.status(_(b'full certificate chain is available\n'))
3657 finally:
3686 finally:
3658 s.close()
3687 s.close()
3659
3688
3660
3689
3661 @command(
3690 @command(
3662 b"debugbackupbundle",
3691 b"debugbackupbundle",
3663 [
3692 [
3664 (
3693 (
3665 b"",
3694 b"",
3666 b"recover",
3695 b"recover",
3667 b"",
3696 b"",
3668 b"brings the specified changeset back into the repository",
3697 b"brings the specified changeset back into the repository",
3669 )
3698 )
3670 ]
3699 ]
3671 + cmdutil.logopts,
3700 + cmdutil.logopts,
3672 _(b"hg debugbackupbundle [--recover HASH]"),
3701 _(b"hg debugbackupbundle [--recover HASH]"),
3673 )
3702 )
3674 def debugbackupbundle(ui, repo, *pats, **opts):
3703 def debugbackupbundle(ui, repo, *pats, **opts):
3675 """lists the changesets available in backup bundles
3704 """lists the changesets available in backup bundles
3676
3705
3677 Without any arguments, this command prints a list of the changesets in each
3706 Without any arguments, this command prints a list of the changesets in each
3678 backup bundle.
3707 backup bundle.
3679
3708
3680 --recover takes a changeset hash and unbundles the first bundle that
3709 --recover takes a changeset hash and unbundles the first bundle that
3681 contains that hash, which puts that changeset back in your repository.
3710 contains that hash, which puts that changeset back in your repository.
3682
3711
3683 --verbose will print the entire commit message and the bundle path for that
3712 --verbose will print the entire commit message and the bundle path for that
3684 backup.
3713 backup.
3685 """
3714 """
3686 backups = list(
3715 backups = list(
3687 filter(
3716 filter(
3688 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3717 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3689 )
3718 )
3690 )
3719 )
3691 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3720 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3692
3721
3693 opts = pycompat.byteskwargs(opts)
3722 opts = pycompat.byteskwargs(opts)
3694 opts[b"bundle"] = b""
3723 opts[b"bundle"] = b""
3695 opts[b"force"] = None
3724 opts[b"force"] = None
3696 limit = logcmdutil.getlimit(opts)
3725 limit = logcmdutil.getlimit(opts)
3697
3726
3698 def display(other, chlist, displayer):
3727 def display(other, chlist, displayer):
3699 if opts.get(b"newest_first"):
3728 if opts.get(b"newest_first"):
3700 chlist.reverse()
3729 chlist.reverse()
3701 count = 0
3730 count = 0
3702 for n in chlist:
3731 for n in chlist:
3703 if limit is not None and count >= limit:
3732 if limit is not None and count >= limit:
3704 break
3733 break
3705 parents = [True for p in other.changelog.parents(n) if p != nullid]
3734 parents = [True for p in other.changelog.parents(n) if p != nullid]
3706 if opts.get(b"no_merges") and len(parents) == 2:
3735 if opts.get(b"no_merges") and len(parents) == 2:
3707 continue
3736 continue
3708 count += 1
3737 count += 1
3709 displayer.show(other[n])
3738 displayer.show(other[n])
3710
3739
3711 recovernode = opts.get(b"recover")
3740 recovernode = opts.get(b"recover")
3712 if recovernode:
3741 if recovernode:
3713 if scmutil.isrevsymbol(repo, recovernode):
3742 if scmutil.isrevsymbol(repo, recovernode):
3714 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3743 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3715 return
3744 return
3716 elif backups:
3745 elif backups:
3717 msg = _(
3746 msg = _(
3718 b"Recover changesets using: hg debugbackupbundle --recover "
3747 b"Recover changesets using: hg debugbackupbundle --recover "
3719 b"<changeset hash>\n\nAvailable backup changesets:"
3748 b"<changeset hash>\n\nAvailable backup changesets:"
3720 )
3749 )
3721 ui.status(msg, label=b"status.removed")
3750 ui.status(msg, label=b"status.removed")
3722 else:
3751 else:
3723 ui.status(_(b"no backup changesets found\n"))
3752 ui.status(_(b"no backup changesets found\n"))
3724 return
3753 return
3725
3754
3726 for backup in backups:
3755 for backup in backups:
3727 # Much of this is copied from the hg incoming logic
3756 # Much of this is copied from the hg incoming logic
3728 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3757 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3729 source, branches = hg.parseurl(source, opts.get(b"branch"))
3758 source, branches = hg.parseurl(source, opts.get(b"branch"))
3730 try:
3759 try:
3731 other = hg.peer(repo, opts, source)
3760 other = hg.peer(repo, opts, source)
3732 except error.LookupError as ex:
3761 except error.LookupError as ex:
3733 msg = _(b"\nwarning: unable to open bundle %s") % source
3762 msg = _(b"\nwarning: unable to open bundle %s") % source
3734 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3763 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3735 ui.warn(msg, hint=hint)
3764 ui.warn(msg, hint=hint)
3736 continue
3765 continue
3737 revs, checkout = hg.addbranchrevs(
3766 revs, checkout = hg.addbranchrevs(
3738 repo, other, branches, opts.get(b"rev")
3767 repo, other, branches, opts.get(b"rev")
3739 )
3768 )
3740
3769
3741 if revs:
3770 if revs:
3742 revs = [other.lookup(rev) for rev in revs]
3771 revs = [other.lookup(rev) for rev in revs]
3743
3772
3744 quiet = ui.quiet
3773 quiet = ui.quiet
3745 try:
3774 try:
3746 ui.quiet = True
3775 ui.quiet = True
3747 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3776 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3748 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3777 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3749 )
3778 )
3750 except error.LookupError:
3779 except error.LookupError:
3751 continue
3780 continue
3752 finally:
3781 finally:
3753 ui.quiet = quiet
3782 ui.quiet = quiet
3754
3783
3755 try:
3784 try:
3756 if not chlist:
3785 if not chlist:
3757 continue
3786 continue
3758 if recovernode:
3787 if recovernode:
3759 with repo.lock(), repo.transaction(b"unbundle") as tr:
3788 with repo.lock(), repo.transaction(b"unbundle") as tr:
3760 if scmutil.isrevsymbol(other, recovernode):
3789 if scmutil.isrevsymbol(other, recovernode):
3761 ui.status(_(b"Unbundling %s\n") % (recovernode))
3790 ui.status(_(b"Unbundling %s\n") % (recovernode))
3762 f = hg.openpath(ui, source)
3791 f = hg.openpath(ui, source)
3763 gen = exchange.readbundle(ui, f, source)
3792 gen = exchange.readbundle(ui, f, source)
3764 if isinstance(gen, bundle2.unbundle20):
3793 if isinstance(gen, bundle2.unbundle20):
3765 bundle2.applybundle(
3794 bundle2.applybundle(
3766 repo,
3795 repo,
3767 gen,
3796 gen,
3768 tr,
3797 tr,
3769 source=b"unbundle",
3798 source=b"unbundle",
3770 url=b"bundle:" + source,
3799 url=b"bundle:" + source,
3771 )
3800 )
3772 else:
3801 else:
3773 gen.apply(repo, b"unbundle", b"bundle:" + source)
3802 gen.apply(repo, b"unbundle", b"bundle:" + source)
3774 break
3803 break
3775 else:
3804 else:
3776 backupdate = encoding.strtolocal(
3805 backupdate = encoding.strtolocal(
3777 time.strftime(
3806 time.strftime(
3778 "%a %H:%M, %Y-%m-%d",
3807 "%a %H:%M, %Y-%m-%d",
3779 time.localtime(os.path.getmtime(source)),
3808 time.localtime(os.path.getmtime(source)),
3780 )
3809 )
3781 )
3810 )
3782 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3811 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3783 if ui.verbose:
3812 if ui.verbose:
3784 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3813 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3785 else:
3814 else:
3786 opts[
3815 opts[
3787 b"template"
3816 b"template"
3788 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3817 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3789 displayer = logcmdutil.changesetdisplayer(
3818 displayer = logcmdutil.changesetdisplayer(
3790 ui, other, opts, False
3819 ui, other, opts, False
3791 )
3820 )
3792 display(other, chlist, displayer)
3821 display(other, chlist, displayer)
3793 displayer.close()
3822 displayer.close()
3794 finally:
3823 finally:
3795 cleanupfn()
3824 cleanupfn()
3796
3825
3797
3826
3798 @command(
3827 @command(
3799 b'debugsub',
3828 b'debugsub',
3800 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3829 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3801 _(b'[-r REV] [REV]'),
3830 _(b'[-r REV] [REV]'),
3802 )
3831 )
3803 def debugsub(ui, repo, rev=None):
3832 def debugsub(ui, repo, rev=None):
3804 ctx = scmutil.revsingle(repo, rev, None)
3833 ctx = scmutil.revsingle(repo, rev, None)
3805 for k, v in sorted(ctx.substate.items()):
3834 for k, v in sorted(ctx.substate.items()):
3806 ui.writenoi18n(b'path %s\n' % k)
3835 ui.writenoi18n(b'path %s\n' % k)
3807 ui.writenoi18n(b' source %s\n' % v[0])
3836 ui.writenoi18n(b' source %s\n' % v[0])
3808 ui.writenoi18n(b' revision %s\n' % v[1])
3837 ui.writenoi18n(b' revision %s\n' % v[1])
3809
3838
3810
3839
3811 @command(b'debugshell', optionalrepo=True)
3840 @command(b'debugshell', optionalrepo=True)
3812 def debugshell(ui, repo):
3841 def debugshell(ui, repo):
3813 """run an interactive Python interpreter
3842 """run an interactive Python interpreter
3814
3843
3815 The local namespace is provided with a reference to the ui and
3844 The local namespace is provided with a reference to the ui and
3816 the repo instance (if available).
3845 the repo instance (if available).
3817 """
3846 """
3818 import code
3847 import code
3819
3848
3820 imported_objects = {
3849 imported_objects = {
3821 'ui': ui,
3850 'ui': ui,
3822 'repo': repo,
3851 'repo': repo,
3823 }
3852 }
3824
3853
3825 code.interact(local=imported_objects)
3854 code.interact(local=imported_objects)
3826
3855
3827
3856
3828 @command(
3857 @command(
3829 b'debugsuccessorssets',
3858 b'debugsuccessorssets',
3830 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3859 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3831 _(b'[REV]'),
3860 _(b'[REV]'),
3832 )
3861 )
3833 def debugsuccessorssets(ui, repo, *revs, **opts):
3862 def debugsuccessorssets(ui, repo, *revs, **opts):
3834 """show set of successors for revision
3863 """show set of successors for revision
3835
3864
3836 A successors set of changeset A is a consistent group of revisions that
3865 A successors set of changeset A is a consistent group of revisions that
3837 succeed A. It contains non-obsolete changesets only unless closests
3866 succeed A. It contains non-obsolete changesets only unless closests
3838 successors set is set.
3867 successors set is set.
3839
3868
3840 In most cases a changeset A has a single successors set containing a single
3869 In most cases a changeset A has a single successors set containing a single
3841 successor (changeset A replaced by A').
3870 successor (changeset A replaced by A').
3842
3871
3843 A changeset that is made obsolete with no successors are called "pruned".
3872 A changeset that is made obsolete with no successors are called "pruned".
3844 Such changesets have no successors sets at all.
3873 Such changesets have no successors sets at all.
3845
3874
3846 A changeset that has been "split" will have a successors set containing
3875 A changeset that has been "split" will have a successors set containing
3847 more than one successor.
3876 more than one successor.
3848
3877
3849 A changeset that has been rewritten in multiple different ways is called
3878 A changeset that has been rewritten in multiple different ways is called
3850 "divergent". Such changesets have multiple successor sets (each of which
3879 "divergent". Such changesets have multiple successor sets (each of which
3851 may also be split, i.e. have multiple successors).
3880 may also be split, i.e. have multiple successors).
3852
3881
3853 Results are displayed as follows::
3882 Results are displayed as follows::
3854
3883
3855 <rev1>
3884 <rev1>
3856 <successors-1A>
3885 <successors-1A>
3857 <rev2>
3886 <rev2>
3858 <successors-2A>
3887 <successors-2A>
3859 <successors-2B1> <successors-2B2> <successors-2B3>
3888 <successors-2B1> <successors-2B2> <successors-2B3>
3860
3889
3861 Here rev2 has two possible (i.e. divergent) successors sets. The first
3890 Here rev2 has two possible (i.e. divergent) successors sets. The first
3862 holds one element, whereas the second holds three (i.e. the changeset has
3891 holds one element, whereas the second holds three (i.e. the changeset has
3863 been split).
3892 been split).
3864 """
3893 """
3865 # passed to successorssets caching computation from one call to another
3894 # passed to successorssets caching computation from one call to another
3866 cache = {}
3895 cache = {}
3867 ctx2str = bytes
3896 ctx2str = bytes
3868 node2str = short
3897 node2str = short
3869 for rev in scmutil.revrange(repo, revs):
3898 for rev in scmutil.revrange(repo, revs):
3870 ctx = repo[rev]
3899 ctx = repo[rev]
3871 ui.write(b'%s\n' % ctx2str(ctx))
3900 ui.write(b'%s\n' % ctx2str(ctx))
3872 for succsset in obsutil.successorssets(
3901 for succsset in obsutil.successorssets(
3873 repo, ctx.node(), closest=opts['closest'], cache=cache
3902 repo, ctx.node(), closest=opts['closest'], cache=cache
3874 ):
3903 ):
3875 if succsset:
3904 if succsset:
3876 ui.write(b' ')
3905 ui.write(b' ')
3877 ui.write(node2str(succsset[0]))
3906 ui.write(node2str(succsset[0]))
3878 for node in succsset[1:]:
3907 for node in succsset[1:]:
3879 ui.write(b' ')
3908 ui.write(b' ')
3880 ui.write(node2str(node))
3909 ui.write(node2str(node))
3881 ui.write(b'\n')
3910 ui.write(b'\n')
3882
3911
3883
3912
3884 @command(b'debugtagscache', [])
3913 @command(b'debugtagscache', [])
3885 def debugtagscache(ui, repo):
3914 def debugtagscache(ui, repo):
3886 """display the contents of .hg/cache/hgtagsfnodes1"""
3915 """display the contents of .hg/cache/hgtagsfnodes1"""
3887 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3916 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3888 flog = repo.file(b'.hgtags')
3917 flog = repo.file(b'.hgtags')
3889 for r in repo:
3918 for r in repo:
3890 node = repo[r].node()
3919 node = repo[r].node()
3891 tagsnode = cache.getfnode(node, computemissing=False)
3920 tagsnode = cache.getfnode(node, computemissing=False)
3892 if tagsnode:
3921 if tagsnode:
3893 tagsnodedisplay = hex(tagsnode)
3922 tagsnodedisplay = hex(tagsnode)
3894 if not flog.hasnode(tagsnode):
3923 if not flog.hasnode(tagsnode):
3895 tagsnodedisplay += b' (unknown node)'
3924 tagsnodedisplay += b' (unknown node)'
3896 elif tagsnode is None:
3925 elif tagsnode is None:
3897 tagsnodedisplay = b'missing'
3926 tagsnodedisplay = b'missing'
3898 else:
3927 else:
3899 tagsnodedisplay = b'invalid'
3928 tagsnodedisplay = b'invalid'
3900
3929
3901 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3930 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3902
3931
3903
3932
3904 @command(
3933 @command(
3905 b'debugtemplate',
3934 b'debugtemplate',
3906 [
3935 [
3907 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3936 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3908 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3937 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3909 ],
3938 ],
3910 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3939 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3911 optionalrepo=True,
3940 optionalrepo=True,
3912 )
3941 )
3913 def debugtemplate(ui, repo, tmpl, **opts):
3942 def debugtemplate(ui, repo, tmpl, **opts):
3914 """parse and apply a template
3943 """parse and apply a template
3915
3944
3916 If -r/--rev is given, the template is processed as a log template and
3945 If -r/--rev is given, the template is processed as a log template and
3917 applied to the given changesets. Otherwise, it is processed as a generic
3946 applied to the given changesets. Otherwise, it is processed as a generic
3918 template.
3947 template.
3919
3948
3920 Use --verbose to print the parsed tree.
3949 Use --verbose to print the parsed tree.
3921 """
3950 """
3922 revs = None
3951 revs = None
3923 if opts['rev']:
3952 if opts['rev']:
3924 if repo is None:
3953 if repo is None:
3925 raise error.RepoError(
3954 raise error.RepoError(
3926 _(b'there is no Mercurial repository here (.hg not found)')
3955 _(b'there is no Mercurial repository here (.hg not found)')
3927 )
3956 )
3928 revs = scmutil.revrange(repo, opts['rev'])
3957 revs = scmutil.revrange(repo, opts['rev'])
3929
3958
3930 props = {}
3959 props = {}
3931 for d in opts['define']:
3960 for d in opts['define']:
3932 try:
3961 try:
3933 k, v = (e.strip() for e in d.split(b'=', 1))
3962 k, v = (e.strip() for e in d.split(b'=', 1))
3934 if not k or k == b'ui':
3963 if not k or k == b'ui':
3935 raise ValueError
3964 raise ValueError
3936 props[k] = v
3965 props[k] = v
3937 except ValueError:
3966 except ValueError:
3938 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3967 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3939
3968
3940 if ui.verbose:
3969 if ui.verbose:
3941 aliases = ui.configitems(b'templatealias')
3970 aliases = ui.configitems(b'templatealias')
3942 tree = templater.parse(tmpl)
3971 tree = templater.parse(tmpl)
3943 ui.note(templater.prettyformat(tree), b'\n')
3972 ui.note(templater.prettyformat(tree), b'\n')
3944 newtree = templater.expandaliases(tree, aliases)
3973 newtree = templater.expandaliases(tree, aliases)
3945 if newtree != tree:
3974 if newtree != tree:
3946 ui.notenoi18n(
3975 ui.notenoi18n(
3947 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3976 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3948 )
3977 )
3949
3978
3950 if revs is None:
3979 if revs is None:
3951 tres = formatter.templateresources(ui, repo)
3980 tres = formatter.templateresources(ui, repo)
3952 t = formatter.maketemplater(ui, tmpl, resources=tres)
3981 t = formatter.maketemplater(ui, tmpl, resources=tres)
3953 if ui.verbose:
3982 if ui.verbose:
3954 kwds, funcs = t.symbolsuseddefault()
3983 kwds, funcs = t.symbolsuseddefault()
3955 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3984 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3956 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3985 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3957 ui.write(t.renderdefault(props))
3986 ui.write(t.renderdefault(props))
3958 else:
3987 else:
3959 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3988 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3960 if ui.verbose:
3989 if ui.verbose:
3961 kwds, funcs = displayer.t.symbolsuseddefault()
3990 kwds, funcs = displayer.t.symbolsuseddefault()
3962 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3991 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3963 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3992 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3964 for r in revs:
3993 for r in revs:
3965 displayer.show(repo[r], **pycompat.strkwargs(props))
3994 displayer.show(repo[r], **pycompat.strkwargs(props))
3966 displayer.close()
3995 displayer.close()
3967
3996
3968
3997
3969 @command(
3998 @command(
3970 b'debuguigetpass',
3999 b'debuguigetpass',
3971 [
4000 [
3972 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4001 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3973 ],
4002 ],
3974 _(b'[-p TEXT]'),
4003 _(b'[-p TEXT]'),
3975 norepo=True,
4004 norepo=True,
3976 )
4005 )
3977 def debuguigetpass(ui, prompt=b''):
4006 def debuguigetpass(ui, prompt=b''):
3978 """show prompt to type password"""
4007 """show prompt to type password"""
3979 r = ui.getpass(prompt)
4008 r = ui.getpass(prompt)
3980 if r is None:
4009 if r is None:
3981 r = b"<default response>"
4010 r = b"<default response>"
3982 ui.writenoi18n(b'response: %s\n' % r)
4011 ui.writenoi18n(b'response: %s\n' % r)
3983
4012
3984
4013
3985 @command(
4014 @command(
3986 b'debuguiprompt',
4015 b'debuguiprompt',
3987 [
4016 [
3988 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4017 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3989 ],
4018 ],
3990 _(b'[-p TEXT]'),
4019 _(b'[-p TEXT]'),
3991 norepo=True,
4020 norepo=True,
3992 )
4021 )
3993 def debuguiprompt(ui, prompt=b''):
4022 def debuguiprompt(ui, prompt=b''):
3994 """show plain prompt"""
4023 """show plain prompt"""
3995 r = ui.prompt(prompt)
4024 r = ui.prompt(prompt)
3996 ui.writenoi18n(b'response: %s\n' % r)
4025 ui.writenoi18n(b'response: %s\n' % r)
3997
4026
3998
4027
3999 @command(b'debugupdatecaches', [])
4028 @command(b'debugupdatecaches', [])
4000 def debugupdatecaches(ui, repo, *pats, **opts):
4029 def debugupdatecaches(ui, repo, *pats, **opts):
4001 """warm all known caches in the repository"""
4030 """warm all known caches in the repository"""
4002 with repo.wlock(), repo.lock():
4031 with repo.wlock(), repo.lock():
4003 repo.updatecaches(full=True)
4032 repo.updatecaches(full=True)
4004
4033
4005
4034
4006 @command(
4035 @command(
4007 b'debugupgraderepo',
4036 b'debugupgraderepo',
4008 [
4037 [
4009 (
4038 (
4010 b'o',
4039 b'o',
4011 b'optimize',
4040 b'optimize',
4012 [],
4041 [],
4013 _(b'extra optimization to perform'),
4042 _(b'extra optimization to perform'),
4014 _(b'NAME'),
4043 _(b'NAME'),
4015 ),
4044 ),
4016 (b'', b'run', False, _(b'performs an upgrade')),
4045 (b'', b'run', False, _(b'performs an upgrade')),
4017 (b'', b'backup', True, _(b'keep the old repository content around')),
4046 (b'', b'backup', True, _(b'keep the old repository content around')),
4018 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4047 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4019 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4048 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4020 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4049 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4021 ],
4050 ],
4022 )
4051 )
4023 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4052 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4024 """upgrade a repository to use different features
4053 """upgrade a repository to use different features
4025
4054
4026 If no arguments are specified, the repository is evaluated for upgrade
4055 If no arguments are specified, the repository is evaluated for upgrade
4027 and a list of problems and potential optimizations is printed.
4056 and a list of problems and potential optimizations is printed.
4028
4057
4029 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4058 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4030 can be influenced via additional arguments. More details will be provided
4059 can be influenced via additional arguments. More details will be provided
4031 by the command output when run without ``--run``.
4060 by the command output when run without ``--run``.
4032
4061
4033 During the upgrade, the repository will be locked and no writes will be
4062 During the upgrade, the repository will be locked and no writes will be
4034 allowed.
4063 allowed.
4035
4064
4036 At the end of the upgrade, the repository may not be readable while new
4065 At the end of the upgrade, the repository may not be readable while new
4037 repository data is swapped in. This window will be as long as it takes to
4066 repository data is swapped in. This window will be as long as it takes to
4038 rename some directories inside the ``.hg`` directory. On most machines, this
4067 rename some directories inside the ``.hg`` directory. On most machines, this
4039 should complete almost instantaneously and the chances of a consumer being
4068 should complete almost instantaneously and the chances of a consumer being
4040 unable to access the repository should be low.
4069 unable to access the repository should be low.
4041
4070
4042 By default, all revlog will be upgraded. You can restrict this using flag
4071 By default, all revlog will be upgraded. You can restrict this using flag
4043 such as `--manifest`:
4072 such as `--manifest`:
4044
4073
4045 * `--manifest`: only optimize the manifest
4074 * `--manifest`: only optimize the manifest
4046 * `--no-manifest`: optimize all revlog but the manifest
4075 * `--no-manifest`: optimize all revlog but the manifest
4047 * `--changelog`: optimize the changelog only
4076 * `--changelog`: optimize the changelog only
4048 * `--no-changelog --no-manifest`: optimize filelogs only
4077 * `--no-changelog --no-manifest`: optimize filelogs only
4049 * `--filelogs`: optimize the filelogs only
4078 * `--filelogs`: optimize the filelogs only
4050 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4079 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4051 """
4080 """
4052 return upgrade.upgraderepo(
4081 return upgrade.upgraderepo(
4053 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4082 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4054 )
4083 )
4055
4084
4056
4085
4057 @command(
4086 @command(
4058 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4087 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4059 )
4088 )
4060 def debugwalk(ui, repo, *pats, **opts):
4089 def debugwalk(ui, repo, *pats, **opts):
4061 """show how files match on given patterns"""
4090 """show how files match on given patterns"""
4062 opts = pycompat.byteskwargs(opts)
4091 opts = pycompat.byteskwargs(opts)
4063 m = scmutil.match(repo[None], pats, opts)
4092 m = scmutil.match(repo[None], pats, opts)
4064 if ui.verbose:
4093 if ui.verbose:
4065 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4094 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4066 items = list(repo[None].walk(m))
4095 items = list(repo[None].walk(m))
4067 if not items:
4096 if not items:
4068 return
4097 return
4069 f = lambda fn: fn
4098 f = lambda fn: fn
4070 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4099 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4071 f = lambda fn: util.normpath(fn)
4100 f = lambda fn: util.normpath(fn)
4072 fmt = b'f %%-%ds %%-%ds %%s' % (
4101 fmt = b'f %%-%ds %%-%ds %%s' % (
4073 max([len(abs) for abs in items]),
4102 max([len(abs) for abs in items]),
4074 max([len(repo.pathto(abs)) for abs in items]),
4103 max([len(repo.pathto(abs)) for abs in items]),
4075 )
4104 )
4076 for abs in items:
4105 for abs in items:
4077 line = fmt % (
4106 line = fmt % (
4078 abs,
4107 abs,
4079 f(repo.pathto(abs)),
4108 f(repo.pathto(abs)),
4080 m.exact(abs) and b'exact' or b'',
4109 m.exact(abs) and b'exact' or b'',
4081 )
4110 )
4082 ui.write(b"%s\n" % line.rstrip())
4111 ui.write(b"%s\n" % line.rstrip())
4083
4112
4084
4113
4085 @command(b'debugwhyunstable', [], _(b'REV'))
4114 @command(b'debugwhyunstable', [], _(b'REV'))
4086 def debugwhyunstable(ui, repo, rev):
4115 def debugwhyunstable(ui, repo, rev):
4087 """explain instabilities of a changeset"""
4116 """explain instabilities of a changeset"""
4088 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4117 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4089 dnodes = b''
4118 dnodes = b''
4090 if entry.get(b'divergentnodes'):
4119 if entry.get(b'divergentnodes'):
4091 dnodes = (
4120 dnodes = (
4092 b' '.join(
4121 b' '.join(
4093 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4122 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4094 for ctx in entry[b'divergentnodes']
4123 for ctx in entry[b'divergentnodes']
4095 )
4124 )
4096 + b' '
4125 + b' '
4097 )
4126 )
4098 ui.write(
4127 ui.write(
4099 b'%s: %s%s %s\n'
4128 b'%s: %s%s %s\n'
4100 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4129 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4101 )
4130 )
4102
4131
4103
4132
4104 @command(
4133 @command(
4105 b'debugwireargs',
4134 b'debugwireargs',
4106 [
4135 [
4107 (b'', b'three', b'', b'three'),
4136 (b'', b'three', b'', b'three'),
4108 (b'', b'four', b'', b'four'),
4137 (b'', b'four', b'', b'four'),
4109 (b'', b'five', b'', b'five'),
4138 (b'', b'five', b'', b'five'),
4110 ]
4139 ]
4111 + cmdutil.remoteopts,
4140 + cmdutil.remoteopts,
4112 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4141 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4113 norepo=True,
4142 norepo=True,
4114 )
4143 )
4115 def debugwireargs(ui, repopath, *vals, **opts):
4144 def debugwireargs(ui, repopath, *vals, **opts):
4116 opts = pycompat.byteskwargs(opts)
4145 opts = pycompat.byteskwargs(opts)
4117 repo = hg.peer(ui, opts, repopath)
4146 repo = hg.peer(ui, opts, repopath)
4118 try:
4147 try:
4119 for opt in cmdutil.remoteopts:
4148 for opt in cmdutil.remoteopts:
4120 del opts[opt[1]]
4149 del opts[opt[1]]
4121 args = {}
4150 args = {}
4122 for k, v in pycompat.iteritems(opts):
4151 for k, v in pycompat.iteritems(opts):
4123 if v:
4152 if v:
4124 args[k] = v
4153 args[k] = v
4125 args = pycompat.strkwargs(args)
4154 args = pycompat.strkwargs(args)
4126 # run twice to check that we don't mess up the stream for the next command
4155 # run twice to check that we don't mess up the stream for the next command
4127 res1 = repo.debugwireargs(*vals, **args)
4156 res1 = repo.debugwireargs(*vals, **args)
4128 res2 = repo.debugwireargs(*vals, **args)
4157 res2 = repo.debugwireargs(*vals, **args)
4129 ui.write(b"%s\n" % res1)
4158 ui.write(b"%s\n" % res1)
4130 if res1 != res2:
4159 if res1 != res2:
4131 ui.warn(b"%s\n" % res2)
4160 ui.warn(b"%s\n" % res2)
4132 finally:
4161 finally:
4133 repo.close()
4162 repo.close()
4134
4163
4135
4164
4136 def _parsewirelangblocks(fh):
4165 def _parsewirelangblocks(fh):
4137 activeaction = None
4166 activeaction = None
4138 blocklines = []
4167 blocklines = []
4139 lastindent = 0
4168 lastindent = 0
4140
4169
4141 for line in fh:
4170 for line in fh:
4142 line = line.rstrip()
4171 line = line.rstrip()
4143 if not line:
4172 if not line:
4144 continue
4173 continue
4145
4174
4146 if line.startswith(b'#'):
4175 if line.startswith(b'#'):
4147 continue
4176 continue
4148
4177
4149 if not line.startswith(b' '):
4178 if not line.startswith(b' '):
4150 # New block. Flush previous one.
4179 # New block. Flush previous one.
4151 if activeaction:
4180 if activeaction:
4152 yield activeaction, blocklines
4181 yield activeaction, blocklines
4153
4182
4154 activeaction = line
4183 activeaction = line
4155 blocklines = []
4184 blocklines = []
4156 lastindent = 0
4185 lastindent = 0
4157 continue
4186 continue
4158
4187
4159 # Else we start with an indent.
4188 # Else we start with an indent.
4160
4189
4161 if not activeaction:
4190 if not activeaction:
4162 raise error.Abort(_(b'indented line outside of block'))
4191 raise error.Abort(_(b'indented line outside of block'))
4163
4192
4164 indent = len(line) - len(line.lstrip())
4193 indent = len(line) - len(line.lstrip())
4165
4194
4166 # If this line is indented more than the last line, concatenate it.
4195 # If this line is indented more than the last line, concatenate it.
4167 if indent > lastindent and blocklines:
4196 if indent > lastindent and blocklines:
4168 blocklines[-1] += line.lstrip()
4197 blocklines[-1] += line.lstrip()
4169 else:
4198 else:
4170 blocklines.append(line)
4199 blocklines.append(line)
4171 lastindent = indent
4200 lastindent = indent
4172
4201
4173 # Flush last block.
4202 # Flush last block.
4174 if activeaction:
4203 if activeaction:
4175 yield activeaction, blocklines
4204 yield activeaction, blocklines
4176
4205
4177
4206
4178 @command(
4207 @command(
4179 b'debugwireproto',
4208 b'debugwireproto',
4180 [
4209 [
4181 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4210 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4182 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4211 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4183 (
4212 (
4184 b'',
4213 b'',
4185 b'noreadstderr',
4214 b'noreadstderr',
4186 False,
4215 False,
4187 _(b'do not read from stderr of the remote'),
4216 _(b'do not read from stderr of the remote'),
4188 ),
4217 ),
4189 (
4218 (
4190 b'',
4219 b'',
4191 b'nologhandshake',
4220 b'nologhandshake',
4192 False,
4221 False,
4193 _(b'do not log I/O related to the peer handshake'),
4222 _(b'do not log I/O related to the peer handshake'),
4194 ),
4223 ),
4195 ]
4224 ]
4196 + cmdutil.remoteopts,
4225 + cmdutil.remoteopts,
4197 _(b'[PATH]'),
4226 _(b'[PATH]'),
4198 optionalrepo=True,
4227 optionalrepo=True,
4199 )
4228 )
4200 def debugwireproto(ui, repo, path=None, **opts):
4229 def debugwireproto(ui, repo, path=None, **opts):
4201 """send wire protocol commands to a server
4230 """send wire protocol commands to a server
4202
4231
4203 This command can be used to issue wire protocol commands to remote
4232 This command can be used to issue wire protocol commands to remote
4204 peers and to debug the raw data being exchanged.
4233 peers and to debug the raw data being exchanged.
4205
4234
4206 ``--localssh`` will start an SSH server against the current repository
4235 ``--localssh`` will start an SSH server against the current repository
4207 and connect to that. By default, the connection will perform a handshake
4236 and connect to that. By default, the connection will perform a handshake
4208 and establish an appropriate peer instance.
4237 and establish an appropriate peer instance.
4209
4238
4210 ``--peer`` can be used to bypass the handshake protocol and construct a
4239 ``--peer`` can be used to bypass the handshake protocol and construct a
4211 peer instance using the specified class type. Valid values are ``raw``,
4240 peer instance using the specified class type. Valid values are ``raw``,
4212 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4241 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4213 raw data payloads and don't support higher-level command actions.
4242 raw data payloads and don't support higher-level command actions.
4214
4243
4215 ``--noreadstderr`` can be used to disable automatic reading from stderr
4244 ``--noreadstderr`` can be used to disable automatic reading from stderr
4216 of the peer (for SSH connections only). Disabling automatic reading of
4245 of the peer (for SSH connections only). Disabling automatic reading of
4217 stderr is useful for making output more deterministic.
4246 stderr is useful for making output more deterministic.
4218
4247
4219 Commands are issued via a mini language which is specified via stdin.
4248 Commands are issued via a mini language which is specified via stdin.
4220 The language consists of individual actions to perform. An action is
4249 The language consists of individual actions to perform. An action is
4221 defined by a block. A block is defined as a line with no leading
4250 defined by a block. A block is defined as a line with no leading
4222 space followed by 0 or more lines with leading space. Blocks are
4251 space followed by 0 or more lines with leading space. Blocks are
4223 effectively a high-level command with additional metadata.
4252 effectively a high-level command with additional metadata.
4224
4253
4225 Lines beginning with ``#`` are ignored.
4254 Lines beginning with ``#`` are ignored.
4226
4255
4227 The following sections denote available actions.
4256 The following sections denote available actions.
4228
4257
4229 raw
4258 raw
4230 ---
4259 ---
4231
4260
4232 Send raw data to the server.
4261 Send raw data to the server.
4233
4262
4234 The block payload contains the raw data to send as one atomic send
4263 The block payload contains the raw data to send as one atomic send
4235 operation. The data may not actually be delivered in a single system
4264 operation. The data may not actually be delivered in a single system
4236 call: it depends on the abilities of the transport being used.
4265 call: it depends on the abilities of the transport being used.
4237
4266
4238 Each line in the block is de-indented and concatenated. Then, that
4267 Each line in the block is de-indented and concatenated. Then, that
4239 value is evaluated as a Python b'' literal. This allows the use of
4268 value is evaluated as a Python b'' literal. This allows the use of
4240 backslash escaping, etc.
4269 backslash escaping, etc.
4241
4270
4242 raw+
4271 raw+
4243 ----
4272 ----
4244
4273
4245 Behaves like ``raw`` except flushes output afterwards.
4274 Behaves like ``raw`` except flushes output afterwards.
4246
4275
4247 command <X>
4276 command <X>
4248 -----------
4277 -----------
4249
4278
4250 Send a request to run a named command, whose name follows the ``command``
4279 Send a request to run a named command, whose name follows the ``command``
4251 string.
4280 string.
4252
4281
4253 Arguments to the command are defined as lines in this block. The format of
4282 Arguments to the command are defined as lines in this block. The format of
4254 each line is ``<key> <value>``. e.g.::
4283 each line is ``<key> <value>``. e.g.::
4255
4284
4256 command listkeys
4285 command listkeys
4257 namespace bookmarks
4286 namespace bookmarks
4258
4287
4259 If the value begins with ``eval:``, it will be interpreted as a Python
4288 If the value begins with ``eval:``, it will be interpreted as a Python
4260 literal expression. Otherwise values are interpreted as Python b'' literals.
4289 literal expression. Otherwise values are interpreted as Python b'' literals.
4261 This allows sending complex types and encoding special byte sequences via
4290 This allows sending complex types and encoding special byte sequences via
4262 backslash escaping.
4291 backslash escaping.
4263
4292
4264 The following arguments have special meaning:
4293 The following arguments have special meaning:
4265
4294
4266 ``PUSHFILE``
4295 ``PUSHFILE``
4267 When defined, the *push* mechanism of the peer will be used instead
4296 When defined, the *push* mechanism of the peer will be used instead
4268 of the static request-response mechanism and the content of the
4297 of the static request-response mechanism and the content of the
4269 file specified in the value of this argument will be sent as the
4298 file specified in the value of this argument will be sent as the
4270 command payload.
4299 command payload.
4271
4300
4272 This can be used to submit a local bundle file to the remote.
4301 This can be used to submit a local bundle file to the remote.
4273
4302
4274 batchbegin
4303 batchbegin
4275 ----------
4304 ----------
4276
4305
4277 Instruct the peer to begin a batched send.
4306 Instruct the peer to begin a batched send.
4278
4307
4279 All ``command`` blocks are queued for execution until the next
4308 All ``command`` blocks are queued for execution until the next
4280 ``batchsubmit`` block.
4309 ``batchsubmit`` block.
4281
4310
4282 batchsubmit
4311 batchsubmit
4283 -----------
4312 -----------
4284
4313
4285 Submit previously queued ``command`` blocks as a batch request.
4314 Submit previously queued ``command`` blocks as a batch request.
4286
4315
4287 This action MUST be paired with a ``batchbegin`` action.
4316 This action MUST be paired with a ``batchbegin`` action.
4288
4317
4289 httprequest <method> <path>
4318 httprequest <method> <path>
4290 ---------------------------
4319 ---------------------------
4291
4320
4292 (HTTP peer only)
4321 (HTTP peer only)
4293
4322
4294 Send an HTTP request to the peer.
4323 Send an HTTP request to the peer.
4295
4324
4296 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4325 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4297
4326
4298 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4327 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4299 headers to add to the request. e.g. ``Accept: foo``.
4328 headers to add to the request. e.g. ``Accept: foo``.
4300
4329
4301 The following arguments are special:
4330 The following arguments are special:
4302
4331
4303 ``BODYFILE``
4332 ``BODYFILE``
4304 The content of the file defined as the value to this argument will be
4333 The content of the file defined as the value to this argument will be
4305 transferred verbatim as the HTTP request body.
4334 transferred verbatim as the HTTP request body.
4306
4335
4307 ``frame <type> <flags> <payload>``
4336 ``frame <type> <flags> <payload>``
4308 Send a unified protocol frame as part of the request body.
4337 Send a unified protocol frame as part of the request body.
4309
4338
4310 All frames will be collected and sent as the body to the HTTP
4339 All frames will be collected and sent as the body to the HTTP
4311 request.
4340 request.
4312
4341
4313 close
4342 close
4314 -----
4343 -----
4315
4344
4316 Close the connection to the server.
4345 Close the connection to the server.
4317
4346
4318 flush
4347 flush
4319 -----
4348 -----
4320
4349
4321 Flush data written to the server.
4350 Flush data written to the server.
4322
4351
4323 readavailable
4352 readavailable
4324 -------------
4353 -------------
4325
4354
4326 Close the write end of the connection and read all available data from
4355 Close the write end of the connection and read all available data from
4327 the server.
4356 the server.
4328
4357
4329 If the connection to the server encompasses multiple pipes, we poll both
4358 If the connection to the server encompasses multiple pipes, we poll both
4330 pipes and read available data.
4359 pipes and read available data.
4331
4360
4332 readline
4361 readline
4333 --------
4362 --------
4334
4363
4335 Read a line of output from the server. If there are multiple output
4364 Read a line of output from the server. If there are multiple output
4336 pipes, reads only the main pipe.
4365 pipes, reads only the main pipe.
4337
4366
4338 ereadline
4367 ereadline
4339 ---------
4368 ---------
4340
4369
4341 Like ``readline``, but read from the stderr pipe, if available.
4370 Like ``readline``, but read from the stderr pipe, if available.
4342
4371
4343 read <X>
4372 read <X>
4344 --------
4373 --------
4345
4374
4346 ``read()`` N bytes from the server's main output pipe.
4375 ``read()`` N bytes from the server's main output pipe.
4347
4376
4348 eread <X>
4377 eread <X>
4349 ---------
4378 ---------
4350
4379
4351 ``read()`` N bytes from the server's stderr pipe, if available.
4380 ``read()`` N bytes from the server's stderr pipe, if available.
4352
4381
4353 Specifying Unified Frame-Based Protocol Frames
4382 Specifying Unified Frame-Based Protocol Frames
4354 ----------------------------------------------
4383 ----------------------------------------------
4355
4384
4356 It is possible to emit a *Unified Frame-Based Protocol* by using special
4385 It is possible to emit a *Unified Frame-Based Protocol* by using special
4357 syntax.
4386 syntax.
4358
4387
4359 A frame is composed as a type, flags, and payload. These can be parsed
4388 A frame is composed as a type, flags, and payload. These can be parsed
4360 from a string of the form:
4389 from a string of the form:
4361
4390
4362 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4391 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4363
4392
4364 ``request-id`` and ``stream-id`` are integers defining the request and
4393 ``request-id`` and ``stream-id`` are integers defining the request and
4365 stream identifiers.
4394 stream identifiers.
4366
4395
4367 ``type`` can be an integer value for the frame type or the string name
4396 ``type`` can be an integer value for the frame type or the string name
4368 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4397 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4369 ``command-name``.
4398 ``command-name``.
4370
4399
4371 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4400 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4372 components. Each component (and there can be just one) can be an integer
4401 components. Each component (and there can be just one) can be an integer
4373 or a flag name for stream flags or frame flags, respectively. Values are
4402 or a flag name for stream flags or frame flags, respectively. Values are
4374 resolved to integers and then bitwise OR'd together.
4403 resolved to integers and then bitwise OR'd together.
4375
4404
4376 ``payload`` represents the raw frame payload. If it begins with
4405 ``payload`` represents the raw frame payload. If it begins with
4377 ``cbor:``, the following string is evaluated as Python code and the
4406 ``cbor:``, the following string is evaluated as Python code and the
4378 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4407 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4379 as a Python byte string literal.
4408 as a Python byte string literal.
4380 """
4409 """
4381 opts = pycompat.byteskwargs(opts)
4410 opts = pycompat.byteskwargs(opts)
4382
4411
4383 if opts[b'localssh'] and not repo:
4412 if opts[b'localssh'] and not repo:
4384 raise error.Abort(_(b'--localssh requires a repository'))
4413 raise error.Abort(_(b'--localssh requires a repository'))
4385
4414
4386 if opts[b'peer'] and opts[b'peer'] not in (
4415 if opts[b'peer'] and opts[b'peer'] not in (
4387 b'raw',
4416 b'raw',
4388 b'http2',
4417 b'http2',
4389 b'ssh1',
4418 b'ssh1',
4390 b'ssh2',
4419 b'ssh2',
4391 ):
4420 ):
4392 raise error.Abort(
4421 raise error.Abort(
4393 _(b'invalid value for --peer'),
4422 _(b'invalid value for --peer'),
4394 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4423 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4395 )
4424 )
4396
4425
4397 if path and opts[b'localssh']:
4426 if path and opts[b'localssh']:
4398 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4427 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4399
4428
4400 if ui.interactive():
4429 if ui.interactive():
4401 ui.write(_(b'(waiting for commands on stdin)\n'))
4430 ui.write(_(b'(waiting for commands on stdin)\n'))
4402
4431
4403 blocks = list(_parsewirelangblocks(ui.fin))
4432 blocks = list(_parsewirelangblocks(ui.fin))
4404
4433
4405 proc = None
4434 proc = None
4406 stdin = None
4435 stdin = None
4407 stdout = None
4436 stdout = None
4408 stderr = None
4437 stderr = None
4409 opener = None
4438 opener = None
4410
4439
4411 if opts[b'localssh']:
4440 if opts[b'localssh']:
4412 # We start the SSH server in its own process so there is process
4441 # We start the SSH server in its own process so there is process
4413 # separation. This prevents a whole class of potential bugs around
4442 # separation. This prevents a whole class of potential bugs around
4414 # shared state from interfering with server operation.
4443 # shared state from interfering with server operation.
4415 args = procutil.hgcmd() + [
4444 args = procutil.hgcmd() + [
4416 b'-R',
4445 b'-R',
4417 repo.root,
4446 repo.root,
4418 b'debugserve',
4447 b'debugserve',
4419 b'--sshstdio',
4448 b'--sshstdio',
4420 ]
4449 ]
4421 proc = subprocess.Popen(
4450 proc = subprocess.Popen(
4422 pycompat.rapply(procutil.tonativestr, args),
4451 pycompat.rapply(procutil.tonativestr, args),
4423 stdin=subprocess.PIPE,
4452 stdin=subprocess.PIPE,
4424 stdout=subprocess.PIPE,
4453 stdout=subprocess.PIPE,
4425 stderr=subprocess.PIPE,
4454 stderr=subprocess.PIPE,
4426 bufsize=0,
4455 bufsize=0,
4427 )
4456 )
4428
4457
4429 stdin = proc.stdin
4458 stdin = proc.stdin
4430 stdout = proc.stdout
4459 stdout = proc.stdout
4431 stderr = proc.stderr
4460 stderr = proc.stderr
4432
4461
4433 # We turn the pipes into observers so we can log I/O.
4462 # We turn the pipes into observers so we can log I/O.
4434 if ui.verbose or opts[b'peer'] == b'raw':
4463 if ui.verbose or opts[b'peer'] == b'raw':
4435 stdin = util.makeloggingfileobject(
4464 stdin = util.makeloggingfileobject(
4436 ui, proc.stdin, b'i', logdata=True
4465 ui, proc.stdin, b'i', logdata=True
4437 )
4466 )
4438 stdout = util.makeloggingfileobject(
4467 stdout = util.makeloggingfileobject(
4439 ui, proc.stdout, b'o', logdata=True
4468 ui, proc.stdout, b'o', logdata=True
4440 )
4469 )
4441 stderr = util.makeloggingfileobject(
4470 stderr = util.makeloggingfileobject(
4442 ui, proc.stderr, b'e', logdata=True
4471 ui, proc.stderr, b'e', logdata=True
4443 )
4472 )
4444
4473
4445 # --localssh also implies the peer connection settings.
4474 # --localssh also implies the peer connection settings.
4446
4475
4447 url = b'ssh://localserver'
4476 url = b'ssh://localserver'
4448 autoreadstderr = not opts[b'noreadstderr']
4477 autoreadstderr = not opts[b'noreadstderr']
4449
4478
4450 if opts[b'peer'] == b'ssh1':
4479 if opts[b'peer'] == b'ssh1':
4451 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4480 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4452 peer = sshpeer.sshv1peer(
4481 peer = sshpeer.sshv1peer(
4453 ui,
4482 ui,
4454 url,
4483 url,
4455 proc,
4484 proc,
4456 stdin,
4485 stdin,
4457 stdout,
4486 stdout,
4458 stderr,
4487 stderr,
4459 None,
4488 None,
4460 autoreadstderr=autoreadstderr,
4489 autoreadstderr=autoreadstderr,
4461 )
4490 )
4462 elif opts[b'peer'] == b'ssh2':
4491 elif opts[b'peer'] == b'ssh2':
4463 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4492 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4464 peer = sshpeer.sshv2peer(
4493 peer = sshpeer.sshv2peer(
4465 ui,
4494 ui,
4466 url,
4495 url,
4467 proc,
4496 proc,
4468 stdin,
4497 stdin,
4469 stdout,
4498 stdout,
4470 stderr,
4499 stderr,
4471 None,
4500 None,
4472 autoreadstderr=autoreadstderr,
4501 autoreadstderr=autoreadstderr,
4473 )
4502 )
4474 elif opts[b'peer'] == b'raw':
4503 elif opts[b'peer'] == b'raw':
4475 ui.write(_(b'using raw connection to peer\n'))
4504 ui.write(_(b'using raw connection to peer\n'))
4476 peer = None
4505 peer = None
4477 else:
4506 else:
4478 ui.write(_(b'creating ssh peer from handshake results\n'))
4507 ui.write(_(b'creating ssh peer from handshake results\n'))
4479 peer = sshpeer.makepeer(
4508 peer = sshpeer.makepeer(
4480 ui,
4509 ui,
4481 url,
4510 url,
4482 proc,
4511 proc,
4483 stdin,
4512 stdin,
4484 stdout,
4513 stdout,
4485 stderr,
4514 stderr,
4486 autoreadstderr=autoreadstderr,
4515 autoreadstderr=autoreadstderr,
4487 )
4516 )
4488
4517
4489 elif path:
4518 elif path:
4490 # We bypass hg.peer() so we can proxy the sockets.
4519 # We bypass hg.peer() so we can proxy the sockets.
4491 # TODO consider not doing this because we skip
4520 # TODO consider not doing this because we skip
4492 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4521 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4493 u = util.url(path)
4522 u = util.url(path)
4494 if u.scheme != b'http':
4523 if u.scheme != b'http':
4495 raise error.Abort(_(b'only http:// paths are currently supported'))
4524 raise error.Abort(_(b'only http:// paths are currently supported'))
4496
4525
4497 url, authinfo = u.authinfo()
4526 url, authinfo = u.authinfo()
4498 openerargs = {
4527 openerargs = {
4499 'useragent': b'Mercurial debugwireproto',
4528 'useragent': b'Mercurial debugwireproto',
4500 }
4529 }
4501
4530
4502 # Turn pipes/sockets into observers so we can log I/O.
4531 # Turn pipes/sockets into observers so we can log I/O.
4503 if ui.verbose:
4532 if ui.verbose:
4504 openerargs.update(
4533 openerargs.update(
4505 {
4534 {
4506 'loggingfh': ui,
4535 'loggingfh': ui,
4507 'loggingname': b's',
4536 'loggingname': b's',
4508 'loggingopts': {
4537 'loggingopts': {
4509 'logdata': True,
4538 'logdata': True,
4510 'logdataapis': False,
4539 'logdataapis': False,
4511 },
4540 },
4512 }
4541 }
4513 )
4542 )
4514
4543
4515 if ui.debugflag:
4544 if ui.debugflag:
4516 openerargs['loggingopts']['logdataapis'] = True
4545 openerargs['loggingopts']['logdataapis'] = True
4517
4546
4518 # Don't send default headers when in raw mode. This allows us to
4547 # Don't send default headers when in raw mode. This allows us to
4519 # bypass most of the behavior of our URL handling code so we can
4548 # bypass most of the behavior of our URL handling code so we can
4520 # have near complete control over what's sent on the wire.
4549 # have near complete control over what's sent on the wire.
4521 if opts[b'peer'] == b'raw':
4550 if opts[b'peer'] == b'raw':
4522 openerargs['sendaccept'] = False
4551 openerargs['sendaccept'] = False
4523
4552
4524 opener = urlmod.opener(ui, authinfo, **openerargs)
4553 opener = urlmod.opener(ui, authinfo, **openerargs)
4525
4554
4526 if opts[b'peer'] == b'http2':
4555 if opts[b'peer'] == b'http2':
4527 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4556 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4528 # We go through makepeer() because we need an API descriptor for
4557 # We go through makepeer() because we need an API descriptor for
4529 # the peer instance to be useful.
4558 # the peer instance to be useful.
4530 with ui.configoverride(
4559 with ui.configoverride(
4531 {(b'experimental', b'httppeer.advertise-v2'): True}
4560 {(b'experimental', b'httppeer.advertise-v2'): True}
4532 ):
4561 ):
4533 if opts[b'nologhandshake']:
4562 if opts[b'nologhandshake']:
4534 ui.pushbuffer()
4563 ui.pushbuffer()
4535
4564
4536 peer = httppeer.makepeer(ui, path, opener=opener)
4565 peer = httppeer.makepeer(ui, path, opener=opener)
4537
4566
4538 if opts[b'nologhandshake']:
4567 if opts[b'nologhandshake']:
4539 ui.popbuffer()
4568 ui.popbuffer()
4540
4569
4541 if not isinstance(peer, httppeer.httpv2peer):
4570 if not isinstance(peer, httppeer.httpv2peer):
4542 raise error.Abort(
4571 raise error.Abort(
4543 _(
4572 _(
4544 b'could not instantiate HTTP peer for '
4573 b'could not instantiate HTTP peer for '
4545 b'wire protocol version 2'
4574 b'wire protocol version 2'
4546 ),
4575 ),
4547 hint=_(
4576 hint=_(
4548 b'the server may not have the feature '
4577 b'the server may not have the feature '
4549 b'enabled or is not allowing this '
4578 b'enabled or is not allowing this '
4550 b'client version'
4579 b'client version'
4551 ),
4580 ),
4552 )
4581 )
4553
4582
4554 elif opts[b'peer'] == b'raw':
4583 elif opts[b'peer'] == b'raw':
4555 ui.write(_(b'using raw connection to peer\n'))
4584 ui.write(_(b'using raw connection to peer\n'))
4556 peer = None
4585 peer = None
4557 elif opts[b'peer']:
4586 elif opts[b'peer']:
4558 raise error.Abort(
4587 raise error.Abort(
4559 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4588 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4560 )
4589 )
4561 else:
4590 else:
4562 peer = httppeer.makepeer(ui, path, opener=opener)
4591 peer = httppeer.makepeer(ui, path, opener=opener)
4563
4592
4564 # We /could/ populate stdin/stdout with sock.makefile()...
4593 # We /could/ populate stdin/stdout with sock.makefile()...
4565 else:
4594 else:
4566 raise error.Abort(_(b'unsupported connection configuration'))
4595 raise error.Abort(_(b'unsupported connection configuration'))
4567
4596
4568 batchedcommands = None
4597 batchedcommands = None
4569
4598
4570 # Now perform actions based on the parsed wire language instructions.
4599 # Now perform actions based on the parsed wire language instructions.
4571 for action, lines in blocks:
4600 for action, lines in blocks:
4572 if action in (b'raw', b'raw+'):
4601 if action in (b'raw', b'raw+'):
4573 if not stdin:
4602 if not stdin:
4574 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4603 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4575
4604
4576 # Concatenate the data together.
4605 # Concatenate the data together.
4577 data = b''.join(l.lstrip() for l in lines)
4606 data = b''.join(l.lstrip() for l in lines)
4578 data = stringutil.unescapestr(data)
4607 data = stringutil.unescapestr(data)
4579 stdin.write(data)
4608 stdin.write(data)
4580
4609
4581 if action == b'raw+':
4610 if action == b'raw+':
4582 stdin.flush()
4611 stdin.flush()
4583 elif action == b'flush':
4612 elif action == b'flush':
4584 if not stdin:
4613 if not stdin:
4585 raise error.Abort(_(b'cannot call flush on this peer'))
4614 raise error.Abort(_(b'cannot call flush on this peer'))
4586 stdin.flush()
4615 stdin.flush()
4587 elif action.startswith(b'command'):
4616 elif action.startswith(b'command'):
4588 if not peer:
4617 if not peer:
4589 raise error.Abort(
4618 raise error.Abort(
4590 _(
4619 _(
4591 b'cannot send commands unless peer instance '
4620 b'cannot send commands unless peer instance '
4592 b'is available'
4621 b'is available'
4593 )
4622 )
4594 )
4623 )
4595
4624
4596 command = action.split(b' ', 1)[1]
4625 command = action.split(b' ', 1)[1]
4597
4626
4598 args = {}
4627 args = {}
4599 for line in lines:
4628 for line in lines:
4600 # We need to allow empty values.
4629 # We need to allow empty values.
4601 fields = line.lstrip().split(b' ', 1)
4630 fields = line.lstrip().split(b' ', 1)
4602 if len(fields) == 1:
4631 if len(fields) == 1:
4603 key = fields[0]
4632 key = fields[0]
4604 value = b''
4633 value = b''
4605 else:
4634 else:
4606 key, value = fields
4635 key, value = fields
4607
4636
4608 if value.startswith(b'eval:'):
4637 if value.startswith(b'eval:'):
4609 value = stringutil.evalpythonliteral(value[5:])
4638 value = stringutil.evalpythonliteral(value[5:])
4610 else:
4639 else:
4611 value = stringutil.unescapestr(value)
4640 value = stringutil.unescapestr(value)
4612
4641
4613 args[key] = value
4642 args[key] = value
4614
4643
4615 if batchedcommands is not None:
4644 if batchedcommands is not None:
4616 batchedcommands.append((command, args))
4645 batchedcommands.append((command, args))
4617 continue
4646 continue
4618
4647
4619 ui.status(_(b'sending %s command\n') % command)
4648 ui.status(_(b'sending %s command\n') % command)
4620
4649
4621 if b'PUSHFILE' in args:
4650 if b'PUSHFILE' in args:
4622 with open(args[b'PUSHFILE'], 'rb') as fh:
4651 with open(args[b'PUSHFILE'], 'rb') as fh:
4623 del args[b'PUSHFILE']
4652 del args[b'PUSHFILE']
4624 res, output = peer._callpush(
4653 res, output = peer._callpush(
4625 command, fh, **pycompat.strkwargs(args)
4654 command, fh, **pycompat.strkwargs(args)
4626 )
4655 )
4627 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4656 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4628 ui.status(
4657 ui.status(
4629 _(b'remote output: %s\n') % stringutil.escapestr(output)
4658 _(b'remote output: %s\n') % stringutil.escapestr(output)
4630 )
4659 )
4631 else:
4660 else:
4632 with peer.commandexecutor() as e:
4661 with peer.commandexecutor() as e:
4633 res = e.callcommand(command, args).result()
4662 res = e.callcommand(command, args).result()
4634
4663
4635 if isinstance(res, wireprotov2peer.commandresponse):
4664 if isinstance(res, wireprotov2peer.commandresponse):
4636 val = res.objects()
4665 val = res.objects()
4637 ui.status(
4666 ui.status(
4638 _(b'response: %s\n')
4667 _(b'response: %s\n')
4639 % stringutil.pprint(val, bprefix=True, indent=2)
4668 % stringutil.pprint(val, bprefix=True, indent=2)
4640 )
4669 )
4641 else:
4670 else:
4642 ui.status(
4671 ui.status(
4643 _(b'response: %s\n')
4672 _(b'response: %s\n')
4644 % stringutil.pprint(res, bprefix=True, indent=2)
4673 % stringutil.pprint(res, bprefix=True, indent=2)
4645 )
4674 )
4646
4675
4647 elif action == b'batchbegin':
4676 elif action == b'batchbegin':
4648 if batchedcommands is not None:
4677 if batchedcommands is not None:
4649 raise error.Abort(_(b'nested batchbegin not allowed'))
4678 raise error.Abort(_(b'nested batchbegin not allowed'))
4650
4679
4651 batchedcommands = []
4680 batchedcommands = []
4652 elif action == b'batchsubmit':
4681 elif action == b'batchsubmit':
4653 # There is a batching API we could go through. But it would be
4682 # There is a batching API we could go through. But it would be
4654 # difficult to normalize requests into function calls. It is easier
4683 # difficult to normalize requests into function calls. It is easier
4655 # to bypass this layer and normalize to commands + args.
4684 # to bypass this layer and normalize to commands + args.
4656 ui.status(
4685 ui.status(
4657 _(b'sending batch with %d sub-commands\n')
4686 _(b'sending batch with %d sub-commands\n')
4658 % len(batchedcommands)
4687 % len(batchedcommands)
4659 )
4688 )
4660 assert peer is not None
4689 assert peer is not None
4661 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4690 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4662 ui.status(
4691 ui.status(
4663 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4692 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4664 )
4693 )
4665
4694
4666 batchedcommands = None
4695 batchedcommands = None
4667
4696
4668 elif action.startswith(b'httprequest '):
4697 elif action.startswith(b'httprequest '):
4669 if not opener:
4698 if not opener:
4670 raise error.Abort(
4699 raise error.Abort(
4671 _(b'cannot use httprequest without an HTTP peer')
4700 _(b'cannot use httprequest without an HTTP peer')
4672 )
4701 )
4673
4702
4674 request = action.split(b' ', 2)
4703 request = action.split(b' ', 2)
4675 if len(request) != 3:
4704 if len(request) != 3:
4676 raise error.Abort(
4705 raise error.Abort(
4677 _(
4706 _(
4678 b'invalid httprequest: expected format is '
4707 b'invalid httprequest: expected format is '
4679 b'"httprequest <method> <path>'
4708 b'"httprequest <method> <path>'
4680 )
4709 )
4681 )
4710 )
4682
4711
4683 method, httppath = request[1:]
4712 method, httppath = request[1:]
4684 headers = {}
4713 headers = {}
4685 body = None
4714 body = None
4686 frames = []
4715 frames = []
4687 for line in lines:
4716 for line in lines:
4688 line = line.lstrip()
4717 line = line.lstrip()
4689 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4718 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4690 if m:
4719 if m:
4691 # Headers need to use native strings.
4720 # Headers need to use native strings.
4692 key = pycompat.strurl(m.group(1))
4721 key = pycompat.strurl(m.group(1))
4693 value = pycompat.strurl(m.group(2))
4722 value = pycompat.strurl(m.group(2))
4694 headers[key] = value
4723 headers[key] = value
4695 continue
4724 continue
4696
4725
4697 if line.startswith(b'BODYFILE '):
4726 if line.startswith(b'BODYFILE '):
4698 with open(line.split(b' ', 1), b'rb') as fh:
4727 with open(line.split(b' ', 1), b'rb') as fh:
4699 body = fh.read()
4728 body = fh.read()
4700 elif line.startswith(b'frame '):
4729 elif line.startswith(b'frame '):
4701 frame = wireprotoframing.makeframefromhumanstring(
4730 frame = wireprotoframing.makeframefromhumanstring(
4702 line[len(b'frame ') :]
4731 line[len(b'frame ') :]
4703 )
4732 )
4704
4733
4705 frames.append(frame)
4734 frames.append(frame)
4706 else:
4735 else:
4707 raise error.Abort(
4736 raise error.Abort(
4708 _(b'unknown argument to httprequest: %s') % line
4737 _(b'unknown argument to httprequest: %s') % line
4709 )
4738 )
4710
4739
4711 url = path + httppath
4740 url = path + httppath
4712
4741
4713 if frames:
4742 if frames:
4714 body = b''.join(bytes(f) for f in frames)
4743 body = b''.join(bytes(f) for f in frames)
4715
4744
4716 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4745 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4717
4746
4718 # urllib.Request insists on using has_data() as a proxy for
4747 # urllib.Request insists on using has_data() as a proxy for
4719 # determining the request method. Override that to use our
4748 # determining the request method. Override that to use our
4720 # explicitly requested method.
4749 # explicitly requested method.
4721 req.get_method = lambda: pycompat.sysstr(method)
4750 req.get_method = lambda: pycompat.sysstr(method)
4722
4751
4723 try:
4752 try:
4724 res = opener.open(req)
4753 res = opener.open(req)
4725 body = res.read()
4754 body = res.read()
4726 except util.urlerr.urlerror as e:
4755 except util.urlerr.urlerror as e:
4727 # read() method must be called, but only exists in Python 2
4756 # read() method must be called, but only exists in Python 2
4728 getattr(e, 'read', lambda: None)()
4757 getattr(e, 'read', lambda: None)()
4729 continue
4758 continue
4730
4759
4731 ct = res.headers.get('Content-Type')
4760 ct = res.headers.get('Content-Type')
4732 if ct == 'application/mercurial-cbor':
4761 if ct == 'application/mercurial-cbor':
4733 ui.write(
4762 ui.write(
4734 _(b'cbor> %s\n')
4763 _(b'cbor> %s\n')
4735 % stringutil.pprint(
4764 % stringutil.pprint(
4736 cborutil.decodeall(body), bprefix=True, indent=2
4765 cborutil.decodeall(body), bprefix=True, indent=2
4737 )
4766 )
4738 )
4767 )
4739
4768
4740 elif action == b'close':
4769 elif action == b'close':
4741 assert peer is not None
4770 assert peer is not None
4742 peer.close()
4771 peer.close()
4743 elif action == b'readavailable':
4772 elif action == b'readavailable':
4744 if not stdout or not stderr:
4773 if not stdout or not stderr:
4745 raise error.Abort(
4774 raise error.Abort(
4746 _(b'readavailable not available on this peer')
4775 _(b'readavailable not available on this peer')
4747 )
4776 )
4748
4777
4749 stdin.close()
4778 stdin.close()
4750 stdout.read()
4779 stdout.read()
4751 stderr.read()
4780 stderr.read()
4752
4781
4753 elif action == b'readline':
4782 elif action == b'readline':
4754 if not stdout:
4783 if not stdout:
4755 raise error.Abort(_(b'readline not available on this peer'))
4784 raise error.Abort(_(b'readline not available on this peer'))
4756 stdout.readline()
4785 stdout.readline()
4757 elif action == b'ereadline':
4786 elif action == b'ereadline':
4758 if not stderr:
4787 if not stderr:
4759 raise error.Abort(_(b'ereadline not available on this peer'))
4788 raise error.Abort(_(b'ereadline not available on this peer'))
4760 stderr.readline()
4789 stderr.readline()
4761 elif action.startswith(b'read '):
4790 elif action.startswith(b'read '):
4762 count = int(action.split(b' ', 1)[1])
4791 count = int(action.split(b' ', 1)[1])
4763 if not stdout:
4792 if not stdout:
4764 raise error.Abort(_(b'read not available on this peer'))
4793 raise error.Abort(_(b'read not available on this peer'))
4765 stdout.read(count)
4794 stdout.read(count)
4766 elif action.startswith(b'eread '):
4795 elif action.startswith(b'eread '):
4767 count = int(action.split(b' ', 1)[1])
4796 count = int(action.split(b' ', 1)[1])
4768 if not stderr:
4797 if not stderr:
4769 raise error.Abort(_(b'eread not available on this peer'))
4798 raise error.Abort(_(b'eread not available on this peer'))
4770 stderr.read(count)
4799 stderr.read(count)
4771 else:
4800 else:
4772 raise error.Abort(_(b'unknown action: %s') % action)
4801 raise error.Abort(_(b'unknown action: %s') % action)
4773
4802
4774 if batchedcommands is not None:
4803 if batchedcommands is not None:
4775 raise error.Abort(_(b'unclosed "batchbegin" request'))
4804 raise error.Abort(_(b'unclosed "batchbegin" request'))
4776
4805
4777 if peer:
4806 if peer:
4778 peer.close()
4807 peer.close()
4779
4808
4780 if proc:
4809 if proc:
4781 proc.kill()
4810 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now