##// END OF EJS Templates
debugdiscovery: add missing byte string marker to some help text...
marmoute -
r47504:b6ac6124 default
parent child Browse files
Show More
@@ -1,4779 +1,4779
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import string
24 import string
25 import subprocess
25 import subprocess
26 import sys
26 import sys
27 import time
27 import time
28
28
29 from .i18n import _
29 from .i18n import _
30 from .node import (
30 from .node import (
31 bin,
31 bin,
32 hex,
32 hex,
33 nullid,
33 nullid,
34 nullrev,
34 nullrev,
35 short,
35 short,
36 )
36 )
37 from .pycompat import (
37 from .pycompat import (
38 getattr,
38 getattr,
39 open,
39 open,
40 )
40 )
41 from . import (
41 from . import (
42 bundle2,
42 bundle2,
43 bundlerepo,
43 bundlerepo,
44 changegroup,
44 changegroup,
45 cmdutil,
45 cmdutil,
46 color,
46 color,
47 context,
47 context,
48 copies,
48 copies,
49 dagparser,
49 dagparser,
50 encoding,
50 encoding,
51 error,
51 error,
52 exchange,
52 exchange,
53 extensions,
53 extensions,
54 filemerge,
54 filemerge,
55 filesetlang,
55 filesetlang,
56 formatter,
56 formatter,
57 hg,
57 hg,
58 httppeer,
58 httppeer,
59 localrepo,
59 localrepo,
60 lock as lockmod,
60 lock as lockmod,
61 logcmdutil,
61 logcmdutil,
62 mergestate as mergestatemod,
62 mergestate as mergestatemod,
63 metadata,
63 metadata,
64 obsolete,
64 obsolete,
65 obsutil,
65 obsutil,
66 pathutil,
66 pathutil,
67 phases,
67 phases,
68 policy,
68 policy,
69 pvec,
69 pvec,
70 pycompat,
70 pycompat,
71 registrar,
71 registrar,
72 repair,
72 repair,
73 repoview,
73 repoview,
74 revlog,
74 revlog,
75 revset,
75 revset,
76 revsetlang,
76 revsetlang,
77 scmutil,
77 scmutil,
78 setdiscovery,
78 setdiscovery,
79 simplemerge,
79 simplemerge,
80 sshpeer,
80 sshpeer,
81 sslutil,
81 sslutil,
82 streamclone,
82 streamclone,
83 strip,
83 strip,
84 tags as tagsmod,
84 tags as tagsmod,
85 templater,
85 templater,
86 treediscovery,
86 treediscovery,
87 upgrade,
87 upgrade,
88 url as urlmod,
88 url as urlmod,
89 util,
89 util,
90 vfs as vfsmod,
90 vfs as vfsmod,
91 wireprotoframing,
91 wireprotoframing,
92 wireprotoserver,
92 wireprotoserver,
93 wireprotov2peer,
93 wireprotov2peer,
94 )
94 )
95 from .utils import (
95 from .utils import (
96 cborutil,
96 cborutil,
97 compression,
97 compression,
98 dateutil,
98 dateutil,
99 procutil,
99 procutil,
100 stringutil,
100 stringutil,
101 )
101 )
102
102
103 from .revlogutils import (
103 from .revlogutils import (
104 deltas as deltautil,
104 deltas as deltautil,
105 nodemap,
105 nodemap,
106 sidedata,
106 sidedata,
107 )
107 )
108
108
109 release = lockmod.release
109 release = lockmod.release
110
110
111 table = {}
111 table = {}
112 table.update(strip.command._table)
112 table.update(strip.command._table)
113 command = registrar.command(table)
113 command = registrar.command(table)
114
114
115
115
116 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
116 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
117 def debugancestor(ui, repo, *args):
117 def debugancestor(ui, repo, *args):
118 """find the ancestor revision of two revisions in a given index"""
118 """find the ancestor revision of two revisions in a given index"""
119 if len(args) == 3:
119 if len(args) == 3:
120 index, rev1, rev2 = args
120 index, rev1, rev2 = args
121 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
121 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
122 lookup = r.lookup
122 lookup = r.lookup
123 elif len(args) == 2:
123 elif len(args) == 2:
124 if not repo:
124 if not repo:
125 raise error.Abort(
125 raise error.Abort(
126 _(b'there is no Mercurial repository here (.hg not found)')
126 _(b'there is no Mercurial repository here (.hg not found)')
127 )
127 )
128 rev1, rev2 = args
128 rev1, rev2 = args
129 r = repo.changelog
129 r = repo.changelog
130 lookup = repo.lookup
130 lookup = repo.lookup
131 else:
131 else:
132 raise error.Abort(_(b'either two or three arguments required'))
132 raise error.Abort(_(b'either two or three arguments required'))
133 a = r.ancestor(lookup(rev1), lookup(rev2))
133 a = r.ancestor(lookup(rev1), lookup(rev2))
134 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
134 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
135
135
136
136
137 @command(b'debugantivirusrunning', [])
137 @command(b'debugantivirusrunning', [])
138 def debugantivirusrunning(ui, repo):
138 def debugantivirusrunning(ui, repo):
139 """attempt to trigger an antivirus scanner to see if one is active"""
139 """attempt to trigger an antivirus scanner to see if one is active"""
140 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
140 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
141 f.write(
141 f.write(
142 util.b85decode(
142 util.b85decode(
143 # This is a base85-armored version of the EICAR test file. See
143 # This is a base85-armored version of the EICAR test file. See
144 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
144 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
145 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
145 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
146 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
146 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
147 )
147 )
148 )
148 )
149 # Give an AV engine time to scan the file.
149 # Give an AV engine time to scan the file.
150 time.sleep(2)
150 time.sleep(2)
151 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
151 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
152
152
153
153
154 @command(b'debugapplystreamclonebundle', [], b'FILE')
154 @command(b'debugapplystreamclonebundle', [], b'FILE')
155 def debugapplystreamclonebundle(ui, repo, fname):
155 def debugapplystreamclonebundle(ui, repo, fname):
156 """apply a stream clone bundle file"""
156 """apply a stream clone bundle file"""
157 f = hg.openpath(ui, fname)
157 f = hg.openpath(ui, fname)
158 gen = exchange.readbundle(ui, f, fname)
158 gen = exchange.readbundle(ui, f, fname)
159 gen.apply(repo)
159 gen.apply(repo)
160
160
161
161
162 @command(
162 @command(
163 b'debugbuilddag',
163 b'debugbuilddag',
164 [
164 [
165 (
165 (
166 b'm',
166 b'm',
167 b'mergeable-file',
167 b'mergeable-file',
168 None,
168 None,
169 _(b'add single file mergeable changes'),
169 _(b'add single file mergeable changes'),
170 ),
170 ),
171 (
171 (
172 b'o',
172 b'o',
173 b'overwritten-file',
173 b'overwritten-file',
174 None,
174 None,
175 _(b'add single file all revs overwrite'),
175 _(b'add single file all revs overwrite'),
176 ),
176 ),
177 (b'n', b'new-file', None, _(b'add new file at each rev')),
177 (b'n', b'new-file', None, _(b'add new file at each rev')),
178 ],
178 ],
179 _(b'[OPTION]... [TEXT]'),
179 _(b'[OPTION]... [TEXT]'),
180 )
180 )
181 def debugbuilddag(
181 def debugbuilddag(
182 ui,
182 ui,
183 repo,
183 repo,
184 text=None,
184 text=None,
185 mergeable_file=False,
185 mergeable_file=False,
186 overwritten_file=False,
186 overwritten_file=False,
187 new_file=False,
187 new_file=False,
188 ):
188 ):
189 """builds a repo with a given DAG from scratch in the current empty repo
189 """builds a repo with a given DAG from scratch in the current empty repo
190
190
191 The description of the DAG is read from stdin if not given on the
191 The description of the DAG is read from stdin if not given on the
192 command line.
192 command line.
193
193
194 Elements:
194 Elements:
195
195
196 - "+n" is a linear run of n nodes based on the current default parent
196 - "+n" is a linear run of n nodes based on the current default parent
197 - "." is a single node based on the current default parent
197 - "." is a single node based on the current default parent
198 - "$" resets the default parent to null (implied at the start);
198 - "$" resets the default parent to null (implied at the start);
199 otherwise the default parent is always the last node created
199 otherwise the default parent is always the last node created
200 - "<p" sets the default parent to the backref p
200 - "<p" sets the default parent to the backref p
201 - "*p" is a fork at parent p, which is a backref
201 - "*p" is a fork at parent p, which is a backref
202 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
202 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
203 - "/p2" is a merge of the preceding node and p2
203 - "/p2" is a merge of the preceding node and p2
204 - ":tag" defines a local tag for the preceding node
204 - ":tag" defines a local tag for the preceding node
205 - "@branch" sets the named branch for subsequent nodes
205 - "@branch" sets the named branch for subsequent nodes
206 - "#...\\n" is a comment up to the end of the line
206 - "#...\\n" is a comment up to the end of the line
207
207
208 Whitespace between the above elements is ignored.
208 Whitespace between the above elements is ignored.
209
209
210 A backref is either
210 A backref is either
211
211
212 - a number n, which references the node curr-n, where curr is the current
212 - a number n, which references the node curr-n, where curr is the current
213 node, or
213 node, or
214 - the name of a local tag you placed earlier using ":tag", or
214 - the name of a local tag you placed earlier using ":tag", or
215 - empty to denote the default parent.
215 - empty to denote the default parent.
216
216
217 All string valued-elements are either strictly alphanumeric, or must
217 All string valued-elements are either strictly alphanumeric, or must
218 be enclosed in double quotes ("..."), with "\\" as escape character.
218 be enclosed in double quotes ("..."), with "\\" as escape character.
219 """
219 """
220
220
221 if text is None:
221 if text is None:
222 ui.status(_(b"reading DAG from stdin\n"))
222 ui.status(_(b"reading DAG from stdin\n"))
223 text = ui.fin.read()
223 text = ui.fin.read()
224
224
225 cl = repo.changelog
225 cl = repo.changelog
226 if len(cl) > 0:
226 if len(cl) > 0:
227 raise error.Abort(_(b'repository is not empty'))
227 raise error.Abort(_(b'repository is not empty'))
228
228
229 # determine number of revs in DAG
229 # determine number of revs in DAG
230 total = 0
230 total = 0
231 for type, data in dagparser.parsedag(text):
231 for type, data in dagparser.parsedag(text):
232 if type == b'n':
232 if type == b'n':
233 total += 1
233 total += 1
234
234
235 if mergeable_file:
235 if mergeable_file:
236 linesperrev = 2
236 linesperrev = 2
237 # make a file with k lines per rev
237 # make a file with k lines per rev
238 initialmergedlines = [
238 initialmergedlines = [
239 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
239 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
240 ]
240 ]
241 initialmergedlines.append(b"")
241 initialmergedlines.append(b"")
242
242
243 tags = []
243 tags = []
244 progress = ui.makeprogress(
244 progress = ui.makeprogress(
245 _(b'building'), unit=_(b'revisions'), total=total
245 _(b'building'), unit=_(b'revisions'), total=total
246 )
246 )
247 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
247 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
248 at = -1
248 at = -1
249 atbranch = b'default'
249 atbranch = b'default'
250 nodeids = []
250 nodeids = []
251 id = 0
251 id = 0
252 progress.update(id)
252 progress.update(id)
253 for type, data in dagparser.parsedag(text):
253 for type, data in dagparser.parsedag(text):
254 if type == b'n':
254 if type == b'n':
255 ui.note((b'node %s\n' % pycompat.bytestr(data)))
255 ui.note((b'node %s\n' % pycompat.bytestr(data)))
256 id, ps = data
256 id, ps = data
257
257
258 files = []
258 files = []
259 filecontent = {}
259 filecontent = {}
260
260
261 p2 = None
261 p2 = None
262 if mergeable_file:
262 if mergeable_file:
263 fn = b"mf"
263 fn = b"mf"
264 p1 = repo[ps[0]]
264 p1 = repo[ps[0]]
265 if len(ps) > 1:
265 if len(ps) > 1:
266 p2 = repo[ps[1]]
266 p2 = repo[ps[1]]
267 pa = p1.ancestor(p2)
267 pa = p1.ancestor(p2)
268 base, local, other = [
268 base, local, other = [
269 x[fn].data() for x in (pa, p1, p2)
269 x[fn].data() for x in (pa, p1, p2)
270 ]
270 ]
271 m3 = simplemerge.Merge3Text(base, local, other)
271 m3 = simplemerge.Merge3Text(base, local, other)
272 ml = [l.strip() for l in m3.merge_lines()]
272 ml = [l.strip() for l in m3.merge_lines()]
273 ml.append(b"")
273 ml.append(b"")
274 elif at > 0:
274 elif at > 0:
275 ml = p1[fn].data().split(b"\n")
275 ml = p1[fn].data().split(b"\n")
276 else:
276 else:
277 ml = initialmergedlines
277 ml = initialmergedlines
278 ml[id * linesperrev] += b" r%i" % id
278 ml[id * linesperrev] += b" r%i" % id
279 mergedtext = b"\n".join(ml)
279 mergedtext = b"\n".join(ml)
280 files.append(fn)
280 files.append(fn)
281 filecontent[fn] = mergedtext
281 filecontent[fn] = mergedtext
282
282
283 if overwritten_file:
283 if overwritten_file:
284 fn = b"of"
284 fn = b"of"
285 files.append(fn)
285 files.append(fn)
286 filecontent[fn] = b"r%i\n" % id
286 filecontent[fn] = b"r%i\n" % id
287
287
288 if new_file:
288 if new_file:
289 fn = b"nf%i" % id
289 fn = b"nf%i" % id
290 files.append(fn)
290 files.append(fn)
291 filecontent[fn] = b"r%i\n" % id
291 filecontent[fn] = b"r%i\n" % id
292 if len(ps) > 1:
292 if len(ps) > 1:
293 if not p2:
293 if not p2:
294 p2 = repo[ps[1]]
294 p2 = repo[ps[1]]
295 for fn in p2:
295 for fn in p2:
296 if fn.startswith(b"nf"):
296 if fn.startswith(b"nf"):
297 files.append(fn)
297 files.append(fn)
298 filecontent[fn] = p2[fn].data()
298 filecontent[fn] = p2[fn].data()
299
299
300 def fctxfn(repo, cx, path):
300 def fctxfn(repo, cx, path):
301 if path in filecontent:
301 if path in filecontent:
302 return context.memfilectx(
302 return context.memfilectx(
303 repo, cx, path, filecontent[path]
303 repo, cx, path, filecontent[path]
304 )
304 )
305 return None
305 return None
306
306
307 if len(ps) == 0 or ps[0] < 0:
307 if len(ps) == 0 or ps[0] < 0:
308 pars = [None, None]
308 pars = [None, None]
309 elif len(ps) == 1:
309 elif len(ps) == 1:
310 pars = [nodeids[ps[0]], None]
310 pars = [nodeids[ps[0]], None]
311 else:
311 else:
312 pars = [nodeids[p] for p in ps]
312 pars = [nodeids[p] for p in ps]
313 cx = context.memctx(
313 cx = context.memctx(
314 repo,
314 repo,
315 pars,
315 pars,
316 b"r%i" % id,
316 b"r%i" % id,
317 files,
317 files,
318 fctxfn,
318 fctxfn,
319 date=(id, 0),
319 date=(id, 0),
320 user=b"debugbuilddag",
320 user=b"debugbuilddag",
321 extra={b'branch': atbranch},
321 extra={b'branch': atbranch},
322 )
322 )
323 nodeid = repo.commitctx(cx)
323 nodeid = repo.commitctx(cx)
324 nodeids.append(nodeid)
324 nodeids.append(nodeid)
325 at = id
325 at = id
326 elif type == b'l':
326 elif type == b'l':
327 id, name = data
327 id, name = data
328 ui.note((b'tag %s\n' % name))
328 ui.note((b'tag %s\n' % name))
329 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
329 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
330 elif type == b'a':
330 elif type == b'a':
331 ui.note((b'branch %s\n' % data))
331 ui.note((b'branch %s\n' % data))
332 atbranch = data
332 atbranch = data
333 progress.update(id)
333 progress.update(id)
334
334
335 if tags:
335 if tags:
336 repo.vfs.write(b"localtags", b"".join(tags))
336 repo.vfs.write(b"localtags", b"".join(tags))
337
337
338
338
339 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
339 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
340 indent_string = b' ' * indent
340 indent_string = b' ' * indent
341 if all:
341 if all:
342 ui.writenoi18n(
342 ui.writenoi18n(
343 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
343 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
344 % indent_string
344 % indent_string
345 )
345 )
346
346
347 def showchunks(named):
347 def showchunks(named):
348 ui.write(b"\n%s%s\n" % (indent_string, named))
348 ui.write(b"\n%s%s\n" % (indent_string, named))
349 for deltadata in gen.deltaiter():
349 for deltadata in gen.deltaiter():
350 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
350 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
351 ui.write(
351 ui.write(
352 b"%s%s %s %s %s %s %d\n"
352 b"%s%s %s %s %s %s %d\n"
353 % (
353 % (
354 indent_string,
354 indent_string,
355 hex(node),
355 hex(node),
356 hex(p1),
356 hex(p1),
357 hex(p2),
357 hex(p2),
358 hex(cs),
358 hex(cs),
359 hex(deltabase),
359 hex(deltabase),
360 len(delta),
360 len(delta),
361 )
361 )
362 )
362 )
363
363
364 gen.changelogheader()
364 gen.changelogheader()
365 showchunks(b"changelog")
365 showchunks(b"changelog")
366 gen.manifestheader()
366 gen.manifestheader()
367 showchunks(b"manifest")
367 showchunks(b"manifest")
368 for chunkdata in iter(gen.filelogheader, {}):
368 for chunkdata in iter(gen.filelogheader, {}):
369 fname = chunkdata[b'filename']
369 fname = chunkdata[b'filename']
370 showchunks(fname)
370 showchunks(fname)
371 else:
371 else:
372 if isinstance(gen, bundle2.unbundle20):
372 if isinstance(gen, bundle2.unbundle20):
373 raise error.Abort(_(b'use debugbundle2 for this file'))
373 raise error.Abort(_(b'use debugbundle2 for this file'))
374 gen.changelogheader()
374 gen.changelogheader()
375 for deltadata in gen.deltaiter():
375 for deltadata in gen.deltaiter():
376 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
376 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
377 ui.write(b"%s%s\n" % (indent_string, hex(node)))
377 ui.write(b"%s%s\n" % (indent_string, hex(node)))
378
378
379
379
380 def _debugobsmarkers(ui, part, indent=0, **opts):
380 def _debugobsmarkers(ui, part, indent=0, **opts):
381 """display version and markers contained in 'data'"""
381 """display version and markers contained in 'data'"""
382 opts = pycompat.byteskwargs(opts)
382 opts = pycompat.byteskwargs(opts)
383 data = part.read()
383 data = part.read()
384 indent_string = b' ' * indent
384 indent_string = b' ' * indent
385 try:
385 try:
386 version, markers = obsolete._readmarkers(data)
386 version, markers = obsolete._readmarkers(data)
387 except error.UnknownVersion as exc:
387 except error.UnknownVersion as exc:
388 msg = b"%sunsupported version: %s (%d bytes)\n"
388 msg = b"%sunsupported version: %s (%d bytes)\n"
389 msg %= indent_string, exc.version, len(data)
389 msg %= indent_string, exc.version, len(data)
390 ui.write(msg)
390 ui.write(msg)
391 else:
391 else:
392 msg = b"%sversion: %d (%d bytes)\n"
392 msg = b"%sversion: %d (%d bytes)\n"
393 msg %= indent_string, version, len(data)
393 msg %= indent_string, version, len(data)
394 ui.write(msg)
394 ui.write(msg)
395 fm = ui.formatter(b'debugobsolete', opts)
395 fm = ui.formatter(b'debugobsolete', opts)
396 for rawmarker in sorted(markers):
396 for rawmarker in sorted(markers):
397 m = obsutil.marker(None, rawmarker)
397 m = obsutil.marker(None, rawmarker)
398 fm.startitem()
398 fm.startitem()
399 fm.plain(indent_string)
399 fm.plain(indent_string)
400 cmdutil.showmarker(fm, m)
400 cmdutil.showmarker(fm, m)
401 fm.end()
401 fm.end()
402
402
403
403
404 def _debugphaseheads(ui, data, indent=0):
404 def _debugphaseheads(ui, data, indent=0):
405 """display version and markers contained in 'data'"""
405 """display version and markers contained in 'data'"""
406 indent_string = b' ' * indent
406 indent_string = b' ' * indent
407 headsbyphase = phases.binarydecode(data)
407 headsbyphase = phases.binarydecode(data)
408 for phase in phases.allphases:
408 for phase in phases.allphases:
409 for head in headsbyphase[phase]:
409 for head in headsbyphase[phase]:
410 ui.write(indent_string)
410 ui.write(indent_string)
411 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
411 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
412
412
413
413
414 def _quasirepr(thing):
414 def _quasirepr(thing):
415 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
415 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
416 return b'{%s}' % (
416 return b'{%s}' % (
417 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
417 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
418 )
418 )
419 return pycompat.bytestr(repr(thing))
419 return pycompat.bytestr(repr(thing))
420
420
421
421
422 def _debugbundle2(ui, gen, all=None, **opts):
422 def _debugbundle2(ui, gen, all=None, **opts):
423 """lists the contents of a bundle2"""
423 """lists the contents of a bundle2"""
424 if not isinstance(gen, bundle2.unbundle20):
424 if not isinstance(gen, bundle2.unbundle20):
425 raise error.Abort(_(b'not a bundle2 file'))
425 raise error.Abort(_(b'not a bundle2 file'))
426 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
426 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
427 parttypes = opts.get('part_type', [])
427 parttypes = opts.get('part_type', [])
428 for part in gen.iterparts():
428 for part in gen.iterparts():
429 if parttypes and part.type not in parttypes:
429 if parttypes and part.type not in parttypes:
430 continue
430 continue
431 msg = b'%s -- %s (mandatory: %r)\n'
431 msg = b'%s -- %s (mandatory: %r)\n'
432 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
432 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
433 if part.type == b'changegroup':
433 if part.type == b'changegroup':
434 version = part.params.get(b'version', b'01')
434 version = part.params.get(b'version', b'01')
435 cg = changegroup.getunbundler(version, part, b'UN')
435 cg = changegroup.getunbundler(version, part, b'UN')
436 if not ui.quiet:
436 if not ui.quiet:
437 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
437 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
438 if part.type == b'obsmarkers':
438 if part.type == b'obsmarkers':
439 if not ui.quiet:
439 if not ui.quiet:
440 _debugobsmarkers(ui, part, indent=4, **opts)
440 _debugobsmarkers(ui, part, indent=4, **opts)
441 if part.type == b'phase-heads':
441 if part.type == b'phase-heads':
442 if not ui.quiet:
442 if not ui.quiet:
443 _debugphaseheads(ui, part, indent=4)
443 _debugphaseheads(ui, part, indent=4)
444
444
445
445
446 @command(
446 @command(
447 b'debugbundle',
447 b'debugbundle',
448 [
448 [
449 (b'a', b'all', None, _(b'show all details')),
449 (b'a', b'all', None, _(b'show all details')),
450 (b'', b'part-type', [], _(b'show only the named part type')),
450 (b'', b'part-type', [], _(b'show only the named part type')),
451 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
451 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
452 ],
452 ],
453 _(b'FILE'),
453 _(b'FILE'),
454 norepo=True,
454 norepo=True,
455 )
455 )
456 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
456 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
457 """lists the contents of a bundle"""
457 """lists the contents of a bundle"""
458 with hg.openpath(ui, bundlepath) as f:
458 with hg.openpath(ui, bundlepath) as f:
459 if spec:
459 if spec:
460 spec = exchange.getbundlespec(ui, f)
460 spec = exchange.getbundlespec(ui, f)
461 ui.write(b'%s\n' % spec)
461 ui.write(b'%s\n' % spec)
462 return
462 return
463
463
464 gen = exchange.readbundle(ui, f, bundlepath)
464 gen = exchange.readbundle(ui, f, bundlepath)
465 if isinstance(gen, bundle2.unbundle20):
465 if isinstance(gen, bundle2.unbundle20):
466 return _debugbundle2(ui, gen, all=all, **opts)
466 return _debugbundle2(ui, gen, all=all, **opts)
467 _debugchangegroup(ui, gen, all=all, **opts)
467 _debugchangegroup(ui, gen, all=all, **opts)
468
468
469
469
470 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
470 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
471 def debugcapabilities(ui, path, **opts):
471 def debugcapabilities(ui, path, **opts):
472 """lists the capabilities of a remote peer"""
472 """lists the capabilities of a remote peer"""
473 opts = pycompat.byteskwargs(opts)
473 opts = pycompat.byteskwargs(opts)
474 peer = hg.peer(ui, opts, path)
474 peer = hg.peer(ui, opts, path)
475 try:
475 try:
476 caps = peer.capabilities()
476 caps = peer.capabilities()
477 ui.writenoi18n(b'Main capabilities:\n')
477 ui.writenoi18n(b'Main capabilities:\n')
478 for c in sorted(caps):
478 for c in sorted(caps):
479 ui.write(b' %s\n' % c)
479 ui.write(b' %s\n' % c)
480 b2caps = bundle2.bundle2caps(peer)
480 b2caps = bundle2.bundle2caps(peer)
481 if b2caps:
481 if b2caps:
482 ui.writenoi18n(b'Bundle2 capabilities:\n')
482 ui.writenoi18n(b'Bundle2 capabilities:\n')
483 for key, values in sorted(pycompat.iteritems(b2caps)):
483 for key, values in sorted(pycompat.iteritems(b2caps)):
484 ui.write(b' %s\n' % key)
484 ui.write(b' %s\n' % key)
485 for v in values:
485 for v in values:
486 ui.write(b' %s\n' % v)
486 ui.write(b' %s\n' % v)
487 finally:
487 finally:
488 peer.close()
488 peer.close()
489
489
490
490
491 @command(
491 @command(
492 b'debugchangedfiles',
492 b'debugchangedfiles',
493 [
493 [
494 (
494 (
495 b'',
495 b'',
496 b'compute',
496 b'compute',
497 False,
497 False,
498 b"compute information instead of reading it from storage",
498 b"compute information instead of reading it from storage",
499 ),
499 ),
500 ],
500 ],
501 b'REV',
501 b'REV',
502 )
502 )
503 def debugchangedfiles(ui, repo, rev, **opts):
503 def debugchangedfiles(ui, repo, rev, **opts):
504 """list the stored files changes for a revision"""
504 """list the stored files changes for a revision"""
505 ctx = scmutil.revsingle(repo, rev, None)
505 ctx = scmutil.revsingle(repo, rev, None)
506 files = None
506 files = None
507
507
508 if opts['compute']:
508 if opts['compute']:
509 files = metadata.compute_all_files_changes(ctx)
509 files = metadata.compute_all_files_changes(ctx)
510 else:
510 else:
511 sd = repo.changelog.sidedata(ctx.rev())
511 sd = repo.changelog.sidedata(ctx.rev())
512 files_block = sd.get(sidedata.SD_FILES)
512 files_block = sd.get(sidedata.SD_FILES)
513 if files_block is not None:
513 if files_block is not None:
514 files = metadata.decode_files_sidedata(sd)
514 files = metadata.decode_files_sidedata(sd)
515 if files is not None:
515 if files is not None:
516 for f in sorted(files.touched):
516 for f in sorted(files.touched):
517 if f in files.added:
517 if f in files.added:
518 action = b"added"
518 action = b"added"
519 elif f in files.removed:
519 elif f in files.removed:
520 action = b"removed"
520 action = b"removed"
521 elif f in files.merged:
521 elif f in files.merged:
522 action = b"merged"
522 action = b"merged"
523 elif f in files.salvaged:
523 elif f in files.salvaged:
524 action = b"salvaged"
524 action = b"salvaged"
525 else:
525 else:
526 action = b"touched"
526 action = b"touched"
527
527
528 copy_parent = b""
528 copy_parent = b""
529 copy_source = b""
529 copy_source = b""
530 if f in files.copied_from_p1:
530 if f in files.copied_from_p1:
531 copy_parent = b"p1"
531 copy_parent = b"p1"
532 copy_source = files.copied_from_p1[f]
532 copy_source = files.copied_from_p1[f]
533 elif f in files.copied_from_p2:
533 elif f in files.copied_from_p2:
534 copy_parent = b"p2"
534 copy_parent = b"p2"
535 copy_source = files.copied_from_p2[f]
535 copy_source = files.copied_from_p2[f]
536
536
537 data = (action, copy_parent, f, copy_source)
537 data = (action, copy_parent, f, copy_source)
538 template = b"%-8s %2s: %s, %s;\n"
538 template = b"%-8s %2s: %s, %s;\n"
539 ui.write(template % data)
539 ui.write(template % data)
540
540
541
541
542 @command(b'debugcheckstate', [], b'')
542 @command(b'debugcheckstate', [], b'')
543 def debugcheckstate(ui, repo):
543 def debugcheckstate(ui, repo):
544 """validate the correctness of the current dirstate"""
544 """validate the correctness of the current dirstate"""
545 parent1, parent2 = repo.dirstate.parents()
545 parent1, parent2 = repo.dirstate.parents()
546 m1 = repo[parent1].manifest()
546 m1 = repo[parent1].manifest()
547 m2 = repo[parent2].manifest()
547 m2 = repo[parent2].manifest()
548 errors = 0
548 errors = 0
549 for f in repo.dirstate:
549 for f in repo.dirstate:
550 state = repo.dirstate[f]
550 state = repo.dirstate[f]
551 if state in b"nr" and f not in m1:
551 if state in b"nr" and f not in m1:
552 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
552 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
553 errors += 1
553 errors += 1
554 if state in b"a" and f in m1:
554 if state in b"a" and f in m1:
555 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
555 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
556 errors += 1
556 errors += 1
557 if state in b"m" and f not in m1 and f not in m2:
557 if state in b"m" and f not in m1 and f not in m2:
558 ui.warn(
558 ui.warn(
559 _(b"%s in state %s, but not in either manifest\n") % (f, state)
559 _(b"%s in state %s, but not in either manifest\n") % (f, state)
560 )
560 )
561 errors += 1
561 errors += 1
562 for f in m1:
562 for f in m1:
563 state = repo.dirstate[f]
563 state = repo.dirstate[f]
564 if state not in b"nrm":
564 if state not in b"nrm":
565 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
565 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
566 errors += 1
566 errors += 1
567 if errors:
567 if errors:
568 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
568 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
569 raise error.Abort(errstr)
569 raise error.Abort(errstr)
570
570
571
571
572 @command(
572 @command(
573 b'debugcolor',
573 b'debugcolor',
574 [(b'', b'style', None, _(b'show all configured styles'))],
574 [(b'', b'style', None, _(b'show all configured styles'))],
575 b'hg debugcolor',
575 b'hg debugcolor',
576 )
576 )
577 def debugcolor(ui, repo, **opts):
577 def debugcolor(ui, repo, **opts):
578 """show available color, effects or style"""
578 """show available color, effects or style"""
579 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
579 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
580 if opts.get('style'):
580 if opts.get('style'):
581 return _debugdisplaystyle(ui)
581 return _debugdisplaystyle(ui)
582 else:
582 else:
583 return _debugdisplaycolor(ui)
583 return _debugdisplaycolor(ui)
584
584
585
585
586 def _debugdisplaycolor(ui):
586 def _debugdisplaycolor(ui):
587 ui = ui.copy()
587 ui = ui.copy()
588 ui._styles.clear()
588 ui._styles.clear()
589 for effect in color._activeeffects(ui).keys():
589 for effect in color._activeeffects(ui).keys():
590 ui._styles[effect] = effect
590 ui._styles[effect] = effect
591 if ui._terminfoparams:
591 if ui._terminfoparams:
592 for k, v in ui.configitems(b'color'):
592 for k, v in ui.configitems(b'color'):
593 if k.startswith(b'color.'):
593 if k.startswith(b'color.'):
594 ui._styles[k] = k[6:]
594 ui._styles[k] = k[6:]
595 elif k.startswith(b'terminfo.'):
595 elif k.startswith(b'terminfo.'):
596 ui._styles[k] = k[9:]
596 ui._styles[k] = k[9:]
597 ui.write(_(b'available colors:\n'))
597 ui.write(_(b'available colors:\n'))
598 # sort label with a '_' after the other to group '_background' entry.
598 # sort label with a '_' after the other to group '_background' entry.
599 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
599 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
600 for colorname, label in items:
600 for colorname, label in items:
601 ui.write(b'%s\n' % colorname, label=label)
601 ui.write(b'%s\n' % colorname, label=label)
602
602
603
603
604 def _debugdisplaystyle(ui):
604 def _debugdisplaystyle(ui):
605 ui.write(_(b'available style:\n'))
605 ui.write(_(b'available style:\n'))
606 if not ui._styles:
606 if not ui._styles:
607 return
607 return
608 width = max(len(s) for s in ui._styles)
608 width = max(len(s) for s in ui._styles)
609 for label, effects in sorted(ui._styles.items()):
609 for label, effects in sorted(ui._styles.items()):
610 ui.write(b'%s' % label, label=label)
610 ui.write(b'%s' % label, label=label)
611 if effects:
611 if effects:
612 # 50
612 # 50
613 ui.write(b': ')
613 ui.write(b': ')
614 ui.write(b' ' * (max(0, width - len(label))))
614 ui.write(b' ' * (max(0, width - len(label))))
615 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
615 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
616 ui.write(b'\n')
616 ui.write(b'\n')
617
617
618
618
619 @command(b'debugcreatestreamclonebundle', [], b'FILE')
619 @command(b'debugcreatestreamclonebundle', [], b'FILE')
620 def debugcreatestreamclonebundle(ui, repo, fname):
620 def debugcreatestreamclonebundle(ui, repo, fname):
621 """create a stream clone bundle file
621 """create a stream clone bundle file
622
622
623 Stream bundles are special bundles that are essentially archives of
623 Stream bundles are special bundles that are essentially archives of
624 revlog files. They are commonly used for cloning very quickly.
624 revlog files. They are commonly used for cloning very quickly.
625 """
625 """
626 # TODO we may want to turn this into an abort when this functionality
626 # TODO we may want to turn this into an abort when this functionality
627 # is moved into `hg bundle`.
627 # is moved into `hg bundle`.
628 if phases.hassecret(repo):
628 if phases.hassecret(repo):
629 ui.warn(
629 ui.warn(
630 _(
630 _(
631 b'(warning: stream clone bundle will contain secret '
631 b'(warning: stream clone bundle will contain secret '
632 b'revisions)\n'
632 b'revisions)\n'
633 )
633 )
634 )
634 )
635
635
636 requirements, gen = streamclone.generatebundlev1(repo)
636 requirements, gen = streamclone.generatebundlev1(repo)
637 changegroup.writechunks(ui, gen, fname)
637 changegroup.writechunks(ui, gen, fname)
638
638
639 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
639 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
640
640
641
641
642 @command(
642 @command(
643 b'debugdag',
643 b'debugdag',
644 [
644 [
645 (b't', b'tags', None, _(b'use tags as labels')),
645 (b't', b'tags', None, _(b'use tags as labels')),
646 (b'b', b'branches', None, _(b'annotate with branch names')),
646 (b'b', b'branches', None, _(b'annotate with branch names')),
647 (b'', b'dots', None, _(b'use dots for runs')),
647 (b'', b'dots', None, _(b'use dots for runs')),
648 (b's', b'spaces', None, _(b'separate elements by spaces')),
648 (b's', b'spaces', None, _(b'separate elements by spaces')),
649 ],
649 ],
650 _(b'[OPTION]... [FILE [REV]...]'),
650 _(b'[OPTION]... [FILE [REV]...]'),
651 optionalrepo=True,
651 optionalrepo=True,
652 )
652 )
653 def debugdag(ui, repo, file_=None, *revs, **opts):
653 def debugdag(ui, repo, file_=None, *revs, **opts):
654 """format the changelog or an index DAG as a concise textual description
654 """format the changelog or an index DAG as a concise textual description
655
655
656 If you pass a revlog index, the revlog's DAG is emitted. If you list
656 If you pass a revlog index, the revlog's DAG is emitted. If you list
657 revision numbers, they get labeled in the output as rN.
657 revision numbers, they get labeled in the output as rN.
658
658
659 Otherwise, the changelog DAG of the current repo is emitted.
659 Otherwise, the changelog DAG of the current repo is emitted.
660 """
660 """
661 spaces = opts.get('spaces')
661 spaces = opts.get('spaces')
662 dots = opts.get('dots')
662 dots = opts.get('dots')
663 if file_:
663 if file_:
664 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
664 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
665 revs = {int(r) for r in revs}
665 revs = {int(r) for r in revs}
666
666
667 def events():
667 def events():
668 for r in rlog:
668 for r in rlog:
669 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
669 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
670 if r in revs:
670 if r in revs:
671 yield b'l', (r, b"r%i" % r)
671 yield b'l', (r, b"r%i" % r)
672
672
673 elif repo:
673 elif repo:
674 cl = repo.changelog
674 cl = repo.changelog
675 tags = opts.get('tags')
675 tags = opts.get('tags')
676 branches = opts.get('branches')
676 branches = opts.get('branches')
677 if tags:
677 if tags:
678 labels = {}
678 labels = {}
679 for l, n in repo.tags().items():
679 for l, n in repo.tags().items():
680 labels.setdefault(cl.rev(n), []).append(l)
680 labels.setdefault(cl.rev(n), []).append(l)
681
681
682 def events():
682 def events():
683 b = b"default"
683 b = b"default"
684 for r in cl:
684 for r in cl:
685 if branches:
685 if branches:
686 newb = cl.read(cl.node(r))[5][b'branch']
686 newb = cl.read(cl.node(r))[5][b'branch']
687 if newb != b:
687 if newb != b:
688 yield b'a', newb
688 yield b'a', newb
689 b = newb
689 b = newb
690 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
690 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
691 if tags:
691 if tags:
692 ls = labels.get(r)
692 ls = labels.get(r)
693 if ls:
693 if ls:
694 for l in ls:
694 for l in ls:
695 yield b'l', (r, l)
695 yield b'l', (r, l)
696
696
697 else:
697 else:
698 raise error.Abort(_(b'need repo for changelog dag'))
698 raise error.Abort(_(b'need repo for changelog dag'))
699
699
700 for line in dagparser.dagtextlines(
700 for line in dagparser.dagtextlines(
701 events(),
701 events(),
702 addspaces=spaces,
702 addspaces=spaces,
703 wraplabels=True,
703 wraplabels=True,
704 wrapannotations=True,
704 wrapannotations=True,
705 wrapnonlinear=dots,
705 wrapnonlinear=dots,
706 usedots=dots,
706 usedots=dots,
707 maxlinewidth=70,
707 maxlinewidth=70,
708 ):
708 ):
709 ui.write(line)
709 ui.write(line)
710 ui.write(b"\n")
710 ui.write(b"\n")
711
711
712
712
713 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
713 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
714 def debugdata(ui, repo, file_, rev=None, **opts):
714 def debugdata(ui, repo, file_, rev=None, **opts):
715 """dump the contents of a data file revision"""
715 """dump the contents of a data file revision"""
716 opts = pycompat.byteskwargs(opts)
716 opts = pycompat.byteskwargs(opts)
717 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
717 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
718 if rev is not None:
718 if rev is not None:
719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
720 file_, rev = None, file_
720 file_, rev = None, file_
721 elif rev is None:
721 elif rev is None:
722 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
722 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
723 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
723 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
724 try:
724 try:
725 ui.write(r.rawdata(r.lookup(rev)))
725 ui.write(r.rawdata(r.lookup(rev)))
726 except KeyError:
726 except KeyError:
727 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727 raise error.Abort(_(b'invalid revision identifier %s') % rev)
728
728
729
729
730 @command(
730 @command(
731 b'debugdate',
731 b'debugdate',
732 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 [(b'e', b'extended', None, _(b'try extended date formats'))],
733 _(b'[-e] DATE [RANGE]'),
733 _(b'[-e] DATE [RANGE]'),
734 norepo=True,
734 norepo=True,
735 optionalrepo=True,
735 optionalrepo=True,
736 )
736 )
737 def debugdate(ui, date, range=None, **opts):
737 def debugdate(ui, date, range=None, **opts):
738 """parse and display a date"""
738 """parse and display a date"""
739 if opts["extended"]:
739 if opts["extended"]:
740 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 d = dateutil.parsedate(date, dateutil.extendeddateformats)
741 else:
741 else:
742 d = dateutil.parsedate(date)
742 d = dateutil.parsedate(date)
743 ui.writenoi18n(b"internal: %d %d\n" % d)
743 ui.writenoi18n(b"internal: %d %d\n" % d)
744 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
745 if range:
745 if range:
746 m = dateutil.matchdate(range)
746 m = dateutil.matchdate(range)
747 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747 ui.writenoi18n(b"match: %s\n" % m(d[0]))
748
748
749
749
750 @command(
750 @command(
751 b'debugdeltachain',
751 b'debugdeltachain',
752 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 cmdutil.debugrevlogopts + cmdutil.formatteropts,
753 _(b'-c|-m|FILE'),
753 _(b'-c|-m|FILE'),
754 optionalrepo=True,
754 optionalrepo=True,
755 )
755 )
756 def debugdeltachain(ui, repo, file_=None, **opts):
756 def debugdeltachain(ui, repo, file_=None, **opts):
757 """dump information about delta chains in a revlog
757 """dump information about delta chains in a revlog
758
758
759 Output can be templatized. Available template keywords are:
759 Output can be templatized. Available template keywords are:
760
760
761 :``rev``: revision number
761 :``rev``: revision number
762 :``chainid``: delta chain identifier (numbered by unique base)
762 :``chainid``: delta chain identifier (numbered by unique base)
763 :``chainlen``: delta chain length to this revision
763 :``chainlen``: delta chain length to this revision
764 :``prevrev``: previous revision in delta chain
764 :``prevrev``: previous revision in delta chain
765 :``deltatype``: role of delta / how it was computed
765 :``deltatype``: role of delta / how it was computed
766 :``compsize``: compressed size of revision
766 :``compsize``: compressed size of revision
767 :``uncompsize``: uncompressed size of revision
767 :``uncompsize``: uncompressed size of revision
768 :``chainsize``: total size of compressed revisions in chain
768 :``chainsize``: total size of compressed revisions in chain
769 :``chainratio``: total chain size divided by uncompressed revision size
769 :``chainratio``: total chain size divided by uncompressed revision size
770 (new delta chains typically start at ratio 2.00)
770 (new delta chains typically start at ratio 2.00)
771 :``lindist``: linear distance from base revision in delta chain to end
771 :``lindist``: linear distance from base revision in delta chain to end
772 of this revision
772 of this revision
773 :``extradist``: total size of revisions not part of this delta chain from
773 :``extradist``: total size of revisions not part of this delta chain from
774 base of delta chain to end of this revision; a measurement
774 base of delta chain to end of this revision; a measurement
775 of how much extra data we need to read/seek across to read
775 of how much extra data we need to read/seek across to read
776 the delta chain for this revision
776 the delta chain for this revision
777 :``extraratio``: extradist divided by chainsize; another representation of
777 :``extraratio``: extradist divided by chainsize; another representation of
778 how much unrelated data is needed to load this delta chain
778 how much unrelated data is needed to load this delta chain
779
779
780 If the repository is configured to use the sparse read, additional keywords
780 If the repository is configured to use the sparse read, additional keywords
781 are available:
781 are available:
782
782
783 :``readsize``: total size of data read from the disk for a revision
783 :``readsize``: total size of data read from the disk for a revision
784 (sum of the sizes of all the blocks)
784 (sum of the sizes of all the blocks)
785 :``largestblock``: size of the largest block of data read from the disk
785 :``largestblock``: size of the largest block of data read from the disk
786 :``readdensity``: density of useful bytes in the data read from the disk
786 :``readdensity``: density of useful bytes in the data read from the disk
787 :``srchunks``: in how many data hunks the whole revision would be read
787 :``srchunks``: in how many data hunks the whole revision would be read
788
788
789 The sparse read can be enabled with experimental.sparse-read = True
789 The sparse read can be enabled with experimental.sparse-read = True
790 """
790 """
791 opts = pycompat.byteskwargs(opts)
791 opts = pycompat.byteskwargs(opts)
792 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
792 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
793 index = r.index
793 index = r.index
794 start = r.start
794 start = r.start
795 length = r.length
795 length = r.length
796 generaldelta = r.version & revlog.FLAG_GENERALDELTA
796 generaldelta = r.version & revlog.FLAG_GENERALDELTA
797 withsparseread = getattr(r, '_withsparseread', False)
797 withsparseread = getattr(r, '_withsparseread', False)
798
798
799 def revinfo(rev):
799 def revinfo(rev):
800 e = index[rev]
800 e = index[rev]
801 compsize = e[1]
801 compsize = e[1]
802 uncompsize = e[2]
802 uncompsize = e[2]
803 chainsize = 0
803 chainsize = 0
804
804
805 if generaldelta:
805 if generaldelta:
806 if e[3] == e[5]:
806 if e[3] == e[5]:
807 deltatype = b'p1'
807 deltatype = b'p1'
808 elif e[3] == e[6]:
808 elif e[3] == e[6]:
809 deltatype = b'p2'
809 deltatype = b'p2'
810 elif e[3] == rev - 1:
810 elif e[3] == rev - 1:
811 deltatype = b'prev'
811 deltatype = b'prev'
812 elif e[3] == rev:
812 elif e[3] == rev:
813 deltatype = b'base'
813 deltatype = b'base'
814 else:
814 else:
815 deltatype = b'other'
815 deltatype = b'other'
816 else:
816 else:
817 if e[3] == rev:
817 if e[3] == rev:
818 deltatype = b'base'
818 deltatype = b'base'
819 else:
819 else:
820 deltatype = b'prev'
820 deltatype = b'prev'
821
821
822 chain = r._deltachain(rev)[0]
822 chain = r._deltachain(rev)[0]
823 for iterrev in chain:
823 for iterrev in chain:
824 e = index[iterrev]
824 e = index[iterrev]
825 chainsize += e[1]
825 chainsize += e[1]
826
826
827 return compsize, uncompsize, deltatype, chain, chainsize
827 return compsize, uncompsize, deltatype, chain, chainsize
828
828
829 fm = ui.formatter(b'debugdeltachain', opts)
829 fm = ui.formatter(b'debugdeltachain', opts)
830
830
831 fm.plain(
831 fm.plain(
832 b' rev chain# chainlen prev delta '
832 b' rev chain# chainlen prev delta '
833 b'size rawsize chainsize ratio lindist extradist '
833 b'size rawsize chainsize ratio lindist extradist '
834 b'extraratio'
834 b'extraratio'
835 )
835 )
836 if withsparseread:
836 if withsparseread:
837 fm.plain(b' readsize largestblk rddensity srchunks')
837 fm.plain(b' readsize largestblk rddensity srchunks')
838 fm.plain(b'\n')
838 fm.plain(b'\n')
839
839
840 chainbases = {}
840 chainbases = {}
841 for rev in r:
841 for rev in r:
842 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
842 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
843 chainbase = chain[0]
843 chainbase = chain[0]
844 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
844 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
845 basestart = start(chainbase)
845 basestart = start(chainbase)
846 revstart = start(rev)
846 revstart = start(rev)
847 lineardist = revstart + comp - basestart
847 lineardist = revstart + comp - basestart
848 extradist = lineardist - chainsize
848 extradist = lineardist - chainsize
849 try:
849 try:
850 prevrev = chain[-2]
850 prevrev = chain[-2]
851 except IndexError:
851 except IndexError:
852 prevrev = -1
852 prevrev = -1
853
853
854 if uncomp != 0:
854 if uncomp != 0:
855 chainratio = float(chainsize) / float(uncomp)
855 chainratio = float(chainsize) / float(uncomp)
856 else:
856 else:
857 chainratio = chainsize
857 chainratio = chainsize
858
858
859 if chainsize != 0:
859 if chainsize != 0:
860 extraratio = float(extradist) / float(chainsize)
860 extraratio = float(extradist) / float(chainsize)
861 else:
861 else:
862 extraratio = extradist
862 extraratio = extradist
863
863
864 fm.startitem()
864 fm.startitem()
865 fm.write(
865 fm.write(
866 b'rev chainid chainlen prevrev deltatype compsize '
866 b'rev chainid chainlen prevrev deltatype compsize '
867 b'uncompsize chainsize chainratio lindist extradist '
867 b'uncompsize chainsize chainratio lindist extradist '
868 b'extraratio',
868 b'extraratio',
869 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
869 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
870 rev,
870 rev,
871 chainid,
871 chainid,
872 len(chain),
872 len(chain),
873 prevrev,
873 prevrev,
874 deltatype,
874 deltatype,
875 comp,
875 comp,
876 uncomp,
876 uncomp,
877 chainsize,
877 chainsize,
878 chainratio,
878 chainratio,
879 lineardist,
879 lineardist,
880 extradist,
880 extradist,
881 extraratio,
881 extraratio,
882 rev=rev,
882 rev=rev,
883 chainid=chainid,
883 chainid=chainid,
884 chainlen=len(chain),
884 chainlen=len(chain),
885 prevrev=prevrev,
885 prevrev=prevrev,
886 deltatype=deltatype,
886 deltatype=deltatype,
887 compsize=comp,
887 compsize=comp,
888 uncompsize=uncomp,
888 uncompsize=uncomp,
889 chainsize=chainsize,
889 chainsize=chainsize,
890 chainratio=chainratio,
890 chainratio=chainratio,
891 lindist=lineardist,
891 lindist=lineardist,
892 extradist=extradist,
892 extradist=extradist,
893 extraratio=extraratio,
893 extraratio=extraratio,
894 )
894 )
895 if withsparseread:
895 if withsparseread:
896 readsize = 0
896 readsize = 0
897 largestblock = 0
897 largestblock = 0
898 srchunks = 0
898 srchunks = 0
899
899
900 for revschunk in deltautil.slicechunk(r, chain):
900 for revschunk in deltautil.slicechunk(r, chain):
901 srchunks += 1
901 srchunks += 1
902 blkend = start(revschunk[-1]) + length(revschunk[-1])
902 blkend = start(revschunk[-1]) + length(revschunk[-1])
903 blksize = blkend - start(revschunk[0])
903 blksize = blkend - start(revschunk[0])
904
904
905 readsize += blksize
905 readsize += blksize
906 if largestblock < blksize:
906 if largestblock < blksize:
907 largestblock = blksize
907 largestblock = blksize
908
908
909 if readsize:
909 if readsize:
910 readdensity = float(chainsize) / float(readsize)
910 readdensity = float(chainsize) / float(readsize)
911 else:
911 else:
912 readdensity = 1
912 readdensity = 1
913
913
914 fm.write(
914 fm.write(
915 b'readsize largestblock readdensity srchunks',
915 b'readsize largestblock readdensity srchunks',
916 b' %10d %10d %9.5f %8d',
916 b' %10d %10d %9.5f %8d',
917 readsize,
917 readsize,
918 largestblock,
918 largestblock,
919 readdensity,
919 readdensity,
920 srchunks,
920 srchunks,
921 readsize=readsize,
921 readsize=readsize,
922 largestblock=largestblock,
922 largestblock=largestblock,
923 readdensity=readdensity,
923 readdensity=readdensity,
924 srchunks=srchunks,
924 srchunks=srchunks,
925 )
925 )
926
926
927 fm.plain(b'\n')
927 fm.plain(b'\n')
928
928
929 fm.end()
929 fm.end()
930
930
931
931
932 @command(
932 @command(
933 b'debugdirstate|debugstate',
933 b'debugdirstate|debugstate',
934 [
934 [
935 (
935 (
936 b'',
936 b'',
937 b'nodates',
937 b'nodates',
938 None,
938 None,
939 _(b'do not display the saved mtime (DEPRECATED)'),
939 _(b'do not display the saved mtime (DEPRECATED)'),
940 ),
940 ),
941 (b'', b'dates', True, _(b'display the saved mtime')),
941 (b'', b'dates', True, _(b'display the saved mtime')),
942 (b'', b'datesort', None, _(b'sort by saved mtime')),
942 (b'', b'datesort', None, _(b'sort by saved mtime')),
943 ],
943 ],
944 _(b'[OPTION]...'),
944 _(b'[OPTION]...'),
945 )
945 )
946 def debugstate(ui, repo, **opts):
946 def debugstate(ui, repo, **opts):
947 """show the contents of the current dirstate"""
947 """show the contents of the current dirstate"""
948
948
949 nodates = not opts['dates']
949 nodates = not opts['dates']
950 if opts.get('nodates') is not None:
950 if opts.get('nodates') is not None:
951 nodates = True
951 nodates = True
952 datesort = opts.get('datesort')
952 datesort = opts.get('datesort')
953
953
954 if datesort:
954 if datesort:
955 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
955 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
956 else:
956 else:
957 keyfunc = None # sort by filename
957 keyfunc = None # sort by filename
958 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
958 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
959 if ent[3] == -1:
959 if ent[3] == -1:
960 timestr = b'unset '
960 timestr = b'unset '
961 elif nodates:
961 elif nodates:
962 timestr = b'set '
962 timestr = b'set '
963 else:
963 else:
964 timestr = time.strftime(
964 timestr = time.strftime(
965 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
965 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
966 )
966 )
967 timestr = encoding.strtolocal(timestr)
967 timestr = encoding.strtolocal(timestr)
968 if ent[1] & 0o20000:
968 if ent[1] & 0o20000:
969 mode = b'lnk'
969 mode = b'lnk'
970 else:
970 else:
971 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
971 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
972 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
972 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
973 for f in repo.dirstate.copies():
973 for f in repo.dirstate.copies():
974 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
974 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
975
975
976
976
977 @command(
977 @command(
978 b'debugdiscovery',
978 b'debugdiscovery',
979 [
979 [
980 (b'', b'old', None, _(b'use old-style discovery')),
980 (b'', b'old', None, _(b'use old-style discovery')),
981 (
981 (
982 b'',
982 b'',
983 b'nonheads',
983 b'nonheads',
984 None,
984 None,
985 _(b'use old-style discovery with non-heads included'),
985 _(b'use old-style discovery with non-heads included'),
986 ),
986 ),
987 (b'', b'rev', [], b'restrict discovery to this set of revs'),
987 (b'', b'rev', [], b'restrict discovery to this set of revs'),
988 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
988 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
989 (
989 (
990 b'',
990 b'',
991 b'local-as-revs',
991 b'local-as-revs',
992 "",
992 b"",
993 'treat local has having these revisions only',
993 b'treat local has having these revisions only',
994 ),
994 ),
995 (
995 (
996 b'',
996 b'',
997 b'remote-as-revs',
997 b'remote-as-revs',
998 "",
998 b"",
999 'use local as remote, with only these these revisions',
999 b'use local as remote, with only these these revisions',
1000 ),
1000 ),
1001 ]
1001 ]
1002 + cmdutil.remoteopts
1002 + cmdutil.remoteopts
1003 + cmdutil.formatteropts,
1003 + cmdutil.formatteropts,
1004 _(b'[--rev REV] [OTHER]'),
1004 _(b'[--rev REV] [OTHER]'),
1005 )
1005 )
1006 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1006 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1007 """runs the changeset discovery protocol in isolation
1007 """runs the changeset discovery protocol in isolation
1008
1008
1009 The local peer can be "replaced" by a subset of the local repository by
1009 The local peer can be "replaced" by a subset of the local repository by
1010 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1010 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1011 be "replaced" by a subset of the local repository using the
1011 be "replaced" by a subset of the local repository using the
1012 `--local-as-revs` flag. This is useful to efficiently debug pathological
1012 `--local-as-revs` flag. This is useful to efficiently debug pathological
1013 discovery situation.
1013 discovery situation.
1014 """
1014 """
1015 opts = pycompat.byteskwargs(opts)
1015 opts = pycompat.byteskwargs(opts)
1016 unfi = repo.unfiltered()
1016 unfi = repo.unfiltered()
1017
1017
1018 # setup potential extra filtering
1018 # setup potential extra filtering
1019 local_revs = opts[b"local_as_revs"]
1019 local_revs = opts[b"local_as_revs"]
1020 remote_revs = opts[b"remote_as_revs"]
1020 remote_revs = opts[b"remote_as_revs"]
1021
1021
1022 # make sure tests are repeatable
1022 # make sure tests are repeatable
1023 random.seed(int(opts[b'seed']))
1023 random.seed(int(opts[b'seed']))
1024
1024
1025 if not remote_revs:
1025 if not remote_revs:
1026
1026
1027 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
1027 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
1028 remote = hg.peer(repo, opts, remoteurl)
1028 remote = hg.peer(repo, opts, remoteurl)
1029 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
1029 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
1030 else:
1030 else:
1031 branches = (None, [])
1031 branches = (None, [])
1032 remote_filtered_revs = scmutil.revrange(
1032 remote_filtered_revs = scmutil.revrange(
1033 unfi, [b"not (::(%s))" % remote_revs]
1033 unfi, [b"not (::(%s))" % remote_revs]
1034 )
1034 )
1035 remote_filtered_revs = frozenset(remote_filtered_revs)
1035 remote_filtered_revs = frozenset(remote_filtered_revs)
1036
1036
1037 def remote_func(x):
1037 def remote_func(x):
1038 return remote_filtered_revs
1038 return remote_filtered_revs
1039
1039
1040 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1040 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1041
1041
1042 remote = repo.peer()
1042 remote = repo.peer()
1043 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1043 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1044
1044
1045 if local_revs:
1045 if local_revs:
1046 local_filtered_revs = scmutil.revrange(
1046 local_filtered_revs = scmutil.revrange(
1047 unfi, [b"not (::(%s))" % local_revs]
1047 unfi, [b"not (::(%s))" % local_revs]
1048 )
1048 )
1049 local_filtered_revs = frozenset(local_filtered_revs)
1049 local_filtered_revs = frozenset(local_filtered_revs)
1050
1050
1051 def local_func(x):
1051 def local_func(x):
1052 return local_filtered_revs
1052 return local_filtered_revs
1053
1053
1054 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1054 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1055 repo = repo.filtered(b'debug-discovery-local-filter')
1055 repo = repo.filtered(b'debug-discovery-local-filter')
1056
1056
1057 data = {}
1057 data = {}
1058 if opts.get(b'old'):
1058 if opts.get(b'old'):
1059
1059
1060 def doit(pushedrevs, remoteheads, remote=remote):
1060 def doit(pushedrevs, remoteheads, remote=remote):
1061 if not util.safehasattr(remote, b'branches'):
1061 if not util.safehasattr(remote, b'branches'):
1062 # enable in-client legacy support
1062 # enable in-client legacy support
1063 remote = localrepo.locallegacypeer(remote.local())
1063 remote = localrepo.locallegacypeer(remote.local())
1064 common, _in, hds = treediscovery.findcommonincoming(
1064 common, _in, hds = treediscovery.findcommonincoming(
1065 repo, remote, force=True, audit=data
1065 repo, remote, force=True, audit=data
1066 )
1066 )
1067 common = set(common)
1067 common = set(common)
1068 if not opts.get(b'nonheads'):
1068 if not opts.get(b'nonheads'):
1069 ui.writenoi18n(
1069 ui.writenoi18n(
1070 b"unpruned common: %s\n"
1070 b"unpruned common: %s\n"
1071 % b" ".join(sorted(short(n) for n in common))
1071 % b" ".join(sorted(short(n) for n in common))
1072 )
1072 )
1073
1073
1074 clnode = repo.changelog.node
1074 clnode = repo.changelog.node
1075 common = repo.revs(b'heads(::%ln)', common)
1075 common = repo.revs(b'heads(::%ln)', common)
1076 common = {clnode(r) for r in common}
1076 common = {clnode(r) for r in common}
1077 return common, hds
1077 return common, hds
1078
1078
1079 else:
1079 else:
1080
1080
1081 def doit(pushedrevs, remoteheads, remote=remote):
1081 def doit(pushedrevs, remoteheads, remote=remote):
1082 nodes = None
1082 nodes = None
1083 if pushedrevs:
1083 if pushedrevs:
1084 revs = scmutil.revrange(repo, pushedrevs)
1084 revs = scmutil.revrange(repo, pushedrevs)
1085 nodes = [repo[r].node() for r in revs]
1085 nodes = [repo[r].node() for r in revs]
1086 common, any, hds = setdiscovery.findcommonheads(
1086 common, any, hds = setdiscovery.findcommonheads(
1087 ui, repo, remote, ancestorsof=nodes, audit=data
1087 ui, repo, remote, ancestorsof=nodes, audit=data
1088 )
1088 )
1089 return common, hds
1089 return common, hds
1090
1090
1091 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1091 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1092 localrevs = opts[b'rev']
1092 localrevs = opts[b'rev']
1093
1093
1094 fm = ui.formatter(b'debugdiscovery', opts)
1094 fm = ui.formatter(b'debugdiscovery', opts)
1095 if fm.strict_format:
1095 if fm.strict_format:
1096
1096
1097 @contextlib.contextmanager
1097 @contextlib.contextmanager
1098 def may_capture_output():
1098 def may_capture_output():
1099 ui.pushbuffer()
1099 ui.pushbuffer()
1100 yield
1100 yield
1101 data[b'output'] = ui.popbuffer()
1101 data[b'output'] = ui.popbuffer()
1102
1102
1103 else:
1103 else:
1104 may_capture_output = util.nullcontextmanager
1104 may_capture_output = util.nullcontextmanager
1105 with may_capture_output():
1105 with may_capture_output():
1106 with util.timedcm('debug-discovery') as t:
1106 with util.timedcm('debug-discovery') as t:
1107 common, hds = doit(localrevs, remoterevs)
1107 common, hds = doit(localrevs, remoterevs)
1108
1108
1109 # compute all statistics
1109 # compute all statistics
1110 heads_common = set(common)
1110 heads_common = set(common)
1111 heads_remote = set(hds)
1111 heads_remote = set(hds)
1112 heads_local = set(repo.heads())
1112 heads_local = set(repo.heads())
1113 # note: they cannot be a local or remote head that is in common and not
1113 # note: they cannot be a local or remote head that is in common and not
1114 # itself a head of common.
1114 # itself a head of common.
1115 heads_common_local = heads_common & heads_local
1115 heads_common_local = heads_common & heads_local
1116 heads_common_remote = heads_common & heads_remote
1116 heads_common_remote = heads_common & heads_remote
1117 heads_common_both = heads_common & heads_remote & heads_local
1117 heads_common_both = heads_common & heads_remote & heads_local
1118
1118
1119 all = repo.revs(b'all()')
1119 all = repo.revs(b'all()')
1120 common = repo.revs(b'::%ln', common)
1120 common = repo.revs(b'::%ln', common)
1121 roots_common = repo.revs(b'roots(::%ld)', common)
1121 roots_common = repo.revs(b'roots(::%ld)', common)
1122 missing = repo.revs(b'not ::%ld', common)
1122 missing = repo.revs(b'not ::%ld', common)
1123 heads_missing = repo.revs(b'heads(%ld)', missing)
1123 heads_missing = repo.revs(b'heads(%ld)', missing)
1124 roots_missing = repo.revs(b'roots(%ld)', missing)
1124 roots_missing = repo.revs(b'roots(%ld)', missing)
1125 assert len(common) + len(missing) == len(all)
1125 assert len(common) + len(missing) == len(all)
1126
1126
1127 initial_undecided = repo.revs(
1127 initial_undecided = repo.revs(
1128 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1128 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1129 )
1129 )
1130 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1130 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1131 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1131 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1132 common_initial_undecided = initial_undecided & common
1132 common_initial_undecided = initial_undecided & common
1133 missing_initial_undecided = initial_undecided & missing
1133 missing_initial_undecided = initial_undecided & missing
1134
1134
1135 data[b'elapsed'] = t.elapsed
1135 data[b'elapsed'] = t.elapsed
1136 data[b'nb-common-heads'] = len(heads_common)
1136 data[b'nb-common-heads'] = len(heads_common)
1137 data[b'nb-common-heads-local'] = len(heads_common_local)
1137 data[b'nb-common-heads-local'] = len(heads_common_local)
1138 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1138 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1139 data[b'nb-common-heads-both'] = len(heads_common_both)
1139 data[b'nb-common-heads-both'] = len(heads_common_both)
1140 data[b'nb-common-roots'] = len(roots_common)
1140 data[b'nb-common-roots'] = len(roots_common)
1141 data[b'nb-head-local'] = len(heads_local)
1141 data[b'nb-head-local'] = len(heads_local)
1142 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1142 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1143 data[b'nb-head-remote'] = len(heads_remote)
1143 data[b'nb-head-remote'] = len(heads_remote)
1144 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1144 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1145 heads_common_remote
1145 heads_common_remote
1146 )
1146 )
1147 data[b'nb-revs'] = len(all)
1147 data[b'nb-revs'] = len(all)
1148 data[b'nb-revs-common'] = len(common)
1148 data[b'nb-revs-common'] = len(common)
1149 data[b'nb-revs-missing'] = len(missing)
1149 data[b'nb-revs-missing'] = len(missing)
1150 data[b'nb-missing-heads'] = len(heads_missing)
1150 data[b'nb-missing-heads'] = len(heads_missing)
1151 data[b'nb-missing-roots'] = len(roots_missing)
1151 data[b'nb-missing-roots'] = len(roots_missing)
1152 data[b'nb-ini_und'] = len(initial_undecided)
1152 data[b'nb-ini_und'] = len(initial_undecided)
1153 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1153 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1154 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1154 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1155 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1155 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1156 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1156 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1157
1157
1158 fm.startitem()
1158 fm.startitem()
1159 fm.data(**pycompat.strkwargs(data))
1159 fm.data(**pycompat.strkwargs(data))
1160 # display discovery summary
1160 # display discovery summary
1161 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1161 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1162 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1162 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1163 fm.plain(b"heads summary:\n")
1163 fm.plain(b"heads summary:\n")
1164 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1164 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1165 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1165 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1166 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1166 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1167 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1167 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1168 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1168 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1169 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1169 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1170 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1170 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1171 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1171 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1172 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1172 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1173 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1173 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1174 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1174 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1175 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1175 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1176 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1176 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1177 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1177 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1178 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1178 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1179 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1179 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1180 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1180 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1181 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1181 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1182 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1182 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1183 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1183 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1184 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1184 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1185 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1185 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1186
1186
1187 if ui.verbose:
1187 if ui.verbose:
1188 fm.plain(
1188 fm.plain(
1189 b"common heads: %s\n"
1189 b"common heads: %s\n"
1190 % b" ".join(sorted(short(n) for n in heads_common))
1190 % b" ".join(sorted(short(n) for n in heads_common))
1191 )
1191 )
1192 fm.end()
1192 fm.end()
1193
1193
1194
1194
1195 _chunksize = 4 << 10
1195 _chunksize = 4 << 10
1196
1196
1197
1197
1198 @command(
1198 @command(
1199 b'debugdownload',
1199 b'debugdownload',
1200 [
1200 [
1201 (b'o', b'output', b'', _(b'path')),
1201 (b'o', b'output', b'', _(b'path')),
1202 ],
1202 ],
1203 optionalrepo=True,
1203 optionalrepo=True,
1204 )
1204 )
1205 def debugdownload(ui, repo, url, output=None, **opts):
1205 def debugdownload(ui, repo, url, output=None, **opts):
1206 """download a resource using Mercurial logic and config"""
1206 """download a resource using Mercurial logic and config"""
1207 fh = urlmod.open(ui, url, output)
1207 fh = urlmod.open(ui, url, output)
1208
1208
1209 dest = ui
1209 dest = ui
1210 if output:
1210 if output:
1211 dest = open(output, b"wb", _chunksize)
1211 dest = open(output, b"wb", _chunksize)
1212 try:
1212 try:
1213 data = fh.read(_chunksize)
1213 data = fh.read(_chunksize)
1214 while data:
1214 while data:
1215 dest.write(data)
1215 dest.write(data)
1216 data = fh.read(_chunksize)
1216 data = fh.read(_chunksize)
1217 finally:
1217 finally:
1218 if output:
1218 if output:
1219 dest.close()
1219 dest.close()
1220
1220
1221
1221
1222 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1222 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1223 def debugextensions(ui, repo, **opts):
1223 def debugextensions(ui, repo, **opts):
1224 '''show information about active extensions'''
1224 '''show information about active extensions'''
1225 opts = pycompat.byteskwargs(opts)
1225 opts = pycompat.byteskwargs(opts)
1226 exts = extensions.extensions(ui)
1226 exts = extensions.extensions(ui)
1227 hgver = util.version()
1227 hgver = util.version()
1228 fm = ui.formatter(b'debugextensions', opts)
1228 fm = ui.formatter(b'debugextensions', opts)
1229 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1229 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1230 isinternal = extensions.ismoduleinternal(extmod)
1230 isinternal = extensions.ismoduleinternal(extmod)
1231 extsource = None
1231 extsource = None
1232
1232
1233 if util.safehasattr(extmod, '__file__'):
1233 if util.safehasattr(extmod, '__file__'):
1234 extsource = pycompat.fsencode(extmod.__file__)
1234 extsource = pycompat.fsencode(extmod.__file__)
1235 elif getattr(sys, 'oxidized', False):
1235 elif getattr(sys, 'oxidized', False):
1236 extsource = pycompat.sysexecutable
1236 extsource = pycompat.sysexecutable
1237 if isinternal:
1237 if isinternal:
1238 exttestedwith = [] # never expose magic string to users
1238 exttestedwith = [] # never expose magic string to users
1239 else:
1239 else:
1240 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1240 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1241 extbuglink = getattr(extmod, 'buglink', None)
1241 extbuglink = getattr(extmod, 'buglink', None)
1242
1242
1243 fm.startitem()
1243 fm.startitem()
1244
1244
1245 if ui.quiet or ui.verbose:
1245 if ui.quiet or ui.verbose:
1246 fm.write(b'name', b'%s\n', extname)
1246 fm.write(b'name', b'%s\n', extname)
1247 else:
1247 else:
1248 fm.write(b'name', b'%s', extname)
1248 fm.write(b'name', b'%s', extname)
1249 if isinternal or hgver in exttestedwith:
1249 if isinternal or hgver in exttestedwith:
1250 fm.plain(b'\n')
1250 fm.plain(b'\n')
1251 elif not exttestedwith:
1251 elif not exttestedwith:
1252 fm.plain(_(b' (untested!)\n'))
1252 fm.plain(_(b' (untested!)\n'))
1253 else:
1253 else:
1254 lasttestedversion = exttestedwith[-1]
1254 lasttestedversion = exttestedwith[-1]
1255 fm.plain(b' (%s!)\n' % lasttestedversion)
1255 fm.plain(b' (%s!)\n' % lasttestedversion)
1256
1256
1257 fm.condwrite(
1257 fm.condwrite(
1258 ui.verbose and extsource,
1258 ui.verbose and extsource,
1259 b'source',
1259 b'source',
1260 _(b' location: %s\n'),
1260 _(b' location: %s\n'),
1261 extsource or b"",
1261 extsource or b"",
1262 )
1262 )
1263
1263
1264 if ui.verbose:
1264 if ui.verbose:
1265 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1265 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1266 fm.data(bundled=isinternal)
1266 fm.data(bundled=isinternal)
1267
1267
1268 fm.condwrite(
1268 fm.condwrite(
1269 ui.verbose and exttestedwith,
1269 ui.verbose and exttestedwith,
1270 b'testedwith',
1270 b'testedwith',
1271 _(b' tested with: %s\n'),
1271 _(b' tested with: %s\n'),
1272 fm.formatlist(exttestedwith, name=b'ver'),
1272 fm.formatlist(exttestedwith, name=b'ver'),
1273 )
1273 )
1274
1274
1275 fm.condwrite(
1275 fm.condwrite(
1276 ui.verbose and extbuglink,
1276 ui.verbose and extbuglink,
1277 b'buglink',
1277 b'buglink',
1278 _(b' bug reporting: %s\n'),
1278 _(b' bug reporting: %s\n'),
1279 extbuglink or b"",
1279 extbuglink or b"",
1280 )
1280 )
1281
1281
1282 fm.end()
1282 fm.end()
1283
1283
1284
1284
1285 @command(
1285 @command(
1286 b'debugfileset',
1286 b'debugfileset',
1287 [
1287 [
1288 (
1288 (
1289 b'r',
1289 b'r',
1290 b'rev',
1290 b'rev',
1291 b'',
1291 b'',
1292 _(b'apply the filespec on this revision'),
1292 _(b'apply the filespec on this revision'),
1293 _(b'REV'),
1293 _(b'REV'),
1294 ),
1294 ),
1295 (
1295 (
1296 b'',
1296 b'',
1297 b'all-files',
1297 b'all-files',
1298 False,
1298 False,
1299 _(b'test files from all revisions and working directory'),
1299 _(b'test files from all revisions and working directory'),
1300 ),
1300 ),
1301 (
1301 (
1302 b's',
1302 b's',
1303 b'show-matcher',
1303 b'show-matcher',
1304 None,
1304 None,
1305 _(b'print internal representation of matcher'),
1305 _(b'print internal representation of matcher'),
1306 ),
1306 ),
1307 (
1307 (
1308 b'p',
1308 b'p',
1309 b'show-stage',
1309 b'show-stage',
1310 [],
1310 [],
1311 _(b'print parsed tree at the given stage'),
1311 _(b'print parsed tree at the given stage'),
1312 _(b'NAME'),
1312 _(b'NAME'),
1313 ),
1313 ),
1314 ],
1314 ],
1315 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1315 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1316 )
1316 )
1317 def debugfileset(ui, repo, expr, **opts):
1317 def debugfileset(ui, repo, expr, **opts):
1318 '''parse and apply a fileset specification'''
1318 '''parse and apply a fileset specification'''
1319 from . import fileset
1319 from . import fileset
1320
1320
1321 fileset.symbols # force import of fileset so we have predicates to optimize
1321 fileset.symbols # force import of fileset so we have predicates to optimize
1322 opts = pycompat.byteskwargs(opts)
1322 opts = pycompat.byteskwargs(opts)
1323 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1323 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1324
1324
1325 stages = [
1325 stages = [
1326 (b'parsed', pycompat.identity),
1326 (b'parsed', pycompat.identity),
1327 (b'analyzed', filesetlang.analyze),
1327 (b'analyzed', filesetlang.analyze),
1328 (b'optimized', filesetlang.optimize),
1328 (b'optimized', filesetlang.optimize),
1329 ]
1329 ]
1330 stagenames = {n for n, f in stages}
1330 stagenames = {n for n, f in stages}
1331
1331
1332 showalways = set()
1332 showalways = set()
1333 if ui.verbose and not opts[b'show_stage']:
1333 if ui.verbose and not opts[b'show_stage']:
1334 # show parsed tree by --verbose (deprecated)
1334 # show parsed tree by --verbose (deprecated)
1335 showalways.add(b'parsed')
1335 showalways.add(b'parsed')
1336 if opts[b'show_stage'] == [b'all']:
1336 if opts[b'show_stage'] == [b'all']:
1337 showalways.update(stagenames)
1337 showalways.update(stagenames)
1338 else:
1338 else:
1339 for n in opts[b'show_stage']:
1339 for n in opts[b'show_stage']:
1340 if n not in stagenames:
1340 if n not in stagenames:
1341 raise error.Abort(_(b'invalid stage name: %s') % n)
1341 raise error.Abort(_(b'invalid stage name: %s') % n)
1342 showalways.update(opts[b'show_stage'])
1342 showalways.update(opts[b'show_stage'])
1343
1343
1344 tree = filesetlang.parse(expr)
1344 tree = filesetlang.parse(expr)
1345 for n, f in stages:
1345 for n, f in stages:
1346 tree = f(tree)
1346 tree = f(tree)
1347 if n in showalways:
1347 if n in showalways:
1348 if opts[b'show_stage'] or n != b'parsed':
1348 if opts[b'show_stage'] or n != b'parsed':
1349 ui.write(b"* %s:\n" % n)
1349 ui.write(b"* %s:\n" % n)
1350 ui.write(filesetlang.prettyformat(tree), b"\n")
1350 ui.write(filesetlang.prettyformat(tree), b"\n")
1351
1351
1352 files = set()
1352 files = set()
1353 if opts[b'all_files']:
1353 if opts[b'all_files']:
1354 for r in repo:
1354 for r in repo:
1355 c = repo[r]
1355 c = repo[r]
1356 files.update(c.files())
1356 files.update(c.files())
1357 files.update(c.substate)
1357 files.update(c.substate)
1358 if opts[b'all_files'] or ctx.rev() is None:
1358 if opts[b'all_files'] or ctx.rev() is None:
1359 wctx = repo[None]
1359 wctx = repo[None]
1360 files.update(
1360 files.update(
1361 repo.dirstate.walk(
1361 repo.dirstate.walk(
1362 scmutil.matchall(repo),
1362 scmutil.matchall(repo),
1363 subrepos=list(wctx.substate),
1363 subrepos=list(wctx.substate),
1364 unknown=True,
1364 unknown=True,
1365 ignored=True,
1365 ignored=True,
1366 )
1366 )
1367 )
1367 )
1368 files.update(wctx.substate)
1368 files.update(wctx.substate)
1369 else:
1369 else:
1370 files.update(ctx.files())
1370 files.update(ctx.files())
1371 files.update(ctx.substate)
1371 files.update(ctx.substate)
1372
1372
1373 m = ctx.matchfileset(repo.getcwd(), expr)
1373 m = ctx.matchfileset(repo.getcwd(), expr)
1374 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1374 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1375 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1375 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1376 for f in sorted(files):
1376 for f in sorted(files):
1377 if not m(f):
1377 if not m(f):
1378 continue
1378 continue
1379 ui.write(b"%s\n" % f)
1379 ui.write(b"%s\n" % f)
1380
1380
1381
1381
1382 @command(b'debugformat', [] + cmdutil.formatteropts)
1382 @command(b'debugformat', [] + cmdutil.formatteropts)
1383 def debugformat(ui, repo, **opts):
1383 def debugformat(ui, repo, **opts):
1384 """display format information about the current repository
1384 """display format information about the current repository
1385
1385
1386 Use --verbose to get extra information about current config value and
1386 Use --verbose to get extra information about current config value and
1387 Mercurial default."""
1387 Mercurial default."""
1388 opts = pycompat.byteskwargs(opts)
1388 opts = pycompat.byteskwargs(opts)
1389 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1389 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1390 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1390 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1391
1391
1392 def makeformatname(name):
1392 def makeformatname(name):
1393 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1393 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1394
1394
1395 fm = ui.formatter(b'debugformat', opts)
1395 fm = ui.formatter(b'debugformat', opts)
1396 if fm.isplain():
1396 if fm.isplain():
1397
1397
1398 def formatvalue(value):
1398 def formatvalue(value):
1399 if util.safehasattr(value, b'startswith'):
1399 if util.safehasattr(value, b'startswith'):
1400 return value
1400 return value
1401 if value:
1401 if value:
1402 return b'yes'
1402 return b'yes'
1403 else:
1403 else:
1404 return b'no'
1404 return b'no'
1405
1405
1406 else:
1406 else:
1407 formatvalue = pycompat.identity
1407 formatvalue = pycompat.identity
1408
1408
1409 fm.plain(b'format-variant')
1409 fm.plain(b'format-variant')
1410 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1410 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1411 fm.plain(b' repo')
1411 fm.plain(b' repo')
1412 if ui.verbose:
1412 if ui.verbose:
1413 fm.plain(b' config default')
1413 fm.plain(b' config default')
1414 fm.plain(b'\n')
1414 fm.plain(b'\n')
1415 for fv in upgrade.allformatvariant:
1415 for fv in upgrade.allformatvariant:
1416 fm.startitem()
1416 fm.startitem()
1417 repovalue = fv.fromrepo(repo)
1417 repovalue = fv.fromrepo(repo)
1418 configvalue = fv.fromconfig(repo)
1418 configvalue = fv.fromconfig(repo)
1419
1419
1420 if repovalue != configvalue:
1420 if repovalue != configvalue:
1421 namelabel = b'formatvariant.name.mismatchconfig'
1421 namelabel = b'formatvariant.name.mismatchconfig'
1422 repolabel = b'formatvariant.repo.mismatchconfig'
1422 repolabel = b'formatvariant.repo.mismatchconfig'
1423 elif repovalue != fv.default:
1423 elif repovalue != fv.default:
1424 namelabel = b'formatvariant.name.mismatchdefault'
1424 namelabel = b'formatvariant.name.mismatchdefault'
1425 repolabel = b'formatvariant.repo.mismatchdefault'
1425 repolabel = b'formatvariant.repo.mismatchdefault'
1426 else:
1426 else:
1427 namelabel = b'formatvariant.name.uptodate'
1427 namelabel = b'formatvariant.name.uptodate'
1428 repolabel = b'formatvariant.repo.uptodate'
1428 repolabel = b'formatvariant.repo.uptodate'
1429
1429
1430 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1430 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1431 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1431 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1432 if fv.default != configvalue:
1432 if fv.default != configvalue:
1433 configlabel = b'formatvariant.config.special'
1433 configlabel = b'formatvariant.config.special'
1434 else:
1434 else:
1435 configlabel = b'formatvariant.config.default'
1435 configlabel = b'formatvariant.config.default'
1436 fm.condwrite(
1436 fm.condwrite(
1437 ui.verbose,
1437 ui.verbose,
1438 b'config',
1438 b'config',
1439 b' %6s',
1439 b' %6s',
1440 formatvalue(configvalue),
1440 formatvalue(configvalue),
1441 label=configlabel,
1441 label=configlabel,
1442 )
1442 )
1443 fm.condwrite(
1443 fm.condwrite(
1444 ui.verbose,
1444 ui.verbose,
1445 b'default',
1445 b'default',
1446 b' %7s',
1446 b' %7s',
1447 formatvalue(fv.default),
1447 formatvalue(fv.default),
1448 label=b'formatvariant.default',
1448 label=b'formatvariant.default',
1449 )
1449 )
1450 fm.plain(b'\n')
1450 fm.plain(b'\n')
1451 fm.end()
1451 fm.end()
1452
1452
1453
1453
1454 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1454 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1455 def debugfsinfo(ui, path=b"."):
1455 def debugfsinfo(ui, path=b"."):
1456 """show information detected about current filesystem"""
1456 """show information detected about current filesystem"""
1457 ui.writenoi18n(b'path: %s\n' % path)
1457 ui.writenoi18n(b'path: %s\n' % path)
1458 ui.writenoi18n(
1458 ui.writenoi18n(
1459 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1459 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1460 )
1460 )
1461 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1461 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1462 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1462 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1463 ui.writenoi18n(
1463 ui.writenoi18n(
1464 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1464 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1465 )
1465 )
1466 ui.writenoi18n(
1466 ui.writenoi18n(
1467 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1467 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1468 )
1468 )
1469 casesensitive = b'(unknown)'
1469 casesensitive = b'(unknown)'
1470 try:
1470 try:
1471 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1471 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1472 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1472 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1473 except OSError:
1473 except OSError:
1474 pass
1474 pass
1475 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1475 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1476
1476
1477
1477
1478 @command(
1478 @command(
1479 b'debuggetbundle',
1479 b'debuggetbundle',
1480 [
1480 [
1481 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1481 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1482 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1482 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1483 (
1483 (
1484 b't',
1484 b't',
1485 b'type',
1485 b'type',
1486 b'bzip2',
1486 b'bzip2',
1487 _(b'bundle compression type to use'),
1487 _(b'bundle compression type to use'),
1488 _(b'TYPE'),
1488 _(b'TYPE'),
1489 ),
1489 ),
1490 ],
1490 ],
1491 _(b'REPO FILE [-H|-C ID]...'),
1491 _(b'REPO FILE [-H|-C ID]...'),
1492 norepo=True,
1492 norepo=True,
1493 )
1493 )
1494 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1494 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1495 """retrieves a bundle from a repo
1495 """retrieves a bundle from a repo
1496
1496
1497 Every ID must be a full-length hex node id string. Saves the bundle to the
1497 Every ID must be a full-length hex node id string. Saves the bundle to the
1498 given file.
1498 given file.
1499 """
1499 """
1500 opts = pycompat.byteskwargs(opts)
1500 opts = pycompat.byteskwargs(opts)
1501 repo = hg.peer(ui, opts, repopath)
1501 repo = hg.peer(ui, opts, repopath)
1502 if not repo.capable(b'getbundle'):
1502 if not repo.capable(b'getbundle'):
1503 raise error.Abort(b"getbundle() not supported by target repository")
1503 raise error.Abort(b"getbundle() not supported by target repository")
1504 args = {}
1504 args = {}
1505 if common:
1505 if common:
1506 args['common'] = [bin(s) for s in common]
1506 args['common'] = [bin(s) for s in common]
1507 if head:
1507 if head:
1508 args['heads'] = [bin(s) for s in head]
1508 args['heads'] = [bin(s) for s in head]
1509 # TODO: get desired bundlecaps from command line.
1509 # TODO: get desired bundlecaps from command line.
1510 args['bundlecaps'] = None
1510 args['bundlecaps'] = None
1511 bundle = repo.getbundle(b'debug', **args)
1511 bundle = repo.getbundle(b'debug', **args)
1512
1512
1513 bundletype = opts.get(b'type', b'bzip2').lower()
1513 bundletype = opts.get(b'type', b'bzip2').lower()
1514 btypes = {
1514 btypes = {
1515 b'none': b'HG10UN',
1515 b'none': b'HG10UN',
1516 b'bzip2': b'HG10BZ',
1516 b'bzip2': b'HG10BZ',
1517 b'gzip': b'HG10GZ',
1517 b'gzip': b'HG10GZ',
1518 b'bundle2': b'HG20',
1518 b'bundle2': b'HG20',
1519 }
1519 }
1520 bundletype = btypes.get(bundletype)
1520 bundletype = btypes.get(bundletype)
1521 if bundletype not in bundle2.bundletypes:
1521 if bundletype not in bundle2.bundletypes:
1522 raise error.Abort(_(b'unknown bundle type specified with --type'))
1522 raise error.Abort(_(b'unknown bundle type specified with --type'))
1523 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1523 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1524
1524
1525
1525
1526 @command(b'debugignore', [], b'[FILE]')
1526 @command(b'debugignore', [], b'[FILE]')
1527 def debugignore(ui, repo, *files, **opts):
1527 def debugignore(ui, repo, *files, **opts):
1528 """display the combined ignore pattern and information about ignored files
1528 """display the combined ignore pattern and information about ignored files
1529
1529
1530 With no argument display the combined ignore pattern.
1530 With no argument display the combined ignore pattern.
1531
1531
1532 Given space separated file names, shows if the given file is ignored and
1532 Given space separated file names, shows if the given file is ignored and
1533 if so, show the ignore rule (file and line number) that matched it.
1533 if so, show the ignore rule (file and line number) that matched it.
1534 """
1534 """
1535 ignore = repo.dirstate._ignore
1535 ignore = repo.dirstate._ignore
1536 if not files:
1536 if not files:
1537 # Show all the patterns
1537 # Show all the patterns
1538 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1538 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1539 else:
1539 else:
1540 m = scmutil.match(repo[None], pats=files)
1540 m = scmutil.match(repo[None], pats=files)
1541 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1541 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1542 for f in m.files():
1542 for f in m.files():
1543 nf = util.normpath(f)
1543 nf = util.normpath(f)
1544 ignored = None
1544 ignored = None
1545 ignoredata = None
1545 ignoredata = None
1546 if nf != b'.':
1546 if nf != b'.':
1547 if ignore(nf):
1547 if ignore(nf):
1548 ignored = nf
1548 ignored = nf
1549 ignoredata = repo.dirstate._ignorefileandline(nf)
1549 ignoredata = repo.dirstate._ignorefileandline(nf)
1550 else:
1550 else:
1551 for p in pathutil.finddirs(nf):
1551 for p in pathutil.finddirs(nf):
1552 if ignore(p):
1552 if ignore(p):
1553 ignored = p
1553 ignored = p
1554 ignoredata = repo.dirstate._ignorefileandline(p)
1554 ignoredata = repo.dirstate._ignorefileandline(p)
1555 break
1555 break
1556 if ignored:
1556 if ignored:
1557 if ignored == nf:
1557 if ignored == nf:
1558 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1558 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1559 else:
1559 else:
1560 ui.write(
1560 ui.write(
1561 _(
1561 _(
1562 b"%s is ignored because of "
1562 b"%s is ignored because of "
1563 b"containing directory %s\n"
1563 b"containing directory %s\n"
1564 )
1564 )
1565 % (uipathfn(f), ignored)
1565 % (uipathfn(f), ignored)
1566 )
1566 )
1567 ignorefile, lineno, line = ignoredata
1567 ignorefile, lineno, line = ignoredata
1568 ui.write(
1568 ui.write(
1569 _(b"(ignore rule in %s, line %d: '%s')\n")
1569 _(b"(ignore rule in %s, line %d: '%s')\n")
1570 % (ignorefile, lineno, line)
1570 % (ignorefile, lineno, line)
1571 )
1571 )
1572 else:
1572 else:
1573 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1573 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1574
1574
1575
1575
1576 @command(
1576 @command(
1577 b'debugindex',
1577 b'debugindex',
1578 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1578 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1579 _(b'-c|-m|FILE'),
1579 _(b'-c|-m|FILE'),
1580 )
1580 )
1581 def debugindex(ui, repo, file_=None, **opts):
1581 def debugindex(ui, repo, file_=None, **opts):
1582 """dump index data for a storage primitive"""
1582 """dump index data for a storage primitive"""
1583 opts = pycompat.byteskwargs(opts)
1583 opts = pycompat.byteskwargs(opts)
1584 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1584 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1585
1585
1586 if ui.debugflag:
1586 if ui.debugflag:
1587 shortfn = hex
1587 shortfn = hex
1588 else:
1588 else:
1589 shortfn = short
1589 shortfn = short
1590
1590
1591 idlen = 12
1591 idlen = 12
1592 for i in store:
1592 for i in store:
1593 idlen = len(shortfn(store.node(i)))
1593 idlen = len(shortfn(store.node(i)))
1594 break
1594 break
1595
1595
1596 fm = ui.formatter(b'debugindex', opts)
1596 fm = ui.formatter(b'debugindex', opts)
1597 fm.plain(
1597 fm.plain(
1598 b' rev linkrev %s %s p2\n'
1598 b' rev linkrev %s %s p2\n'
1599 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1599 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1600 )
1600 )
1601
1601
1602 for rev in store:
1602 for rev in store:
1603 node = store.node(rev)
1603 node = store.node(rev)
1604 parents = store.parents(node)
1604 parents = store.parents(node)
1605
1605
1606 fm.startitem()
1606 fm.startitem()
1607 fm.write(b'rev', b'%6d ', rev)
1607 fm.write(b'rev', b'%6d ', rev)
1608 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1608 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1609 fm.write(b'node', b'%s ', shortfn(node))
1609 fm.write(b'node', b'%s ', shortfn(node))
1610 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1610 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1611 fm.write(b'p2', b'%s', shortfn(parents[1]))
1611 fm.write(b'p2', b'%s', shortfn(parents[1]))
1612 fm.plain(b'\n')
1612 fm.plain(b'\n')
1613
1613
1614 fm.end()
1614 fm.end()
1615
1615
1616
1616
1617 @command(
1617 @command(
1618 b'debugindexdot',
1618 b'debugindexdot',
1619 cmdutil.debugrevlogopts,
1619 cmdutil.debugrevlogopts,
1620 _(b'-c|-m|FILE'),
1620 _(b'-c|-m|FILE'),
1621 optionalrepo=True,
1621 optionalrepo=True,
1622 )
1622 )
1623 def debugindexdot(ui, repo, file_=None, **opts):
1623 def debugindexdot(ui, repo, file_=None, **opts):
1624 """dump an index DAG as a graphviz dot file"""
1624 """dump an index DAG as a graphviz dot file"""
1625 opts = pycompat.byteskwargs(opts)
1625 opts = pycompat.byteskwargs(opts)
1626 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1626 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1627 ui.writenoi18n(b"digraph G {\n")
1627 ui.writenoi18n(b"digraph G {\n")
1628 for i in r:
1628 for i in r:
1629 node = r.node(i)
1629 node = r.node(i)
1630 pp = r.parents(node)
1630 pp = r.parents(node)
1631 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1631 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1632 if pp[1] != nullid:
1632 if pp[1] != nullid:
1633 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1633 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1634 ui.write(b"}\n")
1634 ui.write(b"}\n")
1635
1635
1636
1636
1637 @command(b'debugindexstats', [])
1637 @command(b'debugindexstats', [])
1638 def debugindexstats(ui, repo):
1638 def debugindexstats(ui, repo):
1639 """show stats related to the changelog index"""
1639 """show stats related to the changelog index"""
1640 repo.changelog.shortest(nullid, 1)
1640 repo.changelog.shortest(nullid, 1)
1641 index = repo.changelog.index
1641 index = repo.changelog.index
1642 if not util.safehasattr(index, b'stats'):
1642 if not util.safehasattr(index, b'stats'):
1643 raise error.Abort(_(b'debugindexstats only works with native code'))
1643 raise error.Abort(_(b'debugindexstats only works with native code'))
1644 for k, v in sorted(index.stats().items()):
1644 for k, v in sorted(index.stats().items()):
1645 ui.write(b'%s: %d\n' % (k, v))
1645 ui.write(b'%s: %d\n' % (k, v))
1646
1646
1647
1647
1648 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1648 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1649 def debuginstall(ui, **opts):
1649 def debuginstall(ui, **opts):
1650 """test Mercurial installation
1650 """test Mercurial installation
1651
1651
1652 Returns 0 on success.
1652 Returns 0 on success.
1653 """
1653 """
1654 opts = pycompat.byteskwargs(opts)
1654 opts = pycompat.byteskwargs(opts)
1655
1655
1656 problems = 0
1656 problems = 0
1657
1657
1658 fm = ui.formatter(b'debuginstall', opts)
1658 fm = ui.formatter(b'debuginstall', opts)
1659 fm.startitem()
1659 fm.startitem()
1660
1660
1661 # encoding might be unknown or wrong. don't translate these messages.
1661 # encoding might be unknown or wrong. don't translate these messages.
1662 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1662 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1663 err = None
1663 err = None
1664 try:
1664 try:
1665 codecs.lookup(pycompat.sysstr(encoding.encoding))
1665 codecs.lookup(pycompat.sysstr(encoding.encoding))
1666 except LookupError as inst:
1666 except LookupError as inst:
1667 err = stringutil.forcebytestr(inst)
1667 err = stringutil.forcebytestr(inst)
1668 problems += 1
1668 problems += 1
1669 fm.condwrite(
1669 fm.condwrite(
1670 err,
1670 err,
1671 b'encodingerror',
1671 b'encodingerror',
1672 b" %s\n (check that your locale is properly set)\n",
1672 b" %s\n (check that your locale is properly set)\n",
1673 err,
1673 err,
1674 )
1674 )
1675
1675
1676 # Python
1676 # Python
1677 pythonlib = None
1677 pythonlib = None
1678 if util.safehasattr(os, '__file__'):
1678 if util.safehasattr(os, '__file__'):
1679 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1679 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1680 elif getattr(sys, 'oxidized', False):
1680 elif getattr(sys, 'oxidized', False):
1681 pythonlib = pycompat.sysexecutable
1681 pythonlib = pycompat.sysexecutable
1682
1682
1683 fm.write(
1683 fm.write(
1684 b'pythonexe',
1684 b'pythonexe',
1685 _(b"checking Python executable (%s)\n"),
1685 _(b"checking Python executable (%s)\n"),
1686 pycompat.sysexecutable or _(b"unknown"),
1686 pycompat.sysexecutable or _(b"unknown"),
1687 )
1687 )
1688 fm.write(
1688 fm.write(
1689 b'pythonimplementation',
1689 b'pythonimplementation',
1690 _(b"checking Python implementation (%s)\n"),
1690 _(b"checking Python implementation (%s)\n"),
1691 pycompat.sysbytes(platform.python_implementation()),
1691 pycompat.sysbytes(platform.python_implementation()),
1692 )
1692 )
1693 fm.write(
1693 fm.write(
1694 b'pythonver',
1694 b'pythonver',
1695 _(b"checking Python version (%s)\n"),
1695 _(b"checking Python version (%s)\n"),
1696 (b"%d.%d.%d" % sys.version_info[:3]),
1696 (b"%d.%d.%d" % sys.version_info[:3]),
1697 )
1697 )
1698 fm.write(
1698 fm.write(
1699 b'pythonlib',
1699 b'pythonlib',
1700 _(b"checking Python lib (%s)...\n"),
1700 _(b"checking Python lib (%s)...\n"),
1701 pythonlib or _(b"unknown"),
1701 pythonlib or _(b"unknown"),
1702 )
1702 )
1703
1703
1704 try:
1704 try:
1705 from . import rustext
1705 from . import rustext
1706
1706
1707 rustext.__doc__ # trigger lazy import
1707 rustext.__doc__ # trigger lazy import
1708 except ImportError:
1708 except ImportError:
1709 rustext = None
1709 rustext = None
1710
1710
1711 security = set(sslutil.supportedprotocols)
1711 security = set(sslutil.supportedprotocols)
1712 if sslutil.hassni:
1712 if sslutil.hassni:
1713 security.add(b'sni')
1713 security.add(b'sni')
1714
1714
1715 fm.write(
1715 fm.write(
1716 b'pythonsecurity',
1716 b'pythonsecurity',
1717 _(b"checking Python security support (%s)\n"),
1717 _(b"checking Python security support (%s)\n"),
1718 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1718 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1719 )
1719 )
1720
1720
1721 # These are warnings, not errors. So don't increment problem count. This
1721 # These are warnings, not errors. So don't increment problem count. This
1722 # may change in the future.
1722 # may change in the future.
1723 if b'tls1.2' not in security:
1723 if b'tls1.2' not in security:
1724 fm.plain(
1724 fm.plain(
1725 _(
1725 _(
1726 b' TLS 1.2 not supported by Python install; '
1726 b' TLS 1.2 not supported by Python install; '
1727 b'network connections lack modern security\n'
1727 b'network connections lack modern security\n'
1728 )
1728 )
1729 )
1729 )
1730 if b'sni' not in security:
1730 if b'sni' not in security:
1731 fm.plain(
1731 fm.plain(
1732 _(
1732 _(
1733 b' SNI not supported by Python install; may have '
1733 b' SNI not supported by Python install; may have '
1734 b'connectivity issues with some servers\n'
1734 b'connectivity issues with some servers\n'
1735 )
1735 )
1736 )
1736 )
1737
1737
1738 fm.plain(
1738 fm.plain(
1739 _(
1739 _(
1740 b"checking Rust extensions (%s)\n"
1740 b"checking Rust extensions (%s)\n"
1741 % (b'missing' if rustext is None else b'installed')
1741 % (b'missing' if rustext is None else b'installed')
1742 ),
1742 ),
1743 )
1743 )
1744
1744
1745 # TODO print CA cert info
1745 # TODO print CA cert info
1746
1746
1747 # hg version
1747 # hg version
1748 hgver = util.version()
1748 hgver = util.version()
1749 fm.write(
1749 fm.write(
1750 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1750 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1751 )
1751 )
1752 fm.write(
1752 fm.write(
1753 b'hgverextra',
1753 b'hgverextra',
1754 _(b"checking Mercurial custom build (%s)\n"),
1754 _(b"checking Mercurial custom build (%s)\n"),
1755 b'+'.join(hgver.split(b'+')[1:]),
1755 b'+'.join(hgver.split(b'+')[1:]),
1756 )
1756 )
1757
1757
1758 # compiled modules
1758 # compiled modules
1759 hgmodules = None
1759 hgmodules = None
1760 if util.safehasattr(sys.modules[__name__], '__file__'):
1760 if util.safehasattr(sys.modules[__name__], '__file__'):
1761 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1761 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1762 elif getattr(sys, 'oxidized', False):
1762 elif getattr(sys, 'oxidized', False):
1763 hgmodules = pycompat.sysexecutable
1763 hgmodules = pycompat.sysexecutable
1764
1764
1765 fm.write(
1765 fm.write(
1766 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1766 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1767 )
1767 )
1768 fm.write(
1768 fm.write(
1769 b'hgmodules',
1769 b'hgmodules',
1770 _(b"checking installed modules (%s)...\n"),
1770 _(b"checking installed modules (%s)...\n"),
1771 hgmodules or _(b"unknown"),
1771 hgmodules or _(b"unknown"),
1772 )
1772 )
1773
1773
1774 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1774 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1775 rustext = rustandc # for now, that's the only case
1775 rustext = rustandc # for now, that's the only case
1776 cext = policy.policy in (b'c', b'allow') or rustandc
1776 cext = policy.policy in (b'c', b'allow') or rustandc
1777 nopure = cext or rustext
1777 nopure = cext or rustext
1778 if nopure:
1778 if nopure:
1779 err = None
1779 err = None
1780 try:
1780 try:
1781 if cext:
1781 if cext:
1782 from .cext import ( # pytype: disable=import-error
1782 from .cext import ( # pytype: disable=import-error
1783 base85,
1783 base85,
1784 bdiff,
1784 bdiff,
1785 mpatch,
1785 mpatch,
1786 osutil,
1786 osutil,
1787 )
1787 )
1788
1788
1789 # quiet pyflakes
1789 # quiet pyflakes
1790 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1790 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1791 if rustext:
1791 if rustext:
1792 from .rustext import ( # pytype: disable=import-error
1792 from .rustext import ( # pytype: disable=import-error
1793 ancestor,
1793 ancestor,
1794 dirstate,
1794 dirstate,
1795 )
1795 )
1796
1796
1797 dir(ancestor), dir(dirstate) # quiet pyflakes
1797 dir(ancestor), dir(dirstate) # quiet pyflakes
1798 except Exception as inst:
1798 except Exception as inst:
1799 err = stringutil.forcebytestr(inst)
1799 err = stringutil.forcebytestr(inst)
1800 problems += 1
1800 problems += 1
1801 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1801 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1802
1802
1803 compengines = util.compengines._engines.values()
1803 compengines = util.compengines._engines.values()
1804 fm.write(
1804 fm.write(
1805 b'compengines',
1805 b'compengines',
1806 _(b'checking registered compression engines (%s)\n'),
1806 _(b'checking registered compression engines (%s)\n'),
1807 fm.formatlist(
1807 fm.formatlist(
1808 sorted(e.name() for e in compengines),
1808 sorted(e.name() for e in compengines),
1809 name=b'compengine',
1809 name=b'compengine',
1810 fmt=b'%s',
1810 fmt=b'%s',
1811 sep=b', ',
1811 sep=b', ',
1812 ),
1812 ),
1813 )
1813 )
1814 fm.write(
1814 fm.write(
1815 b'compenginesavail',
1815 b'compenginesavail',
1816 _(b'checking available compression engines (%s)\n'),
1816 _(b'checking available compression engines (%s)\n'),
1817 fm.formatlist(
1817 fm.formatlist(
1818 sorted(e.name() for e in compengines if e.available()),
1818 sorted(e.name() for e in compengines if e.available()),
1819 name=b'compengine',
1819 name=b'compengine',
1820 fmt=b'%s',
1820 fmt=b'%s',
1821 sep=b', ',
1821 sep=b', ',
1822 ),
1822 ),
1823 )
1823 )
1824 wirecompengines = compression.compengines.supportedwireengines(
1824 wirecompengines = compression.compengines.supportedwireengines(
1825 compression.SERVERROLE
1825 compression.SERVERROLE
1826 )
1826 )
1827 fm.write(
1827 fm.write(
1828 b'compenginesserver',
1828 b'compenginesserver',
1829 _(
1829 _(
1830 b'checking available compression engines '
1830 b'checking available compression engines '
1831 b'for wire protocol (%s)\n'
1831 b'for wire protocol (%s)\n'
1832 ),
1832 ),
1833 fm.formatlist(
1833 fm.formatlist(
1834 [e.name() for e in wirecompengines if e.wireprotosupport()],
1834 [e.name() for e in wirecompengines if e.wireprotosupport()],
1835 name=b'compengine',
1835 name=b'compengine',
1836 fmt=b'%s',
1836 fmt=b'%s',
1837 sep=b', ',
1837 sep=b', ',
1838 ),
1838 ),
1839 )
1839 )
1840 re2 = b'missing'
1840 re2 = b'missing'
1841 if util._re2:
1841 if util._re2:
1842 re2 = b'available'
1842 re2 = b'available'
1843 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1843 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1844 fm.data(re2=bool(util._re2))
1844 fm.data(re2=bool(util._re2))
1845
1845
1846 # templates
1846 # templates
1847 p = templater.templatedir()
1847 p = templater.templatedir()
1848 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1848 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1849 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1849 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1850 if p:
1850 if p:
1851 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1851 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1852 if m:
1852 if m:
1853 # template found, check if it is working
1853 # template found, check if it is working
1854 err = None
1854 err = None
1855 try:
1855 try:
1856 templater.templater.frommapfile(m)
1856 templater.templater.frommapfile(m)
1857 except Exception as inst:
1857 except Exception as inst:
1858 err = stringutil.forcebytestr(inst)
1858 err = stringutil.forcebytestr(inst)
1859 p = None
1859 p = None
1860 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1860 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1861 else:
1861 else:
1862 p = None
1862 p = None
1863 fm.condwrite(
1863 fm.condwrite(
1864 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1864 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1865 )
1865 )
1866 fm.condwrite(
1866 fm.condwrite(
1867 not m,
1867 not m,
1868 b'defaulttemplatenotfound',
1868 b'defaulttemplatenotfound',
1869 _(b" template '%s' not found\n"),
1869 _(b" template '%s' not found\n"),
1870 b"default",
1870 b"default",
1871 )
1871 )
1872 if not p:
1872 if not p:
1873 problems += 1
1873 problems += 1
1874 fm.condwrite(
1874 fm.condwrite(
1875 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1875 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1876 )
1876 )
1877
1877
1878 # editor
1878 # editor
1879 editor = ui.geteditor()
1879 editor = ui.geteditor()
1880 editor = util.expandpath(editor)
1880 editor = util.expandpath(editor)
1881 editorbin = procutil.shellsplit(editor)[0]
1881 editorbin = procutil.shellsplit(editor)[0]
1882 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1882 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1883 cmdpath = procutil.findexe(editorbin)
1883 cmdpath = procutil.findexe(editorbin)
1884 fm.condwrite(
1884 fm.condwrite(
1885 not cmdpath and editor == b'vi',
1885 not cmdpath and editor == b'vi',
1886 b'vinotfound',
1886 b'vinotfound',
1887 _(
1887 _(
1888 b" No commit editor set and can't find %s in PATH\n"
1888 b" No commit editor set and can't find %s in PATH\n"
1889 b" (specify a commit editor in your configuration"
1889 b" (specify a commit editor in your configuration"
1890 b" file)\n"
1890 b" file)\n"
1891 ),
1891 ),
1892 not cmdpath and editor == b'vi' and editorbin,
1892 not cmdpath and editor == b'vi' and editorbin,
1893 )
1893 )
1894 fm.condwrite(
1894 fm.condwrite(
1895 not cmdpath and editor != b'vi',
1895 not cmdpath and editor != b'vi',
1896 b'editornotfound',
1896 b'editornotfound',
1897 _(
1897 _(
1898 b" Can't find editor '%s' in PATH\n"
1898 b" Can't find editor '%s' in PATH\n"
1899 b" (specify a commit editor in your configuration"
1899 b" (specify a commit editor in your configuration"
1900 b" file)\n"
1900 b" file)\n"
1901 ),
1901 ),
1902 not cmdpath and editorbin,
1902 not cmdpath and editorbin,
1903 )
1903 )
1904 if not cmdpath and editor != b'vi':
1904 if not cmdpath and editor != b'vi':
1905 problems += 1
1905 problems += 1
1906
1906
1907 # check username
1907 # check username
1908 username = None
1908 username = None
1909 err = None
1909 err = None
1910 try:
1910 try:
1911 username = ui.username()
1911 username = ui.username()
1912 except error.Abort as e:
1912 except error.Abort as e:
1913 err = e.message
1913 err = e.message
1914 problems += 1
1914 problems += 1
1915
1915
1916 fm.condwrite(
1916 fm.condwrite(
1917 username, b'username', _(b"checking username (%s)\n"), username
1917 username, b'username', _(b"checking username (%s)\n"), username
1918 )
1918 )
1919 fm.condwrite(
1919 fm.condwrite(
1920 err,
1920 err,
1921 b'usernameerror',
1921 b'usernameerror',
1922 _(
1922 _(
1923 b"checking username...\n %s\n"
1923 b"checking username...\n %s\n"
1924 b" (specify a username in your configuration file)\n"
1924 b" (specify a username in your configuration file)\n"
1925 ),
1925 ),
1926 err,
1926 err,
1927 )
1927 )
1928
1928
1929 for name, mod in extensions.extensions():
1929 for name, mod in extensions.extensions():
1930 handler = getattr(mod, 'debuginstall', None)
1930 handler = getattr(mod, 'debuginstall', None)
1931 if handler is not None:
1931 if handler is not None:
1932 problems += handler(ui, fm)
1932 problems += handler(ui, fm)
1933
1933
1934 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1934 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1935 if not problems:
1935 if not problems:
1936 fm.data(problems=problems)
1936 fm.data(problems=problems)
1937 fm.condwrite(
1937 fm.condwrite(
1938 problems,
1938 problems,
1939 b'problems',
1939 b'problems',
1940 _(b"%d problems detected, please check your install!\n"),
1940 _(b"%d problems detected, please check your install!\n"),
1941 problems,
1941 problems,
1942 )
1942 )
1943 fm.end()
1943 fm.end()
1944
1944
1945 return problems
1945 return problems
1946
1946
1947
1947
1948 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1948 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1949 def debugknown(ui, repopath, *ids, **opts):
1949 def debugknown(ui, repopath, *ids, **opts):
1950 """test whether node ids are known to a repo
1950 """test whether node ids are known to a repo
1951
1951
1952 Every ID must be a full-length hex node id string. Returns a list of 0s
1952 Every ID must be a full-length hex node id string. Returns a list of 0s
1953 and 1s indicating unknown/known.
1953 and 1s indicating unknown/known.
1954 """
1954 """
1955 opts = pycompat.byteskwargs(opts)
1955 opts = pycompat.byteskwargs(opts)
1956 repo = hg.peer(ui, opts, repopath)
1956 repo = hg.peer(ui, opts, repopath)
1957 if not repo.capable(b'known'):
1957 if not repo.capable(b'known'):
1958 raise error.Abort(b"known() not supported by target repository")
1958 raise error.Abort(b"known() not supported by target repository")
1959 flags = repo.known([bin(s) for s in ids])
1959 flags = repo.known([bin(s) for s in ids])
1960 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1960 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1961
1961
1962
1962
1963 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1963 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1964 def debuglabelcomplete(ui, repo, *args):
1964 def debuglabelcomplete(ui, repo, *args):
1965 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1965 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1966 debugnamecomplete(ui, repo, *args)
1966 debugnamecomplete(ui, repo, *args)
1967
1967
1968
1968
1969 @command(
1969 @command(
1970 b'debuglocks',
1970 b'debuglocks',
1971 [
1971 [
1972 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
1972 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
1973 (
1973 (
1974 b'W',
1974 b'W',
1975 b'force-free-wlock',
1975 b'force-free-wlock',
1976 None,
1976 None,
1977 _(b'free the working state lock (DANGEROUS)'),
1977 _(b'free the working state lock (DANGEROUS)'),
1978 ),
1978 ),
1979 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1979 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1980 (
1980 (
1981 b'S',
1981 b'S',
1982 b'set-wlock',
1982 b'set-wlock',
1983 None,
1983 None,
1984 _(b'set the working state lock until stopped'),
1984 _(b'set the working state lock until stopped'),
1985 ),
1985 ),
1986 ],
1986 ],
1987 _(b'[OPTION]...'),
1987 _(b'[OPTION]...'),
1988 )
1988 )
1989 def debuglocks(ui, repo, **opts):
1989 def debuglocks(ui, repo, **opts):
1990 """show or modify state of locks
1990 """show or modify state of locks
1991
1991
1992 By default, this command will show which locks are held. This
1992 By default, this command will show which locks are held. This
1993 includes the user and process holding the lock, the amount of time
1993 includes the user and process holding the lock, the amount of time
1994 the lock has been held, and the machine name where the process is
1994 the lock has been held, and the machine name where the process is
1995 running if it's not local.
1995 running if it's not local.
1996
1996
1997 Locks protect the integrity of Mercurial's data, so should be
1997 Locks protect the integrity of Mercurial's data, so should be
1998 treated with care. System crashes or other interruptions may cause
1998 treated with care. System crashes or other interruptions may cause
1999 locks to not be properly released, though Mercurial will usually
1999 locks to not be properly released, though Mercurial will usually
2000 detect and remove such stale locks automatically.
2000 detect and remove such stale locks automatically.
2001
2001
2002 However, detecting stale locks may not always be possible (for
2002 However, detecting stale locks may not always be possible (for
2003 instance, on a shared filesystem). Removing locks may also be
2003 instance, on a shared filesystem). Removing locks may also be
2004 blocked by filesystem permissions.
2004 blocked by filesystem permissions.
2005
2005
2006 Setting a lock will prevent other commands from changing the data.
2006 Setting a lock will prevent other commands from changing the data.
2007 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2007 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2008 The set locks are removed when the command exits.
2008 The set locks are removed when the command exits.
2009
2009
2010 Returns 0 if no locks are held.
2010 Returns 0 if no locks are held.
2011
2011
2012 """
2012 """
2013
2013
2014 if opts.get('force_free_lock'):
2014 if opts.get('force_free_lock'):
2015 repo.svfs.unlink(b'lock')
2015 repo.svfs.unlink(b'lock')
2016 if opts.get('force_free_wlock'):
2016 if opts.get('force_free_wlock'):
2017 repo.vfs.unlink(b'wlock')
2017 repo.vfs.unlink(b'wlock')
2018 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2018 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2019 return 0
2019 return 0
2020
2020
2021 locks = []
2021 locks = []
2022 try:
2022 try:
2023 if opts.get('set_wlock'):
2023 if opts.get('set_wlock'):
2024 try:
2024 try:
2025 locks.append(repo.wlock(False))
2025 locks.append(repo.wlock(False))
2026 except error.LockHeld:
2026 except error.LockHeld:
2027 raise error.Abort(_(b'wlock is already held'))
2027 raise error.Abort(_(b'wlock is already held'))
2028 if opts.get('set_lock'):
2028 if opts.get('set_lock'):
2029 try:
2029 try:
2030 locks.append(repo.lock(False))
2030 locks.append(repo.lock(False))
2031 except error.LockHeld:
2031 except error.LockHeld:
2032 raise error.Abort(_(b'lock is already held'))
2032 raise error.Abort(_(b'lock is already held'))
2033 if len(locks):
2033 if len(locks):
2034 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2034 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2035 return 0
2035 return 0
2036 finally:
2036 finally:
2037 release(*locks)
2037 release(*locks)
2038
2038
2039 now = time.time()
2039 now = time.time()
2040 held = 0
2040 held = 0
2041
2041
2042 def report(vfs, name, method):
2042 def report(vfs, name, method):
2043 # this causes stale locks to get reaped for more accurate reporting
2043 # this causes stale locks to get reaped for more accurate reporting
2044 try:
2044 try:
2045 l = method(False)
2045 l = method(False)
2046 except error.LockHeld:
2046 except error.LockHeld:
2047 l = None
2047 l = None
2048
2048
2049 if l:
2049 if l:
2050 l.release()
2050 l.release()
2051 else:
2051 else:
2052 try:
2052 try:
2053 st = vfs.lstat(name)
2053 st = vfs.lstat(name)
2054 age = now - st[stat.ST_MTIME]
2054 age = now - st[stat.ST_MTIME]
2055 user = util.username(st.st_uid)
2055 user = util.username(st.st_uid)
2056 locker = vfs.readlock(name)
2056 locker = vfs.readlock(name)
2057 if b":" in locker:
2057 if b":" in locker:
2058 host, pid = locker.split(b':')
2058 host, pid = locker.split(b':')
2059 if host == socket.gethostname():
2059 if host == socket.gethostname():
2060 locker = b'user %s, process %s' % (user or b'None', pid)
2060 locker = b'user %s, process %s' % (user or b'None', pid)
2061 else:
2061 else:
2062 locker = b'user %s, process %s, host %s' % (
2062 locker = b'user %s, process %s, host %s' % (
2063 user or b'None',
2063 user or b'None',
2064 pid,
2064 pid,
2065 host,
2065 host,
2066 )
2066 )
2067 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2067 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2068 return 1
2068 return 1
2069 except OSError as e:
2069 except OSError as e:
2070 if e.errno != errno.ENOENT:
2070 if e.errno != errno.ENOENT:
2071 raise
2071 raise
2072
2072
2073 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2073 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2074 return 0
2074 return 0
2075
2075
2076 held += report(repo.svfs, b"lock", repo.lock)
2076 held += report(repo.svfs, b"lock", repo.lock)
2077 held += report(repo.vfs, b"wlock", repo.wlock)
2077 held += report(repo.vfs, b"wlock", repo.wlock)
2078
2078
2079 return held
2079 return held
2080
2080
2081
2081
2082 @command(
2082 @command(
2083 b'debugmanifestfulltextcache',
2083 b'debugmanifestfulltextcache',
2084 [
2084 [
2085 (b'', b'clear', False, _(b'clear the cache')),
2085 (b'', b'clear', False, _(b'clear the cache')),
2086 (
2086 (
2087 b'a',
2087 b'a',
2088 b'add',
2088 b'add',
2089 [],
2089 [],
2090 _(b'add the given manifest nodes to the cache'),
2090 _(b'add the given manifest nodes to the cache'),
2091 _(b'NODE'),
2091 _(b'NODE'),
2092 ),
2092 ),
2093 ],
2093 ],
2094 b'',
2094 b'',
2095 )
2095 )
2096 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2096 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2097 """show, clear or amend the contents of the manifest fulltext cache"""
2097 """show, clear or amend the contents of the manifest fulltext cache"""
2098
2098
2099 def getcache():
2099 def getcache():
2100 r = repo.manifestlog.getstorage(b'')
2100 r = repo.manifestlog.getstorage(b'')
2101 try:
2101 try:
2102 return r._fulltextcache
2102 return r._fulltextcache
2103 except AttributeError:
2103 except AttributeError:
2104 msg = _(
2104 msg = _(
2105 b"Current revlog implementation doesn't appear to have a "
2105 b"Current revlog implementation doesn't appear to have a "
2106 b"manifest fulltext cache\n"
2106 b"manifest fulltext cache\n"
2107 )
2107 )
2108 raise error.Abort(msg)
2108 raise error.Abort(msg)
2109
2109
2110 if opts.get('clear'):
2110 if opts.get('clear'):
2111 with repo.wlock():
2111 with repo.wlock():
2112 cache = getcache()
2112 cache = getcache()
2113 cache.clear(clear_persisted_data=True)
2113 cache.clear(clear_persisted_data=True)
2114 return
2114 return
2115
2115
2116 if add:
2116 if add:
2117 with repo.wlock():
2117 with repo.wlock():
2118 m = repo.manifestlog
2118 m = repo.manifestlog
2119 store = m.getstorage(b'')
2119 store = m.getstorage(b'')
2120 for n in add:
2120 for n in add:
2121 try:
2121 try:
2122 manifest = m[store.lookup(n)]
2122 manifest = m[store.lookup(n)]
2123 except error.LookupError as e:
2123 except error.LookupError as e:
2124 raise error.Abort(e, hint=b"Check your manifest node id")
2124 raise error.Abort(e, hint=b"Check your manifest node id")
2125 manifest.read() # stores revisision in cache too
2125 manifest.read() # stores revisision in cache too
2126 return
2126 return
2127
2127
2128 cache = getcache()
2128 cache = getcache()
2129 if not len(cache):
2129 if not len(cache):
2130 ui.write(_(b'cache empty\n'))
2130 ui.write(_(b'cache empty\n'))
2131 else:
2131 else:
2132 ui.write(
2132 ui.write(
2133 _(
2133 _(
2134 b'cache contains %d manifest entries, in order of most to '
2134 b'cache contains %d manifest entries, in order of most to '
2135 b'least recent:\n'
2135 b'least recent:\n'
2136 )
2136 )
2137 % (len(cache),)
2137 % (len(cache),)
2138 )
2138 )
2139 totalsize = 0
2139 totalsize = 0
2140 for nodeid in cache:
2140 for nodeid in cache:
2141 # Use cache.get to not update the LRU order
2141 # Use cache.get to not update the LRU order
2142 data = cache.peek(nodeid)
2142 data = cache.peek(nodeid)
2143 size = len(data)
2143 size = len(data)
2144 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2144 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2145 ui.write(
2145 ui.write(
2146 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2146 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2147 )
2147 )
2148 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2148 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2149 ui.write(
2149 ui.write(
2150 _(b'total cache data size %s, on-disk %s\n')
2150 _(b'total cache data size %s, on-disk %s\n')
2151 % (util.bytecount(totalsize), util.bytecount(ondisk))
2151 % (util.bytecount(totalsize), util.bytecount(ondisk))
2152 )
2152 )
2153
2153
2154
2154
2155 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2155 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2156 def debugmergestate(ui, repo, *args, **opts):
2156 def debugmergestate(ui, repo, *args, **opts):
2157 """print merge state
2157 """print merge state
2158
2158
2159 Use --verbose to print out information about whether v1 or v2 merge state
2159 Use --verbose to print out information about whether v1 or v2 merge state
2160 was chosen."""
2160 was chosen."""
2161
2161
2162 if ui.verbose:
2162 if ui.verbose:
2163 ms = mergestatemod.mergestate(repo)
2163 ms = mergestatemod.mergestate(repo)
2164
2164
2165 # sort so that reasonable information is on top
2165 # sort so that reasonable information is on top
2166 v1records = ms._readrecordsv1()
2166 v1records = ms._readrecordsv1()
2167 v2records = ms._readrecordsv2()
2167 v2records = ms._readrecordsv2()
2168
2168
2169 if not v1records and not v2records:
2169 if not v1records and not v2records:
2170 pass
2170 pass
2171 elif not v2records:
2171 elif not v2records:
2172 ui.writenoi18n(b'no version 2 merge state\n')
2172 ui.writenoi18n(b'no version 2 merge state\n')
2173 elif ms._v1v2match(v1records, v2records):
2173 elif ms._v1v2match(v1records, v2records):
2174 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2174 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2175 else:
2175 else:
2176 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2176 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2177
2177
2178 opts = pycompat.byteskwargs(opts)
2178 opts = pycompat.byteskwargs(opts)
2179 if not opts[b'template']:
2179 if not opts[b'template']:
2180 opts[b'template'] = (
2180 opts[b'template'] = (
2181 b'{if(commits, "", "no merge state found\n")}'
2181 b'{if(commits, "", "no merge state found\n")}'
2182 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2182 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2183 b'{files % "file: {path} (state \\"{state}\\")\n'
2183 b'{files % "file: {path} (state \\"{state}\\")\n'
2184 b'{if(local_path, "'
2184 b'{if(local_path, "'
2185 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2185 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2186 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2186 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2187 b' other path: {other_path} (node {other_node})\n'
2187 b' other path: {other_path} (node {other_node})\n'
2188 b'")}'
2188 b'")}'
2189 b'{if(rename_side, "'
2189 b'{if(rename_side, "'
2190 b' rename side: {rename_side}\n'
2190 b' rename side: {rename_side}\n'
2191 b' renamed path: {renamed_path}\n'
2191 b' renamed path: {renamed_path}\n'
2192 b'")}'
2192 b'")}'
2193 b'{extras % " extra: {key} = {value}\n"}'
2193 b'{extras % " extra: {key} = {value}\n"}'
2194 b'"}'
2194 b'"}'
2195 b'{extras % "extra: {file} ({key} = {value})\n"}'
2195 b'{extras % "extra: {file} ({key} = {value})\n"}'
2196 )
2196 )
2197
2197
2198 ms = mergestatemod.mergestate.read(repo)
2198 ms = mergestatemod.mergestate.read(repo)
2199
2199
2200 fm = ui.formatter(b'debugmergestate', opts)
2200 fm = ui.formatter(b'debugmergestate', opts)
2201 fm.startitem()
2201 fm.startitem()
2202
2202
2203 fm_commits = fm.nested(b'commits')
2203 fm_commits = fm.nested(b'commits')
2204 if ms.active():
2204 if ms.active():
2205 for name, node, label_index in (
2205 for name, node, label_index in (
2206 (b'local', ms.local, 0),
2206 (b'local', ms.local, 0),
2207 (b'other', ms.other, 1),
2207 (b'other', ms.other, 1),
2208 ):
2208 ):
2209 fm_commits.startitem()
2209 fm_commits.startitem()
2210 fm_commits.data(name=name)
2210 fm_commits.data(name=name)
2211 fm_commits.data(node=hex(node))
2211 fm_commits.data(node=hex(node))
2212 if ms._labels and len(ms._labels) > label_index:
2212 if ms._labels and len(ms._labels) > label_index:
2213 fm_commits.data(label=ms._labels[label_index])
2213 fm_commits.data(label=ms._labels[label_index])
2214 fm_commits.end()
2214 fm_commits.end()
2215
2215
2216 fm_files = fm.nested(b'files')
2216 fm_files = fm.nested(b'files')
2217 if ms.active():
2217 if ms.active():
2218 for f in ms:
2218 for f in ms:
2219 fm_files.startitem()
2219 fm_files.startitem()
2220 fm_files.data(path=f)
2220 fm_files.data(path=f)
2221 state = ms._state[f]
2221 state = ms._state[f]
2222 fm_files.data(state=state[0])
2222 fm_files.data(state=state[0])
2223 if state[0] in (
2223 if state[0] in (
2224 mergestatemod.MERGE_RECORD_UNRESOLVED,
2224 mergestatemod.MERGE_RECORD_UNRESOLVED,
2225 mergestatemod.MERGE_RECORD_RESOLVED,
2225 mergestatemod.MERGE_RECORD_RESOLVED,
2226 ):
2226 ):
2227 fm_files.data(local_key=state[1])
2227 fm_files.data(local_key=state[1])
2228 fm_files.data(local_path=state[2])
2228 fm_files.data(local_path=state[2])
2229 fm_files.data(ancestor_path=state[3])
2229 fm_files.data(ancestor_path=state[3])
2230 fm_files.data(ancestor_node=state[4])
2230 fm_files.data(ancestor_node=state[4])
2231 fm_files.data(other_path=state[5])
2231 fm_files.data(other_path=state[5])
2232 fm_files.data(other_node=state[6])
2232 fm_files.data(other_node=state[6])
2233 fm_files.data(local_flags=state[7])
2233 fm_files.data(local_flags=state[7])
2234 elif state[0] in (
2234 elif state[0] in (
2235 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2235 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2236 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2236 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2237 ):
2237 ):
2238 fm_files.data(renamed_path=state[1])
2238 fm_files.data(renamed_path=state[1])
2239 fm_files.data(rename_side=state[2])
2239 fm_files.data(rename_side=state[2])
2240 fm_extras = fm_files.nested(b'extras')
2240 fm_extras = fm_files.nested(b'extras')
2241 for k, v in sorted(ms.extras(f).items()):
2241 for k, v in sorted(ms.extras(f).items()):
2242 fm_extras.startitem()
2242 fm_extras.startitem()
2243 fm_extras.data(key=k)
2243 fm_extras.data(key=k)
2244 fm_extras.data(value=v)
2244 fm_extras.data(value=v)
2245 fm_extras.end()
2245 fm_extras.end()
2246
2246
2247 fm_files.end()
2247 fm_files.end()
2248
2248
2249 fm_extras = fm.nested(b'extras')
2249 fm_extras = fm.nested(b'extras')
2250 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2250 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2251 if f in ms:
2251 if f in ms:
2252 # If file is in mergestate, we have already processed it's extras
2252 # If file is in mergestate, we have already processed it's extras
2253 continue
2253 continue
2254 for k, v in pycompat.iteritems(d):
2254 for k, v in pycompat.iteritems(d):
2255 fm_extras.startitem()
2255 fm_extras.startitem()
2256 fm_extras.data(file=f)
2256 fm_extras.data(file=f)
2257 fm_extras.data(key=k)
2257 fm_extras.data(key=k)
2258 fm_extras.data(value=v)
2258 fm_extras.data(value=v)
2259 fm_extras.end()
2259 fm_extras.end()
2260
2260
2261 fm.end()
2261 fm.end()
2262
2262
2263
2263
2264 @command(b'debugnamecomplete', [], _(b'NAME...'))
2264 @command(b'debugnamecomplete', [], _(b'NAME...'))
2265 def debugnamecomplete(ui, repo, *args):
2265 def debugnamecomplete(ui, repo, *args):
2266 '''complete "names" - tags, open branch names, bookmark names'''
2266 '''complete "names" - tags, open branch names, bookmark names'''
2267
2267
2268 names = set()
2268 names = set()
2269 # since we previously only listed open branches, we will handle that
2269 # since we previously only listed open branches, we will handle that
2270 # specially (after this for loop)
2270 # specially (after this for loop)
2271 for name, ns in pycompat.iteritems(repo.names):
2271 for name, ns in pycompat.iteritems(repo.names):
2272 if name != b'branches':
2272 if name != b'branches':
2273 names.update(ns.listnames(repo))
2273 names.update(ns.listnames(repo))
2274 names.update(
2274 names.update(
2275 tag
2275 tag
2276 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2276 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2277 if not closed
2277 if not closed
2278 )
2278 )
2279 completions = set()
2279 completions = set()
2280 if not args:
2280 if not args:
2281 args = [b'']
2281 args = [b'']
2282 for a in args:
2282 for a in args:
2283 completions.update(n for n in names if n.startswith(a))
2283 completions.update(n for n in names if n.startswith(a))
2284 ui.write(b'\n'.join(sorted(completions)))
2284 ui.write(b'\n'.join(sorted(completions)))
2285 ui.write(b'\n')
2285 ui.write(b'\n')
2286
2286
2287
2287
2288 @command(
2288 @command(
2289 b'debugnodemap',
2289 b'debugnodemap',
2290 [
2290 [
2291 (
2291 (
2292 b'',
2292 b'',
2293 b'dump-new',
2293 b'dump-new',
2294 False,
2294 False,
2295 _(b'write a (new) persistent binary nodemap on stdout'),
2295 _(b'write a (new) persistent binary nodemap on stdout'),
2296 ),
2296 ),
2297 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2297 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2298 (
2298 (
2299 b'',
2299 b'',
2300 b'check',
2300 b'check',
2301 False,
2301 False,
2302 _(b'check that the data on disk data are correct.'),
2302 _(b'check that the data on disk data are correct.'),
2303 ),
2303 ),
2304 (
2304 (
2305 b'',
2305 b'',
2306 b'metadata',
2306 b'metadata',
2307 False,
2307 False,
2308 _(b'display the on disk meta data for the nodemap'),
2308 _(b'display the on disk meta data for the nodemap'),
2309 ),
2309 ),
2310 ],
2310 ],
2311 )
2311 )
2312 def debugnodemap(ui, repo, **opts):
2312 def debugnodemap(ui, repo, **opts):
2313 """write and inspect on disk nodemap"""
2313 """write and inspect on disk nodemap"""
2314 if opts['dump_new']:
2314 if opts['dump_new']:
2315 unfi = repo.unfiltered()
2315 unfi = repo.unfiltered()
2316 cl = unfi.changelog
2316 cl = unfi.changelog
2317 if util.safehasattr(cl.index, "nodemap_data_all"):
2317 if util.safehasattr(cl.index, "nodemap_data_all"):
2318 data = cl.index.nodemap_data_all()
2318 data = cl.index.nodemap_data_all()
2319 else:
2319 else:
2320 data = nodemap.persistent_data(cl.index)
2320 data = nodemap.persistent_data(cl.index)
2321 ui.write(data)
2321 ui.write(data)
2322 elif opts['dump_disk']:
2322 elif opts['dump_disk']:
2323 unfi = repo.unfiltered()
2323 unfi = repo.unfiltered()
2324 cl = unfi.changelog
2324 cl = unfi.changelog
2325 nm_data = nodemap.persisted_data(cl)
2325 nm_data = nodemap.persisted_data(cl)
2326 if nm_data is not None:
2326 if nm_data is not None:
2327 docket, data = nm_data
2327 docket, data = nm_data
2328 ui.write(data[:])
2328 ui.write(data[:])
2329 elif opts['check']:
2329 elif opts['check']:
2330 unfi = repo.unfiltered()
2330 unfi = repo.unfiltered()
2331 cl = unfi.changelog
2331 cl = unfi.changelog
2332 nm_data = nodemap.persisted_data(cl)
2332 nm_data = nodemap.persisted_data(cl)
2333 if nm_data is not None:
2333 if nm_data is not None:
2334 docket, data = nm_data
2334 docket, data = nm_data
2335 return nodemap.check_data(ui, cl.index, data)
2335 return nodemap.check_data(ui, cl.index, data)
2336 elif opts['metadata']:
2336 elif opts['metadata']:
2337 unfi = repo.unfiltered()
2337 unfi = repo.unfiltered()
2338 cl = unfi.changelog
2338 cl = unfi.changelog
2339 nm_data = nodemap.persisted_data(cl)
2339 nm_data = nodemap.persisted_data(cl)
2340 if nm_data is not None:
2340 if nm_data is not None:
2341 docket, data = nm_data
2341 docket, data = nm_data
2342 ui.write((b"uid: %s\n") % docket.uid)
2342 ui.write((b"uid: %s\n") % docket.uid)
2343 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2343 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2344 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2344 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2345 ui.write((b"data-length: %d\n") % docket.data_length)
2345 ui.write((b"data-length: %d\n") % docket.data_length)
2346 ui.write((b"data-unused: %d\n") % docket.data_unused)
2346 ui.write((b"data-unused: %d\n") % docket.data_unused)
2347 unused_perc = docket.data_unused * 100.0 / docket.data_length
2347 unused_perc = docket.data_unused * 100.0 / docket.data_length
2348 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2348 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2349
2349
2350
2350
2351 @command(
2351 @command(
2352 b'debugobsolete',
2352 b'debugobsolete',
2353 [
2353 [
2354 (b'', b'flags', 0, _(b'markers flag')),
2354 (b'', b'flags', 0, _(b'markers flag')),
2355 (
2355 (
2356 b'',
2356 b'',
2357 b'record-parents',
2357 b'record-parents',
2358 False,
2358 False,
2359 _(b'record parent information for the precursor'),
2359 _(b'record parent information for the precursor'),
2360 ),
2360 ),
2361 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2361 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2362 (
2362 (
2363 b'',
2363 b'',
2364 b'exclusive',
2364 b'exclusive',
2365 False,
2365 False,
2366 _(b'restrict display to markers only relevant to REV'),
2366 _(b'restrict display to markers only relevant to REV'),
2367 ),
2367 ),
2368 (b'', b'index', False, _(b'display index of the marker')),
2368 (b'', b'index', False, _(b'display index of the marker')),
2369 (b'', b'delete', [], _(b'delete markers specified by indices')),
2369 (b'', b'delete', [], _(b'delete markers specified by indices')),
2370 ]
2370 ]
2371 + cmdutil.commitopts2
2371 + cmdutil.commitopts2
2372 + cmdutil.formatteropts,
2372 + cmdutil.formatteropts,
2373 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2373 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2374 )
2374 )
2375 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2375 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2376 """create arbitrary obsolete marker
2376 """create arbitrary obsolete marker
2377
2377
2378 With no arguments, displays the list of obsolescence markers."""
2378 With no arguments, displays the list of obsolescence markers."""
2379
2379
2380 opts = pycompat.byteskwargs(opts)
2380 opts = pycompat.byteskwargs(opts)
2381
2381
2382 def parsenodeid(s):
2382 def parsenodeid(s):
2383 try:
2383 try:
2384 # We do not use revsingle/revrange functions here to accept
2384 # We do not use revsingle/revrange functions here to accept
2385 # arbitrary node identifiers, possibly not present in the
2385 # arbitrary node identifiers, possibly not present in the
2386 # local repository.
2386 # local repository.
2387 n = bin(s)
2387 n = bin(s)
2388 if len(n) != len(nullid):
2388 if len(n) != len(nullid):
2389 raise TypeError()
2389 raise TypeError()
2390 return n
2390 return n
2391 except TypeError:
2391 except TypeError:
2392 raise error.InputError(
2392 raise error.InputError(
2393 b'changeset references must be full hexadecimal '
2393 b'changeset references must be full hexadecimal '
2394 b'node identifiers'
2394 b'node identifiers'
2395 )
2395 )
2396
2396
2397 if opts.get(b'delete'):
2397 if opts.get(b'delete'):
2398 indices = []
2398 indices = []
2399 for v in opts.get(b'delete'):
2399 for v in opts.get(b'delete'):
2400 try:
2400 try:
2401 indices.append(int(v))
2401 indices.append(int(v))
2402 except ValueError:
2402 except ValueError:
2403 raise error.InputError(
2403 raise error.InputError(
2404 _(b'invalid index value: %r') % v,
2404 _(b'invalid index value: %r') % v,
2405 hint=_(b'use integers for indices'),
2405 hint=_(b'use integers for indices'),
2406 )
2406 )
2407
2407
2408 if repo.currenttransaction():
2408 if repo.currenttransaction():
2409 raise error.Abort(
2409 raise error.Abort(
2410 _(b'cannot delete obsmarkers in the middle of transaction.')
2410 _(b'cannot delete obsmarkers in the middle of transaction.')
2411 )
2411 )
2412
2412
2413 with repo.lock():
2413 with repo.lock():
2414 n = repair.deleteobsmarkers(repo.obsstore, indices)
2414 n = repair.deleteobsmarkers(repo.obsstore, indices)
2415 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2415 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2416
2416
2417 return
2417 return
2418
2418
2419 if precursor is not None:
2419 if precursor is not None:
2420 if opts[b'rev']:
2420 if opts[b'rev']:
2421 raise error.InputError(
2421 raise error.InputError(
2422 b'cannot select revision when creating marker'
2422 b'cannot select revision when creating marker'
2423 )
2423 )
2424 metadata = {}
2424 metadata = {}
2425 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2425 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2426 succs = tuple(parsenodeid(succ) for succ in successors)
2426 succs = tuple(parsenodeid(succ) for succ in successors)
2427 l = repo.lock()
2427 l = repo.lock()
2428 try:
2428 try:
2429 tr = repo.transaction(b'debugobsolete')
2429 tr = repo.transaction(b'debugobsolete')
2430 try:
2430 try:
2431 date = opts.get(b'date')
2431 date = opts.get(b'date')
2432 if date:
2432 if date:
2433 date = dateutil.parsedate(date)
2433 date = dateutil.parsedate(date)
2434 else:
2434 else:
2435 date = None
2435 date = None
2436 prec = parsenodeid(precursor)
2436 prec = parsenodeid(precursor)
2437 parents = None
2437 parents = None
2438 if opts[b'record_parents']:
2438 if opts[b'record_parents']:
2439 if prec not in repo.unfiltered():
2439 if prec not in repo.unfiltered():
2440 raise error.Abort(
2440 raise error.Abort(
2441 b'cannot used --record-parents on '
2441 b'cannot used --record-parents on '
2442 b'unknown changesets'
2442 b'unknown changesets'
2443 )
2443 )
2444 parents = repo.unfiltered()[prec].parents()
2444 parents = repo.unfiltered()[prec].parents()
2445 parents = tuple(p.node() for p in parents)
2445 parents = tuple(p.node() for p in parents)
2446 repo.obsstore.create(
2446 repo.obsstore.create(
2447 tr,
2447 tr,
2448 prec,
2448 prec,
2449 succs,
2449 succs,
2450 opts[b'flags'],
2450 opts[b'flags'],
2451 parents=parents,
2451 parents=parents,
2452 date=date,
2452 date=date,
2453 metadata=metadata,
2453 metadata=metadata,
2454 ui=ui,
2454 ui=ui,
2455 )
2455 )
2456 tr.close()
2456 tr.close()
2457 except ValueError as exc:
2457 except ValueError as exc:
2458 raise error.Abort(
2458 raise error.Abort(
2459 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2459 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2460 )
2460 )
2461 finally:
2461 finally:
2462 tr.release()
2462 tr.release()
2463 finally:
2463 finally:
2464 l.release()
2464 l.release()
2465 else:
2465 else:
2466 if opts[b'rev']:
2466 if opts[b'rev']:
2467 revs = scmutil.revrange(repo, opts[b'rev'])
2467 revs = scmutil.revrange(repo, opts[b'rev'])
2468 nodes = [repo[r].node() for r in revs]
2468 nodes = [repo[r].node() for r in revs]
2469 markers = list(
2469 markers = list(
2470 obsutil.getmarkers(
2470 obsutil.getmarkers(
2471 repo, nodes=nodes, exclusive=opts[b'exclusive']
2471 repo, nodes=nodes, exclusive=opts[b'exclusive']
2472 )
2472 )
2473 )
2473 )
2474 markers.sort(key=lambda x: x._data)
2474 markers.sort(key=lambda x: x._data)
2475 else:
2475 else:
2476 markers = obsutil.getmarkers(repo)
2476 markers = obsutil.getmarkers(repo)
2477
2477
2478 markerstoiter = markers
2478 markerstoiter = markers
2479 isrelevant = lambda m: True
2479 isrelevant = lambda m: True
2480 if opts.get(b'rev') and opts.get(b'index'):
2480 if opts.get(b'rev') and opts.get(b'index'):
2481 markerstoiter = obsutil.getmarkers(repo)
2481 markerstoiter = obsutil.getmarkers(repo)
2482 markerset = set(markers)
2482 markerset = set(markers)
2483 isrelevant = lambda m: m in markerset
2483 isrelevant = lambda m: m in markerset
2484
2484
2485 fm = ui.formatter(b'debugobsolete', opts)
2485 fm = ui.formatter(b'debugobsolete', opts)
2486 for i, m in enumerate(markerstoiter):
2486 for i, m in enumerate(markerstoiter):
2487 if not isrelevant(m):
2487 if not isrelevant(m):
2488 # marker can be irrelevant when we're iterating over a set
2488 # marker can be irrelevant when we're iterating over a set
2489 # of markers (markerstoiter) which is bigger than the set
2489 # of markers (markerstoiter) which is bigger than the set
2490 # of markers we want to display (markers)
2490 # of markers we want to display (markers)
2491 # this can happen if both --index and --rev options are
2491 # this can happen if both --index and --rev options are
2492 # provided and thus we need to iterate over all of the markers
2492 # provided and thus we need to iterate over all of the markers
2493 # to get the correct indices, but only display the ones that
2493 # to get the correct indices, but only display the ones that
2494 # are relevant to --rev value
2494 # are relevant to --rev value
2495 continue
2495 continue
2496 fm.startitem()
2496 fm.startitem()
2497 ind = i if opts.get(b'index') else None
2497 ind = i if opts.get(b'index') else None
2498 cmdutil.showmarker(fm, m, index=ind)
2498 cmdutil.showmarker(fm, m, index=ind)
2499 fm.end()
2499 fm.end()
2500
2500
2501
2501
2502 @command(
2502 @command(
2503 b'debugp1copies',
2503 b'debugp1copies',
2504 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2504 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2505 _(b'[-r REV]'),
2505 _(b'[-r REV]'),
2506 )
2506 )
2507 def debugp1copies(ui, repo, **opts):
2507 def debugp1copies(ui, repo, **opts):
2508 """dump copy information compared to p1"""
2508 """dump copy information compared to p1"""
2509
2509
2510 opts = pycompat.byteskwargs(opts)
2510 opts = pycompat.byteskwargs(opts)
2511 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2511 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2512 for dst, src in ctx.p1copies().items():
2512 for dst, src in ctx.p1copies().items():
2513 ui.write(b'%s -> %s\n' % (src, dst))
2513 ui.write(b'%s -> %s\n' % (src, dst))
2514
2514
2515
2515
2516 @command(
2516 @command(
2517 b'debugp2copies',
2517 b'debugp2copies',
2518 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2518 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2519 _(b'[-r REV]'),
2519 _(b'[-r REV]'),
2520 )
2520 )
2521 def debugp1copies(ui, repo, **opts):
2521 def debugp1copies(ui, repo, **opts):
2522 """dump copy information compared to p2"""
2522 """dump copy information compared to p2"""
2523
2523
2524 opts = pycompat.byteskwargs(opts)
2524 opts = pycompat.byteskwargs(opts)
2525 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2525 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2526 for dst, src in ctx.p2copies().items():
2526 for dst, src in ctx.p2copies().items():
2527 ui.write(b'%s -> %s\n' % (src, dst))
2527 ui.write(b'%s -> %s\n' % (src, dst))
2528
2528
2529
2529
2530 @command(
2530 @command(
2531 b'debugpathcomplete',
2531 b'debugpathcomplete',
2532 [
2532 [
2533 (b'f', b'full', None, _(b'complete an entire path')),
2533 (b'f', b'full', None, _(b'complete an entire path')),
2534 (b'n', b'normal', None, _(b'show only normal files')),
2534 (b'n', b'normal', None, _(b'show only normal files')),
2535 (b'a', b'added', None, _(b'show only added files')),
2535 (b'a', b'added', None, _(b'show only added files')),
2536 (b'r', b'removed', None, _(b'show only removed files')),
2536 (b'r', b'removed', None, _(b'show only removed files')),
2537 ],
2537 ],
2538 _(b'FILESPEC...'),
2538 _(b'FILESPEC...'),
2539 )
2539 )
2540 def debugpathcomplete(ui, repo, *specs, **opts):
2540 def debugpathcomplete(ui, repo, *specs, **opts):
2541 """complete part or all of a tracked path
2541 """complete part or all of a tracked path
2542
2542
2543 This command supports shells that offer path name completion. It
2543 This command supports shells that offer path name completion. It
2544 currently completes only files already known to the dirstate.
2544 currently completes only files already known to the dirstate.
2545
2545
2546 Completion extends only to the next path segment unless
2546 Completion extends only to the next path segment unless
2547 --full is specified, in which case entire paths are used."""
2547 --full is specified, in which case entire paths are used."""
2548
2548
2549 def complete(path, acceptable):
2549 def complete(path, acceptable):
2550 dirstate = repo.dirstate
2550 dirstate = repo.dirstate
2551 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2551 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2552 rootdir = repo.root + pycompat.ossep
2552 rootdir = repo.root + pycompat.ossep
2553 if spec != repo.root and not spec.startswith(rootdir):
2553 if spec != repo.root and not spec.startswith(rootdir):
2554 return [], []
2554 return [], []
2555 if os.path.isdir(spec):
2555 if os.path.isdir(spec):
2556 spec += b'/'
2556 spec += b'/'
2557 spec = spec[len(rootdir) :]
2557 spec = spec[len(rootdir) :]
2558 fixpaths = pycompat.ossep != b'/'
2558 fixpaths = pycompat.ossep != b'/'
2559 if fixpaths:
2559 if fixpaths:
2560 spec = spec.replace(pycompat.ossep, b'/')
2560 spec = spec.replace(pycompat.ossep, b'/')
2561 speclen = len(spec)
2561 speclen = len(spec)
2562 fullpaths = opts['full']
2562 fullpaths = opts['full']
2563 files, dirs = set(), set()
2563 files, dirs = set(), set()
2564 adddir, addfile = dirs.add, files.add
2564 adddir, addfile = dirs.add, files.add
2565 for f, st in pycompat.iteritems(dirstate):
2565 for f, st in pycompat.iteritems(dirstate):
2566 if f.startswith(spec) and st[0] in acceptable:
2566 if f.startswith(spec) and st[0] in acceptable:
2567 if fixpaths:
2567 if fixpaths:
2568 f = f.replace(b'/', pycompat.ossep)
2568 f = f.replace(b'/', pycompat.ossep)
2569 if fullpaths:
2569 if fullpaths:
2570 addfile(f)
2570 addfile(f)
2571 continue
2571 continue
2572 s = f.find(pycompat.ossep, speclen)
2572 s = f.find(pycompat.ossep, speclen)
2573 if s >= 0:
2573 if s >= 0:
2574 adddir(f[:s])
2574 adddir(f[:s])
2575 else:
2575 else:
2576 addfile(f)
2576 addfile(f)
2577 return files, dirs
2577 return files, dirs
2578
2578
2579 acceptable = b''
2579 acceptable = b''
2580 if opts['normal']:
2580 if opts['normal']:
2581 acceptable += b'nm'
2581 acceptable += b'nm'
2582 if opts['added']:
2582 if opts['added']:
2583 acceptable += b'a'
2583 acceptable += b'a'
2584 if opts['removed']:
2584 if opts['removed']:
2585 acceptable += b'r'
2585 acceptable += b'r'
2586 cwd = repo.getcwd()
2586 cwd = repo.getcwd()
2587 if not specs:
2587 if not specs:
2588 specs = [b'.']
2588 specs = [b'.']
2589
2589
2590 files, dirs = set(), set()
2590 files, dirs = set(), set()
2591 for spec in specs:
2591 for spec in specs:
2592 f, d = complete(spec, acceptable or b'nmar')
2592 f, d = complete(spec, acceptable or b'nmar')
2593 files.update(f)
2593 files.update(f)
2594 dirs.update(d)
2594 dirs.update(d)
2595 files.update(dirs)
2595 files.update(dirs)
2596 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2596 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2597 ui.write(b'\n')
2597 ui.write(b'\n')
2598
2598
2599
2599
2600 @command(
2600 @command(
2601 b'debugpathcopies',
2601 b'debugpathcopies',
2602 cmdutil.walkopts,
2602 cmdutil.walkopts,
2603 b'hg debugpathcopies REV1 REV2 [FILE]',
2603 b'hg debugpathcopies REV1 REV2 [FILE]',
2604 inferrepo=True,
2604 inferrepo=True,
2605 )
2605 )
2606 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2606 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2607 """show copies between two revisions"""
2607 """show copies between two revisions"""
2608 ctx1 = scmutil.revsingle(repo, rev1)
2608 ctx1 = scmutil.revsingle(repo, rev1)
2609 ctx2 = scmutil.revsingle(repo, rev2)
2609 ctx2 = scmutil.revsingle(repo, rev2)
2610 m = scmutil.match(ctx1, pats, opts)
2610 m = scmutil.match(ctx1, pats, opts)
2611 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2611 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2612 ui.write(b'%s -> %s\n' % (src, dst))
2612 ui.write(b'%s -> %s\n' % (src, dst))
2613
2613
2614
2614
2615 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2615 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2616 def debugpeer(ui, path):
2616 def debugpeer(ui, path):
2617 """establish a connection to a peer repository"""
2617 """establish a connection to a peer repository"""
2618 # Always enable peer request logging. Requires --debug to display
2618 # Always enable peer request logging. Requires --debug to display
2619 # though.
2619 # though.
2620 overrides = {
2620 overrides = {
2621 (b'devel', b'debug.peer-request'): True,
2621 (b'devel', b'debug.peer-request'): True,
2622 }
2622 }
2623
2623
2624 with ui.configoverride(overrides):
2624 with ui.configoverride(overrides):
2625 peer = hg.peer(ui, {}, path)
2625 peer = hg.peer(ui, {}, path)
2626
2626
2627 try:
2627 try:
2628 local = peer.local() is not None
2628 local = peer.local() is not None
2629 canpush = peer.canpush()
2629 canpush = peer.canpush()
2630
2630
2631 ui.write(_(b'url: %s\n') % peer.url())
2631 ui.write(_(b'url: %s\n') % peer.url())
2632 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2632 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2633 ui.write(
2633 ui.write(
2634 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2634 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2635 )
2635 )
2636 finally:
2636 finally:
2637 peer.close()
2637 peer.close()
2638
2638
2639
2639
2640 @command(
2640 @command(
2641 b'debugpickmergetool',
2641 b'debugpickmergetool',
2642 [
2642 [
2643 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2643 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2644 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2644 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2645 ]
2645 ]
2646 + cmdutil.walkopts
2646 + cmdutil.walkopts
2647 + cmdutil.mergetoolopts,
2647 + cmdutil.mergetoolopts,
2648 _(b'[PATTERN]...'),
2648 _(b'[PATTERN]...'),
2649 inferrepo=True,
2649 inferrepo=True,
2650 )
2650 )
2651 def debugpickmergetool(ui, repo, *pats, **opts):
2651 def debugpickmergetool(ui, repo, *pats, **opts):
2652 """examine which merge tool is chosen for specified file
2652 """examine which merge tool is chosen for specified file
2653
2653
2654 As described in :hg:`help merge-tools`, Mercurial examines
2654 As described in :hg:`help merge-tools`, Mercurial examines
2655 configurations below in this order to decide which merge tool is
2655 configurations below in this order to decide which merge tool is
2656 chosen for specified file.
2656 chosen for specified file.
2657
2657
2658 1. ``--tool`` option
2658 1. ``--tool`` option
2659 2. ``HGMERGE`` environment variable
2659 2. ``HGMERGE`` environment variable
2660 3. configurations in ``merge-patterns`` section
2660 3. configurations in ``merge-patterns`` section
2661 4. configuration of ``ui.merge``
2661 4. configuration of ``ui.merge``
2662 5. configurations in ``merge-tools`` section
2662 5. configurations in ``merge-tools`` section
2663 6. ``hgmerge`` tool (for historical reason only)
2663 6. ``hgmerge`` tool (for historical reason only)
2664 7. default tool for fallback (``:merge`` or ``:prompt``)
2664 7. default tool for fallback (``:merge`` or ``:prompt``)
2665
2665
2666 This command writes out examination result in the style below::
2666 This command writes out examination result in the style below::
2667
2667
2668 FILE = MERGETOOL
2668 FILE = MERGETOOL
2669
2669
2670 By default, all files known in the first parent context of the
2670 By default, all files known in the first parent context of the
2671 working directory are examined. Use file patterns and/or -I/-X
2671 working directory are examined. Use file patterns and/or -I/-X
2672 options to limit target files. -r/--rev is also useful to examine
2672 options to limit target files. -r/--rev is also useful to examine
2673 files in another context without actual updating to it.
2673 files in another context without actual updating to it.
2674
2674
2675 With --debug, this command shows warning messages while matching
2675 With --debug, this command shows warning messages while matching
2676 against ``merge-patterns`` and so on, too. It is recommended to
2676 against ``merge-patterns`` and so on, too. It is recommended to
2677 use this option with explicit file patterns and/or -I/-X options,
2677 use this option with explicit file patterns and/or -I/-X options,
2678 because this option increases amount of output per file according
2678 because this option increases amount of output per file according
2679 to configurations in hgrc.
2679 to configurations in hgrc.
2680
2680
2681 With -v/--verbose, this command shows configurations below at
2681 With -v/--verbose, this command shows configurations below at
2682 first (only if specified).
2682 first (only if specified).
2683
2683
2684 - ``--tool`` option
2684 - ``--tool`` option
2685 - ``HGMERGE`` environment variable
2685 - ``HGMERGE`` environment variable
2686 - configuration of ``ui.merge``
2686 - configuration of ``ui.merge``
2687
2687
2688 If merge tool is chosen before matching against
2688 If merge tool is chosen before matching against
2689 ``merge-patterns``, this command can't show any helpful
2689 ``merge-patterns``, this command can't show any helpful
2690 information, even with --debug. In such case, information above is
2690 information, even with --debug. In such case, information above is
2691 useful to know why a merge tool is chosen.
2691 useful to know why a merge tool is chosen.
2692 """
2692 """
2693 opts = pycompat.byteskwargs(opts)
2693 opts = pycompat.byteskwargs(opts)
2694 overrides = {}
2694 overrides = {}
2695 if opts[b'tool']:
2695 if opts[b'tool']:
2696 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2696 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2697 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2697 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2698
2698
2699 with ui.configoverride(overrides, b'debugmergepatterns'):
2699 with ui.configoverride(overrides, b'debugmergepatterns'):
2700 hgmerge = encoding.environ.get(b"HGMERGE")
2700 hgmerge = encoding.environ.get(b"HGMERGE")
2701 if hgmerge is not None:
2701 if hgmerge is not None:
2702 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2702 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2703 uimerge = ui.config(b"ui", b"merge")
2703 uimerge = ui.config(b"ui", b"merge")
2704 if uimerge:
2704 if uimerge:
2705 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2705 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2706
2706
2707 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2707 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2708 m = scmutil.match(ctx, pats, opts)
2708 m = scmutil.match(ctx, pats, opts)
2709 changedelete = opts[b'changedelete']
2709 changedelete = opts[b'changedelete']
2710 for path in ctx.walk(m):
2710 for path in ctx.walk(m):
2711 fctx = ctx[path]
2711 fctx = ctx[path]
2712 try:
2712 try:
2713 if not ui.debugflag:
2713 if not ui.debugflag:
2714 ui.pushbuffer(error=True)
2714 ui.pushbuffer(error=True)
2715 tool, toolpath = filemerge._picktool(
2715 tool, toolpath = filemerge._picktool(
2716 repo,
2716 repo,
2717 ui,
2717 ui,
2718 path,
2718 path,
2719 fctx.isbinary(),
2719 fctx.isbinary(),
2720 b'l' in fctx.flags(),
2720 b'l' in fctx.flags(),
2721 changedelete,
2721 changedelete,
2722 )
2722 )
2723 finally:
2723 finally:
2724 if not ui.debugflag:
2724 if not ui.debugflag:
2725 ui.popbuffer()
2725 ui.popbuffer()
2726 ui.write(b'%s = %s\n' % (path, tool))
2726 ui.write(b'%s = %s\n' % (path, tool))
2727
2727
2728
2728
2729 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2729 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2730 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2730 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2731 """access the pushkey key/value protocol
2731 """access the pushkey key/value protocol
2732
2732
2733 With two args, list the keys in the given namespace.
2733 With two args, list the keys in the given namespace.
2734
2734
2735 With five args, set a key to new if it currently is set to old.
2735 With five args, set a key to new if it currently is set to old.
2736 Reports success or failure.
2736 Reports success or failure.
2737 """
2737 """
2738
2738
2739 target = hg.peer(ui, {}, repopath)
2739 target = hg.peer(ui, {}, repopath)
2740 try:
2740 try:
2741 if keyinfo:
2741 if keyinfo:
2742 key, old, new = keyinfo
2742 key, old, new = keyinfo
2743 with target.commandexecutor() as e:
2743 with target.commandexecutor() as e:
2744 r = e.callcommand(
2744 r = e.callcommand(
2745 b'pushkey',
2745 b'pushkey',
2746 {
2746 {
2747 b'namespace': namespace,
2747 b'namespace': namespace,
2748 b'key': key,
2748 b'key': key,
2749 b'old': old,
2749 b'old': old,
2750 b'new': new,
2750 b'new': new,
2751 },
2751 },
2752 ).result()
2752 ).result()
2753
2753
2754 ui.status(pycompat.bytestr(r) + b'\n')
2754 ui.status(pycompat.bytestr(r) + b'\n')
2755 return not r
2755 return not r
2756 else:
2756 else:
2757 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2757 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2758 ui.write(
2758 ui.write(
2759 b"%s\t%s\n"
2759 b"%s\t%s\n"
2760 % (stringutil.escapestr(k), stringutil.escapestr(v))
2760 % (stringutil.escapestr(k), stringutil.escapestr(v))
2761 )
2761 )
2762 finally:
2762 finally:
2763 target.close()
2763 target.close()
2764
2764
2765
2765
2766 @command(b'debugpvec', [], _(b'A B'))
2766 @command(b'debugpvec', [], _(b'A B'))
2767 def debugpvec(ui, repo, a, b=None):
2767 def debugpvec(ui, repo, a, b=None):
2768 ca = scmutil.revsingle(repo, a)
2768 ca = scmutil.revsingle(repo, a)
2769 cb = scmutil.revsingle(repo, b)
2769 cb = scmutil.revsingle(repo, b)
2770 pa = pvec.ctxpvec(ca)
2770 pa = pvec.ctxpvec(ca)
2771 pb = pvec.ctxpvec(cb)
2771 pb = pvec.ctxpvec(cb)
2772 if pa == pb:
2772 if pa == pb:
2773 rel = b"="
2773 rel = b"="
2774 elif pa > pb:
2774 elif pa > pb:
2775 rel = b">"
2775 rel = b">"
2776 elif pa < pb:
2776 elif pa < pb:
2777 rel = b"<"
2777 rel = b"<"
2778 elif pa | pb:
2778 elif pa | pb:
2779 rel = b"|"
2779 rel = b"|"
2780 ui.write(_(b"a: %s\n") % pa)
2780 ui.write(_(b"a: %s\n") % pa)
2781 ui.write(_(b"b: %s\n") % pb)
2781 ui.write(_(b"b: %s\n") % pb)
2782 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2782 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2783 ui.write(
2783 ui.write(
2784 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2784 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2785 % (
2785 % (
2786 abs(pa._depth - pb._depth),
2786 abs(pa._depth - pb._depth),
2787 pvec._hamming(pa._vec, pb._vec),
2787 pvec._hamming(pa._vec, pb._vec),
2788 pa.distance(pb),
2788 pa.distance(pb),
2789 rel,
2789 rel,
2790 )
2790 )
2791 )
2791 )
2792
2792
2793
2793
2794 @command(
2794 @command(
2795 b'debugrebuilddirstate|debugrebuildstate',
2795 b'debugrebuilddirstate|debugrebuildstate',
2796 [
2796 [
2797 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2797 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2798 (
2798 (
2799 b'',
2799 b'',
2800 b'minimal',
2800 b'minimal',
2801 None,
2801 None,
2802 _(
2802 _(
2803 b'only rebuild files that are inconsistent with '
2803 b'only rebuild files that are inconsistent with '
2804 b'the working copy parent'
2804 b'the working copy parent'
2805 ),
2805 ),
2806 ),
2806 ),
2807 ],
2807 ],
2808 _(b'[-r REV]'),
2808 _(b'[-r REV]'),
2809 )
2809 )
2810 def debugrebuilddirstate(ui, repo, rev, **opts):
2810 def debugrebuilddirstate(ui, repo, rev, **opts):
2811 """rebuild the dirstate as it would look like for the given revision
2811 """rebuild the dirstate as it would look like for the given revision
2812
2812
2813 If no revision is specified the first current parent will be used.
2813 If no revision is specified the first current parent will be used.
2814
2814
2815 The dirstate will be set to the files of the given revision.
2815 The dirstate will be set to the files of the given revision.
2816 The actual working directory content or existing dirstate
2816 The actual working directory content or existing dirstate
2817 information such as adds or removes is not considered.
2817 information such as adds or removes is not considered.
2818
2818
2819 ``minimal`` will only rebuild the dirstate status for files that claim to be
2819 ``minimal`` will only rebuild the dirstate status for files that claim to be
2820 tracked but are not in the parent manifest, or that exist in the parent
2820 tracked but are not in the parent manifest, or that exist in the parent
2821 manifest but are not in the dirstate. It will not change adds, removes, or
2821 manifest but are not in the dirstate. It will not change adds, removes, or
2822 modified files that are in the working copy parent.
2822 modified files that are in the working copy parent.
2823
2823
2824 One use of this command is to make the next :hg:`status` invocation
2824 One use of this command is to make the next :hg:`status` invocation
2825 check the actual file content.
2825 check the actual file content.
2826 """
2826 """
2827 ctx = scmutil.revsingle(repo, rev)
2827 ctx = scmutil.revsingle(repo, rev)
2828 with repo.wlock():
2828 with repo.wlock():
2829 dirstate = repo.dirstate
2829 dirstate = repo.dirstate
2830 changedfiles = None
2830 changedfiles = None
2831 # See command doc for what minimal does.
2831 # See command doc for what minimal does.
2832 if opts.get('minimal'):
2832 if opts.get('minimal'):
2833 manifestfiles = set(ctx.manifest().keys())
2833 manifestfiles = set(ctx.manifest().keys())
2834 dirstatefiles = set(dirstate)
2834 dirstatefiles = set(dirstate)
2835 manifestonly = manifestfiles - dirstatefiles
2835 manifestonly = manifestfiles - dirstatefiles
2836 dsonly = dirstatefiles - manifestfiles
2836 dsonly = dirstatefiles - manifestfiles
2837 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2837 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2838 changedfiles = manifestonly | dsnotadded
2838 changedfiles = manifestonly | dsnotadded
2839
2839
2840 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2840 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2841
2841
2842
2842
2843 @command(b'debugrebuildfncache', [], b'')
2843 @command(b'debugrebuildfncache', [], b'')
2844 def debugrebuildfncache(ui, repo):
2844 def debugrebuildfncache(ui, repo):
2845 """rebuild the fncache file"""
2845 """rebuild the fncache file"""
2846 repair.rebuildfncache(ui, repo)
2846 repair.rebuildfncache(ui, repo)
2847
2847
2848
2848
2849 @command(
2849 @command(
2850 b'debugrename',
2850 b'debugrename',
2851 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2851 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2852 _(b'[-r REV] [FILE]...'),
2852 _(b'[-r REV] [FILE]...'),
2853 )
2853 )
2854 def debugrename(ui, repo, *pats, **opts):
2854 def debugrename(ui, repo, *pats, **opts):
2855 """dump rename information"""
2855 """dump rename information"""
2856
2856
2857 opts = pycompat.byteskwargs(opts)
2857 opts = pycompat.byteskwargs(opts)
2858 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2858 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2859 m = scmutil.match(ctx, pats, opts)
2859 m = scmutil.match(ctx, pats, opts)
2860 for abs in ctx.walk(m):
2860 for abs in ctx.walk(m):
2861 fctx = ctx[abs]
2861 fctx = ctx[abs]
2862 o = fctx.filelog().renamed(fctx.filenode())
2862 o = fctx.filelog().renamed(fctx.filenode())
2863 rel = repo.pathto(abs)
2863 rel = repo.pathto(abs)
2864 if o:
2864 if o:
2865 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2865 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2866 else:
2866 else:
2867 ui.write(_(b"%s not renamed\n") % rel)
2867 ui.write(_(b"%s not renamed\n") % rel)
2868
2868
2869
2869
2870 @command(b'debugrequires|debugrequirements', [], b'')
2870 @command(b'debugrequires|debugrequirements', [], b'')
2871 def debugrequirements(ui, repo):
2871 def debugrequirements(ui, repo):
2872 """ print the current repo requirements """
2872 """ print the current repo requirements """
2873 for r in sorted(repo.requirements):
2873 for r in sorted(repo.requirements):
2874 ui.write(b"%s\n" % r)
2874 ui.write(b"%s\n" % r)
2875
2875
2876
2876
2877 @command(
2877 @command(
2878 b'debugrevlog',
2878 b'debugrevlog',
2879 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2879 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2880 _(b'-c|-m|FILE'),
2880 _(b'-c|-m|FILE'),
2881 optionalrepo=True,
2881 optionalrepo=True,
2882 )
2882 )
2883 def debugrevlog(ui, repo, file_=None, **opts):
2883 def debugrevlog(ui, repo, file_=None, **opts):
2884 """show data and statistics about a revlog"""
2884 """show data and statistics about a revlog"""
2885 opts = pycompat.byteskwargs(opts)
2885 opts = pycompat.byteskwargs(opts)
2886 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2886 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2887
2887
2888 if opts.get(b"dump"):
2888 if opts.get(b"dump"):
2889 numrevs = len(r)
2889 numrevs = len(r)
2890 ui.write(
2890 ui.write(
2891 (
2891 (
2892 b"# rev p1rev p2rev start end deltastart base p1 p2"
2892 b"# rev p1rev p2rev start end deltastart base p1 p2"
2893 b" rawsize totalsize compression heads chainlen\n"
2893 b" rawsize totalsize compression heads chainlen\n"
2894 )
2894 )
2895 )
2895 )
2896 ts = 0
2896 ts = 0
2897 heads = set()
2897 heads = set()
2898
2898
2899 for rev in pycompat.xrange(numrevs):
2899 for rev in pycompat.xrange(numrevs):
2900 dbase = r.deltaparent(rev)
2900 dbase = r.deltaparent(rev)
2901 if dbase == -1:
2901 if dbase == -1:
2902 dbase = rev
2902 dbase = rev
2903 cbase = r.chainbase(rev)
2903 cbase = r.chainbase(rev)
2904 clen = r.chainlen(rev)
2904 clen = r.chainlen(rev)
2905 p1, p2 = r.parentrevs(rev)
2905 p1, p2 = r.parentrevs(rev)
2906 rs = r.rawsize(rev)
2906 rs = r.rawsize(rev)
2907 ts = ts + rs
2907 ts = ts + rs
2908 heads -= set(r.parentrevs(rev))
2908 heads -= set(r.parentrevs(rev))
2909 heads.add(rev)
2909 heads.add(rev)
2910 try:
2910 try:
2911 compression = ts / r.end(rev)
2911 compression = ts / r.end(rev)
2912 except ZeroDivisionError:
2912 except ZeroDivisionError:
2913 compression = 0
2913 compression = 0
2914 ui.write(
2914 ui.write(
2915 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2915 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2916 b"%11d %5d %8d\n"
2916 b"%11d %5d %8d\n"
2917 % (
2917 % (
2918 rev,
2918 rev,
2919 p1,
2919 p1,
2920 p2,
2920 p2,
2921 r.start(rev),
2921 r.start(rev),
2922 r.end(rev),
2922 r.end(rev),
2923 r.start(dbase),
2923 r.start(dbase),
2924 r.start(cbase),
2924 r.start(cbase),
2925 r.start(p1),
2925 r.start(p1),
2926 r.start(p2),
2926 r.start(p2),
2927 rs,
2927 rs,
2928 ts,
2928 ts,
2929 compression,
2929 compression,
2930 len(heads),
2930 len(heads),
2931 clen,
2931 clen,
2932 )
2932 )
2933 )
2933 )
2934 return 0
2934 return 0
2935
2935
2936 v = r.version
2936 v = r.version
2937 format = v & 0xFFFF
2937 format = v & 0xFFFF
2938 flags = []
2938 flags = []
2939 gdelta = False
2939 gdelta = False
2940 if v & revlog.FLAG_INLINE_DATA:
2940 if v & revlog.FLAG_INLINE_DATA:
2941 flags.append(b'inline')
2941 flags.append(b'inline')
2942 if v & revlog.FLAG_GENERALDELTA:
2942 if v & revlog.FLAG_GENERALDELTA:
2943 gdelta = True
2943 gdelta = True
2944 flags.append(b'generaldelta')
2944 flags.append(b'generaldelta')
2945 if not flags:
2945 if not flags:
2946 flags = [b'(none)']
2946 flags = [b'(none)']
2947
2947
2948 ### tracks merge vs single parent
2948 ### tracks merge vs single parent
2949 nummerges = 0
2949 nummerges = 0
2950
2950
2951 ### tracks ways the "delta" are build
2951 ### tracks ways the "delta" are build
2952 # nodelta
2952 # nodelta
2953 numempty = 0
2953 numempty = 0
2954 numemptytext = 0
2954 numemptytext = 0
2955 numemptydelta = 0
2955 numemptydelta = 0
2956 # full file content
2956 # full file content
2957 numfull = 0
2957 numfull = 0
2958 # intermediate snapshot against a prior snapshot
2958 # intermediate snapshot against a prior snapshot
2959 numsemi = 0
2959 numsemi = 0
2960 # snapshot count per depth
2960 # snapshot count per depth
2961 numsnapdepth = collections.defaultdict(lambda: 0)
2961 numsnapdepth = collections.defaultdict(lambda: 0)
2962 # delta against previous revision
2962 # delta against previous revision
2963 numprev = 0
2963 numprev = 0
2964 # delta against first or second parent (not prev)
2964 # delta against first or second parent (not prev)
2965 nump1 = 0
2965 nump1 = 0
2966 nump2 = 0
2966 nump2 = 0
2967 # delta against neither prev nor parents
2967 # delta against neither prev nor parents
2968 numother = 0
2968 numother = 0
2969 # delta against prev that are also first or second parent
2969 # delta against prev that are also first or second parent
2970 # (details of `numprev`)
2970 # (details of `numprev`)
2971 nump1prev = 0
2971 nump1prev = 0
2972 nump2prev = 0
2972 nump2prev = 0
2973
2973
2974 # data about delta chain of each revs
2974 # data about delta chain of each revs
2975 chainlengths = []
2975 chainlengths = []
2976 chainbases = []
2976 chainbases = []
2977 chainspans = []
2977 chainspans = []
2978
2978
2979 # data about each revision
2979 # data about each revision
2980 datasize = [None, 0, 0]
2980 datasize = [None, 0, 0]
2981 fullsize = [None, 0, 0]
2981 fullsize = [None, 0, 0]
2982 semisize = [None, 0, 0]
2982 semisize = [None, 0, 0]
2983 # snapshot count per depth
2983 # snapshot count per depth
2984 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2984 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2985 deltasize = [None, 0, 0]
2985 deltasize = [None, 0, 0]
2986 chunktypecounts = {}
2986 chunktypecounts = {}
2987 chunktypesizes = {}
2987 chunktypesizes = {}
2988
2988
2989 def addsize(size, l):
2989 def addsize(size, l):
2990 if l[0] is None or size < l[0]:
2990 if l[0] is None or size < l[0]:
2991 l[0] = size
2991 l[0] = size
2992 if size > l[1]:
2992 if size > l[1]:
2993 l[1] = size
2993 l[1] = size
2994 l[2] += size
2994 l[2] += size
2995
2995
2996 numrevs = len(r)
2996 numrevs = len(r)
2997 for rev in pycompat.xrange(numrevs):
2997 for rev in pycompat.xrange(numrevs):
2998 p1, p2 = r.parentrevs(rev)
2998 p1, p2 = r.parentrevs(rev)
2999 delta = r.deltaparent(rev)
2999 delta = r.deltaparent(rev)
3000 if format > 0:
3000 if format > 0:
3001 addsize(r.rawsize(rev), datasize)
3001 addsize(r.rawsize(rev), datasize)
3002 if p2 != nullrev:
3002 if p2 != nullrev:
3003 nummerges += 1
3003 nummerges += 1
3004 size = r.length(rev)
3004 size = r.length(rev)
3005 if delta == nullrev:
3005 if delta == nullrev:
3006 chainlengths.append(0)
3006 chainlengths.append(0)
3007 chainbases.append(r.start(rev))
3007 chainbases.append(r.start(rev))
3008 chainspans.append(size)
3008 chainspans.append(size)
3009 if size == 0:
3009 if size == 0:
3010 numempty += 1
3010 numempty += 1
3011 numemptytext += 1
3011 numemptytext += 1
3012 else:
3012 else:
3013 numfull += 1
3013 numfull += 1
3014 numsnapdepth[0] += 1
3014 numsnapdepth[0] += 1
3015 addsize(size, fullsize)
3015 addsize(size, fullsize)
3016 addsize(size, snapsizedepth[0])
3016 addsize(size, snapsizedepth[0])
3017 else:
3017 else:
3018 chainlengths.append(chainlengths[delta] + 1)
3018 chainlengths.append(chainlengths[delta] + 1)
3019 baseaddr = chainbases[delta]
3019 baseaddr = chainbases[delta]
3020 revaddr = r.start(rev)
3020 revaddr = r.start(rev)
3021 chainbases.append(baseaddr)
3021 chainbases.append(baseaddr)
3022 chainspans.append((revaddr - baseaddr) + size)
3022 chainspans.append((revaddr - baseaddr) + size)
3023 if size == 0:
3023 if size == 0:
3024 numempty += 1
3024 numempty += 1
3025 numemptydelta += 1
3025 numemptydelta += 1
3026 elif r.issnapshot(rev):
3026 elif r.issnapshot(rev):
3027 addsize(size, semisize)
3027 addsize(size, semisize)
3028 numsemi += 1
3028 numsemi += 1
3029 depth = r.snapshotdepth(rev)
3029 depth = r.snapshotdepth(rev)
3030 numsnapdepth[depth] += 1
3030 numsnapdepth[depth] += 1
3031 addsize(size, snapsizedepth[depth])
3031 addsize(size, snapsizedepth[depth])
3032 else:
3032 else:
3033 addsize(size, deltasize)
3033 addsize(size, deltasize)
3034 if delta == rev - 1:
3034 if delta == rev - 1:
3035 numprev += 1
3035 numprev += 1
3036 if delta == p1:
3036 if delta == p1:
3037 nump1prev += 1
3037 nump1prev += 1
3038 elif delta == p2:
3038 elif delta == p2:
3039 nump2prev += 1
3039 nump2prev += 1
3040 elif delta == p1:
3040 elif delta == p1:
3041 nump1 += 1
3041 nump1 += 1
3042 elif delta == p2:
3042 elif delta == p2:
3043 nump2 += 1
3043 nump2 += 1
3044 elif delta != nullrev:
3044 elif delta != nullrev:
3045 numother += 1
3045 numother += 1
3046
3046
3047 # Obtain data on the raw chunks in the revlog.
3047 # Obtain data on the raw chunks in the revlog.
3048 if util.safehasattr(r, b'_getsegmentforrevs'):
3048 if util.safehasattr(r, b'_getsegmentforrevs'):
3049 segment = r._getsegmentforrevs(rev, rev)[1]
3049 segment = r._getsegmentforrevs(rev, rev)[1]
3050 else:
3050 else:
3051 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3051 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3052 if segment:
3052 if segment:
3053 chunktype = bytes(segment[0:1])
3053 chunktype = bytes(segment[0:1])
3054 else:
3054 else:
3055 chunktype = b'empty'
3055 chunktype = b'empty'
3056
3056
3057 if chunktype not in chunktypecounts:
3057 if chunktype not in chunktypecounts:
3058 chunktypecounts[chunktype] = 0
3058 chunktypecounts[chunktype] = 0
3059 chunktypesizes[chunktype] = 0
3059 chunktypesizes[chunktype] = 0
3060
3060
3061 chunktypecounts[chunktype] += 1
3061 chunktypecounts[chunktype] += 1
3062 chunktypesizes[chunktype] += size
3062 chunktypesizes[chunktype] += size
3063
3063
3064 # Adjust size min value for empty cases
3064 # Adjust size min value for empty cases
3065 for size in (datasize, fullsize, semisize, deltasize):
3065 for size in (datasize, fullsize, semisize, deltasize):
3066 if size[0] is None:
3066 if size[0] is None:
3067 size[0] = 0
3067 size[0] = 0
3068
3068
3069 numdeltas = numrevs - numfull - numempty - numsemi
3069 numdeltas = numrevs - numfull - numempty - numsemi
3070 numoprev = numprev - nump1prev - nump2prev
3070 numoprev = numprev - nump1prev - nump2prev
3071 totalrawsize = datasize[2]
3071 totalrawsize = datasize[2]
3072 datasize[2] /= numrevs
3072 datasize[2] /= numrevs
3073 fulltotal = fullsize[2]
3073 fulltotal = fullsize[2]
3074 if numfull == 0:
3074 if numfull == 0:
3075 fullsize[2] = 0
3075 fullsize[2] = 0
3076 else:
3076 else:
3077 fullsize[2] /= numfull
3077 fullsize[2] /= numfull
3078 semitotal = semisize[2]
3078 semitotal = semisize[2]
3079 snaptotal = {}
3079 snaptotal = {}
3080 if numsemi > 0:
3080 if numsemi > 0:
3081 semisize[2] /= numsemi
3081 semisize[2] /= numsemi
3082 for depth in snapsizedepth:
3082 for depth in snapsizedepth:
3083 snaptotal[depth] = snapsizedepth[depth][2]
3083 snaptotal[depth] = snapsizedepth[depth][2]
3084 snapsizedepth[depth][2] /= numsnapdepth[depth]
3084 snapsizedepth[depth][2] /= numsnapdepth[depth]
3085
3085
3086 deltatotal = deltasize[2]
3086 deltatotal = deltasize[2]
3087 if numdeltas > 0:
3087 if numdeltas > 0:
3088 deltasize[2] /= numdeltas
3088 deltasize[2] /= numdeltas
3089 totalsize = fulltotal + semitotal + deltatotal
3089 totalsize = fulltotal + semitotal + deltatotal
3090 avgchainlen = sum(chainlengths) / numrevs
3090 avgchainlen = sum(chainlengths) / numrevs
3091 maxchainlen = max(chainlengths)
3091 maxchainlen = max(chainlengths)
3092 maxchainspan = max(chainspans)
3092 maxchainspan = max(chainspans)
3093 compratio = 1
3093 compratio = 1
3094 if totalsize:
3094 if totalsize:
3095 compratio = totalrawsize / totalsize
3095 compratio = totalrawsize / totalsize
3096
3096
3097 basedfmtstr = b'%%%dd\n'
3097 basedfmtstr = b'%%%dd\n'
3098 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3098 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3099
3099
3100 def dfmtstr(max):
3100 def dfmtstr(max):
3101 return basedfmtstr % len(str(max))
3101 return basedfmtstr % len(str(max))
3102
3102
3103 def pcfmtstr(max, padding=0):
3103 def pcfmtstr(max, padding=0):
3104 return basepcfmtstr % (len(str(max)), b' ' * padding)
3104 return basepcfmtstr % (len(str(max)), b' ' * padding)
3105
3105
3106 def pcfmt(value, total):
3106 def pcfmt(value, total):
3107 if total:
3107 if total:
3108 return (value, 100 * float(value) / total)
3108 return (value, 100 * float(value) / total)
3109 else:
3109 else:
3110 return value, 100.0
3110 return value, 100.0
3111
3111
3112 ui.writenoi18n(b'format : %d\n' % format)
3112 ui.writenoi18n(b'format : %d\n' % format)
3113 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3113 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3114
3114
3115 ui.write(b'\n')
3115 ui.write(b'\n')
3116 fmt = pcfmtstr(totalsize)
3116 fmt = pcfmtstr(totalsize)
3117 fmt2 = dfmtstr(totalsize)
3117 fmt2 = dfmtstr(totalsize)
3118 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3118 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3119 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3119 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3120 ui.writenoi18n(
3120 ui.writenoi18n(
3121 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3121 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3122 )
3122 )
3123 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3123 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3124 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3124 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3125 ui.writenoi18n(
3125 ui.writenoi18n(
3126 b' text : '
3126 b' text : '
3127 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3127 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3128 )
3128 )
3129 ui.writenoi18n(
3129 ui.writenoi18n(
3130 b' delta : '
3130 b' delta : '
3131 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3131 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3132 )
3132 )
3133 ui.writenoi18n(
3133 ui.writenoi18n(
3134 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3134 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3135 )
3135 )
3136 for depth in sorted(numsnapdepth):
3136 for depth in sorted(numsnapdepth):
3137 ui.write(
3137 ui.write(
3138 (b' lvl-%-3d : ' % depth)
3138 (b' lvl-%-3d : ' % depth)
3139 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3139 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3140 )
3140 )
3141 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3141 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3142 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3142 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3143 ui.writenoi18n(
3143 ui.writenoi18n(
3144 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3144 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3145 )
3145 )
3146 for depth in sorted(numsnapdepth):
3146 for depth in sorted(numsnapdepth):
3147 ui.write(
3147 ui.write(
3148 (b' lvl-%-3d : ' % depth)
3148 (b' lvl-%-3d : ' % depth)
3149 + fmt % pcfmt(snaptotal[depth], totalsize)
3149 + fmt % pcfmt(snaptotal[depth], totalsize)
3150 )
3150 )
3151 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3151 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3152
3152
3153 def fmtchunktype(chunktype):
3153 def fmtchunktype(chunktype):
3154 if chunktype == b'empty':
3154 if chunktype == b'empty':
3155 return b' %s : ' % chunktype
3155 return b' %s : ' % chunktype
3156 elif chunktype in pycompat.bytestr(string.ascii_letters):
3156 elif chunktype in pycompat.bytestr(string.ascii_letters):
3157 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3157 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3158 else:
3158 else:
3159 return b' 0x%s : ' % hex(chunktype)
3159 return b' 0x%s : ' % hex(chunktype)
3160
3160
3161 ui.write(b'\n')
3161 ui.write(b'\n')
3162 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3162 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3163 for chunktype in sorted(chunktypecounts):
3163 for chunktype in sorted(chunktypecounts):
3164 ui.write(fmtchunktype(chunktype))
3164 ui.write(fmtchunktype(chunktype))
3165 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3165 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3166 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3166 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3167 for chunktype in sorted(chunktypecounts):
3167 for chunktype in sorted(chunktypecounts):
3168 ui.write(fmtchunktype(chunktype))
3168 ui.write(fmtchunktype(chunktype))
3169 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3169 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3170
3170
3171 ui.write(b'\n')
3171 ui.write(b'\n')
3172 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3172 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3173 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3173 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3174 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3174 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3175 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3175 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3176 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3176 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3177
3177
3178 if format > 0:
3178 if format > 0:
3179 ui.write(b'\n')
3179 ui.write(b'\n')
3180 ui.writenoi18n(
3180 ui.writenoi18n(
3181 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3181 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3182 % tuple(datasize)
3182 % tuple(datasize)
3183 )
3183 )
3184 ui.writenoi18n(
3184 ui.writenoi18n(
3185 b'full revision size (min/max/avg) : %d / %d / %d\n'
3185 b'full revision size (min/max/avg) : %d / %d / %d\n'
3186 % tuple(fullsize)
3186 % tuple(fullsize)
3187 )
3187 )
3188 ui.writenoi18n(
3188 ui.writenoi18n(
3189 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3189 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3190 % tuple(semisize)
3190 % tuple(semisize)
3191 )
3191 )
3192 for depth in sorted(snapsizedepth):
3192 for depth in sorted(snapsizedepth):
3193 if depth == 0:
3193 if depth == 0:
3194 continue
3194 continue
3195 ui.writenoi18n(
3195 ui.writenoi18n(
3196 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3196 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3197 % ((depth,) + tuple(snapsizedepth[depth]))
3197 % ((depth,) + tuple(snapsizedepth[depth]))
3198 )
3198 )
3199 ui.writenoi18n(
3199 ui.writenoi18n(
3200 b'delta size (min/max/avg) : %d / %d / %d\n'
3200 b'delta size (min/max/avg) : %d / %d / %d\n'
3201 % tuple(deltasize)
3201 % tuple(deltasize)
3202 )
3202 )
3203
3203
3204 if numdeltas > 0:
3204 if numdeltas > 0:
3205 ui.write(b'\n')
3205 ui.write(b'\n')
3206 fmt = pcfmtstr(numdeltas)
3206 fmt = pcfmtstr(numdeltas)
3207 fmt2 = pcfmtstr(numdeltas, 4)
3207 fmt2 = pcfmtstr(numdeltas, 4)
3208 ui.writenoi18n(
3208 ui.writenoi18n(
3209 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3209 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3210 )
3210 )
3211 if numprev > 0:
3211 if numprev > 0:
3212 ui.writenoi18n(
3212 ui.writenoi18n(
3213 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3213 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3214 )
3214 )
3215 ui.writenoi18n(
3215 ui.writenoi18n(
3216 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3216 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3217 )
3217 )
3218 ui.writenoi18n(
3218 ui.writenoi18n(
3219 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3219 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3220 )
3220 )
3221 if gdelta:
3221 if gdelta:
3222 ui.writenoi18n(
3222 ui.writenoi18n(
3223 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3223 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3224 )
3224 )
3225 ui.writenoi18n(
3225 ui.writenoi18n(
3226 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3226 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3227 )
3227 )
3228 ui.writenoi18n(
3228 ui.writenoi18n(
3229 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3229 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3230 )
3230 )
3231
3231
3232
3232
3233 @command(
3233 @command(
3234 b'debugrevlogindex',
3234 b'debugrevlogindex',
3235 cmdutil.debugrevlogopts
3235 cmdutil.debugrevlogopts
3236 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3236 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3237 _(b'[-f FORMAT] -c|-m|FILE'),
3237 _(b'[-f FORMAT] -c|-m|FILE'),
3238 optionalrepo=True,
3238 optionalrepo=True,
3239 )
3239 )
3240 def debugrevlogindex(ui, repo, file_=None, **opts):
3240 def debugrevlogindex(ui, repo, file_=None, **opts):
3241 """dump the contents of a revlog index"""
3241 """dump the contents of a revlog index"""
3242 opts = pycompat.byteskwargs(opts)
3242 opts = pycompat.byteskwargs(opts)
3243 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3243 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3244 format = opts.get(b'format', 0)
3244 format = opts.get(b'format', 0)
3245 if format not in (0, 1):
3245 if format not in (0, 1):
3246 raise error.Abort(_(b"unknown format %d") % format)
3246 raise error.Abort(_(b"unknown format %d") % format)
3247
3247
3248 if ui.debugflag:
3248 if ui.debugflag:
3249 shortfn = hex
3249 shortfn = hex
3250 else:
3250 else:
3251 shortfn = short
3251 shortfn = short
3252
3252
3253 # There might not be anything in r, so have a sane default
3253 # There might not be anything in r, so have a sane default
3254 idlen = 12
3254 idlen = 12
3255 for i in r:
3255 for i in r:
3256 idlen = len(shortfn(r.node(i)))
3256 idlen = len(shortfn(r.node(i)))
3257 break
3257 break
3258
3258
3259 if format == 0:
3259 if format == 0:
3260 if ui.verbose:
3260 if ui.verbose:
3261 ui.writenoi18n(
3261 ui.writenoi18n(
3262 b" rev offset length linkrev %s %s p2\n"
3262 b" rev offset length linkrev %s %s p2\n"
3263 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3263 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3264 )
3264 )
3265 else:
3265 else:
3266 ui.writenoi18n(
3266 ui.writenoi18n(
3267 b" rev linkrev %s %s p2\n"
3267 b" rev linkrev %s %s p2\n"
3268 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3268 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3269 )
3269 )
3270 elif format == 1:
3270 elif format == 1:
3271 if ui.verbose:
3271 if ui.verbose:
3272 ui.writenoi18n(
3272 ui.writenoi18n(
3273 (
3273 (
3274 b" rev flag offset length size link p1"
3274 b" rev flag offset length size link p1"
3275 b" p2 %s\n"
3275 b" p2 %s\n"
3276 )
3276 )
3277 % b"nodeid".rjust(idlen)
3277 % b"nodeid".rjust(idlen)
3278 )
3278 )
3279 else:
3279 else:
3280 ui.writenoi18n(
3280 ui.writenoi18n(
3281 b" rev flag size link p1 p2 %s\n"
3281 b" rev flag size link p1 p2 %s\n"
3282 % b"nodeid".rjust(idlen)
3282 % b"nodeid".rjust(idlen)
3283 )
3283 )
3284
3284
3285 for i in r:
3285 for i in r:
3286 node = r.node(i)
3286 node = r.node(i)
3287 if format == 0:
3287 if format == 0:
3288 try:
3288 try:
3289 pp = r.parents(node)
3289 pp = r.parents(node)
3290 except Exception:
3290 except Exception:
3291 pp = [nullid, nullid]
3291 pp = [nullid, nullid]
3292 if ui.verbose:
3292 if ui.verbose:
3293 ui.write(
3293 ui.write(
3294 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3294 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3295 % (
3295 % (
3296 i,
3296 i,
3297 r.start(i),
3297 r.start(i),
3298 r.length(i),
3298 r.length(i),
3299 r.linkrev(i),
3299 r.linkrev(i),
3300 shortfn(node),
3300 shortfn(node),
3301 shortfn(pp[0]),
3301 shortfn(pp[0]),
3302 shortfn(pp[1]),
3302 shortfn(pp[1]),
3303 )
3303 )
3304 )
3304 )
3305 else:
3305 else:
3306 ui.write(
3306 ui.write(
3307 b"% 6d % 7d %s %s %s\n"
3307 b"% 6d % 7d %s %s %s\n"
3308 % (
3308 % (
3309 i,
3309 i,
3310 r.linkrev(i),
3310 r.linkrev(i),
3311 shortfn(node),
3311 shortfn(node),
3312 shortfn(pp[0]),
3312 shortfn(pp[0]),
3313 shortfn(pp[1]),
3313 shortfn(pp[1]),
3314 )
3314 )
3315 )
3315 )
3316 elif format == 1:
3316 elif format == 1:
3317 pr = r.parentrevs(i)
3317 pr = r.parentrevs(i)
3318 if ui.verbose:
3318 if ui.verbose:
3319 ui.write(
3319 ui.write(
3320 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3320 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3321 % (
3321 % (
3322 i,
3322 i,
3323 r.flags(i),
3323 r.flags(i),
3324 r.start(i),
3324 r.start(i),
3325 r.length(i),
3325 r.length(i),
3326 r.rawsize(i),
3326 r.rawsize(i),
3327 r.linkrev(i),
3327 r.linkrev(i),
3328 pr[0],
3328 pr[0],
3329 pr[1],
3329 pr[1],
3330 shortfn(node),
3330 shortfn(node),
3331 )
3331 )
3332 )
3332 )
3333 else:
3333 else:
3334 ui.write(
3334 ui.write(
3335 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3335 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3336 % (
3336 % (
3337 i,
3337 i,
3338 r.flags(i),
3338 r.flags(i),
3339 r.rawsize(i),
3339 r.rawsize(i),
3340 r.linkrev(i),
3340 r.linkrev(i),
3341 pr[0],
3341 pr[0],
3342 pr[1],
3342 pr[1],
3343 shortfn(node),
3343 shortfn(node),
3344 )
3344 )
3345 )
3345 )
3346
3346
3347
3347
3348 @command(
3348 @command(
3349 b'debugrevspec',
3349 b'debugrevspec',
3350 [
3350 [
3351 (
3351 (
3352 b'',
3352 b'',
3353 b'optimize',
3353 b'optimize',
3354 None,
3354 None,
3355 _(b'print parsed tree after optimizing (DEPRECATED)'),
3355 _(b'print parsed tree after optimizing (DEPRECATED)'),
3356 ),
3356 ),
3357 (
3357 (
3358 b'',
3358 b'',
3359 b'show-revs',
3359 b'show-revs',
3360 True,
3360 True,
3361 _(b'print list of result revisions (default)'),
3361 _(b'print list of result revisions (default)'),
3362 ),
3362 ),
3363 (
3363 (
3364 b's',
3364 b's',
3365 b'show-set',
3365 b'show-set',
3366 None,
3366 None,
3367 _(b'print internal representation of result set'),
3367 _(b'print internal representation of result set'),
3368 ),
3368 ),
3369 (
3369 (
3370 b'p',
3370 b'p',
3371 b'show-stage',
3371 b'show-stage',
3372 [],
3372 [],
3373 _(b'print parsed tree at the given stage'),
3373 _(b'print parsed tree at the given stage'),
3374 _(b'NAME'),
3374 _(b'NAME'),
3375 ),
3375 ),
3376 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3376 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3377 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3377 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3378 ],
3378 ],
3379 b'REVSPEC',
3379 b'REVSPEC',
3380 )
3380 )
3381 def debugrevspec(ui, repo, expr, **opts):
3381 def debugrevspec(ui, repo, expr, **opts):
3382 """parse and apply a revision specification
3382 """parse and apply a revision specification
3383
3383
3384 Use -p/--show-stage option to print the parsed tree at the given stages.
3384 Use -p/--show-stage option to print the parsed tree at the given stages.
3385 Use -p all to print tree at every stage.
3385 Use -p all to print tree at every stage.
3386
3386
3387 Use --no-show-revs option with -s or -p to print only the set
3387 Use --no-show-revs option with -s or -p to print only the set
3388 representation or the parsed tree respectively.
3388 representation or the parsed tree respectively.
3389
3389
3390 Use --verify-optimized to compare the optimized result with the unoptimized
3390 Use --verify-optimized to compare the optimized result with the unoptimized
3391 one. Returns 1 if the optimized result differs.
3391 one. Returns 1 if the optimized result differs.
3392 """
3392 """
3393 opts = pycompat.byteskwargs(opts)
3393 opts = pycompat.byteskwargs(opts)
3394 aliases = ui.configitems(b'revsetalias')
3394 aliases = ui.configitems(b'revsetalias')
3395 stages = [
3395 stages = [
3396 (b'parsed', lambda tree: tree),
3396 (b'parsed', lambda tree: tree),
3397 (
3397 (
3398 b'expanded',
3398 b'expanded',
3399 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3399 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3400 ),
3400 ),
3401 (b'concatenated', revsetlang.foldconcat),
3401 (b'concatenated', revsetlang.foldconcat),
3402 (b'analyzed', revsetlang.analyze),
3402 (b'analyzed', revsetlang.analyze),
3403 (b'optimized', revsetlang.optimize),
3403 (b'optimized', revsetlang.optimize),
3404 ]
3404 ]
3405 if opts[b'no_optimized']:
3405 if opts[b'no_optimized']:
3406 stages = stages[:-1]
3406 stages = stages[:-1]
3407 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3407 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3408 raise error.Abort(
3408 raise error.Abort(
3409 _(b'cannot use --verify-optimized with --no-optimized')
3409 _(b'cannot use --verify-optimized with --no-optimized')
3410 )
3410 )
3411 stagenames = {n for n, f in stages}
3411 stagenames = {n for n, f in stages}
3412
3412
3413 showalways = set()
3413 showalways = set()
3414 showchanged = set()
3414 showchanged = set()
3415 if ui.verbose and not opts[b'show_stage']:
3415 if ui.verbose and not opts[b'show_stage']:
3416 # show parsed tree by --verbose (deprecated)
3416 # show parsed tree by --verbose (deprecated)
3417 showalways.add(b'parsed')
3417 showalways.add(b'parsed')
3418 showchanged.update([b'expanded', b'concatenated'])
3418 showchanged.update([b'expanded', b'concatenated'])
3419 if opts[b'optimize']:
3419 if opts[b'optimize']:
3420 showalways.add(b'optimized')
3420 showalways.add(b'optimized')
3421 if opts[b'show_stage'] and opts[b'optimize']:
3421 if opts[b'show_stage'] and opts[b'optimize']:
3422 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3422 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3423 if opts[b'show_stage'] == [b'all']:
3423 if opts[b'show_stage'] == [b'all']:
3424 showalways.update(stagenames)
3424 showalways.update(stagenames)
3425 else:
3425 else:
3426 for n in opts[b'show_stage']:
3426 for n in opts[b'show_stage']:
3427 if n not in stagenames:
3427 if n not in stagenames:
3428 raise error.Abort(_(b'invalid stage name: %s') % n)
3428 raise error.Abort(_(b'invalid stage name: %s') % n)
3429 showalways.update(opts[b'show_stage'])
3429 showalways.update(opts[b'show_stage'])
3430
3430
3431 treebystage = {}
3431 treebystage = {}
3432 printedtree = None
3432 printedtree = None
3433 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3433 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3434 for n, f in stages:
3434 for n, f in stages:
3435 treebystage[n] = tree = f(tree)
3435 treebystage[n] = tree = f(tree)
3436 if n in showalways or (n in showchanged and tree != printedtree):
3436 if n in showalways or (n in showchanged and tree != printedtree):
3437 if opts[b'show_stage'] or n != b'parsed':
3437 if opts[b'show_stage'] or n != b'parsed':
3438 ui.write(b"* %s:\n" % n)
3438 ui.write(b"* %s:\n" % n)
3439 ui.write(revsetlang.prettyformat(tree), b"\n")
3439 ui.write(revsetlang.prettyformat(tree), b"\n")
3440 printedtree = tree
3440 printedtree = tree
3441
3441
3442 if opts[b'verify_optimized']:
3442 if opts[b'verify_optimized']:
3443 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3443 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3444 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3444 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3445 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3445 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3446 ui.writenoi18n(
3446 ui.writenoi18n(
3447 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3447 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3448 )
3448 )
3449 ui.writenoi18n(
3449 ui.writenoi18n(
3450 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3450 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3451 )
3451 )
3452 arevs = list(arevs)
3452 arevs = list(arevs)
3453 brevs = list(brevs)
3453 brevs = list(brevs)
3454 if arevs == brevs:
3454 if arevs == brevs:
3455 return 0
3455 return 0
3456 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3456 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3457 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3457 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3458 sm = difflib.SequenceMatcher(None, arevs, brevs)
3458 sm = difflib.SequenceMatcher(None, arevs, brevs)
3459 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3459 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3460 if tag in ('delete', 'replace'):
3460 if tag in ('delete', 'replace'):
3461 for c in arevs[alo:ahi]:
3461 for c in arevs[alo:ahi]:
3462 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3462 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3463 if tag in ('insert', 'replace'):
3463 if tag in ('insert', 'replace'):
3464 for c in brevs[blo:bhi]:
3464 for c in brevs[blo:bhi]:
3465 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3465 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3466 if tag == 'equal':
3466 if tag == 'equal':
3467 for c in arevs[alo:ahi]:
3467 for c in arevs[alo:ahi]:
3468 ui.write(b' %d\n' % c)
3468 ui.write(b' %d\n' % c)
3469 return 1
3469 return 1
3470
3470
3471 func = revset.makematcher(tree)
3471 func = revset.makematcher(tree)
3472 revs = func(repo)
3472 revs = func(repo)
3473 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3473 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3474 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3474 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3475 if not opts[b'show_revs']:
3475 if not opts[b'show_revs']:
3476 return
3476 return
3477 for c in revs:
3477 for c in revs:
3478 ui.write(b"%d\n" % c)
3478 ui.write(b"%d\n" % c)
3479
3479
3480
3480
3481 @command(
3481 @command(
3482 b'debugserve',
3482 b'debugserve',
3483 [
3483 [
3484 (
3484 (
3485 b'',
3485 b'',
3486 b'sshstdio',
3486 b'sshstdio',
3487 False,
3487 False,
3488 _(b'run an SSH server bound to process handles'),
3488 _(b'run an SSH server bound to process handles'),
3489 ),
3489 ),
3490 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3490 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3491 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3491 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3492 ],
3492 ],
3493 b'',
3493 b'',
3494 )
3494 )
3495 def debugserve(ui, repo, **opts):
3495 def debugserve(ui, repo, **opts):
3496 """run a server with advanced settings
3496 """run a server with advanced settings
3497
3497
3498 This command is similar to :hg:`serve`. It exists partially as a
3498 This command is similar to :hg:`serve`. It exists partially as a
3499 workaround to the fact that ``hg serve --stdio`` must have specific
3499 workaround to the fact that ``hg serve --stdio`` must have specific
3500 arguments for security reasons.
3500 arguments for security reasons.
3501 """
3501 """
3502 opts = pycompat.byteskwargs(opts)
3502 opts = pycompat.byteskwargs(opts)
3503
3503
3504 if not opts[b'sshstdio']:
3504 if not opts[b'sshstdio']:
3505 raise error.Abort(_(b'only --sshstdio is currently supported'))
3505 raise error.Abort(_(b'only --sshstdio is currently supported'))
3506
3506
3507 logfh = None
3507 logfh = None
3508
3508
3509 if opts[b'logiofd'] and opts[b'logiofile']:
3509 if opts[b'logiofd'] and opts[b'logiofile']:
3510 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3510 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3511
3511
3512 if opts[b'logiofd']:
3512 if opts[b'logiofd']:
3513 # Ideally we would be line buffered. But line buffering in binary
3513 # Ideally we would be line buffered. But line buffering in binary
3514 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3514 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3515 # buffering could have performance impacts. But since this isn't
3515 # buffering could have performance impacts. But since this isn't
3516 # performance critical code, it should be fine.
3516 # performance critical code, it should be fine.
3517 try:
3517 try:
3518 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3518 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3519 except OSError as e:
3519 except OSError as e:
3520 if e.errno != errno.ESPIPE:
3520 if e.errno != errno.ESPIPE:
3521 raise
3521 raise
3522 # can't seek a pipe, so `ab` mode fails on py3
3522 # can't seek a pipe, so `ab` mode fails on py3
3523 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3523 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3524 elif opts[b'logiofile']:
3524 elif opts[b'logiofile']:
3525 logfh = open(opts[b'logiofile'], b'ab', 0)
3525 logfh = open(opts[b'logiofile'], b'ab', 0)
3526
3526
3527 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3527 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3528 s.serve_forever()
3528 s.serve_forever()
3529
3529
3530
3530
3531 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3531 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3532 def debugsetparents(ui, repo, rev1, rev2=None):
3532 def debugsetparents(ui, repo, rev1, rev2=None):
3533 """manually set the parents of the current working directory (DANGEROUS)
3533 """manually set the parents of the current working directory (DANGEROUS)
3534
3534
3535 This command is not what you are looking for and should not be used. Using
3535 This command is not what you are looking for and should not be used. Using
3536 this command will most certainly results in slight corruption of the file
3536 this command will most certainly results in slight corruption of the file
3537 level histories withing your repository. DO NOT USE THIS COMMAND.
3537 level histories withing your repository. DO NOT USE THIS COMMAND.
3538
3538
3539 The command update the p1 and p2 field in the dirstate, and not touching
3539 The command update the p1 and p2 field in the dirstate, and not touching
3540 anything else. This useful for writing repository conversion tools, but
3540 anything else. This useful for writing repository conversion tools, but
3541 should be used with extreme care. For example, neither the working
3541 should be used with extreme care. For example, neither the working
3542 directory nor the dirstate is updated, so file status may be incorrect
3542 directory nor the dirstate is updated, so file status may be incorrect
3543 after running this command. Only used if you are one of the few people that
3543 after running this command. Only used if you are one of the few people that
3544 deeply unstand both conversion tools and file level histories. If you are
3544 deeply unstand both conversion tools and file level histories. If you are
3545 reading this help, you are not one of this people (most of them sailed west
3545 reading this help, you are not one of this people (most of them sailed west
3546 from Mithlond anyway.
3546 from Mithlond anyway.
3547
3547
3548 So one last time DO NOT USE THIS COMMAND.
3548 So one last time DO NOT USE THIS COMMAND.
3549
3549
3550 Returns 0 on success.
3550 Returns 0 on success.
3551 """
3551 """
3552
3552
3553 node1 = scmutil.revsingle(repo, rev1).node()
3553 node1 = scmutil.revsingle(repo, rev1).node()
3554 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3554 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3555
3555
3556 with repo.wlock():
3556 with repo.wlock():
3557 repo.setparents(node1, node2)
3557 repo.setparents(node1, node2)
3558
3558
3559
3559
3560 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3560 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3561 def debugsidedata(ui, repo, file_, rev=None, **opts):
3561 def debugsidedata(ui, repo, file_, rev=None, **opts):
3562 """dump the side data for a cl/manifest/file revision
3562 """dump the side data for a cl/manifest/file revision
3563
3563
3564 Use --verbose to dump the sidedata content."""
3564 Use --verbose to dump the sidedata content."""
3565 opts = pycompat.byteskwargs(opts)
3565 opts = pycompat.byteskwargs(opts)
3566 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3566 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3567 if rev is not None:
3567 if rev is not None:
3568 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3568 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3569 file_, rev = None, file_
3569 file_, rev = None, file_
3570 elif rev is None:
3570 elif rev is None:
3571 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3571 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3572 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3572 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3573 r = getattr(r, '_revlog', r)
3573 r = getattr(r, '_revlog', r)
3574 try:
3574 try:
3575 sidedata = r.sidedata(r.lookup(rev))
3575 sidedata = r.sidedata(r.lookup(rev))
3576 except KeyError:
3576 except KeyError:
3577 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3577 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3578 if sidedata:
3578 if sidedata:
3579 sidedata = list(sidedata.items())
3579 sidedata = list(sidedata.items())
3580 sidedata.sort()
3580 sidedata.sort()
3581 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3581 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3582 for key, value in sidedata:
3582 for key, value in sidedata:
3583 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3583 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3584 if ui.verbose:
3584 if ui.verbose:
3585 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3585 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3586
3586
3587
3587
3588 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3588 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3589 def debugssl(ui, repo, source=None, **opts):
3589 def debugssl(ui, repo, source=None, **opts):
3590 """test a secure connection to a server
3590 """test a secure connection to a server
3591
3591
3592 This builds the certificate chain for the server on Windows, installing the
3592 This builds the certificate chain for the server on Windows, installing the
3593 missing intermediates and trusted root via Windows Update if necessary. It
3593 missing intermediates and trusted root via Windows Update if necessary. It
3594 does nothing on other platforms.
3594 does nothing on other platforms.
3595
3595
3596 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3596 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3597 that server is used. See :hg:`help urls` for more information.
3597 that server is used. See :hg:`help urls` for more information.
3598
3598
3599 If the update succeeds, retry the original operation. Otherwise, the cause
3599 If the update succeeds, retry the original operation. Otherwise, the cause
3600 of the SSL error is likely another issue.
3600 of the SSL error is likely another issue.
3601 """
3601 """
3602 if not pycompat.iswindows:
3602 if not pycompat.iswindows:
3603 raise error.Abort(
3603 raise error.Abort(
3604 _(b'certificate chain building is only possible on Windows')
3604 _(b'certificate chain building is only possible on Windows')
3605 )
3605 )
3606
3606
3607 if not source:
3607 if not source:
3608 if not repo:
3608 if not repo:
3609 raise error.Abort(
3609 raise error.Abort(
3610 _(
3610 _(
3611 b"there is no Mercurial repository here, and no "
3611 b"there is no Mercurial repository here, and no "
3612 b"server specified"
3612 b"server specified"
3613 )
3613 )
3614 )
3614 )
3615 source = b"default"
3615 source = b"default"
3616
3616
3617 source, branches = hg.parseurl(ui.expandpath(source))
3617 source, branches = hg.parseurl(ui.expandpath(source))
3618 url = util.url(source)
3618 url = util.url(source)
3619
3619
3620 defaultport = {b'https': 443, b'ssh': 22}
3620 defaultport = {b'https': 443, b'ssh': 22}
3621 if url.scheme in defaultport:
3621 if url.scheme in defaultport:
3622 try:
3622 try:
3623 addr = (url.host, int(url.port or defaultport[url.scheme]))
3623 addr = (url.host, int(url.port or defaultport[url.scheme]))
3624 except ValueError:
3624 except ValueError:
3625 raise error.Abort(_(b"malformed port number in URL"))
3625 raise error.Abort(_(b"malformed port number in URL"))
3626 else:
3626 else:
3627 raise error.Abort(_(b"only https and ssh connections are supported"))
3627 raise error.Abort(_(b"only https and ssh connections are supported"))
3628
3628
3629 from . import win32
3629 from . import win32
3630
3630
3631 s = ssl.wrap_socket(
3631 s = ssl.wrap_socket(
3632 socket.socket(),
3632 socket.socket(),
3633 ssl_version=ssl.PROTOCOL_TLS,
3633 ssl_version=ssl.PROTOCOL_TLS,
3634 cert_reqs=ssl.CERT_NONE,
3634 cert_reqs=ssl.CERT_NONE,
3635 ca_certs=None,
3635 ca_certs=None,
3636 )
3636 )
3637
3637
3638 try:
3638 try:
3639 s.connect(addr)
3639 s.connect(addr)
3640 cert = s.getpeercert(True)
3640 cert = s.getpeercert(True)
3641
3641
3642 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3642 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3643
3643
3644 complete = win32.checkcertificatechain(cert, build=False)
3644 complete = win32.checkcertificatechain(cert, build=False)
3645
3645
3646 if not complete:
3646 if not complete:
3647 ui.status(_(b'certificate chain is incomplete, updating... '))
3647 ui.status(_(b'certificate chain is incomplete, updating... '))
3648
3648
3649 if not win32.checkcertificatechain(cert):
3649 if not win32.checkcertificatechain(cert):
3650 ui.status(_(b'failed.\n'))
3650 ui.status(_(b'failed.\n'))
3651 else:
3651 else:
3652 ui.status(_(b'done.\n'))
3652 ui.status(_(b'done.\n'))
3653 else:
3653 else:
3654 ui.status(_(b'full certificate chain is available\n'))
3654 ui.status(_(b'full certificate chain is available\n'))
3655 finally:
3655 finally:
3656 s.close()
3656 s.close()
3657
3657
3658
3658
3659 @command(
3659 @command(
3660 b"debugbackupbundle",
3660 b"debugbackupbundle",
3661 [
3661 [
3662 (
3662 (
3663 b"",
3663 b"",
3664 b"recover",
3664 b"recover",
3665 b"",
3665 b"",
3666 b"brings the specified changeset back into the repository",
3666 b"brings the specified changeset back into the repository",
3667 )
3667 )
3668 ]
3668 ]
3669 + cmdutil.logopts,
3669 + cmdutil.logopts,
3670 _(b"hg debugbackupbundle [--recover HASH]"),
3670 _(b"hg debugbackupbundle [--recover HASH]"),
3671 )
3671 )
3672 def debugbackupbundle(ui, repo, *pats, **opts):
3672 def debugbackupbundle(ui, repo, *pats, **opts):
3673 """lists the changesets available in backup bundles
3673 """lists the changesets available in backup bundles
3674
3674
3675 Without any arguments, this command prints a list of the changesets in each
3675 Without any arguments, this command prints a list of the changesets in each
3676 backup bundle.
3676 backup bundle.
3677
3677
3678 --recover takes a changeset hash and unbundles the first bundle that
3678 --recover takes a changeset hash and unbundles the first bundle that
3679 contains that hash, which puts that changeset back in your repository.
3679 contains that hash, which puts that changeset back in your repository.
3680
3680
3681 --verbose will print the entire commit message and the bundle path for that
3681 --verbose will print the entire commit message and the bundle path for that
3682 backup.
3682 backup.
3683 """
3683 """
3684 backups = list(
3684 backups = list(
3685 filter(
3685 filter(
3686 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3686 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3687 )
3687 )
3688 )
3688 )
3689 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3689 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3690
3690
3691 opts = pycompat.byteskwargs(opts)
3691 opts = pycompat.byteskwargs(opts)
3692 opts[b"bundle"] = b""
3692 opts[b"bundle"] = b""
3693 opts[b"force"] = None
3693 opts[b"force"] = None
3694 limit = logcmdutil.getlimit(opts)
3694 limit = logcmdutil.getlimit(opts)
3695
3695
3696 def display(other, chlist, displayer):
3696 def display(other, chlist, displayer):
3697 if opts.get(b"newest_first"):
3697 if opts.get(b"newest_first"):
3698 chlist.reverse()
3698 chlist.reverse()
3699 count = 0
3699 count = 0
3700 for n in chlist:
3700 for n in chlist:
3701 if limit is not None and count >= limit:
3701 if limit is not None and count >= limit:
3702 break
3702 break
3703 parents = [True for p in other.changelog.parents(n) if p != nullid]
3703 parents = [True for p in other.changelog.parents(n) if p != nullid]
3704 if opts.get(b"no_merges") and len(parents) == 2:
3704 if opts.get(b"no_merges") and len(parents) == 2:
3705 continue
3705 continue
3706 count += 1
3706 count += 1
3707 displayer.show(other[n])
3707 displayer.show(other[n])
3708
3708
3709 recovernode = opts.get(b"recover")
3709 recovernode = opts.get(b"recover")
3710 if recovernode:
3710 if recovernode:
3711 if scmutil.isrevsymbol(repo, recovernode):
3711 if scmutil.isrevsymbol(repo, recovernode):
3712 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3712 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3713 return
3713 return
3714 elif backups:
3714 elif backups:
3715 msg = _(
3715 msg = _(
3716 b"Recover changesets using: hg debugbackupbundle --recover "
3716 b"Recover changesets using: hg debugbackupbundle --recover "
3717 b"<changeset hash>\n\nAvailable backup changesets:"
3717 b"<changeset hash>\n\nAvailable backup changesets:"
3718 )
3718 )
3719 ui.status(msg, label=b"status.removed")
3719 ui.status(msg, label=b"status.removed")
3720 else:
3720 else:
3721 ui.status(_(b"no backup changesets found\n"))
3721 ui.status(_(b"no backup changesets found\n"))
3722 return
3722 return
3723
3723
3724 for backup in backups:
3724 for backup in backups:
3725 # Much of this is copied from the hg incoming logic
3725 # Much of this is copied from the hg incoming logic
3726 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3726 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3727 source, branches = hg.parseurl(source, opts.get(b"branch"))
3727 source, branches = hg.parseurl(source, opts.get(b"branch"))
3728 try:
3728 try:
3729 other = hg.peer(repo, opts, source)
3729 other = hg.peer(repo, opts, source)
3730 except error.LookupError as ex:
3730 except error.LookupError as ex:
3731 msg = _(b"\nwarning: unable to open bundle %s") % source
3731 msg = _(b"\nwarning: unable to open bundle %s") % source
3732 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3732 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3733 ui.warn(msg, hint=hint)
3733 ui.warn(msg, hint=hint)
3734 continue
3734 continue
3735 revs, checkout = hg.addbranchrevs(
3735 revs, checkout = hg.addbranchrevs(
3736 repo, other, branches, opts.get(b"rev")
3736 repo, other, branches, opts.get(b"rev")
3737 )
3737 )
3738
3738
3739 if revs:
3739 if revs:
3740 revs = [other.lookup(rev) for rev in revs]
3740 revs = [other.lookup(rev) for rev in revs]
3741
3741
3742 quiet = ui.quiet
3742 quiet = ui.quiet
3743 try:
3743 try:
3744 ui.quiet = True
3744 ui.quiet = True
3745 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3745 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3746 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3746 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3747 )
3747 )
3748 except error.LookupError:
3748 except error.LookupError:
3749 continue
3749 continue
3750 finally:
3750 finally:
3751 ui.quiet = quiet
3751 ui.quiet = quiet
3752
3752
3753 try:
3753 try:
3754 if not chlist:
3754 if not chlist:
3755 continue
3755 continue
3756 if recovernode:
3756 if recovernode:
3757 with repo.lock(), repo.transaction(b"unbundle") as tr:
3757 with repo.lock(), repo.transaction(b"unbundle") as tr:
3758 if scmutil.isrevsymbol(other, recovernode):
3758 if scmutil.isrevsymbol(other, recovernode):
3759 ui.status(_(b"Unbundling %s\n") % (recovernode))
3759 ui.status(_(b"Unbundling %s\n") % (recovernode))
3760 f = hg.openpath(ui, source)
3760 f = hg.openpath(ui, source)
3761 gen = exchange.readbundle(ui, f, source)
3761 gen = exchange.readbundle(ui, f, source)
3762 if isinstance(gen, bundle2.unbundle20):
3762 if isinstance(gen, bundle2.unbundle20):
3763 bundle2.applybundle(
3763 bundle2.applybundle(
3764 repo,
3764 repo,
3765 gen,
3765 gen,
3766 tr,
3766 tr,
3767 source=b"unbundle",
3767 source=b"unbundle",
3768 url=b"bundle:" + source,
3768 url=b"bundle:" + source,
3769 )
3769 )
3770 else:
3770 else:
3771 gen.apply(repo, b"unbundle", b"bundle:" + source)
3771 gen.apply(repo, b"unbundle", b"bundle:" + source)
3772 break
3772 break
3773 else:
3773 else:
3774 backupdate = encoding.strtolocal(
3774 backupdate = encoding.strtolocal(
3775 time.strftime(
3775 time.strftime(
3776 "%a %H:%M, %Y-%m-%d",
3776 "%a %H:%M, %Y-%m-%d",
3777 time.localtime(os.path.getmtime(source)),
3777 time.localtime(os.path.getmtime(source)),
3778 )
3778 )
3779 )
3779 )
3780 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3780 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3781 if ui.verbose:
3781 if ui.verbose:
3782 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3782 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3783 else:
3783 else:
3784 opts[
3784 opts[
3785 b"template"
3785 b"template"
3786 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3786 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3787 displayer = logcmdutil.changesetdisplayer(
3787 displayer = logcmdutil.changesetdisplayer(
3788 ui, other, opts, False
3788 ui, other, opts, False
3789 )
3789 )
3790 display(other, chlist, displayer)
3790 display(other, chlist, displayer)
3791 displayer.close()
3791 displayer.close()
3792 finally:
3792 finally:
3793 cleanupfn()
3793 cleanupfn()
3794
3794
3795
3795
3796 @command(
3796 @command(
3797 b'debugsub',
3797 b'debugsub',
3798 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3798 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3799 _(b'[-r REV] [REV]'),
3799 _(b'[-r REV] [REV]'),
3800 )
3800 )
3801 def debugsub(ui, repo, rev=None):
3801 def debugsub(ui, repo, rev=None):
3802 ctx = scmutil.revsingle(repo, rev, None)
3802 ctx = scmutil.revsingle(repo, rev, None)
3803 for k, v in sorted(ctx.substate.items()):
3803 for k, v in sorted(ctx.substate.items()):
3804 ui.writenoi18n(b'path %s\n' % k)
3804 ui.writenoi18n(b'path %s\n' % k)
3805 ui.writenoi18n(b' source %s\n' % v[0])
3805 ui.writenoi18n(b' source %s\n' % v[0])
3806 ui.writenoi18n(b' revision %s\n' % v[1])
3806 ui.writenoi18n(b' revision %s\n' % v[1])
3807
3807
3808
3808
3809 @command(b'debugshell', optionalrepo=True)
3809 @command(b'debugshell', optionalrepo=True)
3810 def debugshell(ui, repo):
3810 def debugshell(ui, repo):
3811 """run an interactive Python interpreter
3811 """run an interactive Python interpreter
3812
3812
3813 The local namespace is provided with a reference to the ui and
3813 The local namespace is provided with a reference to the ui and
3814 the repo instance (if available).
3814 the repo instance (if available).
3815 """
3815 """
3816 import code
3816 import code
3817
3817
3818 imported_objects = {
3818 imported_objects = {
3819 'ui': ui,
3819 'ui': ui,
3820 'repo': repo,
3820 'repo': repo,
3821 }
3821 }
3822
3822
3823 code.interact(local=imported_objects)
3823 code.interact(local=imported_objects)
3824
3824
3825
3825
3826 @command(
3826 @command(
3827 b'debugsuccessorssets',
3827 b'debugsuccessorssets',
3828 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3828 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3829 _(b'[REV]'),
3829 _(b'[REV]'),
3830 )
3830 )
3831 def debugsuccessorssets(ui, repo, *revs, **opts):
3831 def debugsuccessorssets(ui, repo, *revs, **opts):
3832 """show set of successors for revision
3832 """show set of successors for revision
3833
3833
3834 A successors set of changeset A is a consistent group of revisions that
3834 A successors set of changeset A is a consistent group of revisions that
3835 succeed A. It contains non-obsolete changesets only unless closests
3835 succeed A. It contains non-obsolete changesets only unless closests
3836 successors set is set.
3836 successors set is set.
3837
3837
3838 In most cases a changeset A has a single successors set containing a single
3838 In most cases a changeset A has a single successors set containing a single
3839 successor (changeset A replaced by A').
3839 successor (changeset A replaced by A').
3840
3840
3841 A changeset that is made obsolete with no successors are called "pruned".
3841 A changeset that is made obsolete with no successors are called "pruned".
3842 Such changesets have no successors sets at all.
3842 Such changesets have no successors sets at all.
3843
3843
3844 A changeset that has been "split" will have a successors set containing
3844 A changeset that has been "split" will have a successors set containing
3845 more than one successor.
3845 more than one successor.
3846
3846
3847 A changeset that has been rewritten in multiple different ways is called
3847 A changeset that has been rewritten in multiple different ways is called
3848 "divergent". Such changesets have multiple successor sets (each of which
3848 "divergent". Such changesets have multiple successor sets (each of which
3849 may also be split, i.e. have multiple successors).
3849 may also be split, i.e. have multiple successors).
3850
3850
3851 Results are displayed as follows::
3851 Results are displayed as follows::
3852
3852
3853 <rev1>
3853 <rev1>
3854 <successors-1A>
3854 <successors-1A>
3855 <rev2>
3855 <rev2>
3856 <successors-2A>
3856 <successors-2A>
3857 <successors-2B1> <successors-2B2> <successors-2B3>
3857 <successors-2B1> <successors-2B2> <successors-2B3>
3858
3858
3859 Here rev2 has two possible (i.e. divergent) successors sets. The first
3859 Here rev2 has two possible (i.e. divergent) successors sets. The first
3860 holds one element, whereas the second holds three (i.e. the changeset has
3860 holds one element, whereas the second holds three (i.e. the changeset has
3861 been split).
3861 been split).
3862 """
3862 """
3863 # passed to successorssets caching computation from one call to another
3863 # passed to successorssets caching computation from one call to another
3864 cache = {}
3864 cache = {}
3865 ctx2str = bytes
3865 ctx2str = bytes
3866 node2str = short
3866 node2str = short
3867 for rev in scmutil.revrange(repo, revs):
3867 for rev in scmutil.revrange(repo, revs):
3868 ctx = repo[rev]
3868 ctx = repo[rev]
3869 ui.write(b'%s\n' % ctx2str(ctx))
3869 ui.write(b'%s\n' % ctx2str(ctx))
3870 for succsset in obsutil.successorssets(
3870 for succsset in obsutil.successorssets(
3871 repo, ctx.node(), closest=opts['closest'], cache=cache
3871 repo, ctx.node(), closest=opts['closest'], cache=cache
3872 ):
3872 ):
3873 if succsset:
3873 if succsset:
3874 ui.write(b' ')
3874 ui.write(b' ')
3875 ui.write(node2str(succsset[0]))
3875 ui.write(node2str(succsset[0]))
3876 for node in succsset[1:]:
3876 for node in succsset[1:]:
3877 ui.write(b' ')
3877 ui.write(b' ')
3878 ui.write(node2str(node))
3878 ui.write(node2str(node))
3879 ui.write(b'\n')
3879 ui.write(b'\n')
3880
3880
3881
3881
3882 @command(b'debugtagscache', [])
3882 @command(b'debugtagscache', [])
3883 def debugtagscache(ui, repo):
3883 def debugtagscache(ui, repo):
3884 """display the contents of .hg/cache/hgtagsfnodes1"""
3884 """display the contents of .hg/cache/hgtagsfnodes1"""
3885 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3885 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3886 flog = repo.file(b'.hgtags')
3886 flog = repo.file(b'.hgtags')
3887 for r in repo:
3887 for r in repo:
3888 node = repo[r].node()
3888 node = repo[r].node()
3889 tagsnode = cache.getfnode(node, computemissing=False)
3889 tagsnode = cache.getfnode(node, computemissing=False)
3890 if tagsnode:
3890 if tagsnode:
3891 tagsnodedisplay = hex(tagsnode)
3891 tagsnodedisplay = hex(tagsnode)
3892 if not flog.hasnode(tagsnode):
3892 if not flog.hasnode(tagsnode):
3893 tagsnodedisplay += b' (unknown node)'
3893 tagsnodedisplay += b' (unknown node)'
3894 elif tagsnode is None:
3894 elif tagsnode is None:
3895 tagsnodedisplay = b'missing'
3895 tagsnodedisplay = b'missing'
3896 else:
3896 else:
3897 tagsnodedisplay = b'invalid'
3897 tagsnodedisplay = b'invalid'
3898
3898
3899 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3899 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3900
3900
3901
3901
3902 @command(
3902 @command(
3903 b'debugtemplate',
3903 b'debugtemplate',
3904 [
3904 [
3905 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3905 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3906 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3906 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3907 ],
3907 ],
3908 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3908 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3909 optionalrepo=True,
3909 optionalrepo=True,
3910 )
3910 )
3911 def debugtemplate(ui, repo, tmpl, **opts):
3911 def debugtemplate(ui, repo, tmpl, **opts):
3912 """parse and apply a template
3912 """parse and apply a template
3913
3913
3914 If -r/--rev is given, the template is processed as a log template and
3914 If -r/--rev is given, the template is processed as a log template and
3915 applied to the given changesets. Otherwise, it is processed as a generic
3915 applied to the given changesets. Otherwise, it is processed as a generic
3916 template.
3916 template.
3917
3917
3918 Use --verbose to print the parsed tree.
3918 Use --verbose to print the parsed tree.
3919 """
3919 """
3920 revs = None
3920 revs = None
3921 if opts['rev']:
3921 if opts['rev']:
3922 if repo is None:
3922 if repo is None:
3923 raise error.RepoError(
3923 raise error.RepoError(
3924 _(b'there is no Mercurial repository here (.hg not found)')
3924 _(b'there is no Mercurial repository here (.hg not found)')
3925 )
3925 )
3926 revs = scmutil.revrange(repo, opts['rev'])
3926 revs = scmutil.revrange(repo, opts['rev'])
3927
3927
3928 props = {}
3928 props = {}
3929 for d in opts['define']:
3929 for d in opts['define']:
3930 try:
3930 try:
3931 k, v = (e.strip() for e in d.split(b'=', 1))
3931 k, v = (e.strip() for e in d.split(b'=', 1))
3932 if not k or k == b'ui':
3932 if not k or k == b'ui':
3933 raise ValueError
3933 raise ValueError
3934 props[k] = v
3934 props[k] = v
3935 except ValueError:
3935 except ValueError:
3936 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3936 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3937
3937
3938 if ui.verbose:
3938 if ui.verbose:
3939 aliases = ui.configitems(b'templatealias')
3939 aliases = ui.configitems(b'templatealias')
3940 tree = templater.parse(tmpl)
3940 tree = templater.parse(tmpl)
3941 ui.note(templater.prettyformat(tree), b'\n')
3941 ui.note(templater.prettyformat(tree), b'\n')
3942 newtree = templater.expandaliases(tree, aliases)
3942 newtree = templater.expandaliases(tree, aliases)
3943 if newtree != tree:
3943 if newtree != tree:
3944 ui.notenoi18n(
3944 ui.notenoi18n(
3945 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3945 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3946 )
3946 )
3947
3947
3948 if revs is None:
3948 if revs is None:
3949 tres = formatter.templateresources(ui, repo)
3949 tres = formatter.templateresources(ui, repo)
3950 t = formatter.maketemplater(ui, tmpl, resources=tres)
3950 t = formatter.maketemplater(ui, tmpl, resources=tres)
3951 if ui.verbose:
3951 if ui.verbose:
3952 kwds, funcs = t.symbolsuseddefault()
3952 kwds, funcs = t.symbolsuseddefault()
3953 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3953 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3954 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3954 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3955 ui.write(t.renderdefault(props))
3955 ui.write(t.renderdefault(props))
3956 else:
3956 else:
3957 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3957 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3958 if ui.verbose:
3958 if ui.verbose:
3959 kwds, funcs = displayer.t.symbolsuseddefault()
3959 kwds, funcs = displayer.t.symbolsuseddefault()
3960 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3960 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3961 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3961 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3962 for r in revs:
3962 for r in revs:
3963 displayer.show(repo[r], **pycompat.strkwargs(props))
3963 displayer.show(repo[r], **pycompat.strkwargs(props))
3964 displayer.close()
3964 displayer.close()
3965
3965
3966
3966
3967 @command(
3967 @command(
3968 b'debuguigetpass',
3968 b'debuguigetpass',
3969 [
3969 [
3970 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3970 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3971 ],
3971 ],
3972 _(b'[-p TEXT]'),
3972 _(b'[-p TEXT]'),
3973 norepo=True,
3973 norepo=True,
3974 )
3974 )
3975 def debuguigetpass(ui, prompt=b''):
3975 def debuguigetpass(ui, prompt=b''):
3976 """show prompt to type password"""
3976 """show prompt to type password"""
3977 r = ui.getpass(prompt)
3977 r = ui.getpass(prompt)
3978 if r is None:
3978 if r is None:
3979 r = b"<default response>"
3979 r = b"<default response>"
3980 ui.writenoi18n(b'response: %s\n' % r)
3980 ui.writenoi18n(b'response: %s\n' % r)
3981
3981
3982
3982
3983 @command(
3983 @command(
3984 b'debuguiprompt',
3984 b'debuguiprompt',
3985 [
3985 [
3986 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3986 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3987 ],
3987 ],
3988 _(b'[-p TEXT]'),
3988 _(b'[-p TEXT]'),
3989 norepo=True,
3989 norepo=True,
3990 )
3990 )
3991 def debuguiprompt(ui, prompt=b''):
3991 def debuguiprompt(ui, prompt=b''):
3992 """show plain prompt"""
3992 """show plain prompt"""
3993 r = ui.prompt(prompt)
3993 r = ui.prompt(prompt)
3994 ui.writenoi18n(b'response: %s\n' % r)
3994 ui.writenoi18n(b'response: %s\n' % r)
3995
3995
3996
3996
3997 @command(b'debugupdatecaches', [])
3997 @command(b'debugupdatecaches', [])
3998 def debugupdatecaches(ui, repo, *pats, **opts):
3998 def debugupdatecaches(ui, repo, *pats, **opts):
3999 """warm all known caches in the repository"""
3999 """warm all known caches in the repository"""
4000 with repo.wlock(), repo.lock():
4000 with repo.wlock(), repo.lock():
4001 repo.updatecaches(full=True)
4001 repo.updatecaches(full=True)
4002
4002
4003
4003
4004 @command(
4004 @command(
4005 b'debugupgraderepo',
4005 b'debugupgraderepo',
4006 [
4006 [
4007 (
4007 (
4008 b'o',
4008 b'o',
4009 b'optimize',
4009 b'optimize',
4010 [],
4010 [],
4011 _(b'extra optimization to perform'),
4011 _(b'extra optimization to perform'),
4012 _(b'NAME'),
4012 _(b'NAME'),
4013 ),
4013 ),
4014 (b'', b'run', False, _(b'performs an upgrade')),
4014 (b'', b'run', False, _(b'performs an upgrade')),
4015 (b'', b'backup', True, _(b'keep the old repository content around')),
4015 (b'', b'backup', True, _(b'keep the old repository content around')),
4016 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4016 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4017 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4017 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4018 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4018 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4019 ],
4019 ],
4020 )
4020 )
4021 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4021 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4022 """upgrade a repository to use different features
4022 """upgrade a repository to use different features
4023
4023
4024 If no arguments are specified, the repository is evaluated for upgrade
4024 If no arguments are specified, the repository is evaluated for upgrade
4025 and a list of problems and potential optimizations is printed.
4025 and a list of problems and potential optimizations is printed.
4026
4026
4027 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4027 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4028 can be influenced via additional arguments. More details will be provided
4028 can be influenced via additional arguments. More details will be provided
4029 by the command output when run without ``--run``.
4029 by the command output when run without ``--run``.
4030
4030
4031 During the upgrade, the repository will be locked and no writes will be
4031 During the upgrade, the repository will be locked and no writes will be
4032 allowed.
4032 allowed.
4033
4033
4034 At the end of the upgrade, the repository may not be readable while new
4034 At the end of the upgrade, the repository may not be readable while new
4035 repository data is swapped in. This window will be as long as it takes to
4035 repository data is swapped in. This window will be as long as it takes to
4036 rename some directories inside the ``.hg`` directory. On most machines, this
4036 rename some directories inside the ``.hg`` directory. On most machines, this
4037 should complete almost instantaneously and the chances of a consumer being
4037 should complete almost instantaneously and the chances of a consumer being
4038 unable to access the repository should be low.
4038 unable to access the repository should be low.
4039
4039
4040 By default, all revlog will be upgraded. You can restrict this using flag
4040 By default, all revlog will be upgraded. You can restrict this using flag
4041 such as `--manifest`:
4041 such as `--manifest`:
4042
4042
4043 * `--manifest`: only optimize the manifest
4043 * `--manifest`: only optimize the manifest
4044 * `--no-manifest`: optimize all revlog but the manifest
4044 * `--no-manifest`: optimize all revlog but the manifest
4045 * `--changelog`: optimize the changelog only
4045 * `--changelog`: optimize the changelog only
4046 * `--no-changelog --no-manifest`: optimize filelogs only
4046 * `--no-changelog --no-manifest`: optimize filelogs only
4047 * `--filelogs`: optimize the filelogs only
4047 * `--filelogs`: optimize the filelogs only
4048 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4048 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4049 """
4049 """
4050 return upgrade.upgraderepo(
4050 return upgrade.upgraderepo(
4051 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4051 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4052 )
4052 )
4053
4053
4054
4054
4055 @command(
4055 @command(
4056 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4056 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4057 )
4057 )
4058 def debugwalk(ui, repo, *pats, **opts):
4058 def debugwalk(ui, repo, *pats, **opts):
4059 """show how files match on given patterns"""
4059 """show how files match on given patterns"""
4060 opts = pycompat.byteskwargs(opts)
4060 opts = pycompat.byteskwargs(opts)
4061 m = scmutil.match(repo[None], pats, opts)
4061 m = scmutil.match(repo[None], pats, opts)
4062 if ui.verbose:
4062 if ui.verbose:
4063 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4063 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4064 items = list(repo[None].walk(m))
4064 items = list(repo[None].walk(m))
4065 if not items:
4065 if not items:
4066 return
4066 return
4067 f = lambda fn: fn
4067 f = lambda fn: fn
4068 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4068 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4069 f = lambda fn: util.normpath(fn)
4069 f = lambda fn: util.normpath(fn)
4070 fmt = b'f %%-%ds %%-%ds %%s' % (
4070 fmt = b'f %%-%ds %%-%ds %%s' % (
4071 max([len(abs) for abs in items]),
4071 max([len(abs) for abs in items]),
4072 max([len(repo.pathto(abs)) for abs in items]),
4072 max([len(repo.pathto(abs)) for abs in items]),
4073 )
4073 )
4074 for abs in items:
4074 for abs in items:
4075 line = fmt % (
4075 line = fmt % (
4076 abs,
4076 abs,
4077 f(repo.pathto(abs)),
4077 f(repo.pathto(abs)),
4078 m.exact(abs) and b'exact' or b'',
4078 m.exact(abs) and b'exact' or b'',
4079 )
4079 )
4080 ui.write(b"%s\n" % line.rstrip())
4080 ui.write(b"%s\n" % line.rstrip())
4081
4081
4082
4082
4083 @command(b'debugwhyunstable', [], _(b'REV'))
4083 @command(b'debugwhyunstable', [], _(b'REV'))
4084 def debugwhyunstable(ui, repo, rev):
4084 def debugwhyunstable(ui, repo, rev):
4085 """explain instabilities of a changeset"""
4085 """explain instabilities of a changeset"""
4086 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4086 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4087 dnodes = b''
4087 dnodes = b''
4088 if entry.get(b'divergentnodes'):
4088 if entry.get(b'divergentnodes'):
4089 dnodes = (
4089 dnodes = (
4090 b' '.join(
4090 b' '.join(
4091 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4091 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4092 for ctx in entry[b'divergentnodes']
4092 for ctx in entry[b'divergentnodes']
4093 )
4093 )
4094 + b' '
4094 + b' '
4095 )
4095 )
4096 ui.write(
4096 ui.write(
4097 b'%s: %s%s %s\n'
4097 b'%s: %s%s %s\n'
4098 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4098 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4099 )
4099 )
4100
4100
4101
4101
4102 @command(
4102 @command(
4103 b'debugwireargs',
4103 b'debugwireargs',
4104 [
4104 [
4105 (b'', b'three', b'', b'three'),
4105 (b'', b'three', b'', b'three'),
4106 (b'', b'four', b'', b'four'),
4106 (b'', b'four', b'', b'four'),
4107 (b'', b'five', b'', b'five'),
4107 (b'', b'five', b'', b'five'),
4108 ]
4108 ]
4109 + cmdutil.remoteopts,
4109 + cmdutil.remoteopts,
4110 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4110 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4111 norepo=True,
4111 norepo=True,
4112 )
4112 )
4113 def debugwireargs(ui, repopath, *vals, **opts):
4113 def debugwireargs(ui, repopath, *vals, **opts):
4114 opts = pycompat.byteskwargs(opts)
4114 opts = pycompat.byteskwargs(opts)
4115 repo = hg.peer(ui, opts, repopath)
4115 repo = hg.peer(ui, opts, repopath)
4116 try:
4116 try:
4117 for opt in cmdutil.remoteopts:
4117 for opt in cmdutil.remoteopts:
4118 del opts[opt[1]]
4118 del opts[opt[1]]
4119 args = {}
4119 args = {}
4120 for k, v in pycompat.iteritems(opts):
4120 for k, v in pycompat.iteritems(opts):
4121 if v:
4121 if v:
4122 args[k] = v
4122 args[k] = v
4123 args = pycompat.strkwargs(args)
4123 args = pycompat.strkwargs(args)
4124 # run twice to check that we don't mess up the stream for the next command
4124 # run twice to check that we don't mess up the stream for the next command
4125 res1 = repo.debugwireargs(*vals, **args)
4125 res1 = repo.debugwireargs(*vals, **args)
4126 res2 = repo.debugwireargs(*vals, **args)
4126 res2 = repo.debugwireargs(*vals, **args)
4127 ui.write(b"%s\n" % res1)
4127 ui.write(b"%s\n" % res1)
4128 if res1 != res2:
4128 if res1 != res2:
4129 ui.warn(b"%s\n" % res2)
4129 ui.warn(b"%s\n" % res2)
4130 finally:
4130 finally:
4131 repo.close()
4131 repo.close()
4132
4132
4133
4133
4134 def _parsewirelangblocks(fh):
4134 def _parsewirelangblocks(fh):
4135 activeaction = None
4135 activeaction = None
4136 blocklines = []
4136 blocklines = []
4137 lastindent = 0
4137 lastindent = 0
4138
4138
4139 for line in fh:
4139 for line in fh:
4140 line = line.rstrip()
4140 line = line.rstrip()
4141 if not line:
4141 if not line:
4142 continue
4142 continue
4143
4143
4144 if line.startswith(b'#'):
4144 if line.startswith(b'#'):
4145 continue
4145 continue
4146
4146
4147 if not line.startswith(b' '):
4147 if not line.startswith(b' '):
4148 # New block. Flush previous one.
4148 # New block. Flush previous one.
4149 if activeaction:
4149 if activeaction:
4150 yield activeaction, blocklines
4150 yield activeaction, blocklines
4151
4151
4152 activeaction = line
4152 activeaction = line
4153 blocklines = []
4153 blocklines = []
4154 lastindent = 0
4154 lastindent = 0
4155 continue
4155 continue
4156
4156
4157 # Else we start with an indent.
4157 # Else we start with an indent.
4158
4158
4159 if not activeaction:
4159 if not activeaction:
4160 raise error.Abort(_(b'indented line outside of block'))
4160 raise error.Abort(_(b'indented line outside of block'))
4161
4161
4162 indent = len(line) - len(line.lstrip())
4162 indent = len(line) - len(line.lstrip())
4163
4163
4164 # If this line is indented more than the last line, concatenate it.
4164 # If this line is indented more than the last line, concatenate it.
4165 if indent > lastindent and blocklines:
4165 if indent > lastindent and blocklines:
4166 blocklines[-1] += line.lstrip()
4166 blocklines[-1] += line.lstrip()
4167 else:
4167 else:
4168 blocklines.append(line)
4168 blocklines.append(line)
4169 lastindent = indent
4169 lastindent = indent
4170
4170
4171 # Flush last block.
4171 # Flush last block.
4172 if activeaction:
4172 if activeaction:
4173 yield activeaction, blocklines
4173 yield activeaction, blocklines
4174
4174
4175
4175
4176 @command(
4176 @command(
4177 b'debugwireproto',
4177 b'debugwireproto',
4178 [
4178 [
4179 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4179 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4180 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4180 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4181 (
4181 (
4182 b'',
4182 b'',
4183 b'noreadstderr',
4183 b'noreadstderr',
4184 False,
4184 False,
4185 _(b'do not read from stderr of the remote'),
4185 _(b'do not read from stderr of the remote'),
4186 ),
4186 ),
4187 (
4187 (
4188 b'',
4188 b'',
4189 b'nologhandshake',
4189 b'nologhandshake',
4190 False,
4190 False,
4191 _(b'do not log I/O related to the peer handshake'),
4191 _(b'do not log I/O related to the peer handshake'),
4192 ),
4192 ),
4193 ]
4193 ]
4194 + cmdutil.remoteopts,
4194 + cmdutil.remoteopts,
4195 _(b'[PATH]'),
4195 _(b'[PATH]'),
4196 optionalrepo=True,
4196 optionalrepo=True,
4197 )
4197 )
4198 def debugwireproto(ui, repo, path=None, **opts):
4198 def debugwireproto(ui, repo, path=None, **opts):
4199 """send wire protocol commands to a server
4199 """send wire protocol commands to a server
4200
4200
4201 This command can be used to issue wire protocol commands to remote
4201 This command can be used to issue wire protocol commands to remote
4202 peers and to debug the raw data being exchanged.
4202 peers and to debug the raw data being exchanged.
4203
4203
4204 ``--localssh`` will start an SSH server against the current repository
4204 ``--localssh`` will start an SSH server against the current repository
4205 and connect to that. By default, the connection will perform a handshake
4205 and connect to that. By default, the connection will perform a handshake
4206 and establish an appropriate peer instance.
4206 and establish an appropriate peer instance.
4207
4207
4208 ``--peer`` can be used to bypass the handshake protocol and construct a
4208 ``--peer`` can be used to bypass the handshake protocol and construct a
4209 peer instance using the specified class type. Valid values are ``raw``,
4209 peer instance using the specified class type. Valid values are ``raw``,
4210 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4210 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4211 raw data payloads and don't support higher-level command actions.
4211 raw data payloads and don't support higher-level command actions.
4212
4212
4213 ``--noreadstderr`` can be used to disable automatic reading from stderr
4213 ``--noreadstderr`` can be used to disable automatic reading from stderr
4214 of the peer (for SSH connections only). Disabling automatic reading of
4214 of the peer (for SSH connections only). Disabling automatic reading of
4215 stderr is useful for making output more deterministic.
4215 stderr is useful for making output more deterministic.
4216
4216
4217 Commands are issued via a mini language which is specified via stdin.
4217 Commands are issued via a mini language which is specified via stdin.
4218 The language consists of individual actions to perform. An action is
4218 The language consists of individual actions to perform. An action is
4219 defined by a block. A block is defined as a line with no leading
4219 defined by a block. A block is defined as a line with no leading
4220 space followed by 0 or more lines with leading space. Blocks are
4220 space followed by 0 or more lines with leading space. Blocks are
4221 effectively a high-level command with additional metadata.
4221 effectively a high-level command with additional metadata.
4222
4222
4223 Lines beginning with ``#`` are ignored.
4223 Lines beginning with ``#`` are ignored.
4224
4224
4225 The following sections denote available actions.
4225 The following sections denote available actions.
4226
4226
4227 raw
4227 raw
4228 ---
4228 ---
4229
4229
4230 Send raw data to the server.
4230 Send raw data to the server.
4231
4231
4232 The block payload contains the raw data to send as one atomic send
4232 The block payload contains the raw data to send as one atomic send
4233 operation. The data may not actually be delivered in a single system
4233 operation. The data may not actually be delivered in a single system
4234 call: it depends on the abilities of the transport being used.
4234 call: it depends on the abilities of the transport being used.
4235
4235
4236 Each line in the block is de-indented and concatenated. Then, that
4236 Each line in the block is de-indented and concatenated. Then, that
4237 value is evaluated as a Python b'' literal. This allows the use of
4237 value is evaluated as a Python b'' literal. This allows the use of
4238 backslash escaping, etc.
4238 backslash escaping, etc.
4239
4239
4240 raw+
4240 raw+
4241 ----
4241 ----
4242
4242
4243 Behaves like ``raw`` except flushes output afterwards.
4243 Behaves like ``raw`` except flushes output afterwards.
4244
4244
4245 command <X>
4245 command <X>
4246 -----------
4246 -----------
4247
4247
4248 Send a request to run a named command, whose name follows the ``command``
4248 Send a request to run a named command, whose name follows the ``command``
4249 string.
4249 string.
4250
4250
4251 Arguments to the command are defined as lines in this block. The format of
4251 Arguments to the command are defined as lines in this block. The format of
4252 each line is ``<key> <value>``. e.g.::
4252 each line is ``<key> <value>``. e.g.::
4253
4253
4254 command listkeys
4254 command listkeys
4255 namespace bookmarks
4255 namespace bookmarks
4256
4256
4257 If the value begins with ``eval:``, it will be interpreted as a Python
4257 If the value begins with ``eval:``, it will be interpreted as a Python
4258 literal expression. Otherwise values are interpreted as Python b'' literals.
4258 literal expression. Otherwise values are interpreted as Python b'' literals.
4259 This allows sending complex types and encoding special byte sequences via
4259 This allows sending complex types and encoding special byte sequences via
4260 backslash escaping.
4260 backslash escaping.
4261
4261
4262 The following arguments have special meaning:
4262 The following arguments have special meaning:
4263
4263
4264 ``PUSHFILE``
4264 ``PUSHFILE``
4265 When defined, the *push* mechanism of the peer will be used instead
4265 When defined, the *push* mechanism of the peer will be used instead
4266 of the static request-response mechanism and the content of the
4266 of the static request-response mechanism and the content of the
4267 file specified in the value of this argument will be sent as the
4267 file specified in the value of this argument will be sent as the
4268 command payload.
4268 command payload.
4269
4269
4270 This can be used to submit a local bundle file to the remote.
4270 This can be used to submit a local bundle file to the remote.
4271
4271
4272 batchbegin
4272 batchbegin
4273 ----------
4273 ----------
4274
4274
4275 Instruct the peer to begin a batched send.
4275 Instruct the peer to begin a batched send.
4276
4276
4277 All ``command`` blocks are queued for execution until the next
4277 All ``command`` blocks are queued for execution until the next
4278 ``batchsubmit`` block.
4278 ``batchsubmit`` block.
4279
4279
4280 batchsubmit
4280 batchsubmit
4281 -----------
4281 -----------
4282
4282
4283 Submit previously queued ``command`` blocks as a batch request.
4283 Submit previously queued ``command`` blocks as a batch request.
4284
4284
4285 This action MUST be paired with a ``batchbegin`` action.
4285 This action MUST be paired with a ``batchbegin`` action.
4286
4286
4287 httprequest <method> <path>
4287 httprequest <method> <path>
4288 ---------------------------
4288 ---------------------------
4289
4289
4290 (HTTP peer only)
4290 (HTTP peer only)
4291
4291
4292 Send an HTTP request to the peer.
4292 Send an HTTP request to the peer.
4293
4293
4294 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4294 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4295
4295
4296 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4296 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4297 headers to add to the request. e.g. ``Accept: foo``.
4297 headers to add to the request. e.g. ``Accept: foo``.
4298
4298
4299 The following arguments are special:
4299 The following arguments are special:
4300
4300
4301 ``BODYFILE``
4301 ``BODYFILE``
4302 The content of the file defined as the value to this argument will be
4302 The content of the file defined as the value to this argument will be
4303 transferred verbatim as the HTTP request body.
4303 transferred verbatim as the HTTP request body.
4304
4304
4305 ``frame <type> <flags> <payload>``
4305 ``frame <type> <flags> <payload>``
4306 Send a unified protocol frame as part of the request body.
4306 Send a unified protocol frame as part of the request body.
4307
4307
4308 All frames will be collected and sent as the body to the HTTP
4308 All frames will be collected and sent as the body to the HTTP
4309 request.
4309 request.
4310
4310
4311 close
4311 close
4312 -----
4312 -----
4313
4313
4314 Close the connection to the server.
4314 Close the connection to the server.
4315
4315
4316 flush
4316 flush
4317 -----
4317 -----
4318
4318
4319 Flush data written to the server.
4319 Flush data written to the server.
4320
4320
4321 readavailable
4321 readavailable
4322 -------------
4322 -------------
4323
4323
4324 Close the write end of the connection and read all available data from
4324 Close the write end of the connection and read all available data from
4325 the server.
4325 the server.
4326
4326
4327 If the connection to the server encompasses multiple pipes, we poll both
4327 If the connection to the server encompasses multiple pipes, we poll both
4328 pipes and read available data.
4328 pipes and read available data.
4329
4329
4330 readline
4330 readline
4331 --------
4331 --------
4332
4332
4333 Read a line of output from the server. If there are multiple output
4333 Read a line of output from the server. If there are multiple output
4334 pipes, reads only the main pipe.
4334 pipes, reads only the main pipe.
4335
4335
4336 ereadline
4336 ereadline
4337 ---------
4337 ---------
4338
4338
4339 Like ``readline``, but read from the stderr pipe, if available.
4339 Like ``readline``, but read from the stderr pipe, if available.
4340
4340
4341 read <X>
4341 read <X>
4342 --------
4342 --------
4343
4343
4344 ``read()`` N bytes from the server's main output pipe.
4344 ``read()`` N bytes from the server's main output pipe.
4345
4345
4346 eread <X>
4346 eread <X>
4347 ---------
4347 ---------
4348
4348
4349 ``read()`` N bytes from the server's stderr pipe, if available.
4349 ``read()`` N bytes from the server's stderr pipe, if available.
4350
4350
4351 Specifying Unified Frame-Based Protocol Frames
4351 Specifying Unified Frame-Based Protocol Frames
4352 ----------------------------------------------
4352 ----------------------------------------------
4353
4353
4354 It is possible to emit a *Unified Frame-Based Protocol* by using special
4354 It is possible to emit a *Unified Frame-Based Protocol* by using special
4355 syntax.
4355 syntax.
4356
4356
4357 A frame is composed as a type, flags, and payload. These can be parsed
4357 A frame is composed as a type, flags, and payload. These can be parsed
4358 from a string of the form:
4358 from a string of the form:
4359
4359
4360 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4360 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4361
4361
4362 ``request-id`` and ``stream-id`` are integers defining the request and
4362 ``request-id`` and ``stream-id`` are integers defining the request and
4363 stream identifiers.
4363 stream identifiers.
4364
4364
4365 ``type`` can be an integer value for the frame type or the string name
4365 ``type`` can be an integer value for the frame type or the string name
4366 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4366 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4367 ``command-name``.
4367 ``command-name``.
4368
4368
4369 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4369 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4370 components. Each component (and there can be just one) can be an integer
4370 components. Each component (and there can be just one) can be an integer
4371 or a flag name for stream flags or frame flags, respectively. Values are
4371 or a flag name for stream flags or frame flags, respectively. Values are
4372 resolved to integers and then bitwise OR'd together.
4372 resolved to integers and then bitwise OR'd together.
4373
4373
4374 ``payload`` represents the raw frame payload. If it begins with
4374 ``payload`` represents the raw frame payload. If it begins with
4375 ``cbor:``, the following string is evaluated as Python code and the
4375 ``cbor:``, the following string is evaluated as Python code and the
4376 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4376 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4377 as a Python byte string literal.
4377 as a Python byte string literal.
4378 """
4378 """
4379 opts = pycompat.byteskwargs(opts)
4379 opts = pycompat.byteskwargs(opts)
4380
4380
4381 if opts[b'localssh'] and not repo:
4381 if opts[b'localssh'] and not repo:
4382 raise error.Abort(_(b'--localssh requires a repository'))
4382 raise error.Abort(_(b'--localssh requires a repository'))
4383
4383
4384 if opts[b'peer'] and opts[b'peer'] not in (
4384 if opts[b'peer'] and opts[b'peer'] not in (
4385 b'raw',
4385 b'raw',
4386 b'http2',
4386 b'http2',
4387 b'ssh1',
4387 b'ssh1',
4388 b'ssh2',
4388 b'ssh2',
4389 ):
4389 ):
4390 raise error.Abort(
4390 raise error.Abort(
4391 _(b'invalid value for --peer'),
4391 _(b'invalid value for --peer'),
4392 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4392 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4393 )
4393 )
4394
4394
4395 if path and opts[b'localssh']:
4395 if path and opts[b'localssh']:
4396 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4396 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4397
4397
4398 if ui.interactive():
4398 if ui.interactive():
4399 ui.write(_(b'(waiting for commands on stdin)\n'))
4399 ui.write(_(b'(waiting for commands on stdin)\n'))
4400
4400
4401 blocks = list(_parsewirelangblocks(ui.fin))
4401 blocks = list(_parsewirelangblocks(ui.fin))
4402
4402
4403 proc = None
4403 proc = None
4404 stdin = None
4404 stdin = None
4405 stdout = None
4405 stdout = None
4406 stderr = None
4406 stderr = None
4407 opener = None
4407 opener = None
4408
4408
4409 if opts[b'localssh']:
4409 if opts[b'localssh']:
4410 # We start the SSH server in its own process so there is process
4410 # We start the SSH server in its own process so there is process
4411 # separation. This prevents a whole class of potential bugs around
4411 # separation. This prevents a whole class of potential bugs around
4412 # shared state from interfering with server operation.
4412 # shared state from interfering with server operation.
4413 args = procutil.hgcmd() + [
4413 args = procutil.hgcmd() + [
4414 b'-R',
4414 b'-R',
4415 repo.root,
4415 repo.root,
4416 b'debugserve',
4416 b'debugserve',
4417 b'--sshstdio',
4417 b'--sshstdio',
4418 ]
4418 ]
4419 proc = subprocess.Popen(
4419 proc = subprocess.Popen(
4420 pycompat.rapply(procutil.tonativestr, args),
4420 pycompat.rapply(procutil.tonativestr, args),
4421 stdin=subprocess.PIPE,
4421 stdin=subprocess.PIPE,
4422 stdout=subprocess.PIPE,
4422 stdout=subprocess.PIPE,
4423 stderr=subprocess.PIPE,
4423 stderr=subprocess.PIPE,
4424 bufsize=0,
4424 bufsize=0,
4425 )
4425 )
4426
4426
4427 stdin = proc.stdin
4427 stdin = proc.stdin
4428 stdout = proc.stdout
4428 stdout = proc.stdout
4429 stderr = proc.stderr
4429 stderr = proc.stderr
4430
4430
4431 # We turn the pipes into observers so we can log I/O.
4431 # We turn the pipes into observers so we can log I/O.
4432 if ui.verbose or opts[b'peer'] == b'raw':
4432 if ui.verbose or opts[b'peer'] == b'raw':
4433 stdin = util.makeloggingfileobject(
4433 stdin = util.makeloggingfileobject(
4434 ui, proc.stdin, b'i', logdata=True
4434 ui, proc.stdin, b'i', logdata=True
4435 )
4435 )
4436 stdout = util.makeloggingfileobject(
4436 stdout = util.makeloggingfileobject(
4437 ui, proc.stdout, b'o', logdata=True
4437 ui, proc.stdout, b'o', logdata=True
4438 )
4438 )
4439 stderr = util.makeloggingfileobject(
4439 stderr = util.makeloggingfileobject(
4440 ui, proc.stderr, b'e', logdata=True
4440 ui, proc.stderr, b'e', logdata=True
4441 )
4441 )
4442
4442
4443 # --localssh also implies the peer connection settings.
4443 # --localssh also implies the peer connection settings.
4444
4444
4445 url = b'ssh://localserver'
4445 url = b'ssh://localserver'
4446 autoreadstderr = not opts[b'noreadstderr']
4446 autoreadstderr = not opts[b'noreadstderr']
4447
4447
4448 if opts[b'peer'] == b'ssh1':
4448 if opts[b'peer'] == b'ssh1':
4449 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4449 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4450 peer = sshpeer.sshv1peer(
4450 peer = sshpeer.sshv1peer(
4451 ui,
4451 ui,
4452 url,
4452 url,
4453 proc,
4453 proc,
4454 stdin,
4454 stdin,
4455 stdout,
4455 stdout,
4456 stderr,
4456 stderr,
4457 None,
4457 None,
4458 autoreadstderr=autoreadstderr,
4458 autoreadstderr=autoreadstderr,
4459 )
4459 )
4460 elif opts[b'peer'] == b'ssh2':
4460 elif opts[b'peer'] == b'ssh2':
4461 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4461 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4462 peer = sshpeer.sshv2peer(
4462 peer = sshpeer.sshv2peer(
4463 ui,
4463 ui,
4464 url,
4464 url,
4465 proc,
4465 proc,
4466 stdin,
4466 stdin,
4467 stdout,
4467 stdout,
4468 stderr,
4468 stderr,
4469 None,
4469 None,
4470 autoreadstderr=autoreadstderr,
4470 autoreadstderr=autoreadstderr,
4471 )
4471 )
4472 elif opts[b'peer'] == b'raw':
4472 elif opts[b'peer'] == b'raw':
4473 ui.write(_(b'using raw connection to peer\n'))
4473 ui.write(_(b'using raw connection to peer\n'))
4474 peer = None
4474 peer = None
4475 else:
4475 else:
4476 ui.write(_(b'creating ssh peer from handshake results\n'))
4476 ui.write(_(b'creating ssh peer from handshake results\n'))
4477 peer = sshpeer.makepeer(
4477 peer = sshpeer.makepeer(
4478 ui,
4478 ui,
4479 url,
4479 url,
4480 proc,
4480 proc,
4481 stdin,
4481 stdin,
4482 stdout,
4482 stdout,
4483 stderr,
4483 stderr,
4484 autoreadstderr=autoreadstderr,
4484 autoreadstderr=autoreadstderr,
4485 )
4485 )
4486
4486
4487 elif path:
4487 elif path:
4488 # We bypass hg.peer() so we can proxy the sockets.
4488 # We bypass hg.peer() so we can proxy the sockets.
4489 # TODO consider not doing this because we skip
4489 # TODO consider not doing this because we skip
4490 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4490 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4491 u = util.url(path)
4491 u = util.url(path)
4492 if u.scheme != b'http':
4492 if u.scheme != b'http':
4493 raise error.Abort(_(b'only http:// paths are currently supported'))
4493 raise error.Abort(_(b'only http:// paths are currently supported'))
4494
4494
4495 url, authinfo = u.authinfo()
4495 url, authinfo = u.authinfo()
4496 openerargs = {
4496 openerargs = {
4497 'useragent': b'Mercurial debugwireproto',
4497 'useragent': b'Mercurial debugwireproto',
4498 }
4498 }
4499
4499
4500 # Turn pipes/sockets into observers so we can log I/O.
4500 # Turn pipes/sockets into observers so we can log I/O.
4501 if ui.verbose:
4501 if ui.verbose:
4502 openerargs.update(
4502 openerargs.update(
4503 {
4503 {
4504 'loggingfh': ui,
4504 'loggingfh': ui,
4505 'loggingname': b's',
4505 'loggingname': b's',
4506 'loggingopts': {
4506 'loggingopts': {
4507 'logdata': True,
4507 'logdata': True,
4508 'logdataapis': False,
4508 'logdataapis': False,
4509 },
4509 },
4510 }
4510 }
4511 )
4511 )
4512
4512
4513 if ui.debugflag:
4513 if ui.debugflag:
4514 openerargs['loggingopts']['logdataapis'] = True
4514 openerargs['loggingopts']['logdataapis'] = True
4515
4515
4516 # Don't send default headers when in raw mode. This allows us to
4516 # Don't send default headers when in raw mode. This allows us to
4517 # bypass most of the behavior of our URL handling code so we can
4517 # bypass most of the behavior of our URL handling code so we can
4518 # have near complete control over what's sent on the wire.
4518 # have near complete control over what's sent on the wire.
4519 if opts[b'peer'] == b'raw':
4519 if opts[b'peer'] == b'raw':
4520 openerargs['sendaccept'] = False
4520 openerargs['sendaccept'] = False
4521
4521
4522 opener = urlmod.opener(ui, authinfo, **openerargs)
4522 opener = urlmod.opener(ui, authinfo, **openerargs)
4523
4523
4524 if opts[b'peer'] == b'http2':
4524 if opts[b'peer'] == b'http2':
4525 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4525 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4526 # We go through makepeer() because we need an API descriptor for
4526 # We go through makepeer() because we need an API descriptor for
4527 # the peer instance to be useful.
4527 # the peer instance to be useful.
4528 with ui.configoverride(
4528 with ui.configoverride(
4529 {(b'experimental', b'httppeer.advertise-v2'): True}
4529 {(b'experimental', b'httppeer.advertise-v2'): True}
4530 ):
4530 ):
4531 if opts[b'nologhandshake']:
4531 if opts[b'nologhandshake']:
4532 ui.pushbuffer()
4532 ui.pushbuffer()
4533
4533
4534 peer = httppeer.makepeer(ui, path, opener=opener)
4534 peer = httppeer.makepeer(ui, path, opener=opener)
4535
4535
4536 if opts[b'nologhandshake']:
4536 if opts[b'nologhandshake']:
4537 ui.popbuffer()
4537 ui.popbuffer()
4538
4538
4539 if not isinstance(peer, httppeer.httpv2peer):
4539 if not isinstance(peer, httppeer.httpv2peer):
4540 raise error.Abort(
4540 raise error.Abort(
4541 _(
4541 _(
4542 b'could not instantiate HTTP peer for '
4542 b'could not instantiate HTTP peer for '
4543 b'wire protocol version 2'
4543 b'wire protocol version 2'
4544 ),
4544 ),
4545 hint=_(
4545 hint=_(
4546 b'the server may not have the feature '
4546 b'the server may not have the feature '
4547 b'enabled or is not allowing this '
4547 b'enabled or is not allowing this '
4548 b'client version'
4548 b'client version'
4549 ),
4549 ),
4550 )
4550 )
4551
4551
4552 elif opts[b'peer'] == b'raw':
4552 elif opts[b'peer'] == b'raw':
4553 ui.write(_(b'using raw connection to peer\n'))
4553 ui.write(_(b'using raw connection to peer\n'))
4554 peer = None
4554 peer = None
4555 elif opts[b'peer']:
4555 elif opts[b'peer']:
4556 raise error.Abort(
4556 raise error.Abort(
4557 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4557 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4558 )
4558 )
4559 else:
4559 else:
4560 peer = httppeer.makepeer(ui, path, opener=opener)
4560 peer = httppeer.makepeer(ui, path, opener=opener)
4561
4561
4562 # We /could/ populate stdin/stdout with sock.makefile()...
4562 # We /could/ populate stdin/stdout with sock.makefile()...
4563 else:
4563 else:
4564 raise error.Abort(_(b'unsupported connection configuration'))
4564 raise error.Abort(_(b'unsupported connection configuration'))
4565
4565
4566 batchedcommands = None
4566 batchedcommands = None
4567
4567
4568 # Now perform actions based on the parsed wire language instructions.
4568 # Now perform actions based on the parsed wire language instructions.
4569 for action, lines in blocks:
4569 for action, lines in blocks:
4570 if action in (b'raw', b'raw+'):
4570 if action in (b'raw', b'raw+'):
4571 if not stdin:
4571 if not stdin:
4572 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4572 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4573
4573
4574 # Concatenate the data together.
4574 # Concatenate the data together.
4575 data = b''.join(l.lstrip() for l in lines)
4575 data = b''.join(l.lstrip() for l in lines)
4576 data = stringutil.unescapestr(data)
4576 data = stringutil.unescapestr(data)
4577 stdin.write(data)
4577 stdin.write(data)
4578
4578
4579 if action == b'raw+':
4579 if action == b'raw+':
4580 stdin.flush()
4580 stdin.flush()
4581 elif action == b'flush':
4581 elif action == b'flush':
4582 if not stdin:
4582 if not stdin:
4583 raise error.Abort(_(b'cannot call flush on this peer'))
4583 raise error.Abort(_(b'cannot call flush on this peer'))
4584 stdin.flush()
4584 stdin.flush()
4585 elif action.startswith(b'command'):
4585 elif action.startswith(b'command'):
4586 if not peer:
4586 if not peer:
4587 raise error.Abort(
4587 raise error.Abort(
4588 _(
4588 _(
4589 b'cannot send commands unless peer instance '
4589 b'cannot send commands unless peer instance '
4590 b'is available'
4590 b'is available'
4591 )
4591 )
4592 )
4592 )
4593
4593
4594 command = action.split(b' ', 1)[1]
4594 command = action.split(b' ', 1)[1]
4595
4595
4596 args = {}
4596 args = {}
4597 for line in lines:
4597 for line in lines:
4598 # We need to allow empty values.
4598 # We need to allow empty values.
4599 fields = line.lstrip().split(b' ', 1)
4599 fields = line.lstrip().split(b' ', 1)
4600 if len(fields) == 1:
4600 if len(fields) == 1:
4601 key = fields[0]
4601 key = fields[0]
4602 value = b''
4602 value = b''
4603 else:
4603 else:
4604 key, value = fields
4604 key, value = fields
4605
4605
4606 if value.startswith(b'eval:'):
4606 if value.startswith(b'eval:'):
4607 value = stringutil.evalpythonliteral(value[5:])
4607 value = stringutil.evalpythonliteral(value[5:])
4608 else:
4608 else:
4609 value = stringutil.unescapestr(value)
4609 value = stringutil.unescapestr(value)
4610
4610
4611 args[key] = value
4611 args[key] = value
4612
4612
4613 if batchedcommands is not None:
4613 if batchedcommands is not None:
4614 batchedcommands.append((command, args))
4614 batchedcommands.append((command, args))
4615 continue
4615 continue
4616
4616
4617 ui.status(_(b'sending %s command\n') % command)
4617 ui.status(_(b'sending %s command\n') % command)
4618
4618
4619 if b'PUSHFILE' in args:
4619 if b'PUSHFILE' in args:
4620 with open(args[b'PUSHFILE'], 'rb') as fh:
4620 with open(args[b'PUSHFILE'], 'rb') as fh:
4621 del args[b'PUSHFILE']
4621 del args[b'PUSHFILE']
4622 res, output = peer._callpush(
4622 res, output = peer._callpush(
4623 command, fh, **pycompat.strkwargs(args)
4623 command, fh, **pycompat.strkwargs(args)
4624 )
4624 )
4625 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4625 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4626 ui.status(
4626 ui.status(
4627 _(b'remote output: %s\n') % stringutil.escapestr(output)
4627 _(b'remote output: %s\n') % stringutil.escapestr(output)
4628 )
4628 )
4629 else:
4629 else:
4630 with peer.commandexecutor() as e:
4630 with peer.commandexecutor() as e:
4631 res = e.callcommand(command, args).result()
4631 res = e.callcommand(command, args).result()
4632
4632
4633 if isinstance(res, wireprotov2peer.commandresponse):
4633 if isinstance(res, wireprotov2peer.commandresponse):
4634 val = res.objects()
4634 val = res.objects()
4635 ui.status(
4635 ui.status(
4636 _(b'response: %s\n')
4636 _(b'response: %s\n')
4637 % stringutil.pprint(val, bprefix=True, indent=2)
4637 % stringutil.pprint(val, bprefix=True, indent=2)
4638 )
4638 )
4639 else:
4639 else:
4640 ui.status(
4640 ui.status(
4641 _(b'response: %s\n')
4641 _(b'response: %s\n')
4642 % stringutil.pprint(res, bprefix=True, indent=2)
4642 % stringutil.pprint(res, bprefix=True, indent=2)
4643 )
4643 )
4644
4644
4645 elif action == b'batchbegin':
4645 elif action == b'batchbegin':
4646 if batchedcommands is not None:
4646 if batchedcommands is not None:
4647 raise error.Abort(_(b'nested batchbegin not allowed'))
4647 raise error.Abort(_(b'nested batchbegin not allowed'))
4648
4648
4649 batchedcommands = []
4649 batchedcommands = []
4650 elif action == b'batchsubmit':
4650 elif action == b'batchsubmit':
4651 # There is a batching API we could go through. But it would be
4651 # There is a batching API we could go through. But it would be
4652 # difficult to normalize requests into function calls. It is easier
4652 # difficult to normalize requests into function calls. It is easier
4653 # to bypass this layer and normalize to commands + args.
4653 # to bypass this layer and normalize to commands + args.
4654 ui.status(
4654 ui.status(
4655 _(b'sending batch with %d sub-commands\n')
4655 _(b'sending batch with %d sub-commands\n')
4656 % len(batchedcommands)
4656 % len(batchedcommands)
4657 )
4657 )
4658 assert peer is not None
4658 assert peer is not None
4659 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4659 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4660 ui.status(
4660 ui.status(
4661 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4661 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4662 )
4662 )
4663
4663
4664 batchedcommands = None
4664 batchedcommands = None
4665
4665
4666 elif action.startswith(b'httprequest '):
4666 elif action.startswith(b'httprequest '):
4667 if not opener:
4667 if not opener:
4668 raise error.Abort(
4668 raise error.Abort(
4669 _(b'cannot use httprequest without an HTTP peer')
4669 _(b'cannot use httprequest without an HTTP peer')
4670 )
4670 )
4671
4671
4672 request = action.split(b' ', 2)
4672 request = action.split(b' ', 2)
4673 if len(request) != 3:
4673 if len(request) != 3:
4674 raise error.Abort(
4674 raise error.Abort(
4675 _(
4675 _(
4676 b'invalid httprequest: expected format is '
4676 b'invalid httprequest: expected format is '
4677 b'"httprequest <method> <path>'
4677 b'"httprequest <method> <path>'
4678 )
4678 )
4679 )
4679 )
4680
4680
4681 method, httppath = request[1:]
4681 method, httppath = request[1:]
4682 headers = {}
4682 headers = {}
4683 body = None
4683 body = None
4684 frames = []
4684 frames = []
4685 for line in lines:
4685 for line in lines:
4686 line = line.lstrip()
4686 line = line.lstrip()
4687 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4687 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4688 if m:
4688 if m:
4689 # Headers need to use native strings.
4689 # Headers need to use native strings.
4690 key = pycompat.strurl(m.group(1))
4690 key = pycompat.strurl(m.group(1))
4691 value = pycompat.strurl(m.group(2))
4691 value = pycompat.strurl(m.group(2))
4692 headers[key] = value
4692 headers[key] = value
4693 continue
4693 continue
4694
4694
4695 if line.startswith(b'BODYFILE '):
4695 if line.startswith(b'BODYFILE '):
4696 with open(line.split(b' ', 1), b'rb') as fh:
4696 with open(line.split(b' ', 1), b'rb') as fh:
4697 body = fh.read()
4697 body = fh.read()
4698 elif line.startswith(b'frame '):
4698 elif line.startswith(b'frame '):
4699 frame = wireprotoframing.makeframefromhumanstring(
4699 frame = wireprotoframing.makeframefromhumanstring(
4700 line[len(b'frame ') :]
4700 line[len(b'frame ') :]
4701 )
4701 )
4702
4702
4703 frames.append(frame)
4703 frames.append(frame)
4704 else:
4704 else:
4705 raise error.Abort(
4705 raise error.Abort(
4706 _(b'unknown argument to httprequest: %s') % line
4706 _(b'unknown argument to httprequest: %s') % line
4707 )
4707 )
4708
4708
4709 url = path + httppath
4709 url = path + httppath
4710
4710
4711 if frames:
4711 if frames:
4712 body = b''.join(bytes(f) for f in frames)
4712 body = b''.join(bytes(f) for f in frames)
4713
4713
4714 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4714 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4715
4715
4716 # urllib.Request insists on using has_data() as a proxy for
4716 # urllib.Request insists on using has_data() as a proxy for
4717 # determining the request method. Override that to use our
4717 # determining the request method. Override that to use our
4718 # explicitly requested method.
4718 # explicitly requested method.
4719 req.get_method = lambda: pycompat.sysstr(method)
4719 req.get_method = lambda: pycompat.sysstr(method)
4720
4720
4721 try:
4721 try:
4722 res = opener.open(req)
4722 res = opener.open(req)
4723 body = res.read()
4723 body = res.read()
4724 except util.urlerr.urlerror as e:
4724 except util.urlerr.urlerror as e:
4725 # read() method must be called, but only exists in Python 2
4725 # read() method must be called, but only exists in Python 2
4726 getattr(e, 'read', lambda: None)()
4726 getattr(e, 'read', lambda: None)()
4727 continue
4727 continue
4728
4728
4729 ct = res.headers.get('Content-Type')
4729 ct = res.headers.get('Content-Type')
4730 if ct == 'application/mercurial-cbor':
4730 if ct == 'application/mercurial-cbor':
4731 ui.write(
4731 ui.write(
4732 _(b'cbor> %s\n')
4732 _(b'cbor> %s\n')
4733 % stringutil.pprint(
4733 % stringutil.pprint(
4734 cborutil.decodeall(body), bprefix=True, indent=2
4734 cborutil.decodeall(body), bprefix=True, indent=2
4735 )
4735 )
4736 )
4736 )
4737
4737
4738 elif action == b'close':
4738 elif action == b'close':
4739 assert peer is not None
4739 assert peer is not None
4740 peer.close()
4740 peer.close()
4741 elif action == b'readavailable':
4741 elif action == b'readavailable':
4742 if not stdout or not stderr:
4742 if not stdout or not stderr:
4743 raise error.Abort(
4743 raise error.Abort(
4744 _(b'readavailable not available on this peer')
4744 _(b'readavailable not available on this peer')
4745 )
4745 )
4746
4746
4747 stdin.close()
4747 stdin.close()
4748 stdout.read()
4748 stdout.read()
4749 stderr.read()
4749 stderr.read()
4750
4750
4751 elif action == b'readline':
4751 elif action == b'readline':
4752 if not stdout:
4752 if not stdout:
4753 raise error.Abort(_(b'readline not available on this peer'))
4753 raise error.Abort(_(b'readline not available on this peer'))
4754 stdout.readline()
4754 stdout.readline()
4755 elif action == b'ereadline':
4755 elif action == b'ereadline':
4756 if not stderr:
4756 if not stderr:
4757 raise error.Abort(_(b'ereadline not available on this peer'))
4757 raise error.Abort(_(b'ereadline not available on this peer'))
4758 stderr.readline()
4758 stderr.readline()
4759 elif action.startswith(b'read '):
4759 elif action.startswith(b'read '):
4760 count = int(action.split(b' ', 1)[1])
4760 count = int(action.split(b' ', 1)[1])
4761 if not stdout:
4761 if not stdout:
4762 raise error.Abort(_(b'read not available on this peer'))
4762 raise error.Abort(_(b'read not available on this peer'))
4763 stdout.read(count)
4763 stdout.read(count)
4764 elif action.startswith(b'eread '):
4764 elif action.startswith(b'eread '):
4765 count = int(action.split(b' ', 1)[1])
4765 count = int(action.split(b' ', 1)[1])
4766 if not stderr:
4766 if not stderr:
4767 raise error.Abort(_(b'eread not available on this peer'))
4767 raise error.Abort(_(b'eread not available on this peer'))
4768 stderr.read(count)
4768 stderr.read(count)
4769 else:
4769 else:
4770 raise error.Abort(_(b'unknown action: %s') % action)
4770 raise error.Abort(_(b'unknown action: %s') % action)
4771
4771
4772 if batchedcommands is not None:
4772 if batchedcommands is not None:
4773 raise error.Abort(_(b'unclosed "batchbegin" request'))
4773 raise error.Abort(_(b'unclosed "batchbegin" request'))
4774
4774
4775 if peer:
4775 if peer:
4776 peer.close()
4776 peer.close()
4777
4777
4778 if proc:
4778 if proc:
4779 proc.kill()
4779 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now