##// END OF EJS Templates
debugcommands: don't shadow the error module...
Augie Fackler -
r44034:72b454fa default
parent child Browse files
Show More
@@ -1,4266 +1,4266
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .pycompat import (
35 from .pycompat import (
36 getattr,
36 getattr,
37 open,
37 open,
38 )
38 )
39 from . import (
39 from . import (
40 bundle2,
40 bundle2,
41 changegroup,
41 changegroup,
42 cmdutil,
42 cmdutil,
43 color,
43 color,
44 context,
44 context,
45 copies,
45 copies,
46 dagparser,
46 dagparser,
47 encoding,
47 encoding,
48 error,
48 error,
49 exchange,
49 exchange,
50 extensions,
50 extensions,
51 filemerge,
51 filemerge,
52 filesetlang,
52 filesetlang,
53 formatter,
53 formatter,
54 hg,
54 hg,
55 httppeer,
55 httppeer,
56 localrepo,
56 localrepo,
57 lock as lockmod,
57 lock as lockmod,
58 logcmdutil,
58 logcmdutil,
59 merge as mergemod,
59 merge as mergemod,
60 obsolete,
60 obsolete,
61 obsutil,
61 obsutil,
62 pathutil,
62 pathutil,
63 phases,
63 phases,
64 policy,
64 policy,
65 pvec,
65 pvec,
66 pycompat,
66 pycompat,
67 registrar,
67 registrar,
68 repair,
68 repair,
69 revlog,
69 revlog,
70 revset,
70 revset,
71 revsetlang,
71 revsetlang,
72 scmutil,
72 scmutil,
73 setdiscovery,
73 setdiscovery,
74 simplemerge,
74 simplemerge,
75 sshpeer,
75 sshpeer,
76 sslutil,
76 sslutil,
77 streamclone,
77 streamclone,
78 templater,
78 templater,
79 treediscovery,
79 treediscovery,
80 upgrade,
80 upgrade,
81 url as urlmod,
81 url as urlmod,
82 util,
82 util,
83 vfs as vfsmod,
83 vfs as vfsmod,
84 wireprotoframing,
84 wireprotoframing,
85 wireprotoserver,
85 wireprotoserver,
86 wireprotov2peer,
86 wireprotov2peer,
87 )
87 )
88 from .utils import (
88 from .utils import (
89 cborutil,
89 cborutil,
90 compression,
90 compression,
91 dateutil,
91 dateutil,
92 procutil,
92 procutil,
93 stringutil,
93 stringutil,
94 )
94 )
95
95
96 from .revlogutils import deltas as deltautil
96 from .revlogutils import deltas as deltautil
97
97
98 release = lockmod.release
98 release = lockmod.release
99
99
100 command = registrar.command()
100 command = registrar.command()
101
101
102
102
103 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
103 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
104 def debugancestor(ui, repo, *args):
104 def debugancestor(ui, repo, *args):
105 """find the ancestor revision of two revisions in a given index"""
105 """find the ancestor revision of two revisions in a given index"""
106 if len(args) == 3:
106 if len(args) == 3:
107 index, rev1, rev2 = args
107 index, rev1, rev2 = args
108 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
108 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
109 lookup = r.lookup
109 lookup = r.lookup
110 elif len(args) == 2:
110 elif len(args) == 2:
111 if not repo:
111 if not repo:
112 raise error.Abort(
112 raise error.Abort(
113 _(b'there is no Mercurial repository here (.hg not found)')
113 _(b'there is no Mercurial repository here (.hg not found)')
114 )
114 )
115 rev1, rev2 = args
115 rev1, rev2 = args
116 r = repo.changelog
116 r = repo.changelog
117 lookup = repo.lookup
117 lookup = repo.lookup
118 else:
118 else:
119 raise error.Abort(_(b'either two or three arguments required'))
119 raise error.Abort(_(b'either two or three arguments required'))
120 a = r.ancestor(lookup(rev1), lookup(rev2))
120 a = r.ancestor(lookup(rev1), lookup(rev2))
121 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
121 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
122
122
123
123
124 @command(b'debugapplystreamclonebundle', [], b'FILE')
124 @command(b'debugapplystreamclonebundle', [], b'FILE')
125 def debugapplystreamclonebundle(ui, repo, fname):
125 def debugapplystreamclonebundle(ui, repo, fname):
126 """apply a stream clone bundle file"""
126 """apply a stream clone bundle file"""
127 f = hg.openpath(ui, fname)
127 f = hg.openpath(ui, fname)
128 gen = exchange.readbundle(ui, f, fname)
128 gen = exchange.readbundle(ui, f, fname)
129 gen.apply(repo)
129 gen.apply(repo)
130
130
131
131
132 @command(
132 @command(
133 b'debugbuilddag',
133 b'debugbuilddag',
134 [
134 [
135 (
135 (
136 b'm',
136 b'm',
137 b'mergeable-file',
137 b'mergeable-file',
138 None,
138 None,
139 _(b'add single file mergeable changes'),
139 _(b'add single file mergeable changes'),
140 ),
140 ),
141 (
141 (
142 b'o',
142 b'o',
143 b'overwritten-file',
143 b'overwritten-file',
144 None,
144 None,
145 _(b'add single file all revs overwrite'),
145 _(b'add single file all revs overwrite'),
146 ),
146 ),
147 (b'n', b'new-file', None, _(b'add new file at each rev')),
147 (b'n', b'new-file', None, _(b'add new file at each rev')),
148 ],
148 ],
149 _(b'[OPTION]... [TEXT]'),
149 _(b'[OPTION]... [TEXT]'),
150 )
150 )
151 def debugbuilddag(
151 def debugbuilddag(
152 ui,
152 ui,
153 repo,
153 repo,
154 text=None,
154 text=None,
155 mergeable_file=False,
155 mergeable_file=False,
156 overwritten_file=False,
156 overwritten_file=False,
157 new_file=False,
157 new_file=False,
158 ):
158 ):
159 """builds a repo with a given DAG from scratch in the current empty repo
159 """builds a repo with a given DAG from scratch in the current empty repo
160
160
161 The description of the DAG is read from stdin if not given on the
161 The description of the DAG is read from stdin if not given on the
162 command line.
162 command line.
163
163
164 Elements:
164 Elements:
165
165
166 - "+n" is a linear run of n nodes based on the current default parent
166 - "+n" is a linear run of n nodes based on the current default parent
167 - "." is a single node based on the current default parent
167 - "." is a single node based on the current default parent
168 - "$" resets the default parent to null (implied at the start);
168 - "$" resets the default parent to null (implied at the start);
169 otherwise the default parent is always the last node created
169 otherwise the default parent is always the last node created
170 - "<p" sets the default parent to the backref p
170 - "<p" sets the default parent to the backref p
171 - "*p" is a fork at parent p, which is a backref
171 - "*p" is a fork at parent p, which is a backref
172 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
172 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
173 - "/p2" is a merge of the preceding node and p2
173 - "/p2" is a merge of the preceding node and p2
174 - ":tag" defines a local tag for the preceding node
174 - ":tag" defines a local tag for the preceding node
175 - "@branch" sets the named branch for subsequent nodes
175 - "@branch" sets the named branch for subsequent nodes
176 - "#...\\n" is a comment up to the end of the line
176 - "#...\\n" is a comment up to the end of the line
177
177
178 Whitespace between the above elements is ignored.
178 Whitespace between the above elements is ignored.
179
179
180 A backref is either
180 A backref is either
181
181
182 - a number n, which references the node curr-n, where curr is the current
182 - a number n, which references the node curr-n, where curr is the current
183 node, or
183 node, or
184 - the name of a local tag you placed earlier using ":tag", or
184 - the name of a local tag you placed earlier using ":tag", or
185 - empty to denote the default parent.
185 - empty to denote the default parent.
186
186
187 All string valued-elements are either strictly alphanumeric, or must
187 All string valued-elements are either strictly alphanumeric, or must
188 be enclosed in double quotes ("..."), with "\\" as escape character.
188 be enclosed in double quotes ("..."), with "\\" as escape character.
189 """
189 """
190
190
191 if text is None:
191 if text is None:
192 ui.status(_(b"reading DAG from stdin\n"))
192 ui.status(_(b"reading DAG from stdin\n"))
193 text = ui.fin.read()
193 text = ui.fin.read()
194
194
195 cl = repo.changelog
195 cl = repo.changelog
196 if len(cl) > 0:
196 if len(cl) > 0:
197 raise error.Abort(_(b'repository is not empty'))
197 raise error.Abort(_(b'repository is not empty'))
198
198
199 # determine number of revs in DAG
199 # determine number of revs in DAG
200 total = 0
200 total = 0
201 for type, data in dagparser.parsedag(text):
201 for type, data in dagparser.parsedag(text):
202 if type == b'n':
202 if type == b'n':
203 total += 1
203 total += 1
204
204
205 if mergeable_file:
205 if mergeable_file:
206 linesperrev = 2
206 linesperrev = 2
207 # make a file with k lines per rev
207 # make a file with k lines per rev
208 initialmergedlines = [
208 initialmergedlines = [
209 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
209 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
210 ]
210 ]
211 initialmergedlines.append(b"")
211 initialmergedlines.append(b"")
212
212
213 tags = []
213 tags = []
214 progress = ui.makeprogress(
214 progress = ui.makeprogress(
215 _(b'building'), unit=_(b'revisions'), total=total
215 _(b'building'), unit=_(b'revisions'), total=total
216 )
216 )
217 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
217 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
218 at = -1
218 at = -1
219 atbranch = b'default'
219 atbranch = b'default'
220 nodeids = []
220 nodeids = []
221 id = 0
221 id = 0
222 progress.update(id)
222 progress.update(id)
223 for type, data in dagparser.parsedag(text):
223 for type, data in dagparser.parsedag(text):
224 if type == b'n':
224 if type == b'n':
225 ui.note((b'node %s\n' % pycompat.bytestr(data)))
225 ui.note((b'node %s\n' % pycompat.bytestr(data)))
226 id, ps = data
226 id, ps = data
227
227
228 files = []
228 files = []
229 filecontent = {}
229 filecontent = {}
230
230
231 p2 = None
231 p2 = None
232 if mergeable_file:
232 if mergeable_file:
233 fn = b"mf"
233 fn = b"mf"
234 p1 = repo[ps[0]]
234 p1 = repo[ps[0]]
235 if len(ps) > 1:
235 if len(ps) > 1:
236 p2 = repo[ps[1]]
236 p2 = repo[ps[1]]
237 pa = p1.ancestor(p2)
237 pa = p1.ancestor(p2)
238 base, local, other = [
238 base, local, other = [
239 x[fn].data() for x in (pa, p1, p2)
239 x[fn].data() for x in (pa, p1, p2)
240 ]
240 ]
241 m3 = simplemerge.Merge3Text(base, local, other)
241 m3 = simplemerge.Merge3Text(base, local, other)
242 ml = [l.strip() for l in m3.merge_lines()]
242 ml = [l.strip() for l in m3.merge_lines()]
243 ml.append(b"")
243 ml.append(b"")
244 elif at > 0:
244 elif at > 0:
245 ml = p1[fn].data().split(b"\n")
245 ml = p1[fn].data().split(b"\n")
246 else:
246 else:
247 ml = initialmergedlines
247 ml = initialmergedlines
248 ml[id * linesperrev] += b" r%i" % id
248 ml[id * linesperrev] += b" r%i" % id
249 mergedtext = b"\n".join(ml)
249 mergedtext = b"\n".join(ml)
250 files.append(fn)
250 files.append(fn)
251 filecontent[fn] = mergedtext
251 filecontent[fn] = mergedtext
252
252
253 if overwritten_file:
253 if overwritten_file:
254 fn = b"of"
254 fn = b"of"
255 files.append(fn)
255 files.append(fn)
256 filecontent[fn] = b"r%i\n" % id
256 filecontent[fn] = b"r%i\n" % id
257
257
258 if new_file:
258 if new_file:
259 fn = b"nf%i" % id
259 fn = b"nf%i" % id
260 files.append(fn)
260 files.append(fn)
261 filecontent[fn] = b"r%i\n" % id
261 filecontent[fn] = b"r%i\n" % id
262 if len(ps) > 1:
262 if len(ps) > 1:
263 if not p2:
263 if not p2:
264 p2 = repo[ps[1]]
264 p2 = repo[ps[1]]
265 for fn in p2:
265 for fn in p2:
266 if fn.startswith(b"nf"):
266 if fn.startswith(b"nf"):
267 files.append(fn)
267 files.append(fn)
268 filecontent[fn] = p2[fn].data()
268 filecontent[fn] = p2[fn].data()
269
269
270 def fctxfn(repo, cx, path):
270 def fctxfn(repo, cx, path):
271 if path in filecontent:
271 if path in filecontent:
272 return context.memfilectx(
272 return context.memfilectx(
273 repo, cx, path, filecontent[path]
273 repo, cx, path, filecontent[path]
274 )
274 )
275 return None
275 return None
276
276
277 if len(ps) == 0 or ps[0] < 0:
277 if len(ps) == 0 or ps[0] < 0:
278 pars = [None, None]
278 pars = [None, None]
279 elif len(ps) == 1:
279 elif len(ps) == 1:
280 pars = [nodeids[ps[0]], None]
280 pars = [nodeids[ps[0]], None]
281 else:
281 else:
282 pars = [nodeids[p] for p in ps]
282 pars = [nodeids[p] for p in ps]
283 cx = context.memctx(
283 cx = context.memctx(
284 repo,
284 repo,
285 pars,
285 pars,
286 b"r%i" % id,
286 b"r%i" % id,
287 files,
287 files,
288 fctxfn,
288 fctxfn,
289 date=(id, 0),
289 date=(id, 0),
290 user=b"debugbuilddag",
290 user=b"debugbuilddag",
291 extra={b'branch': atbranch},
291 extra={b'branch': atbranch},
292 )
292 )
293 nodeid = repo.commitctx(cx)
293 nodeid = repo.commitctx(cx)
294 nodeids.append(nodeid)
294 nodeids.append(nodeid)
295 at = id
295 at = id
296 elif type == b'l':
296 elif type == b'l':
297 id, name = data
297 id, name = data
298 ui.note((b'tag %s\n' % name))
298 ui.note((b'tag %s\n' % name))
299 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
299 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
300 elif type == b'a':
300 elif type == b'a':
301 ui.note((b'branch %s\n' % data))
301 ui.note((b'branch %s\n' % data))
302 atbranch = data
302 atbranch = data
303 progress.update(id)
303 progress.update(id)
304
304
305 if tags:
305 if tags:
306 repo.vfs.write(b"localtags", b"".join(tags))
306 repo.vfs.write(b"localtags", b"".join(tags))
307
307
308
308
309 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
309 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
310 indent_string = b' ' * indent
310 indent_string = b' ' * indent
311 if all:
311 if all:
312 ui.writenoi18n(
312 ui.writenoi18n(
313 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
313 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
314 % indent_string
314 % indent_string
315 )
315 )
316
316
317 def showchunks(named):
317 def showchunks(named):
318 ui.write(b"\n%s%s\n" % (indent_string, named))
318 ui.write(b"\n%s%s\n" % (indent_string, named))
319 for deltadata in gen.deltaiter():
319 for deltadata in gen.deltaiter():
320 node, p1, p2, cs, deltabase, delta, flags = deltadata
320 node, p1, p2, cs, deltabase, delta, flags = deltadata
321 ui.write(
321 ui.write(
322 b"%s%s %s %s %s %s %d\n"
322 b"%s%s %s %s %s %s %d\n"
323 % (
323 % (
324 indent_string,
324 indent_string,
325 hex(node),
325 hex(node),
326 hex(p1),
326 hex(p1),
327 hex(p2),
327 hex(p2),
328 hex(cs),
328 hex(cs),
329 hex(deltabase),
329 hex(deltabase),
330 len(delta),
330 len(delta),
331 )
331 )
332 )
332 )
333
333
334 chunkdata = gen.changelogheader()
334 chunkdata = gen.changelogheader()
335 showchunks(b"changelog")
335 showchunks(b"changelog")
336 chunkdata = gen.manifestheader()
336 chunkdata = gen.manifestheader()
337 showchunks(b"manifest")
337 showchunks(b"manifest")
338 for chunkdata in iter(gen.filelogheader, {}):
338 for chunkdata in iter(gen.filelogheader, {}):
339 fname = chunkdata[b'filename']
339 fname = chunkdata[b'filename']
340 showchunks(fname)
340 showchunks(fname)
341 else:
341 else:
342 if isinstance(gen, bundle2.unbundle20):
342 if isinstance(gen, bundle2.unbundle20):
343 raise error.Abort(_(b'use debugbundle2 for this file'))
343 raise error.Abort(_(b'use debugbundle2 for this file'))
344 chunkdata = gen.changelogheader()
344 chunkdata = gen.changelogheader()
345 for deltadata in gen.deltaiter():
345 for deltadata in gen.deltaiter():
346 node, p1, p2, cs, deltabase, delta, flags = deltadata
346 node, p1, p2, cs, deltabase, delta, flags = deltadata
347 ui.write(b"%s%s\n" % (indent_string, hex(node)))
347 ui.write(b"%s%s\n" % (indent_string, hex(node)))
348
348
349
349
350 def _debugobsmarkers(ui, part, indent=0, **opts):
350 def _debugobsmarkers(ui, part, indent=0, **opts):
351 """display version and markers contained in 'data'"""
351 """display version and markers contained in 'data'"""
352 opts = pycompat.byteskwargs(opts)
352 opts = pycompat.byteskwargs(opts)
353 data = part.read()
353 data = part.read()
354 indent_string = b' ' * indent
354 indent_string = b' ' * indent
355 try:
355 try:
356 version, markers = obsolete._readmarkers(data)
356 version, markers = obsolete._readmarkers(data)
357 except error.UnknownVersion as exc:
357 except error.UnknownVersion as exc:
358 msg = b"%sunsupported version: %s (%d bytes)\n"
358 msg = b"%sunsupported version: %s (%d bytes)\n"
359 msg %= indent_string, exc.version, len(data)
359 msg %= indent_string, exc.version, len(data)
360 ui.write(msg)
360 ui.write(msg)
361 else:
361 else:
362 msg = b"%sversion: %d (%d bytes)\n"
362 msg = b"%sversion: %d (%d bytes)\n"
363 msg %= indent_string, version, len(data)
363 msg %= indent_string, version, len(data)
364 ui.write(msg)
364 ui.write(msg)
365 fm = ui.formatter(b'debugobsolete', opts)
365 fm = ui.formatter(b'debugobsolete', opts)
366 for rawmarker in sorted(markers):
366 for rawmarker in sorted(markers):
367 m = obsutil.marker(None, rawmarker)
367 m = obsutil.marker(None, rawmarker)
368 fm.startitem()
368 fm.startitem()
369 fm.plain(indent_string)
369 fm.plain(indent_string)
370 cmdutil.showmarker(fm, m)
370 cmdutil.showmarker(fm, m)
371 fm.end()
371 fm.end()
372
372
373
373
374 def _debugphaseheads(ui, data, indent=0):
374 def _debugphaseheads(ui, data, indent=0):
375 """display version and markers contained in 'data'"""
375 """display version and markers contained in 'data'"""
376 indent_string = b' ' * indent
376 indent_string = b' ' * indent
377 headsbyphase = phases.binarydecode(data)
377 headsbyphase = phases.binarydecode(data)
378 for phase in phases.allphases:
378 for phase in phases.allphases:
379 for head in headsbyphase[phase]:
379 for head in headsbyphase[phase]:
380 ui.write(indent_string)
380 ui.write(indent_string)
381 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
381 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
382
382
383
383
384 def _quasirepr(thing):
384 def _quasirepr(thing):
385 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
385 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
386 return b'{%s}' % (
386 return b'{%s}' % (
387 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
387 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
388 )
388 )
389 return pycompat.bytestr(repr(thing))
389 return pycompat.bytestr(repr(thing))
390
390
391
391
392 def _debugbundle2(ui, gen, all=None, **opts):
392 def _debugbundle2(ui, gen, all=None, **opts):
393 """lists the contents of a bundle2"""
393 """lists the contents of a bundle2"""
394 if not isinstance(gen, bundle2.unbundle20):
394 if not isinstance(gen, bundle2.unbundle20):
395 raise error.Abort(_(b'not a bundle2 file'))
395 raise error.Abort(_(b'not a bundle2 file'))
396 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
396 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
397 parttypes = opts.get('part_type', [])
397 parttypes = opts.get('part_type', [])
398 for part in gen.iterparts():
398 for part in gen.iterparts():
399 if parttypes and part.type not in parttypes:
399 if parttypes and part.type not in parttypes:
400 continue
400 continue
401 msg = b'%s -- %s (mandatory: %r)\n'
401 msg = b'%s -- %s (mandatory: %r)\n'
402 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
402 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
403 if part.type == b'changegroup':
403 if part.type == b'changegroup':
404 version = part.params.get(b'version', b'01')
404 version = part.params.get(b'version', b'01')
405 cg = changegroup.getunbundler(version, part, b'UN')
405 cg = changegroup.getunbundler(version, part, b'UN')
406 if not ui.quiet:
406 if not ui.quiet:
407 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
407 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
408 if part.type == b'obsmarkers':
408 if part.type == b'obsmarkers':
409 if not ui.quiet:
409 if not ui.quiet:
410 _debugobsmarkers(ui, part, indent=4, **opts)
410 _debugobsmarkers(ui, part, indent=4, **opts)
411 if part.type == b'phase-heads':
411 if part.type == b'phase-heads':
412 if not ui.quiet:
412 if not ui.quiet:
413 _debugphaseheads(ui, part, indent=4)
413 _debugphaseheads(ui, part, indent=4)
414
414
415
415
416 @command(
416 @command(
417 b'debugbundle',
417 b'debugbundle',
418 [
418 [
419 (b'a', b'all', None, _(b'show all details')),
419 (b'a', b'all', None, _(b'show all details')),
420 (b'', b'part-type', [], _(b'show only the named part type')),
420 (b'', b'part-type', [], _(b'show only the named part type')),
421 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
421 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
422 ],
422 ],
423 _(b'FILE'),
423 _(b'FILE'),
424 norepo=True,
424 norepo=True,
425 )
425 )
426 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
426 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
427 """lists the contents of a bundle"""
427 """lists the contents of a bundle"""
428 with hg.openpath(ui, bundlepath) as f:
428 with hg.openpath(ui, bundlepath) as f:
429 if spec:
429 if spec:
430 spec = exchange.getbundlespec(ui, f)
430 spec = exchange.getbundlespec(ui, f)
431 ui.write(b'%s\n' % spec)
431 ui.write(b'%s\n' % spec)
432 return
432 return
433
433
434 gen = exchange.readbundle(ui, f, bundlepath)
434 gen = exchange.readbundle(ui, f, bundlepath)
435 if isinstance(gen, bundle2.unbundle20):
435 if isinstance(gen, bundle2.unbundle20):
436 return _debugbundle2(ui, gen, all=all, **opts)
436 return _debugbundle2(ui, gen, all=all, **opts)
437 _debugchangegroup(ui, gen, all=all, **opts)
437 _debugchangegroup(ui, gen, all=all, **opts)
438
438
439
439
440 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
440 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
441 def debugcapabilities(ui, path, **opts):
441 def debugcapabilities(ui, path, **opts):
442 """lists the capabilities of a remote peer"""
442 """lists the capabilities of a remote peer"""
443 opts = pycompat.byteskwargs(opts)
443 opts = pycompat.byteskwargs(opts)
444 peer = hg.peer(ui, opts, path)
444 peer = hg.peer(ui, opts, path)
445 caps = peer.capabilities()
445 caps = peer.capabilities()
446 ui.writenoi18n(b'Main capabilities:\n')
446 ui.writenoi18n(b'Main capabilities:\n')
447 for c in sorted(caps):
447 for c in sorted(caps):
448 ui.write(b' %s\n' % c)
448 ui.write(b' %s\n' % c)
449 b2caps = bundle2.bundle2caps(peer)
449 b2caps = bundle2.bundle2caps(peer)
450 if b2caps:
450 if b2caps:
451 ui.writenoi18n(b'Bundle2 capabilities:\n')
451 ui.writenoi18n(b'Bundle2 capabilities:\n')
452 for key, values in sorted(pycompat.iteritems(b2caps)):
452 for key, values in sorted(pycompat.iteritems(b2caps)):
453 ui.write(b' %s\n' % key)
453 ui.write(b' %s\n' % key)
454 for v in values:
454 for v in values:
455 ui.write(b' %s\n' % v)
455 ui.write(b' %s\n' % v)
456
456
457
457
458 @command(b'debugcheckstate', [], b'')
458 @command(b'debugcheckstate', [], b'')
459 def debugcheckstate(ui, repo):
459 def debugcheckstate(ui, repo):
460 """validate the correctness of the current dirstate"""
460 """validate the correctness of the current dirstate"""
461 parent1, parent2 = repo.dirstate.parents()
461 parent1, parent2 = repo.dirstate.parents()
462 m1 = repo[parent1].manifest()
462 m1 = repo[parent1].manifest()
463 m2 = repo[parent2].manifest()
463 m2 = repo[parent2].manifest()
464 errors = 0
464 errors = 0
465 for f in repo.dirstate:
465 for f in repo.dirstate:
466 state = repo.dirstate[f]
466 state = repo.dirstate[f]
467 if state in b"nr" and f not in m1:
467 if state in b"nr" and f not in m1:
468 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
468 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
469 errors += 1
469 errors += 1
470 if state in b"a" and f in m1:
470 if state in b"a" and f in m1:
471 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
471 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
472 errors += 1
472 errors += 1
473 if state in b"m" and f not in m1 and f not in m2:
473 if state in b"m" and f not in m1 and f not in m2:
474 ui.warn(
474 ui.warn(
475 _(b"%s in state %s, but not in either manifest\n") % (f, state)
475 _(b"%s in state %s, but not in either manifest\n") % (f, state)
476 )
476 )
477 errors += 1
477 errors += 1
478 for f in m1:
478 for f in m1:
479 state = repo.dirstate[f]
479 state = repo.dirstate[f]
480 if state not in b"nrm":
480 if state not in b"nrm":
481 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
481 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
482 errors += 1
482 errors += 1
483 if errors:
483 if errors:
484 error = _(b".hg/dirstate inconsistent with current parent's manifest")
484 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
485 raise error.Abort(error)
485 raise error.Abort(errstr)
486
486
487
487
488 @command(
488 @command(
489 b'debugcolor',
489 b'debugcolor',
490 [(b'', b'style', None, _(b'show all configured styles'))],
490 [(b'', b'style', None, _(b'show all configured styles'))],
491 b'hg debugcolor',
491 b'hg debugcolor',
492 )
492 )
493 def debugcolor(ui, repo, **opts):
493 def debugcolor(ui, repo, **opts):
494 """show available color, effects or style"""
494 """show available color, effects or style"""
495 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
495 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
496 if opts.get('style'):
496 if opts.get('style'):
497 return _debugdisplaystyle(ui)
497 return _debugdisplaystyle(ui)
498 else:
498 else:
499 return _debugdisplaycolor(ui)
499 return _debugdisplaycolor(ui)
500
500
501
501
502 def _debugdisplaycolor(ui):
502 def _debugdisplaycolor(ui):
503 ui = ui.copy()
503 ui = ui.copy()
504 ui._styles.clear()
504 ui._styles.clear()
505 for effect in color._activeeffects(ui).keys():
505 for effect in color._activeeffects(ui).keys():
506 ui._styles[effect] = effect
506 ui._styles[effect] = effect
507 if ui._terminfoparams:
507 if ui._terminfoparams:
508 for k, v in ui.configitems(b'color'):
508 for k, v in ui.configitems(b'color'):
509 if k.startswith(b'color.'):
509 if k.startswith(b'color.'):
510 ui._styles[k] = k[6:]
510 ui._styles[k] = k[6:]
511 elif k.startswith(b'terminfo.'):
511 elif k.startswith(b'terminfo.'):
512 ui._styles[k] = k[9:]
512 ui._styles[k] = k[9:]
513 ui.write(_(b'available colors:\n'))
513 ui.write(_(b'available colors:\n'))
514 # sort label with a '_' after the other to group '_background' entry.
514 # sort label with a '_' after the other to group '_background' entry.
515 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
515 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
516 for colorname, label in items:
516 for colorname, label in items:
517 ui.write(b'%s\n' % colorname, label=label)
517 ui.write(b'%s\n' % colorname, label=label)
518
518
519
519
520 def _debugdisplaystyle(ui):
520 def _debugdisplaystyle(ui):
521 ui.write(_(b'available style:\n'))
521 ui.write(_(b'available style:\n'))
522 if not ui._styles:
522 if not ui._styles:
523 return
523 return
524 width = max(len(s) for s in ui._styles)
524 width = max(len(s) for s in ui._styles)
525 for label, effects in sorted(ui._styles.items()):
525 for label, effects in sorted(ui._styles.items()):
526 ui.write(b'%s' % label, label=label)
526 ui.write(b'%s' % label, label=label)
527 if effects:
527 if effects:
528 # 50
528 # 50
529 ui.write(b': ')
529 ui.write(b': ')
530 ui.write(b' ' * (max(0, width - len(label))))
530 ui.write(b' ' * (max(0, width - len(label))))
531 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
531 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
532 ui.write(b'\n')
532 ui.write(b'\n')
533
533
534
534
535 @command(b'debugcreatestreamclonebundle', [], b'FILE')
535 @command(b'debugcreatestreamclonebundle', [], b'FILE')
536 def debugcreatestreamclonebundle(ui, repo, fname):
536 def debugcreatestreamclonebundle(ui, repo, fname):
537 """create a stream clone bundle file
537 """create a stream clone bundle file
538
538
539 Stream bundles are special bundles that are essentially archives of
539 Stream bundles are special bundles that are essentially archives of
540 revlog files. They are commonly used for cloning very quickly.
540 revlog files. They are commonly used for cloning very quickly.
541 """
541 """
542 # TODO we may want to turn this into an abort when this functionality
542 # TODO we may want to turn this into an abort when this functionality
543 # is moved into `hg bundle`.
543 # is moved into `hg bundle`.
544 if phases.hassecret(repo):
544 if phases.hassecret(repo):
545 ui.warn(
545 ui.warn(
546 _(
546 _(
547 b'(warning: stream clone bundle will contain secret '
547 b'(warning: stream clone bundle will contain secret '
548 b'revisions)\n'
548 b'revisions)\n'
549 )
549 )
550 )
550 )
551
551
552 requirements, gen = streamclone.generatebundlev1(repo)
552 requirements, gen = streamclone.generatebundlev1(repo)
553 changegroup.writechunks(ui, gen, fname)
553 changegroup.writechunks(ui, gen, fname)
554
554
555 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
555 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
556
556
557
557
558 @command(
558 @command(
559 b'debugdag',
559 b'debugdag',
560 [
560 [
561 (b't', b'tags', None, _(b'use tags as labels')),
561 (b't', b'tags', None, _(b'use tags as labels')),
562 (b'b', b'branches', None, _(b'annotate with branch names')),
562 (b'b', b'branches', None, _(b'annotate with branch names')),
563 (b'', b'dots', None, _(b'use dots for runs')),
563 (b'', b'dots', None, _(b'use dots for runs')),
564 (b's', b'spaces', None, _(b'separate elements by spaces')),
564 (b's', b'spaces', None, _(b'separate elements by spaces')),
565 ],
565 ],
566 _(b'[OPTION]... [FILE [REV]...]'),
566 _(b'[OPTION]... [FILE [REV]...]'),
567 optionalrepo=True,
567 optionalrepo=True,
568 )
568 )
569 def debugdag(ui, repo, file_=None, *revs, **opts):
569 def debugdag(ui, repo, file_=None, *revs, **opts):
570 """format the changelog or an index DAG as a concise textual description
570 """format the changelog or an index DAG as a concise textual description
571
571
572 If you pass a revlog index, the revlog's DAG is emitted. If you list
572 If you pass a revlog index, the revlog's DAG is emitted. If you list
573 revision numbers, they get labeled in the output as rN.
573 revision numbers, they get labeled in the output as rN.
574
574
575 Otherwise, the changelog DAG of the current repo is emitted.
575 Otherwise, the changelog DAG of the current repo is emitted.
576 """
576 """
577 spaces = opts.get('spaces')
577 spaces = opts.get('spaces')
578 dots = opts.get('dots')
578 dots = opts.get('dots')
579 if file_:
579 if file_:
580 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
580 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
581 revs = set((int(r) for r in revs))
581 revs = set((int(r) for r in revs))
582
582
583 def events():
583 def events():
584 for r in rlog:
584 for r in rlog:
585 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
585 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
586 if r in revs:
586 if r in revs:
587 yield b'l', (r, b"r%i" % r)
587 yield b'l', (r, b"r%i" % r)
588
588
589 elif repo:
589 elif repo:
590 cl = repo.changelog
590 cl = repo.changelog
591 tags = opts.get('tags')
591 tags = opts.get('tags')
592 branches = opts.get('branches')
592 branches = opts.get('branches')
593 if tags:
593 if tags:
594 labels = {}
594 labels = {}
595 for l, n in repo.tags().items():
595 for l, n in repo.tags().items():
596 labels.setdefault(cl.rev(n), []).append(l)
596 labels.setdefault(cl.rev(n), []).append(l)
597
597
598 def events():
598 def events():
599 b = b"default"
599 b = b"default"
600 for r in cl:
600 for r in cl:
601 if branches:
601 if branches:
602 newb = cl.read(cl.node(r))[5][b'branch']
602 newb = cl.read(cl.node(r))[5][b'branch']
603 if newb != b:
603 if newb != b:
604 yield b'a', newb
604 yield b'a', newb
605 b = newb
605 b = newb
606 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
606 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
607 if tags:
607 if tags:
608 ls = labels.get(r)
608 ls = labels.get(r)
609 if ls:
609 if ls:
610 for l in ls:
610 for l in ls:
611 yield b'l', (r, l)
611 yield b'l', (r, l)
612
612
613 else:
613 else:
614 raise error.Abort(_(b'need repo for changelog dag'))
614 raise error.Abort(_(b'need repo for changelog dag'))
615
615
616 for line in dagparser.dagtextlines(
616 for line in dagparser.dagtextlines(
617 events(),
617 events(),
618 addspaces=spaces,
618 addspaces=spaces,
619 wraplabels=True,
619 wraplabels=True,
620 wrapannotations=True,
620 wrapannotations=True,
621 wrapnonlinear=dots,
621 wrapnonlinear=dots,
622 usedots=dots,
622 usedots=dots,
623 maxlinewidth=70,
623 maxlinewidth=70,
624 ):
624 ):
625 ui.write(line)
625 ui.write(line)
626 ui.write(b"\n")
626 ui.write(b"\n")
627
627
628
628
629 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
629 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
630 def debugdata(ui, repo, file_, rev=None, **opts):
630 def debugdata(ui, repo, file_, rev=None, **opts):
631 """dump the contents of a data file revision"""
631 """dump the contents of a data file revision"""
632 opts = pycompat.byteskwargs(opts)
632 opts = pycompat.byteskwargs(opts)
633 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
633 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
634 if rev is not None:
634 if rev is not None:
635 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
635 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
636 file_, rev = None, file_
636 file_, rev = None, file_
637 elif rev is None:
637 elif rev is None:
638 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
638 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
639 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
639 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
640 try:
640 try:
641 ui.write(r.rawdata(r.lookup(rev)))
641 ui.write(r.rawdata(r.lookup(rev)))
642 except KeyError:
642 except KeyError:
643 raise error.Abort(_(b'invalid revision identifier %s') % rev)
643 raise error.Abort(_(b'invalid revision identifier %s') % rev)
644
644
645
645
646 @command(
646 @command(
647 b'debugdate',
647 b'debugdate',
648 [(b'e', b'extended', None, _(b'try extended date formats'))],
648 [(b'e', b'extended', None, _(b'try extended date formats'))],
649 _(b'[-e] DATE [RANGE]'),
649 _(b'[-e] DATE [RANGE]'),
650 norepo=True,
650 norepo=True,
651 optionalrepo=True,
651 optionalrepo=True,
652 )
652 )
653 def debugdate(ui, date, range=None, **opts):
653 def debugdate(ui, date, range=None, **opts):
654 """parse and display a date"""
654 """parse and display a date"""
655 if opts["extended"]:
655 if opts["extended"]:
656 d = dateutil.parsedate(date, util.extendeddateformats)
656 d = dateutil.parsedate(date, util.extendeddateformats)
657 else:
657 else:
658 d = dateutil.parsedate(date)
658 d = dateutil.parsedate(date)
659 ui.writenoi18n(b"internal: %d %d\n" % d)
659 ui.writenoi18n(b"internal: %d %d\n" % d)
660 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
660 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
661 if range:
661 if range:
662 m = dateutil.matchdate(range)
662 m = dateutil.matchdate(range)
663 ui.writenoi18n(b"match: %s\n" % m(d[0]))
663 ui.writenoi18n(b"match: %s\n" % m(d[0]))
664
664
665
665
666 @command(
666 @command(
667 b'debugdeltachain',
667 b'debugdeltachain',
668 cmdutil.debugrevlogopts + cmdutil.formatteropts,
668 cmdutil.debugrevlogopts + cmdutil.formatteropts,
669 _(b'-c|-m|FILE'),
669 _(b'-c|-m|FILE'),
670 optionalrepo=True,
670 optionalrepo=True,
671 )
671 )
672 def debugdeltachain(ui, repo, file_=None, **opts):
672 def debugdeltachain(ui, repo, file_=None, **opts):
673 """dump information about delta chains in a revlog
673 """dump information about delta chains in a revlog
674
674
675 Output can be templatized. Available template keywords are:
675 Output can be templatized. Available template keywords are:
676
676
677 :``rev``: revision number
677 :``rev``: revision number
678 :``chainid``: delta chain identifier (numbered by unique base)
678 :``chainid``: delta chain identifier (numbered by unique base)
679 :``chainlen``: delta chain length to this revision
679 :``chainlen``: delta chain length to this revision
680 :``prevrev``: previous revision in delta chain
680 :``prevrev``: previous revision in delta chain
681 :``deltatype``: role of delta / how it was computed
681 :``deltatype``: role of delta / how it was computed
682 :``compsize``: compressed size of revision
682 :``compsize``: compressed size of revision
683 :``uncompsize``: uncompressed size of revision
683 :``uncompsize``: uncompressed size of revision
684 :``chainsize``: total size of compressed revisions in chain
684 :``chainsize``: total size of compressed revisions in chain
685 :``chainratio``: total chain size divided by uncompressed revision size
685 :``chainratio``: total chain size divided by uncompressed revision size
686 (new delta chains typically start at ratio 2.00)
686 (new delta chains typically start at ratio 2.00)
687 :``lindist``: linear distance from base revision in delta chain to end
687 :``lindist``: linear distance from base revision in delta chain to end
688 of this revision
688 of this revision
689 :``extradist``: total size of revisions not part of this delta chain from
689 :``extradist``: total size of revisions not part of this delta chain from
690 base of delta chain to end of this revision; a measurement
690 base of delta chain to end of this revision; a measurement
691 of how much extra data we need to read/seek across to read
691 of how much extra data we need to read/seek across to read
692 the delta chain for this revision
692 the delta chain for this revision
693 :``extraratio``: extradist divided by chainsize; another representation of
693 :``extraratio``: extradist divided by chainsize; another representation of
694 how much unrelated data is needed to load this delta chain
694 how much unrelated data is needed to load this delta chain
695
695
696 If the repository is configured to use the sparse read, additional keywords
696 If the repository is configured to use the sparse read, additional keywords
697 are available:
697 are available:
698
698
699 :``readsize``: total size of data read from the disk for a revision
699 :``readsize``: total size of data read from the disk for a revision
700 (sum of the sizes of all the blocks)
700 (sum of the sizes of all the blocks)
701 :``largestblock``: size of the largest block of data read from the disk
701 :``largestblock``: size of the largest block of data read from the disk
702 :``readdensity``: density of useful bytes in the data read from the disk
702 :``readdensity``: density of useful bytes in the data read from the disk
703 :``srchunks``: in how many data hunks the whole revision would be read
703 :``srchunks``: in how many data hunks the whole revision would be read
704
704
705 The sparse read can be enabled with experimental.sparse-read = True
705 The sparse read can be enabled with experimental.sparse-read = True
706 """
706 """
707 opts = pycompat.byteskwargs(opts)
707 opts = pycompat.byteskwargs(opts)
708 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
708 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
709 index = r.index
709 index = r.index
710 start = r.start
710 start = r.start
711 length = r.length
711 length = r.length
712 generaldelta = r.version & revlog.FLAG_GENERALDELTA
712 generaldelta = r.version & revlog.FLAG_GENERALDELTA
713 withsparseread = getattr(r, '_withsparseread', False)
713 withsparseread = getattr(r, '_withsparseread', False)
714
714
715 def revinfo(rev):
715 def revinfo(rev):
716 e = index[rev]
716 e = index[rev]
717 compsize = e[1]
717 compsize = e[1]
718 uncompsize = e[2]
718 uncompsize = e[2]
719 chainsize = 0
719 chainsize = 0
720
720
721 if generaldelta:
721 if generaldelta:
722 if e[3] == e[5]:
722 if e[3] == e[5]:
723 deltatype = b'p1'
723 deltatype = b'p1'
724 elif e[3] == e[6]:
724 elif e[3] == e[6]:
725 deltatype = b'p2'
725 deltatype = b'p2'
726 elif e[3] == rev - 1:
726 elif e[3] == rev - 1:
727 deltatype = b'prev'
727 deltatype = b'prev'
728 elif e[3] == rev:
728 elif e[3] == rev:
729 deltatype = b'base'
729 deltatype = b'base'
730 else:
730 else:
731 deltatype = b'other'
731 deltatype = b'other'
732 else:
732 else:
733 if e[3] == rev:
733 if e[3] == rev:
734 deltatype = b'base'
734 deltatype = b'base'
735 else:
735 else:
736 deltatype = b'prev'
736 deltatype = b'prev'
737
737
738 chain = r._deltachain(rev)[0]
738 chain = r._deltachain(rev)[0]
739 for iterrev in chain:
739 for iterrev in chain:
740 e = index[iterrev]
740 e = index[iterrev]
741 chainsize += e[1]
741 chainsize += e[1]
742
742
743 return compsize, uncompsize, deltatype, chain, chainsize
743 return compsize, uncompsize, deltatype, chain, chainsize
744
744
745 fm = ui.formatter(b'debugdeltachain', opts)
745 fm = ui.formatter(b'debugdeltachain', opts)
746
746
747 fm.plain(
747 fm.plain(
748 b' rev chain# chainlen prev delta '
748 b' rev chain# chainlen prev delta '
749 b'size rawsize chainsize ratio lindist extradist '
749 b'size rawsize chainsize ratio lindist extradist '
750 b'extraratio'
750 b'extraratio'
751 )
751 )
752 if withsparseread:
752 if withsparseread:
753 fm.plain(b' readsize largestblk rddensity srchunks')
753 fm.plain(b' readsize largestblk rddensity srchunks')
754 fm.plain(b'\n')
754 fm.plain(b'\n')
755
755
756 chainbases = {}
756 chainbases = {}
757 for rev in r:
757 for rev in r:
758 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
758 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
759 chainbase = chain[0]
759 chainbase = chain[0]
760 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
760 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
761 basestart = start(chainbase)
761 basestart = start(chainbase)
762 revstart = start(rev)
762 revstart = start(rev)
763 lineardist = revstart + comp - basestart
763 lineardist = revstart + comp - basestart
764 extradist = lineardist - chainsize
764 extradist = lineardist - chainsize
765 try:
765 try:
766 prevrev = chain[-2]
766 prevrev = chain[-2]
767 except IndexError:
767 except IndexError:
768 prevrev = -1
768 prevrev = -1
769
769
770 if uncomp != 0:
770 if uncomp != 0:
771 chainratio = float(chainsize) / float(uncomp)
771 chainratio = float(chainsize) / float(uncomp)
772 else:
772 else:
773 chainratio = chainsize
773 chainratio = chainsize
774
774
775 if chainsize != 0:
775 if chainsize != 0:
776 extraratio = float(extradist) / float(chainsize)
776 extraratio = float(extradist) / float(chainsize)
777 else:
777 else:
778 extraratio = extradist
778 extraratio = extradist
779
779
780 fm.startitem()
780 fm.startitem()
781 fm.write(
781 fm.write(
782 b'rev chainid chainlen prevrev deltatype compsize '
782 b'rev chainid chainlen prevrev deltatype compsize '
783 b'uncompsize chainsize chainratio lindist extradist '
783 b'uncompsize chainsize chainratio lindist extradist '
784 b'extraratio',
784 b'extraratio',
785 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
785 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
786 rev,
786 rev,
787 chainid,
787 chainid,
788 len(chain),
788 len(chain),
789 prevrev,
789 prevrev,
790 deltatype,
790 deltatype,
791 comp,
791 comp,
792 uncomp,
792 uncomp,
793 chainsize,
793 chainsize,
794 chainratio,
794 chainratio,
795 lineardist,
795 lineardist,
796 extradist,
796 extradist,
797 extraratio,
797 extraratio,
798 rev=rev,
798 rev=rev,
799 chainid=chainid,
799 chainid=chainid,
800 chainlen=len(chain),
800 chainlen=len(chain),
801 prevrev=prevrev,
801 prevrev=prevrev,
802 deltatype=deltatype,
802 deltatype=deltatype,
803 compsize=comp,
803 compsize=comp,
804 uncompsize=uncomp,
804 uncompsize=uncomp,
805 chainsize=chainsize,
805 chainsize=chainsize,
806 chainratio=chainratio,
806 chainratio=chainratio,
807 lindist=lineardist,
807 lindist=lineardist,
808 extradist=extradist,
808 extradist=extradist,
809 extraratio=extraratio,
809 extraratio=extraratio,
810 )
810 )
811 if withsparseread:
811 if withsparseread:
812 readsize = 0
812 readsize = 0
813 largestblock = 0
813 largestblock = 0
814 srchunks = 0
814 srchunks = 0
815
815
816 for revschunk in deltautil.slicechunk(r, chain):
816 for revschunk in deltautil.slicechunk(r, chain):
817 srchunks += 1
817 srchunks += 1
818 blkend = start(revschunk[-1]) + length(revschunk[-1])
818 blkend = start(revschunk[-1]) + length(revschunk[-1])
819 blksize = blkend - start(revschunk[0])
819 blksize = blkend - start(revschunk[0])
820
820
821 readsize += blksize
821 readsize += blksize
822 if largestblock < blksize:
822 if largestblock < blksize:
823 largestblock = blksize
823 largestblock = blksize
824
824
825 if readsize:
825 if readsize:
826 readdensity = float(chainsize) / float(readsize)
826 readdensity = float(chainsize) / float(readsize)
827 else:
827 else:
828 readdensity = 1
828 readdensity = 1
829
829
830 fm.write(
830 fm.write(
831 b'readsize largestblock readdensity srchunks',
831 b'readsize largestblock readdensity srchunks',
832 b' %10d %10d %9.5f %8d',
832 b' %10d %10d %9.5f %8d',
833 readsize,
833 readsize,
834 largestblock,
834 largestblock,
835 readdensity,
835 readdensity,
836 srchunks,
836 srchunks,
837 readsize=readsize,
837 readsize=readsize,
838 largestblock=largestblock,
838 largestblock=largestblock,
839 readdensity=readdensity,
839 readdensity=readdensity,
840 srchunks=srchunks,
840 srchunks=srchunks,
841 )
841 )
842
842
843 fm.plain(b'\n')
843 fm.plain(b'\n')
844
844
845 fm.end()
845 fm.end()
846
846
847
847
848 @command(
848 @command(
849 b'debugdirstate|debugstate',
849 b'debugdirstate|debugstate',
850 [
850 [
851 (
851 (
852 b'',
852 b'',
853 b'nodates',
853 b'nodates',
854 None,
854 None,
855 _(b'do not display the saved mtime (DEPRECATED)'),
855 _(b'do not display the saved mtime (DEPRECATED)'),
856 ),
856 ),
857 (b'', b'dates', True, _(b'display the saved mtime')),
857 (b'', b'dates', True, _(b'display the saved mtime')),
858 (b'', b'datesort', None, _(b'sort by saved mtime')),
858 (b'', b'datesort', None, _(b'sort by saved mtime')),
859 ],
859 ],
860 _(b'[OPTION]...'),
860 _(b'[OPTION]...'),
861 )
861 )
862 def debugstate(ui, repo, **opts):
862 def debugstate(ui, repo, **opts):
863 """show the contents of the current dirstate"""
863 """show the contents of the current dirstate"""
864
864
865 nodates = not opts['dates']
865 nodates = not opts['dates']
866 if opts.get('nodates') is not None:
866 if opts.get('nodates') is not None:
867 nodates = True
867 nodates = True
868 datesort = opts.get('datesort')
868 datesort = opts.get('datesort')
869
869
870 if datesort:
870 if datesort:
871 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
871 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
872 else:
872 else:
873 keyfunc = None # sort by filename
873 keyfunc = None # sort by filename
874 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
874 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
875 if ent[3] == -1:
875 if ent[3] == -1:
876 timestr = b'unset '
876 timestr = b'unset '
877 elif nodates:
877 elif nodates:
878 timestr = b'set '
878 timestr = b'set '
879 else:
879 else:
880 timestr = time.strftime(
880 timestr = time.strftime(
881 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
881 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
882 )
882 )
883 timestr = encoding.strtolocal(timestr)
883 timestr = encoding.strtolocal(timestr)
884 if ent[1] & 0o20000:
884 if ent[1] & 0o20000:
885 mode = b'lnk'
885 mode = b'lnk'
886 else:
886 else:
887 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
887 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
888 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
888 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
889 for f in repo.dirstate.copies():
889 for f in repo.dirstate.copies():
890 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
890 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
891
891
892
892
893 @command(
893 @command(
894 b'debugdiscovery',
894 b'debugdiscovery',
895 [
895 [
896 (b'', b'old', None, _(b'use old-style discovery')),
896 (b'', b'old', None, _(b'use old-style discovery')),
897 (
897 (
898 b'',
898 b'',
899 b'nonheads',
899 b'nonheads',
900 None,
900 None,
901 _(b'use old-style discovery with non-heads included'),
901 _(b'use old-style discovery with non-heads included'),
902 ),
902 ),
903 (b'', b'rev', [], b'restrict discovery to this set of revs'),
903 (b'', b'rev', [], b'restrict discovery to this set of revs'),
904 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
904 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
905 ]
905 ]
906 + cmdutil.remoteopts,
906 + cmdutil.remoteopts,
907 _(b'[--rev REV] [OTHER]'),
907 _(b'[--rev REV] [OTHER]'),
908 )
908 )
909 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
909 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
910 """runs the changeset discovery protocol in isolation"""
910 """runs the changeset discovery protocol in isolation"""
911 opts = pycompat.byteskwargs(opts)
911 opts = pycompat.byteskwargs(opts)
912 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
912 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
913 remote = hg.peer(repo, opts, remoteurl)
913 remote = hg.peer(repo, opts, remoteurl)
914 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
914 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
915
915
916 # make sure tests are repeatable
916 # make sure tests are repeatable
917 random.seed(int(opts[b'seed']))
917 random.seed(int(opts[b'seed']))
918
918
919 if opts.get(b'old'):
919 if opts.get(b'old'):
920
920
921 def doit(pushedrevs, remoteheads, remote=remote):
921 def doit(pushedrevs, remoteheads, remote=remote):
922 if not util.safehasattr(remote, b'branches'):
922 if not util.safehasattr(remote, b'branches'):
923 # enable in-client legacy support
923 # enable in-client legacy support
924 remote = localrepo.locallegacypeer(remote.local())
924 remote = localrepo.locallegacypeer(remote.local())
925 common, _in, hds = treediscovery.findcommonincoming(
925 common, _in, hds = treediscovery.findcommonincoming(
926 repo, remote, force=True
926 repo, remote, force=True
927 )
927 )
928 common = set(common)
928 common = set(common)
929 if not opts.get(b'nonheads'):
929 if not opts.get(b'nonheads'):
930 ui.writenoi18n(
930 ui.writenoi18n(
931 b"unpruned common: %s\n"
931 b"unpruned common: %s\n"
932 % b" ".join(sorted(short(n) for n in common))
932 % b" ".join(sorted(short(n) for n in common))
933 )
933 )
934
934
935 clnode = repo.changelog.node
935 clnode = repo.changelog.node
936 common = repo.revs(b'heads(::%ln)', common)
936 common = repo.revs(b'heads(::%ln)', common)
937 common = {clnode(r) for r in common}
937 common = {clnode(r) for r in common}
938 return common, hds
938 return common, hds
939
939
940 else:
940 else:
941
941
942 def doit(pushedrevs, remoteheads, remote=remote):
942 def doit(pushedrevs, remoteheads, remote=remote):
943 nodes = None
943 nodes = None
944 if pushedrevs:
944 if pushedrevs:
945 revs = scmutil.revrange(repo, pushedrevs)
945 revs = scmutil.revrange(repo, pushedrevs)
946 nodes = [repo[r].node() for r in revs]
946 nodes = [repo[r].node() for r in revs]
947 common, any, hds = setdiscovery.findcommonheads(
947 common, any, hds = setdiscovery.findcommonheads(
948 ui, repo, remote, ancestorsof=nodes
948 ui, repo, remote, ancestorsof=nodes
949 )
949 )
950 return common, hds
950 return common, hds
951
951
952 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
952 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
953 localrevs = opts[b'rev']
953 localrevs = opts[b'rev']
954 with util.timedcm('debug-discovery') as t:
954 with util.timedcm('debug-discovery') as t:
955 common, hds = doit(localrevs, remoterevs)
955 common, hds = doit(localrevs, remoterevs)
956
956
957 # compute all statistics
957 # compute all statistics
958 common = set(common)
958 common = set(common)
959 rheads = set(hds)
959 rheads = set(hds)
960 lheads = set(repo.heads())
960 lheads = set(repo.heads())
961
961
962 data = {}
962 data = {}
963 data[b'elapsed'] = t.elapsed
963 data[b'elapsed'] = t.elapsed
964 data[b'nb-common'] = len(common)
964 data[b'nb-common'] = len(common)
965 data[b'nb-common-local'] = len(common & lheads)
965 data[b'nb-common-local'] = len(common & lheads)
966 data[b'nb-common-remote'] = len(common & rheads)
966 data[b'nb-common-remote'] = len(common & rheads)
967 data[b'nb-common-both'] = len(common & rheads & lheads)
967 data[b'nb-common-both'] = len(common & rheads & lheads)
968 data[b'nb-local'] = len(lheads)
968 data[b'nb-local'] = len(lheads)
969 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
969 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
970 data[b'nb-remote'] = len(rheads)
970 data[b'nb-remote'] = len(rheads)
971 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
971 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
972 data[b'nb-revs'] = len(repo.revs(b'all()'))
972 data[b'nb-revs'] = len(repo.revs(b'all()'))
973 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
973 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
974 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
974 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
975
975
976 # display discovery summary
976 # display discovery summary
977 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
977 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
978 ui.writenoi18n(b"heads summary:\n")
978 ui.writenoi18n(b"heads summary:\n")
979 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
979 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
980 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
980 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
981 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
981 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
982 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
982 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
983 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
983 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
984 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
984 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
985 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
985 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
986 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
986 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
987 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
987 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
988 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
988 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
989 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
989 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
990 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
990 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
991 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
991 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
992
992
993 if ui.verbose:
993 if ui.verbose:
994 ui.writenoi18n(
994 ui.writenoi18n(
995 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
995 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
996 )
996 )
997
997
998
998
999 _chunksize = 4 << 10
999 _chunksize = 4 << 10
1000
1000
1001
1001
1002 @command(
1002 @command(
1003 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1003 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1004 )
1004 )
1005 def debugdownload(ui, repo, url, output=None, **opts):
1005 def debugdownload(ui, repo, url, output=None, **opts):
1006 """download a resource using Mercurial logic and config
1006 """download a resource using Mercurial logic and config
1007 """
1007 """
1008 fh = urlmod.open(ui, url, output)
1008 fh = urlmod.open(ui, url, output)
1009
1009
1010 dest = ui
1010 dest = ui
1011 if output:
1011 if output:
1012 dest = open(output, b"wb", _chunksize)
1012 dest = open(output, b"wb", _chunksize)
1013 try:
1013 try:
1014 data = fh.read(_chunksize)
1014 data = fh.read(_chunksize)
1015 while data:
1015 while data:
1016 dest.write(data)
1016 dest.write(data)
1017 data = fh.read(_chunksize)
1017 data = fh.read(_chunksize)
1018 finally:
1018 finally:
1019 if output:
1019 if output:
1020 dest.close()
1020 dest.close()
1021
1021
1022
1022
1023 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1023 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1024 def debugextensions(ui, repo, **opts):
1024 def debugextensions(ui, repo, **opts):
1025 '''show information about active extensions'''
1025 '''show information about active extensions'''
1026 opts = pycompat.byteskwargs(opts)
1026 opts = pycompat.byteskwargs(opts)
1027 exts = extensions.extensions(ui)
1027 exts = extensions.extensions(ui)
1028 hgver = util.version()
1028 hgver = util.version()
1029 fm = ui.formatter(b'debugextensions', opts)
1029 fm = ui.formatter(b'debugextensions', opts)
1030 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1030 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1031 isinternal = extensions.ismoduleinternal(extmod)
1031 isinternal = extensions.ismoduleinternal(extmod)
1032 extsource = pycompat.fsencode(extmod.__file__)
1032 extsource = pycompat.fsencode(extmod.__file__)
1033 if isinternal:
1033 if isinternal:
1034 exttestedwith = [] # never expose magic string to users
1034 exttestedwith = [] # never expose magic string to users
1035 else:
1035 else:
1036 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1036 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1037 extbuglink = getattr(extmod, 'buglink', None)
1037 extbuglink = getattr(extmod, 'buglink', None)
1038
1038
1039 fm.startitem()
1039 fm.startitem()
1040
1040
1041 if ui.quiet or ui.verbose:
1041 if ui.quiet or ui.verbose:
1042 fm.write(b'name', b'%s\n', extname)
1042 fm.write(b'name', b'%s\n', extname)
1043 else:
1043 else:
1044 fm.write(b'name', b'%s', extname)
1044 fm.write(b'name', b'%s', extname)
1045 if isinternal or hgver in exttestedwith:
1045 if isinternal or hgver in exttestedwith:
1046 fm.plain(b'\n')
1046 fm.plain(b'\n')
1047 elif not exttestedwith:
1047 elif not exttestedwith:
1048 fm.plain(_(b' (untested!)\n'))
1048 fm.plain(_(b' (untested!)\n'))
1049 else:
1049 else:
1050 lasttestedversion = exttestedwith[-1]
1050 lasttestedversion = exttestedwith[-1]
1051 fm.plain(b' (%s!)\n' % lasttestedversion)
1051 fm.plain(b' (%s!)\n' % lasttestedversion)
1052
1052
1053 fm.condwrite(
1053 fm.condwrite(
1054 ui.verbose and extsource,
1054 ui.verbose and extsource,
1055 b'source',
1055 b'source',
1056 _(b' location: %s\n'),
1056 _(b' location: %s\n'),
1057 extsource or b"",
1057 extsource or b"",
1058 )
1058 )
1059
1059
1060 if ui.verbose:
1060 if ui.verbose:
1061 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1061 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1062 fm.data(bundled=isinternal)
1062 fm.data(bundled=isinternal)
1063
1063
1064 fm.condwrite(
1064 fm.condwrite(
1065 ui.verbose and exttestedwith,
1065 ui.verbose and exttestedwith,
1066 b'testedwith',
1066 b'testedwith',
1067 _(b' tested with: %s\n'),
1067 _(b' tested with: %s\n'),
1068 fm.formatlist(exttestedwith, name=b'ver'),
1068 fm.formatlist(exttestedwith, name=b'ver'),
1069 )
1069 )
1070
1070
1071 fm.condwrite(
1071 fm.condwrite(
1072 ui.verbose and extbuglink,
1072 ui.verbose and extbuglink,
1073 b'buglink',
1073 b'buglink',
1074 _(b' bug reporting: %s\n'),
1074 _(b' bug reporting: %s\n'),
1075 extbuglink or b"",
1075 extbuglink or b"",
1076 )
1076 )
1077
1077
1078 fm.end()
1078 fm.end()
1079
1079
1080
1080
1081 @command(
1081 @command(
1082 b'debugfileset',
1082 b'debugfileset',
1083 [
1083 [
1084 (
1084 (
1085 b'r',
1085 b'r',
1086 b'rev',
1086 b'rev',
1087 b'',
1087 b'',
1088 _(b'apply the filespec on this revision'),
1088 _(b'apply the filespec on this revision'),
1089 _(b'REV'),
1089 _(b'REV'),
1090 ),
1090 ),
1091 (
1091 (
1092 b'',
1092 b'',
1093 b'all-files',
1093 b'all-files',
1094 False,
1094 False,
1095 _(b'test files from all revisions and working directory'),
1095 _(b'test files from all revisions and working directory'),
1096 ),
1096 ),
1097 (
1097 (
1098 b's',
1098 b's',
1099 b'show-matcher',
1099 b'show-matcher',
1100 None,
1100 None,
1101 _(b'print internal representation of matcher'),
1101 _(b'print internal representation of matcher'),
1102 ),
1102 ),
1103 (
1103 (
1104 b'p',
1104 b'p',
1105 b'show-stage',
1105 b'show-stage',
1106 [],
1106 [],
1107 _(b'print parsed tree at the given stage'),
1107 _(b'print parsed tree at the given stage'),
1108 _(b'NAME'),
1108 _(b'NAME'),
1109 ),
1109 ),
1110 ],
1110 ],
1111 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1111 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1112 )
1112 )
1113 def debugfileset(ui, repo, expr, **opts):
1113 def debugfileset(ui, repo, expr, **opts):
1114 '''parse and apply a fileset specification'''
1114 '''parse and apply a fileset specification'''
1115 from . import fileset
1115 from . import fileset
1116
1116
1117 fileset.symbols # force import of fileset so we have predicates to optimize
1117 fileset.symbols # force import of fileset so we have predicates to optimize
1118 opts = pycompat.byteskwargs(opts)
1118 opts = pycompat.byteskwargs(opts)
1119 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1119 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1120
1120
1121 stages = [
1121 stages = [
1122 (b'parsed', pycompat.identity),
1122 (b'parsed', pycompat.identity),
1123 (b'analyzed', filesetlang.analyze),
1123 (b'analyzed', filesetlang.analyze),
1124 (b'optimized', filesetlang.optimize),
1124 (b'optimized', filesetlang.optimize),
1125 ]
1125 ]
1126 stagenames = set(n for n, f in stages)
1126 stagenames = set(n for n, f in stages)
1127
1127
1128 showalways = set()
1128 showalways = set()
1129 if ui.verbose and not opts[b'show_stage']:
1129 if ui.verbose and not opts[b'show_stage']:
1130 # show parsed tree by --verbose (deprecated)
1130 # show parsed tree by --verbose (deprecated)
1131 showalways.add(b'parsed')
1131 showalways.add(b'parsed')
1132 if opts[b'show_stage'] == [b'all']:
1132 if opts[b'show_stage'] == [b'all']:
1133 showalways.update(stagenames)
1133 showalways.update(stagenames)
1134 else:
1134 else:
1135 for n in opts[b'show_stage']:
1135 for n in opts[b'show_stage']:
1136 if n not in stagenames:
1136 if n not in stagenames:
1137 raise error.Abort(_(b'invalid stage name: %s') % n)
1137 raise error.Abort(_(b'invalid stage name: %s') % n)
1138 showalways.update(opts[b'show_stage'])
1138 showalways.update(opts[b'show_stage'])
1139
1139
1140 tree = filesetlang.parse(expr)
1140 tree = filesetlang.parse(expr)
1141 for n, f in stages:
1141 for n, f in stages:
1142 tree = f(tree)
1142 tree = f(tree)
1143 if n in showalways:
1143 if n in showalways:
1144 if opts[b'show_stage'] or n != b'parsed':
1144 if opts[b'show_stage'] or n != b'parsed':
1145 ui.write(b"* %s:\n" % n)
1145 ui.write(b"* %s:\n" % n)
1146 ui.write(filesetlang.prettyformat(tree), b"\n")
1146 ui.write(filesetlang.prettyformat(tree), b"\n")
1147
1147
1148 files = set()
1148 files = set()
1149 if opts[b'all_files']:
1149 if opts[b'all_files']:
1150 for r in repo:
1150 for r in repo:
1151 c = repo[r]
1151 c = repo[r]
1152 files.update(c.files())
1152 files.update(c.files())
1153 files.update(c.substate)
1153 files.update(c.substate)
1154 if opts[b'all_files'] or ctx.rev() is None:
1154 if opts[b'all_files'] or ctx.rev() is None:
1155 wctx = repo[None]
1155 wctx = repo[None]
1156 files.update(
1156 files.update(
1157 repo.dirstate.walk(
1157 repo.dirstate.walk(
1158 scmutil.matchall(repo),
1158 scmutil.matchall(repo),
1159 subrepos=list(wctx.substate),
1159 subrepos=list(wctx.substate),
1160 unknown=True,
1160 unknown=True,
1161 ignored=True,
1161 ignored=True,
1162 )
1162 )
1163 )
1163 )
1164 files.update(wctx.substate)
1164 files.update(wctx.substate)
1165 else:
1165 else:
1166 files.update(ctx.files())
1166 files.update(ctx.files())
1167 files.update(ctx.substate)
1167 files.update(ctx.substate)
1168
1168
1169 m = ctx.matchfileset(expr)
1169 m = ctx.matchfileset(expr)
1170 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1170 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1171 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1171 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1172 for f in sorted(files):
1172 for f in sorted(files):
1173 if not m(f):
1173 if not m(f):
1174 continue
1174 continue
1175 ui.write(b"%s\n" % f)
1175 ui.write(b"%s\n" % f)
1176
1176
1177
1177
1178 @command(b'debugformat', [] + cmdutil.formatteropts)
1178 @command(b'debugformat', [] + cmdutil.formatteropts)
1179 def debugformat(ui, repo, **opts):
1179 def debugformat(ui, repo, **opts):
1180 """display format information about the current repository
1180 """display format information about the current repository
1181
1181
1182 Use --verbose to get extra information about current config value and
1182 Use --verbose to get extra information about current config value and
1183 Mercurial default."""
1183 Mercurial default."""
1184 opts = pycompat.byteskwargs(opts)
1184 opts = pycompat.byteskwargs(opts)
1185 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1185 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1186 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1186 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1187
1187
1188 def makeformatname(name):
1188 def makeformatname(name):
1189 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1189 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1190
1190
1191 fm = ui.formatter(b'debugformat', opts)
1191 fm = ui.formatter(b'debugformat', opts)
1192 if fm.isplain():
1192 if fm.isplain():
1193
1193
1194 def formatvalue(value):
1194 def formatvalue(value):
1195 if util.safehasattr(value, b'startswith'):
1195 if util.safehasattr(value, b'startswith'):
1196 return value
1196 return value
1197 if value:
1197 if value:
1198 return b'yes'
1198 return b'yes'
1199 else:
1199 else:
1200 return b'no'
1200 return b'no'
1201
1201
1202 else:
1202 else:
1203 formatvalue = pycompat.identity
1203 formatvalue = pycompat.identity
1204
1204
1205 fm.plain(b'format-variant')
1205 fm.plain(b'format-variant')
1206 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1206 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1207 fm.plain(b' repo')
1207 fm.plain(b' repo')
1208 if ui.verbose:
1208 if ui.verbose:
1209 fm.plain(b' config default')
1209 fm.plain(b' config default')
1210 fm.plain(b'\n')
1210 fm.plain(b'\n')
1211 for fv in upgrade.allformatvariant:
1211 for fv in upgrade.allformatvariant:
1212 fm.startitem()
1212 fm.startitem()
1213 repovalue = fv.fromrepo(repo)
1213 repovalue = fv.fromrepo(repo)
1214 configvalue = fv.fromconfig(repo)
1214 configvalue = fv.fromconfig(repo)
1215
1215
1216 if repovalue != configvalue:
1216 if repovalue != configvalue:
1217 namelabel = b'formatvariant.name.mismatchconfig'
1217 namelabel = b'formatvariant.name.mismatchconfig'
1218 repolabel = b'formatvariant.repo.mismatchconfig'
1218 repolabel = b'formatvariant.repo.mismatchconfig'
1219 elif repovalue != fv.default:
1219 elif repovalue != fv.default:
1220 namelabel = b'formatvariant.name.mismatchdefault'
1220 namelabel = b'formatvariant.name.mismatchdefault'
1221 repolabel = b'formatvariant.repo.mismatchdefault'
1221 repolabel = b'formatvariant.repo.mismatchdefault'
1222 else:
1222 else:
1223 namelabel = b'formatvariant.name.uptodate'
1223 namelabel = b'formatvariant.name.uptodate'
1224 repolabel = b'formatvariant.repo.uptodate'
1224 repolabel = b'formatvariant.repo.uptodate'
1225
1225
1226 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1226 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1227 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1227 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1228 if fv.default != configvalue:
1228 if fv.default != configvalue:
1229 configlabel = b'formatvariant.config.special'
1229 configlabel = b'formatvariant.config.special'
1230 else:
1230 else:
1231 configlabel = b'formatvariant.config.default'
1231 configlabel = b'formatvariant.config.default'
1232 fm.condwrite(
1232 fm.condwrite(
1233 ui.verbose,
1233 ui.verbose,
1234 b'config',
1234 b'config',
1235 b' %6s',
1235 b' %6s',
1236 formatvalue(configvalue),
1236 formatvalue(configvalue),
1237 label=configlabel,
1237 label=configlabel,
1238 )
1238 )
1239 fm.condwrite(
1239 fm.condwrite(
1240 ui.verbose,
1240 ui.verbose,
1241 b'default',
1241 b'default',
1242 b' %7s',
1242 b' %7s',
1243 formatvalue(fv.default),
1243 formatvalue(fv.default),
1244 label=b'formatvariant.default',
1244 label=b'formatvariant.default',
1245 )
1245 )
1246 fm.plain(b'\n')
1246 fm.plain(b'\n')
1247 fm.end()
1247 fm.end()
1248
1248
1249
1249
1250 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1250 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1251 def debugfsinfo(ui, path=b"."):
1251 def debugfsinfo(ui, path=b"."):
1252 """show information detected about current filesystem"""
1252 """show information detected about current filesystem"""
1253 ui.writenoi18n(b'path: %s\n' % path)
1253 ui.writenoi18n(b'path: %s\n' % path)
1254 ui.writenoi18n(
1254 ui.writenoi18n(
1255 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1255 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1256 )
1256 )
1257 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1257 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1258 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1258 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1259 ui.writenoi18n(
1259 ui.writenoi18n(
1260 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1260 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1261 )
1261 )
1262 ui.writenoi18n(
1262 ui.writenoi18n(
1263 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1263 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1264 )
1264 )
1265 casesensitive = b'(unknown)'
1265 casesensitive = b'(unknown)'
1266 try:
1266 try:
1267 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1267 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1268 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1268 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1269 except OSError:
1269 except OSError:
1270 pass
1270 pass
1271 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1271 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1272
1272
1273
1273
1274 @command(
1274 @command(
1275 b'debuggetbundle',
1275 b'debuggetbundle',
1276 [
1276 [
1277 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1277 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1278 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1278 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1279 (
1279 (
1280 b't',
1280 b't',
1281 b'type',
1281 b'type',
1282 b'bzip2',
1282 b'bzip2',
1283 _(b'bundle compression type to use'),
1283 _(b'bundle compression type to use'),
1284 _(b'TYPE'),
1284 _(b'TYPE'),
1285 ),
1285 ),
1286 ],
1286 ],
1287 _(b'REPO FILE [-H|-C ID]...'),
1287 _(b'REPO FILE [-H|-C ID]...'),
1288 norepo=True,
1288 norepo=True,
1289 )
1289 )
1290 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1290 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1291 """retrieves a bundle from a repo
1291 """retrieves a bundle from a repo
1292
1292
1293 Every ID must be a full-length hex node id string. Saves the bundle to the
1293 Every ID must be a full-length hex node id string. Saves the bundle to the
1294 given file.
1294 given file.
1295 """
1295 """
1296 opts = pycompat.byteskwargs(opts)
1296 opts = pycompat.byteskwargs(opts)
1297 repo = hg.peer(ui, opts, repopath)
1297 repo = hg.peer(ui, opts, repopath)
1298 if not repo.capable(b'getbundle'):
1298 if not repo.capable(b'getbundle'):
1299 raise error.Abort(b"getbundle() not supported by target repository")
1299 raise error.Abort(b"getbundle() not supported by target repository")
1300 args = {}
1300 args = {}
1301 if common:
1301 if common:
1302 args['common'] = [bin(s) for s in common]
1302 args['common'] = [bin(s) for s in common]
1303 if head:
1303 if head:
1304 args['heads'] = [bin(s) for s in head]
1304 args['heads'] = [bin(s) for s in head]
1305 # TODO: get desired bundlecaps from command line.
1305 # TODO: get desired bundlecaps from command line.
1306 args['bundlecaps'] = None
1306 args['bundlecaps'] = None
1307 bundle = repo.getbundle(b'debug', **args)
1307 bundle = repo.getbundle(b'debug', **args)
1308
1308
1309 bundletype = opts.get(b'type', b'bzip2').lower()
1309 bundletype = opts.get(b'type', b'bzip2').lower()
1310 btypes = {
1310 btypes = {
1311 b'none': b'HG10UN',
1311 b'none': b'HG10UN',
1312 b'bzip2': b'HG10BZ',
1312 b'bzip2': b'HG10BZ',
1313 b'gzip': b'HG10GZ',
1313 b'gzip': b'HG10GZ',
1314 b'bundle2': b'HG20',
1314 b'bundle2': b'HG20',
1315 }
1315 }
1316 bundletype = btypes.get(bundletype)
1316 bundletype = btypes.get(bundletype)
1317 if bundletype not in bundle2.bundletypes:
1317 if bundletype not in bundle2.bundletypes:
1318 raise error.Abort(_(b'unknown bundle type specified with --type'))
1318 raise error.Abort(_(b'unknown bundle type specified with --type'))
1319 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1319 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1320
1320
1321
1321
1322 @command(b'debugignore', [], b'[FILE]')
1322 @command(b'debugignore', [], b'[FILE]')
1323 def debugignore(ui, repo, *files, **opts):
1323 def debugignore(ui, repo, *files, **opts):
1324 """display the combined ignore pattern and information about ignored files
1324 """display the combined ignore pattern and information about ignored files
1325
1325
1326 With no argument display the combined ignore pattern.
1326 With no argument display the combined ignore pattern.
1327
1327
1328 Given space separated file names, shows if the given file is ignored and
1328 Given space separated file names, shows if the given file is ignored and
1329 if so, show the ignore rule (file and line number) that matched it.
1329 if so, show the ignore rule (file and line number) that matched it.
1330 """
1330 """
1331 ignore = repo.dirstate._ignore
1331 ignore = repo.dirstate._ignore
1332 if not files:
1332 if not files:
1333 # Show all the patterns
1333 # Show all the patterns
1334 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1334 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1335 else:
1335 else:
1336 m = scmutil.match(repo[None], pats=files)
1336 m = scmutil.match(repo[None], pats=files)
1337 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1337 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1338 for f in m.files():
1338 for f in m.files():
1339 nf = util.normpath(f)
1339 nf = util.normpath(f)
1340 ignored = None
1340 ignored = None
1341 ignoredata = None
1341 ignoredata = None
1342 if nf != b'.':
1342 if nf != b'.':
1343 if ignore(nf):
1343 if ignore(nf):
1344 ignored = nf
1344 ignored = nf
1345 ignoredata = repo.dirstate._ignorefileandline(nf)
1345 ignoredata = repo.dirstate._ignorefileandline(nf)
1346 else:
1346 else:
1347 for p in pathutil.finddirs(nf):
1347 for p in pathutil.finddirs(nf):
1348 if ignore(p):
1348 if ignore(p):
1349 ignored = p
1349 ignored = p
1350 ignoredata = repo.dirstate._ignorefileandline(p)
1350 ignoredata = repo.dirstate._ignorefileandline(p)
1351 break
1351 break
1352 if ignored:
1352 if ignored:
1353 if ignored == nf:
1353 if ignored == nf:
1354 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1354 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1355 else:
1355 else:
1356 ui.write(
1356 ui.write(
1357 _(
1357 _(
1358 b"%s is ignored because of "
1358 b"%s is ignored because of "
1359 b"containing directory %s\n"
1359 b"containing directory %s\n"
1360 )
1360 )
1361 % (uipathfn(f), ignored)
1361 % (uipathfn(f), ignored)
1362 )
1362 )
1363 ignorefile, lineno, line = ignoredata
1363 ignorefile, lineno, line = ignoredata
1364 ui.write(
1364 ui.write(
1365 _(b"(ignore rule in %s, line %d: '%s')\n")
1365 _(b"(ignore rule in %s, line %d: '%s')\n")
1366 % (ignorefile, lineno, line)
1366 % (ignorefile, lineno, line)
1367 )
1367 )
1368 else:
1368 else:
1369 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1369 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1370
1370
1371
1371
1372 @command(
1372 @command(
1373 b'debugindex',
1373 b'debugindex',
1374 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1374 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1375 _(b'-c|-m|FILE'),
1375 _(b'-c|-m|FILE'),
1376 )
1376 )
1377 def debugindex(ui, repo, file_=None, **opts):
1377 def debugindex(ui, repo, file_=None, **opts):
1378 """dump index data for a storage primitive"""
1378 """dump index data for a storage primitive"""
1379 opts = pycompat.byteskwargs(opts)
1379 opts = pycompat.byteskwargs(opts)
1380 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1380 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1381
1381
1382 if ui.debugflag:
1382 if ui.debugflag:
1383 shortfn = hex
1383 shortfn = hex
1384 else:
1384 else:
1385 shortfn = short
1385 shortfn = short
1386
1386
1387 idlen = 12
1387 idlen = 12
1388 for i in store:
1388 for i in store:
1389 idlen = len(shortfn(store.node(i)))
1389 idlen = len(shortfn(store.node(i)))
1390 break
1390 break
1391
1391
1392 fm = ui.formatter(b'debugindex', opts)
1392 fm = ui.formatter(b'debugindex', opts)
1393 fm.plain(
1393 fm.plain(
1394 b' rev linkrev %s %s p2\n'
1394 b' rev linkrev %s %s p2\n'
1395 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1395 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1396 )
1396 )
1397
1397
1398 for rev in store:
1398 for rev in store:
1399 node = store.node(rev)
1399 node = store.node(rev)
1400 parents = store.parents(node)
1400 parents = store.parents(node)
1401
1401
1402 fm.startitem()
1402 fm.startitem()
1403 fm.write(b'rev', b'%6d ', rev)
1403 fm.write(b'rev', b'%6d ', rev)
1404 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1404 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1405 fm.write(b'node', b'%s ', shortfn(node))
1405 fm.write(b'node', b'%s ', shortfn(node))
1406 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1406 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1407 fm.write(b'p2', b'%s', shortfn(parents[1]))
1407 fm.write(b'p2', b'%s', shortfn(parents[1]))
1408 fm.plain(b'\n')
1408 fm.plain(b'\n')
1409
1409
1410 fm.end()
1410 fm.end()
1411
1411
1412
1412
1413 @command(
1413 @command(
1414 b'debugindexdot',
1414 b'debugindexdot',
1415 cmdutil.debugrevlogopts,
1415 cmdutil.debugrevlogopts,
1416 _(b'-c|-m|FILE'),
1416 _(b'-c|-m|FILE'),
1417 optionalrepo=True,
1417 optionalrepo=True,
1418 )
1418 )
1419 def debugindexdot(ui, repo, file_=None, **opts):
1419 def debugindexdot(ui, repo, file_=None, **opts):
1420 """dump an index DAG as a graphviz dot file"""
1420 """dump an index DAG as a graphviz dot file"""
1421 opts = pycompat.byteskwargs(opts)
1421 opts = pycompat.byteskwargs(opts)
1422 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1422 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1423 ui.writenoi18n(b"digraph G {\n")
1423 ui.writenoi18n(b"digraph G {\n")
1424 for i in r:
1424 for i in r:
1425 node = r.node(i)
1425 node = r.node(i)
1426 pp = r.parents(node)
1426 pp = r.parents(node)
1427 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1427 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1428 if pp[1] != nullid:
1428 if pp[1] != nullid:
1429 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1429 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1430 ui.write(b"}\n")
1430 ui.write(b"}\n")
1431
1431
1432
1432
1433 @command(b'debugindexstats', [])
1433 @command(b'debugindexstats', [])
1434 def debugindexstats(ui, repo):
1434 def debugindexstats(ui, repo):
1435 """show stats related to the changelog index"""
1435 """show stats related to the changelog index"""
1436 repo.changelog.shortest(nullid, 1)
1436 repo.changelog.shortest(nullid, 1)
1437 index = repo.changelog.index
1437 index = repo.changelog.index
1438 if not util.safehasattr(index, b'stats'):
1438 if not util.safehasattr(index, b'stats'):
1439 raise error.Abort(_(b'debugindexstats only works with native code'))
1439 raise error.Abort(_(b'debugindexstats only works with native code'))
1440 for k, v in sorted(index.stats().items()):
1440 for k, v in sorted(index.stats().items()):
1441 ui.write(b'%s: %d\n' % (k, v))
1441 ui.write(b'%s: %d\n' % (k, v))
1442
1442
1443
1443
1444 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1444 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1445 def debuginstall(ui, **opts):
1445 def debuginstall(ui, **opts):
1446 '''test Mercurial installation
1446 '''test Mercurial installation
1447
1447
1448 Returns 0 on success.
1448 Returns 0 on success.
1449 '''
1449 '''
1450 opts = pycompat.byteskwargs(opts)
1450 opts = pycompat.byteskwargs(opts)
1451
1451
1452 problems = 0
1452 problems = 0
1453
1453
1454 fm = ui.formatter(b'debuginstall', opts)
1454 fm = ui.formatter(b'debuginstall', opts)
1455 fm.startitem()
1455 fm.startitem()
1456
1456
1457 # encoding
1457 # encoding
1458 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1458 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1459 err = None
1459 err = None
1460 try:
1460 try:
1461 codecs.lookup(pycompat.sysstr(encoding.encoding))
1461 codecs.lookup(pycompat.sysstr(encoding.encoding))
1462 except LookupError as inst:
1462 except LookupError as inst:
1463 err = stringutil.forcebytestr(inst)
1463 err = stringutil.forcebytestr(inst)
1464 problems += 1
1464 problems += 1
1465 fm.condwrite(
1465 fm.condwrite(
1466 err,
1466 err,
1467 b'encodingerror',
1467 b'encodingerror',
1468 _(b" %s\n (check that your locale is properly set)\n"),
1468 _(b" %s\n (check that your locale is properly set)\n"),
1469 err,
1469 err,
1470 )
1470 )
1471
1471
1472 # Python
1472 # Python
1473 fm.write(
1473 fm.write(
1474 b'pythonexe',
1474 b'pythonexe',
1475 _(b"checking Python executable (%s)\n"),
1475 _(b"checking Python executable (%s)\n"),
1476 pycompat.sysexecutable or _(b"unknown"),
1476 pycompat.sysexecutable or _(b"unknown"),
1477 )
1477 )
1478 fm.write(
1478 fm.write(
1479 b'pythonver',
1479 b'pythonver',
1480 _(b"checking Python version (%s)\n"),
1480 _(b"checking Python version (%s)\n"),
1481 (b"%d.%d.%d" % sys.version_info[:3]),
1481 (b"%d.%d.%d" % sys.version_info[:3]),
1482 )
1482 )
1483 fm.write(
1483 fm.write(
1484 b'pythonlib',
1484 b'pythonlib',
1485 _(b"checking Python lib (%s)...\n"),
1485 _(b"checking Python lib (%s)...\n"),
1486 os.path.dirname(pycompat.fsencode(os.__file__)),
1486 os.path.dirname(pycompat.fsencode(os.__file__)),
1487 )
1487 )
1488
1488
1489 security = set(sslutil.supportedprotocols)
1489 security = set(sslutil.supportedprotocols)
1490 if sslutil.hassni:
1490 if sslutil.hassni:
1491 security.add(b'sni')
1491 security.add(b'sni')
1492
1492
1493 fm.write(
1493 fm.write(
1494 b'pythonsecurity',
1494 b'pythonsecurity',
1495 _(b"checking Python security support (%s)\n"),
1495 _(b"checking Python security support (%s)\n"),
1496 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1496 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1497 )
1497 )
1498
1498
1499 # These are warnings, not errors. So don't increment problem count. This
1499 # These are warnings, not errors. So don't increment problem count. This
1500 # may change in the future.
1500 # may change in the future.
1501 if b'tls1.2' not in security:
1501 if b'tls1.2' not in security:
1502 fm.plain(
1502 fm.plain(
1503 _(
1503 _(
1504 b' TLS 1.2 not supported by Python install; '
1504 b' TLS 1.2 not supported by Python install; '
1505 b'network connections lack modern security\n'
1505 b'network connections lack modern security\n'
1506 )
1506 )
1507 )
1507 )
1508 if b'sni' not in security:
1508 if b'sni' not in security:
1509 fm.plain(
1509 fm.plain(
1510 _(
1510 _(
1511 b' SNI not supported by Python install; may have '
1511 b' SNI not supported by Python install; may have '
1512 b'connectivity issues with some servers\n'
1512 b'connectivity issues with some servers\n'
1513 )
1513 )
1514 )
1514 )
1515
1515
1516 # TODO print CA cert info
1516 # TODO print CA cert info
1517
1517
1518 # hg version
1518 # hg version
1519 hgver = util.version()
1519 hgver = util.version()
1520 fm.write(
1520 fm.write(
1521 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1521 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1522 )
1522 )
1523 fm.write(
1523 fm.write(
1524 b'hgverextra',
1524 b'hgverextra',
1525 _(b"checking Mercurial custom build (%s)\n"),
1525 _(b"checking Mercurial custom build (%s)\n"),
1526 b'+'.join(hgver.split(b'+')[1:]),
1526 b'+'.join(hgver.split(b'+')[1:]),
1527 )
1527 )
1528
1528
1529 # compiled modules
1529 # compiled modules
1530 fm.write(
1530 fm.write(
1531 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1531 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1532 )
1532 )
1533 fm.write(
1533 fm.write(
1534 b'hgmodules',
1534 b'hgmodules',
1535 _(b"checking installed modules (%s)...\n"),
1535 _(b"checking installed modules (%s)...\n"),
1536 os.path.dirname(pycompat.fsencode(__file__)),
1536 os.path.dirname(pycompat.fsencode(__file__)),
1537 )
1537 )
1538
1538
1539 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1539 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1540 rustext = rustandc # for now, that's the only case
1540 rustext = rustandc # for now, that's the only case
1541 cext = policy.policy in (b'c', b'allow') or rustandc
1541 cext = policy.policy in (b'c', b'allow') or rustandc
1542 nopure = cext or rustext
1542 nopure = cext or rustext
1543 if nopure:
1543 if nopure:
1544 err = None
1544 err = None
1545 try:
1545 try:
1546 if cext:
1546 if cext:
1547 from .cext import (
1547 from .cext import (
1548 base85,
1548 base85,
1549 bdiff,
1549 bdiff,
1550 mpatch,
1550 mpatch,
1551 osutil,
1551 osutil,
1552 )
1552 )
1553
1553
1554 # quiet pyflakes
1554 # quiet pyflakes
1555 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1555 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1556 if rustext:
1556 if rustext:
1557 from .rustext import (
1557 from .rustext import (
1558 ancestor,
1558 ancestor,
1559 dirstate,
1559 dirstate,
1560 )
1560 )
1561
1561
1562 dir(ancestor), dir(dirstate) # quiet pyflakes
1562 dir(ancestor), dir(dirstate) # quiet pyflakes
1563 except Exception as inst:
1563 except Exception as inst:
1564 err = stringutil.forcebytestr(inst)
1564 err = stringutil.forcebytestr(inst)
1565 problems += 1
1565 problems += 1
1566 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1566 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1567
1567
1568 compengines = util.compengines._engines.values()
1568 compengines = util.compengines._engines.values()
1569 fm.write(
1569 fm.write(
1570 b'compengines',
1570 b'compengines',
1571 _(b'checking registered compression engines (%s)\n'),
1571 _(b'checking registered compression engines (%s)\n'),
1572 fm.formatlist(
1572 fm.formatlist(
1573 sorted(e.name() for e in compengines),
1573 sorted(e.name() for e in compengines),
1574 name=b'compengine',
1574 name=b'compengine',
1575 fmt=b'%s',
1575 fmt=b'%s',
1576 sep=b', ',
1576 sep=b', ',
1577 ),
1577 ),
1578 )
1578 )
1579 fm.write(
1579 fm.write(
1580 b'compenginesavail',
1580 b'compenginesavail',
1581 _(b'checking available compression engines (%s)\n'),
1581 _(b'checking available compression engines (%s)\n'),
1582 fm.formatlist(
1582 fm.formatlist(
1583 sorted(e.name() for e in compengines if e.available()),
1583 sorted(e.name() for e in compengines if e.available()),
1584 name=b'compengine',
1584 name=b'compengine',
1585 fmt=b'%s',
1585 fmt=b'%s',
1586 sep=b', ',
1586 sep=b', ',
1587 ),
1587 ),
1588 )
1588 )
1589 wirecompengines = compression.compengines.supportedwireengines(
1589 wirecompengines = compression.compengines.supportedwireengines(
1590 compression.SERVERROLE
1590 compression.SERVERROLE
1591 )
1591 )
1592 fm.write(
1592 fm.write(
1593 b'compenginesserver',
1593 b'compenginesserver',
1594 _(
1594 _(
1595 b'checking available compression engines '
1595 b'checking available compression engines '
1596 b'for wire protocol (%s)\n'
1596 b'for wire protocol (%s)\n'
1597 ),
1597 ),
1598 fm.formatlist(
1598 fm.formatlist(
1599 [e.name() for e in wirecompengines if e.wireprotosupport()],
1599 [e.name() for e in wirecompengines if e.wireprotosupport()],
1600 name=b'compengine',
1600 name=b'compengine',
1601 fmt=b'%s',
1601 fmt=b'%s',
1602 sep=b', ',
1602 sep=b', ',
1603 ),
1603 ),
1604 )
1604 )
1605 re2 = b'missing'
1605 re2 = b'missing'
1606 if util._re2:
1606 if util._re2:
1607 re2 = b'available'
1607 re2 = b'available'
1608 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1608 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1609 fm.data(re2=bool(util._re2))
1609 fm.data(re2=bool(util._re2))
1610
1610
1611 # templates
1611 # templates
1612 p = templater.templatepaths()
1612 p = templater.templatepaths()
1613 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1613 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1614 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1614 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1615 if p:
1615 if p:
1616 m = templater.templatepath(b"map-cmdline.default")
1616 m = templater.templatepath(b"map-cmdline.default")
1617 if m:
1617 if m:
1618 # template found, check if it is working
1618 # template found, check if it is working
1619 err = None
1619 err = None
1620 try:
1620 try:
1621 templater.templater.frommapfile(m)
1621 templater.templater.frommapfile(m)
1622 except Exception as inst:
1622 except Exception as inst:
1623 err = stringutil.forcebytestr(inst)
1623 err = stringutil.forcebytestr(inst)
1624 p = None
1624 p = None
1625 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1625 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1626 else:
1626 else:
1627 p = None
1627 p = None
1628 fm.condwrite(
1628 fm.condwrite(
1629 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1629 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1630 )
1630 )
1631 fm.condwrite(
1631 fm.condwrite(
1632 not m,
1632 not m,
1633 b'defaulttemplatenotfound',
1633 b'defaulttemplatenotfound',
1634 _(b" template '%s' not found\n"),
1634 _(b" template '%s' not found\n"),
1635 b"default",
1635 b"default",
1636 )
1636 )
1637 if not p:
1637 if not p:
1638 problems += 1
1638 problems += 1
1639 fm.condwrite(
1639 fm.condwrite(
1640 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1640 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1641 )
1641 )
1642
1642
1643 # editor
1643 # editor
1644 editor = ui.geteditor()
1644 editor = ui.geteditor()
1645 editor = util.expandpath(editor)
1645 editor = util.expandpath(editor)
1646 editorbin = procutil.shellsplit(editor)[0]
1646 editorbin = procutil.shellsplit(editor)[0]
1647 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1647 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1648 cmdpath = procutil.findexe(editorbin)
1648 cmdpath = procutil.findexe(editorbin)
1649 fm.condwrite(
1649 fm.condwrite(
1650 not cmdpath and editor == b'vi',
1650 not cmdpath and editor == b'vi',
1651 b'vinotfound',
1651 b'vinotfound',
1652 _(
1652 _(
1653 b" No commit editor set and can't find %s in PATH\n"
1653 b" No commit editor set and can't find %s in PATH\n"
1654 b" (specify a commit editor in your configuration"
1654 b" (specify a commit editor in your configuration"
1655 b" file)\n"
1655 b" file)\n"
1656 ),
1656 ),
1657 not cmdpath and editor == b'vi' and editorbin,
1657 not cmdpath and editor == b'vi' and editorbin,
1658 )
1658 )
1659 fm.condwrite(
1659 fm.condwrite(
1660 not cmdpath and editor != b'vi',
1660 not cmdpath and editor != b'vi',
1661 b'editornotfound',
1661 b'editornotfound',
1662 _(
1662 _(
1663 b" Can't find editor '%s' in PATH\n"
1663 b" Can't find editor '%s' in PATH\n"
1664 b" (specify a commit editor in your configuration"
1664 b" (specify a commit editor in your configuration"
1665 b" file)\n"
1665 b" file)\n"
1666 ),
1666 ),
1667 not cmdpath and editorbin,
1667 not cmdpath and editorbin,
1668 )
1668 )
1669 if not cmdpath and editor != b'vi':
1669 if not cmdpath and editor != b'vi':
1670 problems += 1
1670 problems += 1
1671
1671
1672 # check username
1672 # check username
1673 username = None
1673 username = None
1674 err = None
1674 err = None
1675 try:
1675 try:
1676 username = ui.username()
1676 username = ui.username()
1677 except error.Abort as e:
1677 except error.Abort as e:
1678 err = stringutil.forcebytestr(e)
1678 err = stringutil.forcebytestr(e)
1679 problems += 1
1679 problems += 1
1680
1680
1681 fm.condwrite(
1681 fm.condwrite(
1682 username, b'username', _(b"checking username (%s)\n"), username
1682 username, b'username', _(b"checking username (%s)\n"), username
1683 )
1683 )
1684 fm.condwrite(
1684 fm.condwrite(
1685 err,
1685 err,
1686 b'usernameerror',
1686 b'usernameerror',
1687 _(
1687 _(
1688 b"checking username...\n %s\n"
1688 b"checking username...\n %s\n"
1689 b" (specify a username in your configuration file)\n"
1689 b" (specify a username in your configuration file)\n"
1690 ),
1690 ),
1691 err,
1691 err,
1692 )
1692 )
1693
1693
1694 for name, mod in extensions.extensions():
1694 for name, mod in extensions.extensions():
1695 handler = getattr(mod, 'debuginstall', None)
1695 handler = getattr(mod, 'debuginstall', None)
1696 if handler is not None:
1696 if handler is not None:
1697 problems += handler(ui, fm)
1697 problems += handler(ui, fm)
1698
1698
1699 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1699 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1700 if not problems:
1700 if not problems:
1701 fm.data(problems=problems)
1701 fm.data(problems=problems)
1702 fm.condwrite(
1702 fm.condwrite(
1703 problems,
1703 problems,
1704 b'problems',
1704 b'problems',
1705 _(b"%d problems detected, please check your install!\n"),
1705 _(b"%d problems detected, please check your install!\n"),
1706 problems,
1706 problems,
1707 )
1707 )
1708 fm.end()
1708 fm.end()
1709
1709
1710 return problems
1710 return problems
1711
1711
1712
1712
1713 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1713 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1714 def debugknown(ui, repopath, *ids, **opts):
1714 def debugknown(ui, repopath, *ids, **opts):
1715 """test whether node ids are known to a repo
1715 """test whether node ids are known to a repo
1716
1716
1717 Every ID must be a full-length hex node id string. Returns a list of 0s
1717 Every ID must be a full-length hex node id string. Returns a list of 0s
1718 and 1s indicating unknown/known.
1718 and 1s indicating unknown/known.
1719 """
1719 """
1720 opts = pycompat.byteskwargs(opts)
1720 opts = pycompat.byteskwargs(opts)
1721 repo = hg.peer(ui, opts, repopath)
1721 repo = hg.peer(ui, opts, repopath)
1722 if not repo.capable(b'known'):
1722 if not repo.capable(b'known'):
1723 raise error.Abort(b"known() not supported by target repository")
1723 raise error.Abort(b"known() not supported by target repository")
1724 flags = repo.known([bin(s) for s in ids])
1724 flags = repo.known([bin(s) for s in ids])
1725 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1725 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1726
1726
1727
1727
1728 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1728 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1729 def debuglabelcomplete(ui, repo, *args):
1729 def debuglabelcomplete(ui, repo, *args):
1730 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1730 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1731 debugnamecomplete(ui, repo, *args)
1731 debugnamecomplete(ui, repo, *args)
1732
1732
1733
1733
1734 @command(
1734 @command(
1735 b'debuglocks',
1735 b'debuglocks',
1736 [
1736 [
1737 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1737 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1738 (
1738 (
1739 b'W',
1739 b'W',
1740 b'force-wlock',
1740 b'force-wlock',
1741 None,
1741 None,
1742 _(b'free the working state lock (DANGEROUS)'),
1742 _(b'free the working state lock (DANGEROUS)'),
1743 ),
1743 ),
1744 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1744 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1745 (
1745 (
1746 b'S',
1746 b'S',
1747 b'set-wlock',
1747 b'set-wlock',
1748 None,
1748 None,
1749 _(b'set the working state lock until stopped'),
1749 _(b'set the working state lock until stopped'),
1750 ),
1750 ),
1751 ],
1751 ],
1752 _(b'[OPTION]...'),
1752 _(b'[OPTION]...'),
1753 )
1753 )
1754 def debuglocks(ui, repo, **opts):
1754 def debuglocks(ui, repo, **opts):
1755 """show or modify state of locks
1755 """show or modify state of locks
1756
1756
1757 By default, this command will show which locks are held. This
1757 By default, this command will show which locks are held. This
1758 includes the user and process holding the lock, the amount of time
1758 includes the user and process holding the lock, the amount of time
1759 the lock has been held, and the machine name where the process is
1759 the lock has been held, and the machine name where the process is
1760 running if it's not local.
1760 running if it's not local.
1761
1761
1762 Locks protect the integrity of Mercurial's data, so should be
1762 Locks protect the integrity of Mercurial's data, so should be
1763 treated with care. System crashes or other interruptions may cause
1763 treated with care. System crashes or other interruptions may cause
1764 locks to not be properly released, though Mercurial will usually
1764 locks to not be properly released, though Mercurial will usually
1765 detect and remove such stale locks automatically.
1765 detect and remove such stale locks automatically.
1766
1766
1767 However, detecting stale locks may not always be possible (for
1767 However, detecting stale locks may not always be possible (for
1768 instance, on a shared filesystem). Removing locks may also be
1768 instance, on a shared filesystem). Removing locks may also be
1769 blocked by filesystem permissions.
1769 blocked by filesystem permissions.
1770
1770
1771 Setting a lock will prevent other commands from changing the data.
1771 Setting a lock will prevent other commands from changing the data.
1772 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1772 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1773 The set locks are removed when the command exits.
1773 The set locks are removed when the command exits.
1774
1774
1775 Returns 0 if no locks are held.
1775 Returns 0 if no locks are held.
1776
1776
1777 """
1777 """
1778
1778
1779 if opts.get('force_lock'):
1779 if opts.get('force_lock'):
1780 repo.svfs.unlink(b'lock')
1780 repo.svfs.unlink(b'lock')
1781 if opts.get('force_wlock'):
1781 if opts.get('force_wlock'):
1782 repo.vfs.unlink(b'wlock')
1782 repo.vfs.unlink(b'wlock')
1783 if opts.get('force_lock') or opts.get('force_wlock'):
1783 if opts.get('force_lock') or opts.get('force_wlock'):
1784 return 0
1784 return 0
1785
1785
1786 locks = []
1786 locks = []
1787 try:
1787 try:
1788 if opts.get('set_wlock'):
1788 if opts.get('set_wlock'):
1789 try:
1789 try:
1790 locks.append(repo.wlock(False))
1790 locks.append(repo.wlock(False))
1791 except error.LockHeld:
1791 except error.LockHeld:
1792 raise error.Abort(_(b'wlock is already held'))
1792 raise error.Abort(_(b'wlock is already held'))
1793 if opts.get('set_lock'):
1793 if opts.get('set_lock'):
1794 try:
1794 try:
1795 locks.append(repo.lock(False))
1795 locks.append(repo.lock(False))
1796 except error.LockHeld:
1796 except error.LockHeld:
1797 raise error.Abort(_(b'lock is already held'))
1797 raise error.Abort(_(b'lock is already held'))
1798 if len(locks):
1798 if len(locks):
1799 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1799 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1800 return 0
1800 return 0
1801 finally:
1801 finally:
1802 release(*locks)
1802 release(*locks)
1803
1803
1804 now = time.time()
1804 now = time.time()
1805 held = 0
1805 held = 0
1806
1806
1807 def report(vfs, name, method):
1807 def report(vfs, name, method):
1808 # this causes stale locks to get reaped for more accurate reporting
1808 # this causes stale locks to get reaped for more accurate reporting
1809 try:
1809 try:
1810 l = method(False)
1810 l = method(False)
1811 except error.LockHeld:
1811 except error.LockHeld:
1812 l = None
1812 l = None
1813
1813
1814 if l:
1814 if l:
1815 l.release()
1815 l.release()
1816 else:
1816 else:
1817 try:
1817 try:
1818 st = vfs.lstat(name)
1818 st = vfs.lstat(name)
1819 age = now - st[stat.ST_MTIME]
1819 age = now - st[stat.ST_MTIME]
1820 user = util.username(st.st_uid)
1820 user = util.username(st.st_uid)
1821 locker = vfs.readlock(name)
1821 locker = vfs.readlock(name)
1822 if b":" in locker:
1822 if b":" in locker:
1823 host, pid = locker.split(b':')
1823 host, pid = locker.split(b':')
1824 if host == socket.gethostname():
1824 if host == socket.gethostname():
1825 locker = b'user %s, process %s' % (user or b'None', pid)
1825 locker = b'user %s, process %s' % (user or b'None', pid)
1826 else:
1826 else:
1827 locker = b'user %s, process %s, host %s' % (
1827 locker = b'user %s, process %s, host %s' % (
1828 user or b'None',
1828 user or b'None',
1829 pid,
1829 pid,
1830 host,
1830 host,
1831 )
1831 )
1832 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1832 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1833 return 1
1833 return 1
1834 except OSError as e:
1834 except OSError as e:
1835 if e.errno != errno.ENOENT:
1835 if e.errno != errno.ENOENT:
1836 raise
1836 raise
1837
1837
1838 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1838 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1839 return 0
1839 return 0
1840
1840
1841 held += report(repo.svfs, b"lock", repo.lock)
1841 held += report(repo.svfs, b"lock", repo.lock)
1842 held += report(repo.vfs, b"wlock", repo.wlock)
1842 held += report(repo.vfs, b"wlock", repo.wlock)
1843
1843
1844 return held
1844 return held
1845
1845
1846
1846
1847 @command(
1847 @command(
1848 b'debugmanifestfulltextcache',
1848 b'debugmanifestfulltextcache',
1849 [
1849 [
1850 (b'', b'clear', False, _(b'clear the cache')),
1850 (b'', b'clear', False, _(b'clear the cache')),
1851 (
1851 (
1852 b'a',
1852 b'a',
1853 b'add',
1853 b'add',
1854 [],
1854 [],
1855 _(b'add the given manifest nodes to the cache'),
1855 _(b'add the given manifest nodes to the cache'),
1856 _(b'NODE'),
1856 _(b'NODE'),
1857 ),
1857 ),
1858 ],
1858 ],
1859 b'',
1859 b'',
1860 )
1860 )
1861 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1861 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1862 """show, clear or amend the contents of the manifest fulltext cache"""
1862 """show, clear or amend the contents of the manifest fulltext cache"""
1863
1863
1864 def getcache():
1864 def getcache():
1865 r = repo.manifestlog.getstorage(b'')
1865 r = repo.manifestlog.getstorage(b'')
1866 try:
1866 try:
1867 return r._fulltextcache
1867 return r._fulltextcache
1868 except AttributeError:
1868 except AttributeError:
1869 msg = _(
1869 msg = _(
1870 b"Current revlog implementation doesn't appear to have a "
1870 b"Current revlog implementation doesn't appear to have a "
1871 b"manifest fulltext cache\n"
1871 b"manifest fulltext cache\n"
1872 )
1872 )
1873 raise error.Abort(msg)
1873 raise error.Abort(msg)
1874
1874
1875 if opts.get('clear'):
1875 if opts.get('clear'):
1876 with repo.wlock():
1876 with repo.wlock():
1877 cache = getcache()
1877 cache = getcache()
1878 cache.clear(clear_persisted_data=True)
1878 cache.clear(clear_persisted_data=True)
1879 return
1879 return
1880
1880
1881 if add:
1881 if add:
1882 with repo.wlock():
1882 with repo.wlock():
1883 m = repo.manifestlog
1883 m = repo.manifestlog
1884 store = m.getstorage(b'')
1884 store = m.getstorage(b'')
1885 for n in add:
1885 for n in add:
1886 try:
1886 try:
1887 manifest = m[store.lookup(n)]
1887 manifest = m[store.lookup(n)]
1888 except error.LookupError as e:
1888 except error.LookupError as e:
1889 raise error.Abort(e, hint=b"Check your manifest node id")
1889 raise error.Abort(e, hint=b"Check your manifest node id")
1890 manifest.read() # stores revisision in cache too
1890 manifest.read() # stores revisision in cache too
1891 return
1891 return
1892
1892
1893 cache = getcache()
1893 cache = getcache()
1894 if not len(cache):
1894 if not len(cache):
1895 ui.write(_(b'cache empty\n'))
1895 ui.write(_(b'cache empty\n'))
1896 else:
1896 else:
1897 ui.write(
1897 ui.write(
1898 _(
1898 _(
1899 b'cache contains %d manifest entries, in order of most to '
1899 b'cache contains %d manifest entries, in order of most to '
1900 b'least recent:\n'
1900 b'least recent:\n'
1901 )
1901 )
1902 % (len(cache),)
1902 % (len(cache),)
1903 )
1903 )
1904 totalsize = 0
1904 totalsize = 0
1905 for nodeid in cache:
1905 for nodeid in cache:
1906 # Use cache.get to not update the LRU order
1906 # Use cache.get to not update the LRU order
1907 data = cache.peek(nodeid)
1907 data = cache.peek(nodeid)
1908 size = len(data)
1908 size = len(data)
1909 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1909 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1910 ui.write(
1910 ui.write(
1911 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1911 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1912 )
1912 )
1913 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1913 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1914 ui.write(
1914 ui.write(
1915 _(b'total cache data size %s, on-disk %s\n')
1915 _(b'total cache data size %s, on-disk %s\n')
1916 % (util.bytecount(totalsize), util.bytecount(ondisk))
1916 % (util.bytecount(totalsize), util.bytecount(ondisk))
1917 )
1917 )
1918
1918
1919
1919
1920 @command(b'debugmergestate', [], b'')
1920 @command(b'debugmergestate', [], b'')
1921 def debugmergestate(ui, repo, *args):
1921 def debugmergestate(ui, repo, *args):
1922 """print merge state
1922 """print merge state
1923
1923
1924 Use --verbose to print out information about whether v1 or v2 merge state
1924 Use --verbose to print out information about whether v1 or v2 merge state
1925 was chosen."""
1925 was chosen."""
1926
1926
1927 def _hashornull(h):
1927 def _hashornull(h):
1928 if h == nullhex:
1928 if h == nullhex:
1929 return b'null'
1929 return b'null'
1930 else:
1930 else:
1931 return h
1931 return h
1932
1932
1933 def printrecords(version):
1933 def printrecords(version):
1934 ui.writenoi18n(b'* version %d records\n' % version)
1934 ui.writenoi18n(b'* version %d records\n' % version)
1935 if version == 1:
1935 if version == 1:
1936 records = v1records
1936 records = v1records
1937 else:
1937 else:
1938 records = v2records
1938 records = v2records
1939
1939
1940 for rtype, record in records:
1940 for rtype, record in records:
1941 # pretty print some record types
1941 # pretty print some record types
1942 if rtype == b'L':
1942 if rtype == b'L':
1943 ui.writenoi18n(b'local: %s\n' % record)
1943 ui.writenoi18n(b'local: %s\n' % record)
1944 elif rtype == b'O':
1944 elif rtype == b'O':
1945 ui.writenoi18n(b'other: %s\n' % record)
1945 ui.writenoi18n(b'other: %s\n' % record)
1946 elif rtype == b'm':
1946 elif rtype == b'm':
1947 driver, mdstate = record.split(b'\0', 1)
1947 driver, mdstate = record.split(b'\0', 1)
1948 ui.writenoi18n(
1948 ui.writenoi18n(
1949 b'merge driver: %s (state "%s")\n' % (driver, mdstate)
1949 b'merge driver: %s (state "%s")\n' % (driver, mdstate)
1950 )
1950 )
1951 elif rtype in b'FDC':
1951 elif rtype in b'FDC':
1952 r = record.split(b'\0')
1952 r = record.split(b'\0')
1953 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1953 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1954 if version == 1:
1954 if version == 1:
1955 onode = b'not stored in v1 format'
1955 onode = b'not stored in v1 format'
1956 flags = r[7]
1956 flags = r[7]
1957 else:
1957 else:
1958 onode, flags = r[7:9]
1958 onode, flags = r[7:9]
1959 ui.writenoi18n(
1959 ui.writenoi18n(
1960 b'file: %s (record type "%s", state "%s", hash %s)\n'
1960 b'file: %s (record type "%s", state "%s", hash %s)\n'
1961 % (f, rtype, state, _hashornull(hash))
1961 % (f, rtype, state, _hashornull(hash))
1962 )
1962 )
1963 ui.writenoi18n(
1963 ui.writenoi18n(
1964 b' local path: %s (flags "%s")\n' % (lfile, flags)
1964 b' local path: %s (flags "%s")\n' % (lfile, flags)
1965 )
1965 )
1966 ui.writenoi18n(
1966 ui.writenoi18n(
1967 b' ancestor path: %s (node %s)\n'
1967 b' ancestor path: %s (node %s)\n'
1968 % (afile, _hashornull(anode))
1968 % (afile, _hashornull(anode))
1969 )
1969 )
1970 ui.writenoi18n(
1970 ui.writenoi18n(
1971 b' other path: %s (node %s)\n'
1971 b' other path: %s (node %s)\n'
1972 % (ofile, _hashornull(onode))
1972 % (ofile, _hashornull(onode))
1973 )
1973 )
1974 elif rtype == b'f':
1974 elif rtype == b'f':
1975 filename, rawextras = record.split(b'\0', 1)
1975 filename, rawextras = record.split(b'\0', 1)
1976 extras = rawextras.split(b'\0')
1976 extras = rawextras.split(b'\0')
1977 i = 0
1977 i = 0
1978 extrastrings = []
1978 extrastrings = []
1979 while i < len(extras):
1979 while i < len(extras):
1980 extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1]))
1980 extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1]))
1981 i += 2
1981 i += 2
1982
1982
1983 ui.writenoi18n(
1983 ui.writenoi18n(
1984 b'file extras: %s (%s)\n'
1984 b'file extras: %s (%s)\n'
1985 % (filename, b', '.join(extrastrings))
1985 % (filename, b', '.join(extrastrings))
1986 )
1986 )
1987 elif rtype == b'l':
1987 elif rtype == b'l':
1988 labels = record.split(b'\0', 2)
1988 labels = record.split(b'\0', 2)
1989 labels = [l for l in labels if len(l) > 0]
1989 labels = [l for l in labels if len(l) > 0]
1990 ui.writenoi18n(b'labels:\n')
1990 ui.writenoi18n(b'labels:\n')
1991 ui.write((b' local: %s\n' % labels[0]))
1991 ui.write((b' local: %s\n' % labels[0]))
1992 ui.write((b' other: %s\n' % labels[1]))
1992 ui.write((b' other: %s\n' % labels[1]))
1993 if len(labels) > 2:
1993 if len(labels) > 2:
1994 ui.write((b' base: %s\n' % labels[2]))
1994 ui.write((b' base: %s\n' % labels[2]))
1995 else:
1995 else:
1996 ui.writenoi18n(
1996 ui.writenoi18n(
1997 b'unrecognized entry: %s\t%s\n'
1997 b'unrecognized entry: %s\t%s\n'
1998 % (rtype, record.replace(b'\0', b'\t'))
1998 % (rtype, record.replace(b'\0', b'\t'))
1999 )
1999 )
2000
2000
2001 # Avoid mergestate.read() since it may raise an exception for unsupported
2001 # Avoid mergestate.read() since it may raise an exception for unsupported
2002 # merge state records. We shouldn't be doing this, but this is OK since this
2002 # merge state records. We shouldn't be doing this, but this is OK since this
2003 # command is pretty low-level.
2003 # command is pretty low-level.
2004 ms = mergemod.mergestate(repo)
2004 ms = mergemod.mergestate(repo)
2005
2005
2006 # sort so that reasonable information is on top
2006 # sort so that reasonable information is on top
2007 v1records = ms._readrecordsv1()
2007 v1records = ms._readrecordsv1()
2008 v2records = ms._readrecordsv2()
2008 v2records = ms._readrecordsv2()
2009 order = b'LOml'
2009 order = b'LOml'
2010
2010
2011 def key(r):
2011 def key(r):
2012 idx = order.find(r[0])
2012 idx = order.find(r[0])
2013 if idx == -1:
2013 if idx == -1:
2014 return (1, r[1])
2014 return (1, r[1])
2015 else:
2015 else:
2016 return (0, idx)
2016 return (0, idx)
2017
2017
2018 v1records.sort(key=key)
2018 v1records.sort(key=key)
2019 v2records.sort(key=key)
2019 v2records.sort(key=key)
2020
2020
2021 if not v1records and not v2records:
2021 if not v1records and not v2records:
2022 ui.writenoi18n(b'no merge state found\n')
2022 ui.writenoi18n(b'no merge state found\n')
2023 elif not v2records:
2023 elif not v2records:
2024 ui.notenoi18n(b'no version 2 merge state\n')
2024 ui.notenoi18n(b'no version 2 merge state\n')
2025 printrecords(1)
2025 printrecords(1)
2026 elif ms._v1v2match(v1records, v2records):
2026 elif ms._v1v2match(v1records, v2records):
2027 ui.notenoi18n(b'v1 and v2 states match: using v2\n')
2027 ui.notenoi18n(b'v1 and v2 states match: using v2\n')
2028 printrecords(2)
2028 printrecords(2)
2029 else:
2029 else:
2030 ui.notenoi18n(b'v1 and v2 states mismatch: using v1\n')
2030 ui.notenoi18n(b'v1 and v2 states mismatch: using v1\n')
2031 printrecords(1)
2031 printrecords(1)
2032 if ui.verbose:
2032 if ui.verbose:
2033 printrecords(2)
2033 printrecords(2)
2034
2034
2035
2035
2036 @command(b'debugnamecomplete', [], _(b'NAME...'))
2036 @command(b'debugnamecomplete', [], _(b'NAME...'))
2037 def debugnamecomplete(ui, repo, *args):
2037 def debugnamecomplete(ui, repo, *args):
2038 '''complete "names" - tags, open branch names, bookmark names'''
2038 '''complete "names" - tags, open branch names, bookmark names'''
2039
2039
2040 names = set()
2040 names = set()
2041 # since we previously only listed open branches, we will handle that
2041 # since we previously only listed open branches, we will handle that
2042 # specially (after this for loop)
2042 # specially (after this for loop)
2043 for name, ns in pycompat.iteritems(repo.names):
2043 for name, ns in pycompat.iteritems(repo.names):
2044 if name != b'branches':
2044 if name != b'branches':
2045 names.update(ns.listnames(repo))
2045 names.update(ns.listnames(repo))
2046 names.update(
2046 names.update(
2047 tag
2047 tag
2048 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2048 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2049 if not closed
2049 if not closed
2050 )
2050 )
2051 completions = set()
2051 completions = set()
2052 if not args:
2052 if not args:
2053 args = [b'']
2053 args = [b'']
2054 for a in args:
2054 for a in args:
2055 completions.update(n for n in names if n.startswith(a))
2055 completions.update(n for n in names if n.startswith(a))
2056 ui.write(b'\n'.join(sorted(completions)))
2056 ui.write(b'\n'.join(sorted(completions)))
2057 ui.write(b'\n')
2057 ui.write(b'\n')
2058
2058
2059
2059
2060 @command(
2060 @command(
2061 b'debugobsolete',
2061 b'debugobsolete',
2062 [
2062 [
2063 (b'', b'flags', 0, _(b'markers flag')),
2063 (b'', b'flags', 0, _(b'markers flag')),
2064 (
2064 (
2065 b'',
2065 b'',
2066 b'record-parents',
2066 b'record-parents',
2067 False,
2067 False,
2068 _(b'record parent information for the precursor'),
2068 _(b'record parent information for the precursor'),
2069 ),
2069 ),
2070 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2070 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2071 (
2071 (
2072 b'',
2072 b'',
2073 b'exclusive',
2073 b'exclusive',
2074 False,
2074 False,
2075 _(b'restrict display to markers only relevant to REV'),
2075 _(b'restrict display to markers only relevant to REV'),
2076 ),
2076 ),
2077 (b'', b'index', False, _(b'display index of the marker')),
2077 (b'', b'index', False, _(b'display index of the marker')),
2078 (b'', b'delete', [], _(b'delete markers specified by indices')),
2078 (b'', b'delete', [], _(b'delete markers specified by indices')),
2079 ]
2079 ]
2080 + cmdutil.commitopts2
2080 + cmdutil.commitopts2
2081 + cmdutil.formatteropts,
2081 + cmdutil.formatteropts,
2082 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2082 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2083 )
2083 )
2084 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2084 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2085 """create arbitrary obsolete marker
2085 """create arbitrary obsolete marker
2086
2086
2087 With no arguments, displays the list of obsolescence markers."""
2087 With no arguments, displays the list of obsolescence markers."""
2088
2088
2089 opts = pycompat.byteskwargs(opts)
2089 opts = pycompat.byteskwargs(opts)
2090
2090
2091 def parsenodeid(s):
2091 def parsenodeid(s):
2092 try:
2092 try:
2093 # We do not use revsingle/revrange functions here to accept
2093 # We do not use revsingle/revrange functions here to accept
2094 # arbitrary node identifiers, possibly not present in the
2094 # arbitrary node identifiers, possibly not present in the
2095 # local repository.
2095 # local repository.
2096 n = bin(s)
2096 n = bin(s)
2097 if len(n) != len(nullid):
2097 if len(n) != len(nullid):
2098 raise TypeError()
2098 raise TypeError()
2099 return n
2099 return n
2100 except TypeError:
2100 except TypeError:
2101 raise error.Abort(
2101 raise error.Abort(
2102 b'changeset references must be full hexadecimal '
2102 b'changeset references must be full hexadecimal '
2103 b'node identifiers'
2103 b'node identifiers'
2104 )
2104 )
2105
2105
2106 if opts.get(b'delete'):
2106 if opts.get(b'delete'):
2107 indices = []
2107 indices = []
2108 for v in opts.get(b'delete'):
2108 for v in opts.get(b'delete'):
2109 try:
2109 try:
2110 indices.append(int(v))
2110 indices.append(int(v))
2111 except ValueError:
2111 except ValueError:
2112 raise error.Abort(
2112 raise error.Abort(
2113 _(b'invalid index value: %r') % v,
2113 _(b'invalid index value: %r') % v,
2114 hint=_(b'use integers for indices'),
2114 hint=_(b'use integers for indices'),
2115 )
2115 )
2116
2116
2117 if repo.currenttransaction():
2117 if repo.currenttransaction():
2118 raise error.Abort(
2118 raise error.Abort(
2119 _(b'cannot delete obsmarkers in the middle of transaction.')
2119 _(b'cannot delete obsmarkers in the middle of transaction.')
2120 )
2120 )
2121
2121
2122 with repo.lock():
2122 with repo.lock():
2123 n = repair.deleteobsmarkers(repo.obsstore, indices)
2123 n = repair.deleteobsmarkers(repo.obsstore, indices)
2124 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2124 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2125
2125
2126 return
2126 return
2127
2127
2128 if precursor is not None:
2128 if precursor is not None:
2129 if opts[b'rev']:
2129 if opts[b'rev']:
2130 raise error.Abort(b'cannot select revision when creating marker')
2130 raise error.Abort(b'cannot select revision when creating marker')
2131 metadata = {}
2131 metadata = {}
2132 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2132 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2133 succs = tuple(parsenodeid(succ) for succ in successors)
2133 succs = tuple(parsenodeid(succ) for succ in successors)
2134 l = repo.lock()
2134 l = repo.lock()
2135 try:
2135 try:
2136 tr = repo.transaction(b'debugobsolete')
2136 tr = repo.transaction(b'debugobsolete')
2137 try:
2137 try:
2138 date = opts.get(b'date')
2138 date = opts.get(b'date')
2139 if date:
2139 if date:
2140 date = dateutil.parsedate(date)
2140 date = dateutil.parsedate(date)
2141 else:
2141 else:
2142 date = None
2142 date = None
2143 prec = parsenodeid(precursor)
2143 prec = parsenodeid(precursor)
2144 parents = None
2144 parents = None
2145 if opts[b'record_parents']:
2145 if opts[b'record_parents']:
2146 if prec not in repo.unfiltered():
2146 if prec not in repo.unfiltered():
2147 raise error.Abort(
2147 raise error.Abort(
2148 b'cannot used --record-parents on '
2148 b'cannot used --record-parents on '
2149 b'unknown changesets'
2149 b'unknown changesets'
2150 )
2150 )
2151 parents = repo.unfiltered()[prec].parents()
2151 parents = repo.unfiltered()[prec].parents()
2152 parents = tuple(p.node() for p in parents)
2152 parents = tuple(p.node() for p in parents)
2153 repo.obsstore.create(
2153 repo.obsstore.create(
2154 tr,
2154 tr,
2155 prec,
2155 prec,
2156 succs,
2156 succs,
2157 opts[b'flags'],
2157 opts[b'flags'],
2158 parents=parents,
2158 parents=parents,
2159 date=date,
2159 date=date,
2160 metadata=metadata,
2160 metadata=metadata,
2161 ui=ui,
2161 ui=ui,
2162 )
2162 )
2163 tr.close()
2163 tr.close()
2164 except ValueError as exc:
2164 except ValueError as exc:
2165 raise error.Abort(
2165 raise error.Abort(
2166 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2166 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2167 )
2167 )
2168 finally:
2168 finally:
2169 tr.release()
2169 tr.release()
2170 finally:
2170 finally:
2171 l.release()
2171 l.release()
2172 else:
2172 else:
2173 if opts[b'rev']:
2173 if opts[b'rev']:
2174 revs = scmutil.revrange(repo, opts[b'rev'])
2174 revs = scmutil.revrange(repo, opts[b'rev'])
2175 nodes = [repo[r].node() for r in revs]
2175 nodes = [repo[r].node() for r in revs]
2176 markers = list(
2176 markers = list(
2177 obsutil.getmarkers(
2177 obsutil.getmarkers(
2178 repo, nodes=nodes, exclusive=opts[b'exclusive']
2178 repo, nodes=nodes, exclusive=opts[b'exclusive']
2179 )
2179 )
2180 )
2180 )
2181 markers.sort(key=lambda x: x._data)
2181 markers.sort(key=lambda x: x._data)
2182 else:
2182 else:
2183 markers = obsutil.getmarkers(repo)
2183 markers = obsutil.getmarkers(repo)
2184
2184
2185 markerstoiter = markers
2185 markerstoiter = markers
2186 isrelevant = lambda m: True
2186 isrelevant = lambda m: True
2187 if opts.get(b'rev') and opts.get(b'index'):
2187 if opts.get(b'rev') and opts.get(b'index'):
2188 markerstoiter = obsutil.getmarkers(repo)
2188 markerstoiter = obsutil.getmarkers(repo)
2189 markerset = set(markers)
2189 markerset = set(markers)
2190 isrelevant = lambda m: m in markerset
2190 isrelevant = lambda m: m in markerset
2191
2191
2192 fm = ui.formatter(b'debugobsolete', opts)
2192 fm = ui.formatter(b'debugobsolete', opts)
2193 for i, m in enumerate(markerstoiter):
2193 for i, m in enumerate(markerstoiter):
2194 if not isrelevant(m):
2194 if not isrelevant(m):
2195 # marker can be irrelevant when we're iterating over a set
2195 # marker can be irrelevant when we're iterating over a set
2196 # of markers (markerstoiter) which is bigger than the set
2196 # of markers (markerstoiter) which is bigger than the set
2197 # of markers we want to display (markers)
2197 # of markers we want to display (markers)
2198 # this can happen if both --index and --rev options are
2198 # this can happen if both --index and --rev options are
2199 # provided and thus we need to iterate over all of the markers
2199 # provided and thus we need to iterate over all of the markers
2200 # to get the correct indices, but only display the ones that
2200 # to get the correct indices, but only display the ones that
2201 # are relevant to --rev value
2201 # are relevant to --rev value
2202 continue
2202 continue
2203 fm.startitem()
2203 fm.startitem()
2204 ind = i if opts.get(b'index') else None
2204 ind = i if opts.get(b'index') else None
2205 cmdutil.showmarker(fm, m, index=ind)
2205 cmdutil.showmarker(fm, m, index=ind)
2206 fm.end()
2206 fm.end()
2207
2207
2208
2208
2209 @command(
2209 @command(
2210 b'debugp1copies',
2210 b'debugp1copies',
2211 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2211 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2212 _(b'[-r REV]'),
2212 _(b'[-r REV]'),
2213 )
2213 )
2214 def debugp1copies(ui, repo, **opts):
2214 def debugp1copies(ui, repo, **opts):
2215 """dump copy information compared to p1"""
2215 """dump copy information compared to p1"""
2216
2216
2217 opts = pycompat.byteskwargs(opts)
2217 opts = pycompat.byteskwargs(opts)
2218 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2218 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2219 for dst, src in ctx.p1copies().items():
2219 for dst, src in ctx.p1copies().items():
2220 ui.write(b'%s -> %s\n' % (src, dst))
2220 ui.write(b'%s -> %s\n' % (src, dst))
2221
2221
2222
2222
2223 @command(
2223 @command(
2224 b'debugp2copies',
2224 b'debugp2copies',
2225 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2225 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2226 _(b'[-r REV]'),
2226 _(b'[-r REV]'),
2227 )
2227 )
2228 def debugp1copies(ui, repo, **opts):
2228 def debugp1copies(ui, repo, **opts):
2229 """dump copy information compared to p2"""
2229 """dump copy information compared to p2"""
2230
2230
2231 opts = pycompat.byteskwargs(opts)
2231 opts = pycompat.byteskwargs(opts)
2232 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2232 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2233 for dst, src in ctx.p2copies().items():
2233 for dst, src in ctx.p2copies().items():
2234 ui.write(b'%s -> %s\n' % (src, dst))
2234 ui.write(b'%s -> %s\n' % (src, dst))
2235
2235
2236
2236
2237 @command(
2237 @command(
2238 b'debugpathcomplete',
2238 b'debugpathcomplete',
2239 [
2239 [
2240 (b'f', b'full', None, _(b'complete an entire path')),
2240 (b'f', b'full', None, _(b'complete an entire path')),
2241 (b'n', b'normal', None, _(b'show only normal files')),
2241 (b'n', b'normal', None, _(b'show only normal files')),
2242 (b'a', b'added', None, _(b'show only added files')),
2242 (b'a', b'added', None, _(b'show only added files')),
2243 (b'r', b'removed', None, _(b'show only removed files')),
2243 (b'r', b'removed', None, _(b'show only removed files')),
2244 ],
2244 ],
2245 _(b'FILESPEC...'),
2245 _(b'FILESPEC...'),
2246 )
2246 )
2247 def debugpathcomplete(ui, repo, *specs, **opts):
2247 def debugpathcomplete(ui, repo, *specs, **opts):
2248 '''complete part or all of a tracked path
2248 '''complete part or all of a tracked path
2249
2249
2250 This command supports shells that offer path name completion. It
2250 This command supports shells that offer path name completion. It
2251 currently completes only files already known to the dirstate.
2251 currently completes only files already known to the dirstate.
2252
2252
2253 Completion extends only to the next path segment unless
2253 Completion extends only to the next path segment unless
2254 --full is specified, in which case entire paths are used.'''
2254 --full is specified, in which case entire paths are used.'''
2255
2255
2256 def complete(path, acceptable):
2256 def complete(path, acceptable):
2257 dirstate = repo.dirstate
2257 dirstate = repo.dirstate
2258 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2258 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2259 rootdir = repo.root + pycompat.ossep
2259 rootdir = repo.root + pycompat.ossep
2260 if spec != repo.root and not spec.startswith(rootdir):
2260 if spec != repo.root and not spec.startswith(rootdir):
2261 return [], []
2261 return [], []
2262 if os.path.isdir(spec):
2262 if os.path.isdir(spec):
2263 spec += b'/'
2263 spec += b'/'
2264 spec = spec[len(rootdir) :]
2264 spec = spec[len(rootdir) :]
2265 fixpaths = pycompat.ossep != b'/'
2265 fixpaths = pycompat.ossep != b'/'
2266 if fixpaths:
2266 if fixpaths:
2267 spec = spec.replace(pycompat.ossep, b'/')
2267 spec = spec.replace(pycompat.ossep, b'/')
2268 speclen = len(spec)
2268 speclen = len(spec)
2269 fullpaths = opts['full']
2269 fullpaths = opts['full']
2270 files, dirs = set(), set()
2270 files, dirs = set(), set()
2271 adddir, addfile = dirs.add, files.add
2271 adddir, addfile = dirs.add, files.add
2272 for f, st in pycompat.iteritems(dirstate):
2272 for f, st in pycompat.iteritems(dirstate):
2273 if f.startswith(spec) and st[0] in acceptable:
2273 if f.startswith(spec) and st[0] in acceptable:
2274 if fixpaths:
2274 if fixpaths:
2275 f = f.replace(b'/', pycompat.ossep)
2275 f = f.replace(b'/', pycompat.ossep)
2276 if fullpaths:
2276 if fullpaths:
2277 addfile(f)
2277 addfile(f)
2278 continue
2278 continue
2279 s = f.find(pycompat.ossep, speclen)
2279 s = f.find(pycompat.ossep, speclen)
2280 if s >= 0:
2280 if s >= 0:
2281 adddir(f[:s])
2281 adddir(f[:s])
2282 else:
2282 else:
2283 addfile(f)
2283 addfile(f)
2284 return files, dirs
2284 return files, dirs
2285
2285
2286 acceptable = b''
2286 acceptable = b''
2287 if opts['normal']:
2287 if opts['normal']:
2288 acceptable += b'nm'
2288 acceptable += b'nm'
2289 if opts['added']:
2289 if opts['added']:
2290 acceptable += b'a'
2290 acceptable += b'a'
2291 if opts['removed']:
2291 if opts['removed']:
2292 acceptable += b'r'
2292 acceptable += b'r'
2293 cwd = repo.getcwd()
2293 cwd = repo.getcwd()
2294 if not specs:
2294 if not specs:
2295 specs = [b'.']
2295 specs = [b'.']
2296
2296
2297 files, dirs = set(), set()
2297 files, dirs = set(), set()
2298 for spec in specs:
2298 for spec in specs:
2299 f, d = complete(spec, acceptable or b'nmar')
2299 f, d = complete(spec, acceptable or b'nmar')
2300 files.update(f)
2300 files.update(f)
2301 dirs.update(d)
2301 dirs.update(d)
2302 files.update(dirs)
2302 files.update(dirs)
2303 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2303 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2304 ui.write(b'\n')
2304 ui.write(b'\n')
2305
2305
2306
2306
2307 @command(
2307 @command(
2308 b'debugpathcopies',
2308 b'debugpathcopies',
2309 cmdutil.walkopts,
2309 cmdutil.walkopts,
2310 b'hg debugpathcopies REV1 REV2 [FILE]',
2310 b'hg debugpathcopies REV1 REV2 [FILE]',
2311 inferrepo=True,
2311 inferrepo=True,
2312 )
2312 )
2313 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2313 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2314 """show copies between two revisions"""
2314 """show copies between two revisions"""
2315 ctx1 = scmutil.revsingle(repo, rev1)
2315 ctx1 = scmutil.revsingle(repo, rev1)
2316 ctx2 = scmutil.revsingle(repo, rev2)
2316 ctx2 = scmutil.revsingle(repo, rev2)
2317 m = scmutil.match(ctx1, pats, opts)
2317 m = scmutil.match(ctx1, pats, opts)
2318 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2318 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2319 ui.write(b'%s -> %s\n' % (src, dst))
2319 ui.write(b'%s -> %s\n' % (src, dst))
2320
2320
2321
2321
2322 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2322 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2323 def debugpeer(ui, path):
2323 def debugpeer(ui, path):
2324 """establish a connection to a peer repository"""
2324 """establish a connection to a peer repository"""
2325 # Always enable peer request logging. Requires --debug to display
2325 # Always enable peer request logging. Requires --debug to display
2326 # though.
2326 # though.
2327 overrides = {
2327 overrides = {
2328 (b'devel', b'debug.peer-request'): True,
2328 (b'devel', b'debug.peer-request'): True,
2329 }
2329 }
2330
2330
2331 with ui.configoverride(overrides):
2331 with ui.configoverride(overrides):
2332 peer = hg.peer(ui, {}, path)
2332 peer = hg.peer(ui, {}, path)
2333
2333
2334 local = peer.local() is not None
2334 local = peer.local() is not None
2335 canpush = peer.canpush()
2335 canpush = peer.canpush()
2336
2336
2337 ui.write(_(b'url: %s\n') % peer.url())
2337 ui.write(_(b'url: %s\n') % peer.url())
2338 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2338 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2339 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2339 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2340
2340
2341
2341
2342 @command(
2342 @command(
2343 b'debugpickmergetool',
2343 b'debugpickmergetool',
2344 [
2344 [
2345 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2345 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2346 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2346 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2347 ]
2347 ]
2348 + cmdutil.walkopts
2348 + cmdutil.walkopts
2349 + cmdutil.mergetoolopts,
2349 + cmdutil.mergetoolopts,
2350 _(b'[PATTERN]...'),
2350 _(b'[PATTERN]...'),
2351 inferrepo=True,
2351 inferrepo=True,
2352 )
2352 )
2353 def debugpickmergetool(ui, repo, *pats, **opts):
2353 def debugpickmergetool(ui, repo, *pats, **opts):
2354 """examine which merge tool is chosen for specified file
2354 """examine which merge tool is chosen for specified file
2355
2355
2356 As described in :hg:`help merge-tools`, Mercurial examines
2356 As described in :hg:`help merge-tools`, Mercurial examines
2357 configurations below in this order to decide which merge tool is
2357 configurations below in this order to decide which merge tool is
2358 chosen for specified file.
2358 chosen for specified file.
2359
2359
2360 1. ``--tool`` option
2360 1. ``--tool`` option
2361 2. ``HGMERGE`` environment variable
2361 2. ``HGMERGE`` environment variable
2362 3. configurations in ``merge-patterns`` section
2362 3. configurations in ``merge-patterns`` section
2363 4. configuration of ``ui.merge``
2363 4. configuration of ``ui.merge``
2364 5. configurations in ``merge-tools`` section
2364 5. configurations in ``merge-tools`` section
2365 6. ``hgmerge`` tool (for historical reason only)
2365 6. ``hgmerge`` tool (for historical reason only)
2366 7. default tool for fallback (``:merge`` or ``:prompt``)
2366 7. default tool for fallback (``:merge`` or ``:prompt``)
2367
2367
2368 This command writes out examination result in the style below::
2368 This command writes out examination result in the style below::
2369
2369
2370 FILE = MERGETOOL
2370 FILE = MERGETOOL
2371
2371
2372 By default, all files known in the first parent context of the
2372 By default, all files known in the first parent context of the
2373 working directory are examined. Use file patterns and/or -I/-X
2373 working directory are examined. Use file patterns and/or -I/-X
2374 options to limit target files. -r/--rev is also useful to examine
2374 options to limit target files. -r/--rev is also useful to examine
2375 files in another context without actual updating to it.
2375 files in another context without actual updating to it.
2376
2376
2377 With --debug, this command shows warning messages while matching
2377 With --debug, this command shows warning messages while matching
2378 against ``merge-patterns`` and so on, too. It is recommended to
2378 against ``merge-patterns`` and so on, too. It is recommended to
2379 use this option with explicit file patterns and/or -I/-X options,
2379 use this option with explicit file patterns and/or -I/-X options,
2380 because this option increases amount of output per file according
2380 because this option increases amount of output per file according
2381 to configurations in hgrc.
2381 to configurations in hgrc.
2382
2382
2383 With -v/--verbose, this command shows configurations below at
2383 With -v/--verbose, this command shows configurations below at
2384 first (only if specified).
2384 first (only if specified).
2385
2385
2386 - ``--tool`` option
2386 - ``--tool`` option
2387 - ``HGMERGE`` environment variable
2387 - ``HGMERGE`` environment variable
2388 - configuration of ``ui.merge``
2388 - configuration of ``ui.merge``
2389
2389
2390 If merge tool is chosen before matching against
2390 If merge tool is chosen before matching against
2391 ``merge-patterns``, this command can't show any helpful
2391 ``merge-patterns``, this command can't show any helpful
2392 information, even with --debug. In such case, information above is
2392 information, even with --debug. In such case, information above is
2393 useful to know why a merge tool is chosen.
2393 useful to know why a merge tool is chosen.
2394 """
2394 """
2395 opts = pycompat.byteskwargs(opts)
2395 opts = pycompat.byteskwargs(opts)
2396 overrides = {}
2396 overrides = {}
2397 if opts[b'tool']:
2397 if opts[b'tool']:
2398 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2398 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2399 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2399 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2400
2400
2401 with ui.configoverride(overrides, b'debugmergepatterns'):
2401 with ui.configoverride(overrides, b'debugmergepatterns'):
2402 hgmerge = encoding.environ.get(b"HGMERGE")
2402 hgmerge = encoding.environ.get(b"HGMERGE")
2403 if hgmerge is not None:
2403 if hgmerge is not None:
2404 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2404 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2405 uimerge = ui.config(b"ui", b"merge")
2405 uimerge = ui.config(b"ui", b"merge")
2406 if uimerge:
2406 if uimerge:
2407 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2407 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2408
2408
2409 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2409 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2410 m = scmutil.match(ctx, pats, opts)
2410 m = scmutil.match(ctx, pats, opts)
2411 changedelete = opts[b'changedelete']
2411 changedelete = opts[b'changedelete']
2412 for path in ctx.walk(m):
2412 for path in ctx.walk(m):
2413 fctx = ctx[path]
2413 fctx = ctx[path]
2414 try:
2414 try:
2415 if not ui.debugflag:
2415 if not ui.debugflag:
2416 ui.pushbuffer(error=True)
2416 ui.pushbuffer(error=True)
2417 tool, toolpath = filemerge._picktool(
2417 tool, toolpath = filemerge._picktool(
2418 repo,
2418 repo,
2419 ui,
2419 ui,
2420 path,
2420 path,
2421 fctx.isbinary(),
2421 fctx.isbinary(),
2422 b'l' in fctx.flags(),
2422 b'l' in fctx.flags(),
2423 changedelete,
2423 changedelete,
2424 )
2424 )
2425 finally:
2425 finally:
2426 if not ui.debugflag:
2426 if not ui.debugflag:
2427 ui.popbuffer()
2427 ui.popbuffer()
2428 ui.write(b'%s = %s\n' % (path, tool))
2428 ui.write(b'%s = %s\n' % (path, tool))
2429
2429
2430
2430
2431 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2431 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2432 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2432 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2433 '''access the pushkey key/value protocol
2433 '''access the pushkey key/value protocol
2434
2434
2435 With two args, list the keys in the given namespace.
2435 With two args, list the keys in the given namespace.
2436
2436
2437 With five args, set a key to new if it currently is set to old.
2437 With five args, set a key to new if it currently is set to old.
2438 Reports success or failure.
2438 Reports success or failure.
2439 '''
2439 '''
2440
2440
2441 target = hg.peer(ui, {}, repopath)
2441 target = hg.peer(ui, {}, repopath)
2442 if keyinfo:
2442 if keyinfo:
2443 key, old, new = keyinfo
2443 key, old, new = keyinfo
2444 with target.commandexecutor() as e:
2444 with target.commandexecutor() as e:
2445 r = e.callcommand(
2445 r = e.callcommand(
2446 b'pushkey',
2446 b'pushkey',
2447 {
2447 {
2448 b'namespace': namespace,
2448 b'namespace': namespace,
2449 b'key': key,
2449 b'key': key,
2450 b'old': old,
2450 b'old': old,
2451 b'new': new,
2451 b'new': new,
2452 },
2452 },
2453 ).result()
2453 ).result()
2454
2454
2455 ui.status(pycompat.bytestr(r) + b'\n')
2455 ui.status(pycompat.bytestr(r) + b'\n')
2456 return not r
2456 return not r
2457 else:
2457 else:
2458 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2458 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2459 ui.write(
2459 ui.write(
2460 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2460 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2461 )
2461 )
2462
2462
2463
2463
2464 @command(b'debugpvec', [], _(b'A B'))
2464 @command(b'debugpvec', [], _(b'A B'))
2465 def debugpvec(ui, repo, a, b=None):
2465 def debugpvec(ui, repo, a, b=None):
2466 ca = scmutil.revsingle(repo, a)
2466 ca = scmutil.revsingle(repo, a)
2467 cb = scmutil.revsingle(repo, b)
2467 cb = scmutil.revsingle(repo, b)
2468 pa = pvec.ctxpvec(ca)
2468 pa = pvec.ctxpvec(ca)
2469 pb = pvec.ctxpvec(cb)
2469 pb = pvec.ctxpvec(cb)
2470 if pa == pb:
2470 if pa == pb:
2471 rel = b"="
2471 rel = b"="
2472 elif pa > pb:
2472 elif pa > pb:
2473 rel = b">"
2473 rel = b">"
2474 elif pa < pb:
2474 elif pa < pb:
2475 rel = b"<"
2475 rel = b"<"
2476 elif pa | pb:
2476 elif pa | pb:
2477 rel = b"|"
2477 rel = b"|"
2478 ui.write(_(b"a: %s\n") % pa)
2478 ui.write(_(b"a: %s\n") % pa)
2479 ui.write(_(b"b: %s\n") % pb)
2479 ui.write(_(b"b: %s\n") % pb)
2480 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2480 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2481 ui.write(
2481 ui.write(
2482 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2482 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2483 % (
2483 % (
2484 abs(pa._depth - pb._depth),
2484 abs(pa._depth - pb._depth),
2485 pvec._hamming(pa._vec, pb._vec),
2485 pvec._hamming(pa._vec, pb._vec),
2486 pa.distance(pb),
2486 pa.distance(pb),
2487 rel,
2487 rel,
2488 )
2488 )
2489 )
2489 )
2490
2490
2491
2491
2492 @command(
2492 @command(
2493 b'debugrebuilddirstate|debugrebuildstate',
2493 b'debugrebuilddirstate|debugrebuildstate',
2494 [
2494 [
2495 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2495 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2496 (
2496 (
2497 b'',
2497 b'',
2498 b'minimal',
2498 b'minimal',
2499 None,
2499 None,
2500 _(
2500 _(
2501 b'only rebuild files that are inconsistent with '
2501 b'only rebuild files that are inconsistent with '
2502 b'the working copy parent'
2502 b'the working copy parent'
2503 ),
2503 ),
2504 ),
2504 ),
2505 ],
2505 ],
2506 _(b'[-r REV]'),
2506 _(b'[-r REV]'),
2507 )
2507 )
2508 def debugrebuilddirstate(ui, repo, rev, **opts):
2508 def debugrebuilddirstate(ui, repo, rev, **opts):
2509 """rebuild the dirstate as it would look like for the given revision
2509 """rebuild the dirstate as it would look like for the given revision
2510
2510
2511 If no revision is specified the first current parent will be used.
2511 If no revision is specified the first current parent will be used.
2512
2512
2513 The dirstate will be set to the files of the given revision.
2513 The dirstate will be set to the files of the given revision.
2514 The actual working directory content or existing dirstate
2514 The actual working directory content or existing dirstate
2515 information such as adds or removes is not considered.
2515 information such as adds or removes is not considered.
2516
2516
2517 ``minimal`` will only rebuild the dirstate status for files that claim to be
2517 ``minimal`` will only rebuild the dirstate status for files that claim to be
2518 tracked but are not in the parent manifest, or that exist in the parent
2518 tracked but are not in the parent manifest, or that exist in the parent
2519 manifest but are not in the dirstate. It will not change adds, removes, or
2519 manifest but are not in the dirstate. It will not change adds, removes, or
2520 modified files that are in the working copy parent.
2520 modified files that are in the working copy parent.
2521
2521
2522 One use of this command is to make the next :hg:`status` invocation
2522 One use of this command is to make the next :hg:`status` invocation
2523 check the actual file content.
2523 check the actual file content.
2524 """
2524 """
2525 ctx = scmutil.revsingle(repo, rev)
2525 ctx = scmutil.revsingle(repo, rev)
2526 with repo.wlock():
2526 with repo.wlock():
2527 dirstate = repo.dirstate
2527 dirstate = repo.dirstate
2528 changedfiles = None
2528 changedfiles = None
2529 # See command doc for what minimal does.
2529 # See command doc for what minimal does.
2530 if opts.get('minimal'):
2530 if opts.get('minimal'):
2531 manifestfiles = set(ctx.manifest().keys())
2531 manifestfiles = set(ctx.manifest().keys())
2532 dirstatefiles = set(dirstate)
2532 dirstatefiles = set(dirstate)
2533 manifestonly = manifestfiles - dirstatefiles
2533 manifestonly = manifestfiles - dirstatefiles
2534 dsonly = dirstatefiles - manifestfiles
2534 dsonly = dirstatefiles - manifestfiles
2535 dsnotadded = set(f for f in dsonly if dirstate[f] != b'a')
2535 dsnotadded = set(f for f in dsonly if dirstate[f] != b'a')
2536 changedfiles = manifestonly | dsnotadded
2536 changedfiles = manifestonly | dsnotadded
2537
2537
2538 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2538 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2539
2539
2540
2540
2541 @command(b'debugrebuildfncache', [], b'')
2541 @command(b'debugrebuildfncache', [], b'')
2542 def debugrebuildfncache(ui, repo):
2542 def debugrebuildfncache(ui, repo):
2543 """rebuild the fncache file"""
2543 """rebuild the fncache file"""
2544 repair.rebuildfncache(ui, repo)
2544 repair.rebuildfncache(ui, repo)
2545
2545
2546
2546
2547 @command(
2547 @command(
2548 b'debugrename',
2548 b'debugrename',
2549 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2549 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2550 _(b'[-r REV] [FILE]...'),
2550 _(b'[-r REV] [FILE]...'),
2551 )
2551 )
2552 def debugrename(ui, repo, *pats, **opts):
2552 def debugrename(ui, repo, *pats, **opts):
2553 """dump rename information"""
2553 """dump rename information"""
2554
2554
2555 opts = pycompat.byteskwargs(opts)
2555 opts = pycompat.byteskwargs(opts)
2556 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2556 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2557 m = scmutil.match(ctx, pats, opts)
2557 m = scmutil.match(ctx, pats, opts)
2558 for abs in ctx.walk(m):
2558 for abs in ctx.walk(m):
2559 fctx = ctx[abs]
2559 fctx = ctx[abs]
2560 o = fctx.filelog().renamed(fctx.filenode())
2560 o = fctx.filelog().renamed(fctx.filenode())
2561 rel = repo.pathto(abs)
2561 rel = repo.pathto(abs)
2562 if o:
2562 if o:
2563 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2563 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2564 else:
2564 else:
2565 ui.write(_(b"%s not renamed\n") % rel)
2565 ui.write(_(b"%s not renamed\n") % rel)
2566
2566
2567
2567
2568 @command(
2568 @command(
2569 b'debugrevlog',
2569 b'debugrevlog',
2570 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2570 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2571 _(b'-c|-m|FILE'),
2571 _(b'-c|-m|FILE'),
2572 optionalrepo=True,
2572 optionalrepo=True,
2573 )
2573 )
2574 def debugrevlog(ui, repo, file_=None, **opts):
2574 def debugrevlog(ui, repo, file_=None, **opts):
2575 """show data and statistics about a revlog"""
2575 """show data and statistics about a revlog"""
2576 opts = pycompat.byteskwargs(opts)
2576 opts = pycompat.byteskwargs(opts)
2577 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2577 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2578
2578
2579 if opts.get(b"dump"):
2579 if opts.get(b"dump"):
2580 numrevs = len(r)
2580 numrevs = len(r)
2581 ui.write(
2581 ui.write(
2582 (
2582 (
2583 b"# rev p1rev p2rev start end deltastart base p1 p2"
2583 b"# rev p1rev p2rev start end deltastart base p1 p2"
2584 b" rawsize totalsize compression heads chainlen\n"
2584 b" rawsize totalsize compression heads chainlen\n"
2585 )
2585 )
2586 )
2586 )
2587 ts = 0
2587 ts = 0
2588 heads = set()
2588 heads = set()
2589
2589
2590 for rev in pycompat.xrange(numrevs):
2590 for rev in pycompat.xrange(numrevs):
2591 dbase = r.deltaparent(rev)
2591 dbase = r.deltaparent(rev)
2592 if dbase == -1:
2592 if dbase == -1:
2593 dbase = rev
2593 dbase = rev
2594 cbase = r.chainbase(rev)
2594 cbase = r.chainbase(rev)
2595 clen = r.chainlen(rev)
2595 clen = r.chainlen(rev)
2596 p1, p2 = r.parentrevs(rev)
2596 p1, p2 = r.parentrevs(rev)
2597 rs = r.rawsize(rev)
2597 rs = r.rawsize(rev)
2598 ts = ts + rs
2598 ts = ts + rs
2599 heads -= set(r.parentrevs(rev))
2599 heads -= set(r.parentrevs(rev))
2600 heads.add(rev)
2600 heads.add(rev)
2601 try:
2601 try:
2602 compression = ts / r.end(rev)
2602 compression = ts / r.end(rev)
2603 except ZeroDivisionError:
2603 except ZeroDivisionError:
2604 compression = 0
2604 compression = 0
2605 ui.write(
2605 ui.write(
2606 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2606 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2607 b"%11d %5d %8d\n"
2607 b"%11d %5d %8d\n"
2608 % (
2608 % (
2609 rev,
2609 rev,
2610 p1,
2610 p1,
2611 p2,
2611 p2,
2612 r.start(rev),
2612 r.start(rev),
2613 r.end(rev),
2613 r.end(rev),
2614 r.start(dbase),
2614 r.start(dbase),
2615 r.start(cbase),
2615 r.start(cbase),
2616 r.start(p1),
2616 r.start(p1),
2617 r.start(p2),
2617 r.start(p2),
2618 rs,
2618 rs,
2619 ts,
2619 ts,
2620 compression,
2620 compression,
2621 len(heads),
2621 len(heads),
2622 clen,
2622 clen,
2623 )
2623 )
2624 )
2624 )
2625 return 0
2625 return 0
2626
2626
2627 v = r.version
2627 v = r.version
2628 format = v & 0xFFFF
2628 format = v & 0xFFFF
2629 flags = []
2629 flags = []
2630 gdelta = False
2630 gdelta = False
2631 if v & revlog.FLAG_INLINE_DATA:
2631 if v & revlog.FLAG_INLINE_DATA:
2632 flags.append(b'inline')
2632 flags.append(b'inline')
2633 if v & revlog.FLAG_GENERALDELTA:
2633 if v & revlog.FLAG_GENERALDELTA:
2634 gdelta = True
2634 gdelta = True
2635 flags.append(b'generaldelta')
2635 flags.append(b'generaldelta')
2636 if not flags:
2636 if not flags:
2637 flags = [b'(none)']
2637 flags = [b'(none)']
2638
2638
2639 ### tracks merge vs single parent
2639 ### tracks merge vs single parent
2640 nummerges = 0
2640 nummerges = 0
2641
2641
2642 ### tracks ways the "delta" are build
2642 ### tracks ways the "delta" are build
2643 # nodelta
2643 # nodelta
2644 numempty = 0
2644 numempty = 0
2645 numemptytext = 0
2645 numemptytext = 0
2646 numemptydelta = 0
2646 numemptydelta = 0
2647 # full file content
2647 # full file content
2648 numfull = 0
2648 numfull = 0
2649 # intermediate snapshot against a prior snapshot
2649 # intermediate snapshot against a prior snapshot
2650 numsemi = 0
2650 numsemi = 0
2651 # snapshot count per depth
2651 # snapshot count per depth
2652 numsnapdepth = collections.defaultdict(lambda: 0)
2652 numsnapdepth = collections.defaultdict(lambda: 0)
2653 # delta against previous revision
2653 # delta against previous revision
2654 numprev = 0
2654 numprev = 0
2655 # delta against first or second parent (not prev)
2655 # delta against first or second parent (not prev)
2656 nump1 = 0
2656 nump1 = 0
2657 nump2 = 0
2657 nump2 = 0
2658 # delta against neither prev nor parents
2658 # delta against neither prev nor parents
2659 numother = 0
2659 numother = 0
2660 # delta against prev that are also first or second parent
2660 # delta against prev that are also first or second parent
2661 # (details of `numprev`)
2661 # (details of `numprev`)
2662 nump1prev = 0
2662 nump1prev = 0
2663 nump2prev = 0
2663 nump2prev = 0
2664
2664
2665 # data about delta chain of each revs
2665 # data about delta chain of each revs
2666 chainlengths = []
2666 chainlengths = []
2667 chainbases = []
2667 chainbases = []
2668 chainspans = []
2668 chainspans = []
2669
2669
2670 # data about each revision
2670 # data about each revision
2671 datasize = [None, 0, 0]
2671 datasize = [None, 0, 0]
2672 fullsize = [None, 0, 0]
2672 fullsize = [None, 0, 0]
2673 semisize = [None, 0, 0]
2673 semisize = [None, 0, 0]
2674 # snapshot count per depth
2674 # snapshot count per depth
2675 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2675 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2676 deltasize = [None, 0, 0]
2676 deltasize = [None, 0, 0]
2677 chunktypecounts = {}
2677 chunktypecounts = {}
2678 chunktypesizes = {}
2678 chunktypesizes = {}
2679
2679
2680 def addsize(size, l):
2680 def addsize(size, l):
2681 if l[0] is None or size < l[0]:
2681 if l[0] is None or size < l[0]:
2682 l[0] = size
2682 l[0] = size
2683 if size > l[1]:
2683 if size > l[1]:
2684 l[1] = size
2684 l[1] = size
2685 l[2] += size
2685 l[2] += size
2686
2686
2687 numrevs = len(r)
2687 numrevs = len(r)
2688 for rev in pycompat.xrange(numrevs):
2688 for rev in pycompat.xrange(numrevs):
2689 p1, p2 = r.parentrevs(rev)
2689 p1, p2 = r.parentrevs(rev)
2690 delta = r.deltaparent(rev)
2690 delta = r.deltaparent(rev)
2691 if format > 0:
2691 if format > 0:
2692 addsize(r.rawsize(rev), datasize)
2692 addsize(r.rawsize(rev), datasize)
2693 if p2 != nullrev:
2693 if p2 != nullrev:
2694 nummerges += 1
2694 nummerges += 1
2695 size = r.length(rev)
2695 size = r.length(rev)
2696 if delta == nullrev:
2696 if delta == nullrev:
2697 chainlengths.append(0)
2697 chainlengths.append(0)
2698 chainbases.append(r.start(rev))
2698 chainbases.append(r.start(rev))
2699 chainspans.append(size)
2699 chainspans.append(size)
2700 if size == 0:
2700 if size == 0:
2701 numempty += 1
2701 numempty += 1
2702 numemptytext += 1
2702 numemptytext += 1
2703 else:
2703 else:
2704 numfull += 1
2704 numfull += 1
2705 numsnapdepth[0] += 1
2705 numsnapdepth[0] += 1
2706 addsize(size, fullsize)
2706 addsize(size, fullsize)
2707 addsize(size, snapsizedepth[0])
2707 addsize(size, snapsizedepth[0])
2708 else:
2708 else:
2709 chainlengths.append(chainlengths[delta] + 1)
2709 chainlengths.append(chainlengths[delta] + 1)
2710 baseaddr = chainbases[delta]
2710 baseaddr = chainbases[delta]
2711 revaddr = r.start(rev)
2711 revaddr = r.start(rev)
2712 chainbases.append(baseaddr)
2712 chainbases.append(baseaddr)
2713 chainspans.append((revaddr - baseaddr) + size)
2713 chainspans.append((revaddr - baseaddr) + size)
2714 if size == 0:
2714 if size == 0:
2715 numempty += 1
2715 numempty += 1
2716 numemptydelta += 1
2716 numemptydelta += 1
2717 elif r.issnapshot(rev):
2717 elif r.issnapshot(rev):
2718 addsize(size, semisize)
2718 addsize(size, semisize)
2719 numsemi += 1
2719 numsemi += 1
2720 depth = r.snapshotdepth(rev)
2720 depth = r.snapshotdepth(rev)
2721 numsnapdepth[depth] += 1
2721 numsnapdepth[depth] += 1
2722 addsize(size, snapsizedepth[depth])
2722 addsize(size, snapsizedepth[depth])
2723 else:
2723 else:
2724 addsize(size, deltasize)
2724 addsize(size, deltasize)
2725 if delta == rev - 1:
2725 if delta == rev - 1:
2726 numprev += 1
2726 numprev += 1
2727 if delta == p1:
2727 if delta == p1:
2728 nump1prev += 1
2728 nump1prev += 1
2729 elif delta == p2:
2729 elif delta == p2:
2730 nump2prev += 1
2730 nump2prev += 1
2731 elif delta == p1:
2731 elif delta == p1:
2732 nump1 += 1
2732 nump1 += 1
2733 elif delta == p2:
2733 elif delta == p2:
2734 nump2 += 1
2734 nump2 += 1
2735 elif delta != nullrev:
2735 elif delta != nullrev:
2736 numother += 1
2736 numother += 1
2737
2737
2738 # Obtain data on the raw chunks in the revlog.
2738 # Obtain data on the raw chunks in the revlog.
2739 if util.safehasattr(r, b'_getsegmentforrevs'):
2739 if util.safehasattr(r, b'_getsegmentforrevs'):
2740 segment = r._getsegmentforrevs(rev, rev)[1]
2740 segment = r._getsegmentforrevs(rev, rev)[1]
2741 else:
2741 else:
2742 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2742 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2743 if segment:
2743 if segment:
2744 chunktype = bytes(segment[0:1])
2744 chunktype = bytes(segment[0:1])
2745 else:
2745 else:
2746 chunktype = b'empty'
2746 chunktype = b'empty'
2747
2747
2748 if chunktype not in chunktypecounts:
2748 if chunktype not in chunktypecounts:
2749 chunktypecounts[chunktype] = 0
2749 chunktypecounts[chunktype] = 0
2750 chunktypesizes[chunktype] = 0
2750 chunktypesizes[chunktype] = 0
2751
2751
2752 chunktypecounts[chunktype] += 1
2752 chunktypecounts[chunktype] += 1
2753 chunktypesizes[chunktype] += size
2753 chunktypesizes[chunktype] += size
2754
2754
2755 # Adjust size min value for empty cases
2755 # Adjust size min value for empty cases
2756 for size in (datasize, fullsize, semisize, deltasize):
2756 for size in (datasize, fullsize, semisize, deltasize):
2757 if size[0] is None:
2757 if size[0] is None:
2758 size[0] = 0
2758 size[0] = 0
2759
2759
2760 numdeltas = numrevs - numfull - numempty - numsemi
2760 numdeltas = numrevs - numfull - numempty - numsemi
2761 numoprev = numprev - nump1prev - nump2prev
2761 numoprev = numprev - nump1prev - nump2prev
2762 totalrawsize = datasize[2]
2762 totalrawsize = datasize[2]
2763 datasize[2] /= numrevs
2763 datasize[2] /= numrevs
2764 fulltotal = fullsize[2]
2764 fulltotal = fullsize[2]
2765 if numfull == 0:
2765 if numfull == 0:
2766 fullsize[2] = 0
2766 fullsize[2] = 0
2767 else:
2767 else:
2768 fullsize[2] /= numfull
2768 fullsize[2] /= numfull
2769 semitotal = semisize[2]
2769 semitotal = semisize[2]
2770 snaptotal = {}
2770 snaptotal = {}
2771 if numsemi > 0:
2771 if numsemi > 0:
2772 semisize[2] /= numsemi
2772 semisize[2] /= numsemi
2773 for depth in snapsizedepth:
2773 for depth in snapsizedepth:
2774 snaptotal[depth] = snapsizedepth[depth][2]
2774 snaptotal[depth] = snapsizedepth[depth][2]
2775 snapsizedepth[depth][2] /= numsnapdepth[depth]
2775 snapsizedepth[depth][2] /= numsnapdepth[depth]
2776
2776
2777 deltatotal = deltasize[2]
2777 deltatotal = deltasize[2]
2778 if numdeltas > 0:
2778 if numdeltas > 0:
2779 deltasize[2] /= numdeltas
2779 deltasize[2] /= numdeltas
2780 totalsize = fulltotal + semitotal + deltatotal
2780 totalsize = fulltotal + semitotal + deltatotal
2781 avgchainlen = sum(chainlengths) / numrevs
2781 avgchainlen = sum(chainlengths) / numrevs
2782 maxchainlen = max(chainlengths)
2782 maxchainlen = max(chainlengths)
2783 maxchainspan = max(chainspans)
2783 maxchainspan = max(chainspans)
2784 compratio = 1
2784 compratio = 1
2785 if totalsize:
2785 if totalsize:
2786 compratio = totalrawsize / totalsize
2786 compratio = totalrawsize / totalsize
2787
2787
2788 basedfmtstr = b'%%%dd\n'
2788 basedfmtstr = b'%%%dd\n'
2789 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2789 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2790
2790
2791 def dfmtstr(max):
2791 def dfmtstr(max):
2792 return basedfmtstr % len(str(max))
2792 return basedfmtstr % len(str(max))
2793
2793
2794 def pcfmtstr(max, padding=0):
2794 def pcfmtstr(max, padding=0):
2795 return basepcfmtstr % (len(str(max)), b' ' * padding)
2795 return basepcfmtstr % (len(str(max)), b' ' * padding)
2796
2796
2797 def pcfmt(value, total):
2797 def pcfmt(value, total):
2798 if total:
2798 if total:
2799 return (value, 100 * float(value) / total)
2799 return (value, 100 * float(value) / total)
2800 else:
2800 else:
2801 return value, 100.0
2801 return value, 100.0
2802
2802
2803 ui.writenoi18n(b'format : %d\n' % format)
2803 ui.writenoi18n(b'format : %d\n' % format)
2804 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2804 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2805
2805
2806 ui.write(b'\n')
2806 ui.write(b'\n')
2807 fmt = pcfmtstr(totalsize)
2807 fmt = pcfmtstr(totalsize)
2808 fmt2 = dfmtstr(totalsize)
2808 fmt2 = dfmtstr(totalsize)
2809 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2809 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2810 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2810 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2811 ui.writenoi18n(
2811 ui.writenoi18n(
2812 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2812 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2813 )
2813 )
2814 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2814 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2815 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2815 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2816 ui.writenoi18n(
2816 ui.writenoi18n(
2817 b' text : '
2817 b' text : '
2818 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2818 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2819 )
2819 )
2820 ui.writenoi18n(
2820 ui.writenoi18n(
2821 b' delta : '
2821 b' delta : '
2822 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2822 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2823 )
2823 )
2824 ui.writenoi18n(
2824 ui.writenoi18n(
2825 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2825 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2826 )
2826 )
2827 for depth in sorted(numsnapdepth):
2827 for depth in sorted(numsnapdepth):
2828 ui.write(
2828 ui.write(
2829 (b' lvl-%-3d : ' % depth)
2829 (b' lvl-%-3d : ' % depth)
2830 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2830 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2831 )
2831 )
2832 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2832 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2833 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2833 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2834 ui.writenoi18n(
2834 ui.writenoi18n(
2835 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2835 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2836 )
2836 )
2837 for depth in sorted(numsnapdepth):
2837 for depth in sorted(numsnapdepth):
2838 ui.write(
2838 ui.write(
2839 (b' lvl-%-3d : ' % depth)
2839 (b' lvl-%-3d : ' % depth)
2840 + fmt % pcfmt(snaptotal[depth], totalsize)
2840 + fmt % pcfmt(snaptotal[depth], totalsize)
2841 )
2841 )
2842 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2842 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2843
2843
2844 def fmtchunktype(chunktype):
2844 def fmtchunktype(chunktype):
2845 if chunktype == b'empty':
2845 if chunktype == b'empty':
2846 return b' %s : ' % chunktype
2846 return b' %s : ' % chunktype
2847 elif chunktype in pycompat.bytestr(string.ascii_letters):
2847 elif chunktype in pycompat.bytestr(string.ascii_letters):
2848 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2848 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2849 else:
2849 else:
2850 return b' 0x%s : ' % hex(chunktype)
2850 return b' 0x%s : ' % hex(chunktype)
2851
2851
2852 ui.write(b'\n')
2852 ui.write(b'\n')
2853 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2853 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2854 for chunktype in sorted(chunktypecounts):
2854 for chunktype in sorted(chunktypecounts):
2855 ui.write(fmtchunktype(chunktype))
2855 ui.write(fmtchunktype(chunktype))
2856 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2856 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2857 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2857 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2858 for chunktype in sorted(chunktypecounts):
2858 for chunktype in sorted(chunktypecounts):
2859 ui.write(fmtchunktype(chunktype))
2859 ui.write(fmtchunktype(chunktype))
2860 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2860 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2861
2861
2862 ui.write(b'\n')
2862 ui.write(b'\n')
2863 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2863 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2864 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2864 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2865 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2865 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2866 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2866 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2867 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2867 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2868
2868
2869 if format > 0:
2869 if format > 0:
2870 ui.write(b'\n')
2870 ui.write(b'\n')
2871 ui.writenoi18n(
2871 ui.writenoi18n(
2872 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2872 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2873 % tuple(datasize)
2873 % tuple(datasize)
2874 )
2874 )
2875 ui.writenoi18n(
2875 ui.writenoi18n(
2876 b'full revision size (min/max/avg) : %d / %d / %d\n'
2876 b'full revision size (min/max/avg) : %d / %d / %d\n'
2877 % tuple(fullsize)
2877 % tuple(fullsize)
2878 )
2878 )
2879 ui.writenoi18n(
2879 ui.writenoi18n(
2880 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2880 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2881 % tuple(semisize)
2881 % tuple(semisize)
2882 )
2882 )
2883 for depth in sorted(snapsizedepth):
2883 for depth in sorted(snapsizedepth):
2884 if depth == 0:
2884 if depth == 0:
2885 continue
2885 continue
2886 ui.writenoi18n(
2886 ui.writenoi18n(
2887 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2887 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2888 % ((depth,) + tuple(snapsizedepth[depth]))
2888 % ((depth,) + tuple(snapsizedepth[depth]))
2889 )
2889 )
2890 ui.writenoi18n(
2890 ui.writenoi18n(
2891 b'delta size (min/max/avg) : %d / %d / %d\n'
2891 b'delta size (min/max/avg) : %d / %d / %d\n'
2892 % tuple(deltasize)
2892 % tuple(deltasize)
2893 )
2893 )
2894
2894
2895 if numdeltas > 0:
2895 if numdeltas > 0:
2896 ui.write(b'\n')
2896 ui.write(b'\n')
2897 fmt = pcfmtstr(numdeltas)
2897 fmt = pcfmtstr(numdeltas)
2898 fmt2 = pcfmtstr(numdeltas, 4)
2898 fmt2 = pcfmtstr(numdeltas, 4)
2899 ui.writenoi18n(
2899 ui.writenoi18n(
2900 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2900 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2901 )
2901 )
2902 if numprev > 0:
2902 if numprev > 0:
2903 ui.writenoi18n(
2903 ui.writenoi18n(
2904 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2904 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2905 )
2905 )
2906 ui.writenoi18n(
2906 ui.writenoi18n(
2907 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2907 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2908 )
2908 )
2909 ui.writenoi18n(
2909 ui.writenoi18n(
2910 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
2910 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
2911 )
2911 )
2912 if gdelta:
2912 if gdelta:
2913 ui.writenoi18n(
2913 ui.writenoi18n(
2914 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
2914 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
2915 )
2915 )
2916 ui.writenoi18n(
2916 ui.writenoi18n(
2917 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
2917 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
2918 )
2918 )
2919 ui.writenoi18n(
2919 ui.writenoi18n(
2920 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
2920 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
2921 )
2921 )
2922
2922
2923
2923
2924 @command(
2924 @command(
2925 b'debugrevlogindex',
2925 b'debugrevlogindex',
2926 cmdutil.debugrevlogopts
2926 cmdutil.debugrevlogopts
2927 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
2927 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
2928 _(b'[-f FORMAT] -c|-m|FILE'),
2928 _(b'[-f FORMAT] -c|-m|FILE'),
2929 optionalrepo=True,
2929 optionalrepo=True,
2930 )
2930 )
2931 def debugrevlogindex(ui, repo, file_=None, **opts):
2931 def debugrevlogindex(ui, repo, file_=None, **opts):
2932 """dump the contents of a revlog index"""
2932 """dump the contents of a revlog index"""
2933 opts = pycompat.byteskwargs(opts)
2933 opts = pycompat.byteskwargs(opts)
2934 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
2934 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
2935 format = opts.get(b'format', 0)
2935 format = opts.get(b'format', 0)
2936 if format not in (0, 1):
2936 if format not in (0, 1):
2937 raise error.Abort(_(b"unknown format %d") % format)
2937 raise error.Abort(_(b"unknown format %d") % format)
2938
2938
2939 if ui.debugflag:
2939 if ui.debugflag:
2940 shortfn = hex
2940 shortfn = hex
2941 else:
2941 else:
2942 shortfn = short
2942 shortfn = short
2943
2943
2944 # There might not be anything in r, so have a sane default
2944 # There might not be anything in r, so have a sane default
2945 idlen = 12
2945 idlen = 12
2946 for i in r:
2946 for i in r:
2947 idlen = len(shortfn(r.node(i)))
2947 idlen = len(shortfn(r.node(i)))
2948 break
2948 break
2949
2949
2950 if format == 0:
2950 if format == 0:
2951 if ui.verbose:
2951 if ui.verbose:
2952 ui.writenoi18n(
2952 ui.writenoi18n(
2953 b" rev offset length linkrev %s %s p2\n"
2953 b" rev offset length linkrev %s %s p2\n"
2954 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2954 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2955 )
2955 )
2956 else:
2956 else:
2957 ui.writenoi18n(
2957 ui.writenoi18n(
2958 b" rev linkrev %s %s p2\n"
2958 b" rev linkrev %s %s p2\n"
2959 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2959 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2960 )
2960 )
2961 elif format == 1:
2961 elif format == 1:
2962 if ui.verbose:
2962 if ui.verbose:
2963 ui.writenoi18n(
2963 ui.writenoi18n(
2964 (
2964 (
2965 b" rev flag offset length size link p1"
2965 b" rev flag offset length size link p1"
2966 b" p2 %s\n"
2966 b" p2 %s\n"
2967 )
2967 )
2968 % b"nodeid".rjust(idlen)
2968 % b"nodeid".rjust(idlen)
2969 )
2969 )
2970 else:
2970 else:
2971 ui.writenoi18n(
2971 ui.writenoi18n(
2972 b" rev flag size link p1 p2 %s\n"
2972 b" rev flag size link p1 p2 %s\n"
2973 % b"nodeid".rjust(idlen)
2973 % b"nodeid".rjust(idlen)
2974 )
2974 )
2975
2975
2976 for i in r:
2976 for i in r:
2977 node = r.node(i)
2977 node = r.node(i)
2978 if format == 0:
2978 if format == 0:
2979 try:
2979 try:
2980 pp = r.parents(node)
2980 pp = r.parents(node)
2981 except Exception:
2981 except Exception:
2982 pp = [nullid, nullid]
2982 pp = [nullid, nullid]
2983 if ui.verbose:
2983 if ui.verbose:
2984 ui.write(
2984 ui.write(
2985 b"% 6d % 9d % 7d % 7d %s %s %s\n"
2985 b"% 6d % 9d % 7d % 7d %s %s %s\n"
2986 % (
2986 % (
2987 i,
2987 i,
2988 r.start(i),
2988 r.start(i),
2989 r.length(i),
2989 r.length(i),
2990 r.linkrev(i),
2990 r.linkrev(i),
2991 shortfn(node),
2991 shortfn(node),
2992 shortfn(pp[0]),
2992 shortfn(pp[0]),
2993 shortfn(pp[1]),
2993 shortfn(pp[1]),
2994 )
2994 )
2995 )
2995 )
2996 else:
2996 else:
2997 ui.write(
2997 ui.write(
2998 b"% 6d % 7d %s %s %s\n"
2998 b"% 6d % 7d %s %s %s\n"
2999 % (
2999 % (
3000 i,
3000 i,
3001 r.linkrev(i),
3001 r.linkrev(i),
3002 shortfn(node),
3002 shortfn(node),
3003 shortfn(pp[0]),
3003 shortfn(pp[0]),
3004 shortfn(pp[1]),
3004 shortfn(pp[1]),
3005 )
3005 )
3006 )
3006 )
3007 elif format == 1:
3007 elif format == 1:
3008 pr = r.parentrevs(i)
3008 pr = r.parentrevs(i)
3009 if ui.verbose:
3009 if ui.verbose:
3010 ui.write(
3010 ui.write(
3011 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3011 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3012 % (
3012 % (
3013 i,
3013 i,
3014 r.flags(i),
3014 r.flags(i),
3015 r.start(i),
3015 r.start(i),
3016 r.length(i),
3016 r.length(i),
3017 r.rawsize(i),
3017 r.rawsize(i),
3018 r.linkrev(i),
3018 r.linkrev(i),
3019 pr[0],
3019 pr[0],
3020 pr[1],
3020 pr[1],
3021 shortfn(node),
3021 shortfn(node),
3022 )
3022 )
3023 )
3023 )
3024 else:
3024 else:
3025 ui.write(
3025 ui.write(
3026 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3026 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3027 % (
3027 % (
3028 i,
3028 i,
3029 r.flags(i),
3029 r.flags(i),
3030 r.rawsize(i),
3030 r.rawsize(i),
3031 r.linkrev(i),
3031 r.linkrev(i),
3032 pr[0],
3032 pr[0],
3033 pr[1],
3033 pr[1],
3034 shortfn(node),
3034 shortfn(node),
3035 )
3035 )
3036 )
3036 )
3037
3037
3038
3038
3039 @command(
3039 @command(
3040 b'debugrevspec',
3040 b'debugrevspec',
3041 [
3041 [
3042 (
3042 (
3043 b'',
3043 b'',
3044 b'optimize',
3044 b'optimize',
3045 None,
3045 None,
3046 _(b'print parsed tree after optimizing (DEPRECATED)'),
3046 _(b'print parsed tree after optimizing (DEPRECATED)'),
3047 ),
3047 ),
3048 (
3048 (
3049 b'',
3049 b'',
3050 b'show-revs',
3050 b'show-revs',
3051 True,
3051 True,
3052 _(b'print list of result revisions (default)'),
3052 _(b'print list of result revisions (default)'),
3053 ),
3053 ),
3054 (
3054 (
3055 b's',
3055 b's',
3056 b'show-set',
3056 b'show-set',
3057 None,
3057 None,
3058 _(b'print internal representation of result set'),
3058 _(b'print internal representation of result set'),
3059 ),
3059 ),
3060 (
3060 (
3061 b'p',
3061 b'p',
3062 b'show-stage',
3062 b'show-stage',
3063 [],
3063 [],
3064 _(b'print parsed tree at the given stage'),
3064 _(b'print parsed tree at the given stage'),
3065 _(b'NAME'),
3065 _(b'NAME'),
3066 ),
3066 ),
3067 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3067 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3068 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3068 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3069 ],
3069 ],
3070 b'REVSPEC',
3070 b'REVSPEC',
3071 )
3071 )
3072 def debugrevspec(ui, repo, expr, **opts):
3072 def debugrevspec(ui, repo, expr, **opts):
3073 """parse and apply a revision specification
3073 """parse and apply a revision specification
3074
3074
3075 Use -p/--show-stage option to print the parsed tree at the given stages.
3075 Use -p/--show-stage option to print the parsed tree at the given stages.
3076 Use -p all to print tree at every stage.
3076 Use -p all to print tree at every stage.
3077
3077
3078 Use --no-show-revs option with -s or -p to print only the set
3078 Use --no-show-revs option with -s or -p to print only the set
3079 representation or the parsed tree respectively.
3079 representation or the parsed tree respectively.
3080
3080
3081 Use --verify-optimized to compare the optimized result with the unoptimized
3081 Use --verify-optimized to compare the optimized result with the unoptimized
3082 one. Returns 1 if the optimized result differs.
3082 one. Returns 1 if the optimized result differs.
3083 """
3083 """
3084 opts = pycompat.byteskwargs(opts)
3084 opts = pycompat.byteskwargs(opts)
3085 aliases = ui.configitems(b'revsetalias')
3085 aliases = ui.configitems(b'revsetalias')
3086 stages = [
3086 stages = [
3087 (b'parsed', lambda tree: tree),
3087 (b'parsed', lambda tree: tree),
3088 (
3088 (
3089 b'expanded',
3089 b'expanded',
3090 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3090 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3091 ),
3091 ),
3092 (b'concatenated', revsetlang.foldconcat),
3092 (b'concatenated', revsetlang.foldconcat),
3093 (b'analyzed', revsetlang.analyze),
3093 (b'analyzed', revsetlang.analyze),
3094 (b'optimized', revsetlang.optimize),
3094 (b'optimized', revsetlang.optimize),
3095 ]
3095 ]
3096 if opts[b'no_optimized']:
3096 if opts[b'no_optimized']:
3097 stages = stages[:-1]
3097 stages = stages[:-1]
3098 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3098 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3099 raise error.Abort(
3099 raise error.Abort(
3100 _(b'cannot use --verify-optimized with --no-optimized')
3100 _(b'cannot use --verify-optimized with --no-optimized')
3101 )
3101 )
3102 stagenames = set(n for n, f in stages)
3102 stagenames = set(n for n, f in stages)
3103
3103
3104 showalways = set()
3104 showalways = set()
3105 showchanged = set()
3105 showchanged = set()
3106 if ui.verbose and not opts[b'show_stage']:
3106 if ui.verbose and not opts[b'show_stage']:
3107 # show parsed tree by --verbose (deprecated)
3107 # show parsed tree by --verbose (deprecated)
3108 showalways.add(b'parsed')
3108 showalways.add(b'parsed')
3109 showchanged.update([b'expanded', b'concatenated'])
3109 showchanged.update([b'expanded', b'concatenated'])
3110 if opts[b'optimize']:
3110 if opts[b'optimize']:
3111 showalways.add(b'optimized')
3111 showalways.add(b'optimized')
3112 if opts[b'show_stage'] and opts[b'optimize']:
3112 if opts[b'show_stage'] and opts[b'optimize']:
3113 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3113 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3114 if opts[b'show_stage'] == [b'all']:
3114 if opts[b'show_stage'] == [b'all']:
3115 showalways.update(stagenames)
3115 showalways.update(stagenames)
3116 else:
3116 else:
3117 for n in opts[b'show_stage']:
3117 for n in opts[b'show_stage']:
3118 if n not in stagenames:
3118 if n not in stagenames:
3119 raise error.Abort(_(b'invalid stage name: %s') % n)
3119 raise error.Abort(_(b'invalid stage name: %s') % n)
3120 showalways.update(opts[b'show_stage'])
3120 showalways.update(opts[b'show_stage'])
3121
3121
3122 treebystage = {}
3122 treebystage = {}
3123 printedtree = None
3123 printedtree = None
3124 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3124 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3125 for n, f in stages:
3125 for n, f in stages:
3126 treebystage[n] = tree = f(tree)
3126 treebystage[n] = tree = f(tree)
3127 if n in showalways or (n in showchanged and tree != printedtree):
3127 if n in showalways or (n in showchanged and tree != printedtree):
3128 if opts[b'show_stage'] or n != b'parsed':
3128 if opts[b'show_stage'] or n != b'parsed':
3129 ui.write(b"* %s:\n" % n)
3129 ui.write(b"* %s:\n" % n)
3130 ui.write(revsetlang.prettyformat(tree), b"\n")
3130 ui.write(revsetlang.prettyformat(tree), b"\n")
3131 printedtree = tree
3131 printedtree = tree
3132
3132
3133 if opts[b'verify_optimized']:
3133 if opts[b'verify_optimized']:
3134 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3134 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3135 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3135 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3136 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3136 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3137 ui.writenoi18n(
3137 ui.writenoi18n(
3138 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3138 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3139 )
3139 )
3140 ui.writenoi18n(
3140 ui.writenoi18n(
3141 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3141 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3142 )
3142 )
3143 arevs = list(arevs)
3143 arevs = list(arevs)
3144 brevs = list(brevs)
3144 brevs = list(brevs)
3145 if arevs == brevs:
3145 if arevs == brevs:
3146 return 0
3146 return 0
3147 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3147 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3148 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3148 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3149 sm = difflib.SequenceMatcher(None, arevs, brevs)
3149 sm = difflib.SequenceMatcher(None, arevs, brevs)
3150 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3150 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3151 if tag in ('delete', 'replace'):
3151 if tag in ('delete', 'replace'):
3152 for c in arevs[alo:ahi]:
3152 for c in arevs[alo:ahi]:
3153 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3153 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3154 if tag in ('insert', 'replace'):
3154 if tag in ('insert', 'replace'):
3155 for c in brevs[blo:bhi]:
3155 for c in brevs[blo:bhi]:
3156 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3156 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3157 if tag == 'equal':
3157 if tag == 'equal':
3158 for c in arevs[alo:ahi]:
3158 for c in arevs[alo:ahi]:
3159 ui.write(b' %d\n' % c)
3159 ui.write(b' %d\n' % c)
3160 return 1
3160 return 1
3161
3161
3162 func = revset.makematcher(tree)
3162 func = revset.makematcher(tree)
3163 revs = func(repo)
3163 revs = func(repo)
3164 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3164 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3165 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3165 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3166 if not opts[b'show_revs']:
3166 if not opts[b'show_revs']:
3167 return
3167 return
3168 for c in revs:
3168 for c in revs:
3169 ui.write(b"%d\n" % c)
3169 ui.write(b"%d\n" % c)
3170
3170
3171
3171
3172 @command(
3172 @command(
3173 b'debugserve',
3173 b'debugserve',
3174 [
3174 [
3175 (
3175 (
3176 b'',
3176 b'',
3177 b'sshstdio',
3177 b'sshstdio',
3178 False,
3178 False,
3179 _(b'run an SSH server bound to process handles'),
3179 _(b'run an SSH server bound to process handles'),
3180 ),
3180 ),
3181 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3181 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3182 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3182 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3183 ],
3183 ],
3184 b'',
3184 b'',
3185 )
3185 )
3186 def debugserve(ui, repo, **opts):
3186 def debugserve(ui, repo, **opts):
3187 """run a server with advanced settings
3187 """run a server with advanced settings
3188
3188
3189 This command is similar to :hg:`serve`. It exists partially as a
3189 This command is similar to :hg:`serve`. It exists partially as a
3190 workaround to the fact that ``hg serve --stdio`` must have specific
3190 workaround to the fact that ``hg serve --stdio`` must have specific
3191 arguments for security reasons.
3191 arguments for security reasons.
3192 """
3192 """
3193 opts = pycompat.byteskwargs(opts)
3193 opts = pycompat.byteskwargs(opts)
3194
3194
3195 if not opts[b'sshstdio']:
3195 if not opts[b'sshstdio']:
3196 raise error.Abort(_(b'only --sshstdio is currently supported'))
3196 raise error.Abort(_(b'only --sshstdio is currently supported'))
3197
3197
3198 logfh = None
3198 logfh = None
3199
3199
3200 if opts[b'logiofd'] and opts[b'logiofile']:
3200 if opts[b'logiofd'] and opts[b'logiofile']:
3201 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3201 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3202
3202
3203 if opts[b'logiofd']:
3203 if opts[b'logiofd']:
3204 # Line buffered because output is line based.
3204 # Line buffered because output is line based.
3205 try:
3205 try:
3206 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 1)
3206 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 1)
3207 except OSError as e:
3207 except OSError as e:
3208 if e.errno != errno.ESPIPE:
3208 if e.errno != errno.ESPIPE:
3209 raise
3209 raise
3210 # can't seek a pipe, so `ab` mode fails on py3
3210 # can't seek a pipe, so `ab` mode fails on py3
3211 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 1)
3211 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 1)
3212 elif opts[b'logiofile']:
3212 elif opts[b'logiofile']:
3213 logfh = open(opts[b'logiofile'], b'ab', 1)
3213 logfh = open(opts[b'logiofile'], b'ab', 1)
3214
3214
3215 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3215 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3216 s.serve_forever()
3216 s.serve_forever()
3217
3217
3218
3218
3219 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3219 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3220 def debugsetparents(ui, repo, rev1, rev2=None):
3220 def debugsetparents(ui, repo, rev1, rev2=None):
3221 """manually set the parents of the current working directory
3221 """manually set the parents of the current working directory
3222
3222
3223 This is useful for writing repository conversion tools, but should
3223 This is useful for writing repository conversion tools, but should
3224 be used with care. For example, neither the working directory nor the
3224 be used with care. For example, neither the working directory nor the
3225 dirstate is updated, so file status may be incorrect after running this
3225 dirstate is updated, so file status may be incorrect after running this
3226 command.
3226 command.
3227
3227
3228 Returns 0 on success.
3228 Returns 0 on success.
3229 """
3229 """
3230
3230
3231 node1 = scmutil.revsingle(repo, rev1).node()
3231 node1 = scmutil.revsingle(repo, rev1).node()
3232 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3232 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3233
3233
3234 with repo.wlock():
3234 with repo.wlock():
3235 repo.setparents(node1, node2)
3235 repo.setparents(node1, node2)
3236
3236
3237
3237
3238 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3238 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3239 def debugsidedata(ui, repo, file_, rev=None, **opts):
3239 def debugsidedata(ui, repo, file_, rev=None, **opts):
3240 """dump the side data for a cl/manifest/file revision
3240 """dump the side data for a cl/manifest/file revision
3241
3241
3242 Use --verbose to dump the sidedata content."""
3242 Use --verbose to dump the sidedata content."""
3243 opts = pycompat.byteskwargs(opts)
3243 opts = pycompat.byteskwargs(opts)
3244 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3244 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3245 if rev is not None:
3245 if rev is not None:
3246 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3246 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3247 file_, rev = None, file_
3247 file_, rev = None, file_
3248 elif rev is None:
3248 elif rev is None:
3249 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3249 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3250 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3250 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3251 r = getattr(r, '_revlog', r)
3251 r = getattr(r, '_revlog', r)
3252 try:
3252 try:
3253 sidedata = r.sidedata(r.lookup(rev))
3253 sidedata = r.sidedata(r.lookup(rev))
3254 except KeyError:
3254 except KeyError:
3255 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3255 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3256 if sidedata:
3256 if sidedata:
3257 sidedata = list(sidedata.items())
3257 sidedata = list(sidedata.items())
3258 sidedata.sort()
3258 sidedata.sort()
3259 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3259 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3260 for key, value in sidedata:
3260 for key, value in sidedata:
3261 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3261 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3262 if ui.verbose:
3262 if ui.verbose:
3263 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3263 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3264
3264
3265
3265
3266 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3266 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3267 def debugssl(ui, repo, source=None, **opts):
3267 def debugssl(ui, repo, source=None, **opts):
3268 '''test a secure connection to a server
3268 '''test a secure connection to a server
3269
3269
3270 This builds the certificate chain for the server on Windows, installing the
3270 This builds the certificate chain for the server on Windows, installing the
3271 missing intermediates and trusted root via Windows Update if necessary. It
3271 missing intermediates and trusted root via Windows Update if necessary. It
3272 does nothing on other platforms.
3272 does nothing on other platforms.
3273
3273
3274 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3274 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3275 that server is used. See :hg:`help urls` for more information.
3275 that server is used. See :hg:`help urls` for more information.
3276
3276
3277 If the update succeeds, retry the original operation. Otherwise, the cause
3277 If the update succeeds, retry the original operation. Otherwise, the cause
3278 of the SSL error is likely another issue.
3278 of the SSL error is likely another issue.
3279 '''
3279 '''
3280 if not pycompat.iswindows:
3280 if not pycompat.iswindows:
3281 raise error.Abort(
3281 raise error.Abort(
3282 _(b'certificate chain building is only possible on Windows')
3282 _(b'certificate chain building is only possible on Windows')
3283 )
3283 )
3284
3284
3285 if not source:
3285 if not source:
3286 if not repo:
3286 if not repo:
3287 raise error.Abort(
3287 raise error.Abort(
3288 _(
3288 _(
3289 b"there is no Mercurial repository here, and no "
3289 b"there is no Mercurial repository here, and no "
3290 b"server specified"
3290 b"server specified"
3291 )
3291 )
3292 )
3292 )
3293 source = b"default"
3293 source = b"default"
3294
3294
3295 source, branches = hg.parseurl(ui.expandpath(source))
3295 source, branches = hg.parseurl(ui.expandpath(source))
3296 url = util.url(source)
3296 url = util.url(source)
3297
3297
3298 defaultport = {b'https': 443, b'ssh': 22}
3298 defaultport = {b'https': 443, b'ssh': 22}
3299 if url.scheme in defaultport:
3299 if url.scheme in defaultport:
3300 try:
3300 try:
3301 addr = (url.host, int(url.port or defaultport[url.scheme]))
3301 addr = (url.host, int(url.port or defaultport[url.scheme]))
3302 except ValueError:
3302 except ValueError:
3303 raise error.Abort(_(b"malformed port number in URL"))
3303 raise error.Abort(_(b"malformed port number in URL"))
3304 else:
3304 else:
3305 raise error.Abort(_(b"only https and ssh connections are supported"))
3305 raise error.Abort(_(b"only https and ssh connections are supported"))
3306
3306
3307 from . import win32
3307 from . import win32
3308
3308
3309 s = ssl.wrap_socket(
3309 s = ssl.wrap_socket(
3310 socket.socket(),
3310 socket.socket(),
3311 ssl_version=ssl.PROTOCOL_TLS,
3311 ssl_version=ssl.PROTOCOL_TLS,
3312 cert_reqs=ssl.CERT_NONE,
3312 cert_reqs=ssl.CERT_NONE,
3313 ca_certs=None,
3313 ca_certs=None,
3314 )
3314 )
3315
3315
3316 try:
3316 try:
3317 s.connect(addr)
3317 s.connect(addr)
3318 cert = s.getpeercert(True)
3318 cert = s.getpeercert(True)
3319
3319
3320 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3320 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3321
3321
3322 complete = win32.checkcertificatechain(cert, build=False)
3322 complete = win32.checkcertificatechain(cert, build=False)
3323
3323
3324 if not complete:
3324 if not complete:
3325 ui.status(_(b'certificate chain is incomplete, updating... '))
3325 ui.status(_(b'certificate chain is incomplete, updating... '))
3326
3326
3327 if not win32.checkcertificatechain(cert):
3327 if not win32.checkcertificatechain(cert):
3328 ui.status(_(b'failed.\n'))
3328 ui.status(_(b'failed.\n'))
3329 else:
3329 else:
3330 ui.status(_(b'done.\n'))
3330 ui.status(_(b'done.\n'))
3331 else:
3331 else:
3332 ui.status(_(b'full certificate chain is available\n'))
3332 ui.status(_(b'full certificate chain is available\n'))
3333 finally:
3333 finally:
3334 s.close()
3334 s.close()
3335
3335
3336
3336
3337 @command(
3337 @command(
3338 b'debugsub',
3338 b'debugsub',
3339 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3339 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3340 _(b'[-r REV] [REV]'),
3340 _(b'[-r REV] [REV]'),
3341 )
3341 )
3342 def debugsub(ui, repo, rev=None):
3342 def debugsub(ui, repo, rev=None):
3343 ctx = scmutil.revsingle(repo, rev, None)
3343 ctx = scmutil.revsingle(repo, rev, None)
3344 for k, v in sorted(ctx.substate.items()):
3344 for k, v in sorted(ctx.substate.items()):
3345 ui.writenoi18n(b'path %s\n' % k)
3345 ui.writenoi18n(b'path %s\n' % k)
3346 ui.writenoi18n(b' source %s\n' % v[0])
3346 ui.writenoi18n(b' source %s\n' % v[0])
3347 ui.writenoi18n(b' revision %s\n' % v[1])
3347 ui.writenoi18n(b' revision %s\n' % v[1])
3348
3348
3349
3349
3350 @command(
3350 @command(
3351 b'debugsuccessorssets',
3351 b'debugsuccessorssets',
3352 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3352 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3353 _(b'[REV]'),
3353 _(b'[REV]'),
3354 )
3354 )
3355 def debugsuccessorssets(ui, repo, *revs, **opts):
3355 def debugsuccessorssets(ui, repo, *revs, **opts):
3356 """show set of successors for revision
3356 """show set of successors for revision
3357
3357
3358 A successors set of changeset A is a consistent group of revisions that
3358 A successors set of changeset A is a consistent group of revisions that
3359 succeed A. It contains non-obsolete changesets only unless closests
3359 succeed A. It contains non-obsolete changesets only unless closests
3360 successors set is set.
3360 successors set is set.
3361
3361
3362 In most cases a changeset A has a single successors set containing a single
3362 In most cases a changeset A has a single successors set containing a single
3363 successor (changeset A replaced by A').
3363 successor (changeset A replaced by A').
3364
3364
3365 A changeset that is made obsolete with no successors are called "pruned".
3365 A changeset that is made obsolete with no successors are called "pruned".
3366 Such changesets have no successors sets at all.
3366 Such changesets have no successors sets at all.
3367
3367
3368 A changeset that has been "split" will have a successors set containing
3368 A changeset that has been "split" will have a successors set containing
3369 more than one successor.
3369 more than one successor.
3370
3370
3371 A changeset that has been rewritten in multiple different ways is called
3371 A changeset that has been rewritten in multiple different ways is called
3372 "divergent". Such changesets have multiple successor sets (each of which
3372 "divergent". Such changesets have multiple successor sets (each of which
3373 may also be split, i.e. have multiple successors).
3373 may also be split, i.e. have multiple successors).
3374
3374
3375 Results are displayed as follows::
3375 Results are displayed as follows::
3376
3376
3377 <rev1>
3377 <rev1>
3378 <successors-1A>
3378 <successors-1A>
3379 <rev2>
3379 <rev2>
3380 <successors-2A>
3380 <successors-2A>
3381 <successors-2B1> <successors-2B2> <successors-2B3>
3381 <successors-2B1> <successors-2B2> <successors-2B3>
3382
3382
3383 Here rev2 has two possible (i.e. divergent) successors sets. The first
3383 Here rev2 has two possible (i.e. divergent) successors sets. The first
3384 holds one element, whereas the second holds three (i.e. the changeset has
3384 holds one element, whereas the second holds three (i.e. the changeset has
3385 been split).
3385 been split).
3386 """
3386 """
3387 # passed to successorssets caching computation from one call to another
3387 # passed to successorssets caching computation from one call to another
3388 cache = {}
3388 cache = {}
3389 ctx2str = bytes
3389 ctx2str = bytes
3390 node2str = short
3390 node2str = short
3391 for rev in scmutil.revrange(repo, revs):
3391 for rev in scmutil.revrange(repo, revs):
3392 ctx = repo[rev]
3392 ctx = repo[rev]
3393 ui.write(b'%s\n' % ctx2str(ctx))
3393 ui.write(b'%s\n' % ctx2str(ctx))
3394 for succsset in obsutil.successorssets(
3394 for succsset in obsutil.successorssets(
3395 repo, ctx.node(), closest=opts['closest'], cache=cache
3395 repo, ctx.node(), closest=opts['closest'], cache=cache
3396 ):
3396 ):
3397 if succsset:
3397 if succsset:
3398 ui.write(b' ')
3398 ui.write(b' ')
3399 ui.write(node2str(succsset[0]))
3399 ui.write(node2str(succsset[0]))
3400 for node in succsset[1:]:
3400 for node in succsset[1:]:
3401 ui.write(b' ')
3401 ui.write(b' ')
3402 ui.write(node2str(node))
3402 ui.write(node2str(node))
3403 ui.write(b'\n')
3403 ui.write(b'\n')
3404
3404
3405
3405
3406 @command(
3406 @command(
3407 b'debugtemplate',
3407 b'debugtemplate',
3408 [
3408 [
3409 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3409 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3410 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3410 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3411 ],
3411 ],
3412 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3412 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3413 optionalrepo=True,
3413 optionalrepo=True,
3414 )
3414 )
3415 def debugtemplate(ui, repo, tmpl, **opts):
3415 def debugtemplate(ui, repo, tmpl, **opts):
3416 """parse and apply a template
3416 """parse and apply a template
3417
3417
3418 If -r/--rev is given, the template is processed as a log template and
3418 If -r/--rev is given, the template is processed as a log template and
3419 applied to the given changesets. Otherwise, it is processed as a generic
3419 applied to the given changesets. Otherwise, it is processed as a generic
3420 template.
3420 template.
3421
3421
3422 Use --verbose to print the parsed tree.
3422 Use --verbose to print the parsed tree.
3423 """
3423 """
3424 revs = None
3424 revs = None
3425 if opts['rev']:
3425 if opts['rev']:
3426 if repo is None:
3426 if repo is None:
3427 raise error.RepoError(
3427 raise error.RepoError(
3428 _(b'there is no Mercurial repository here (.hg not found)')
3428 _(b'there is no Mercurial repository here (.hg not found)')
3429 )
3429 )
3430 revs = scmutil.revrange(repo, opts['rev'])
3430 revs = scmutil.revrange(repo, opts['rev'])
3431
3431
3432 props = {}
3432 props = {}
3433 for d in opts['define']:
3433 for d in opts['define']:
3434 try:
3434 try:
3435 k, v = (e.strip() for e in d.split(b'=', 1))
3435 k, v = (e.strip() for e in d.split(b'=', 1))
3436 if not k or k == b'ui':
3436 if not k or k == b'ui':
3437 raise ValueError
3437 raise ValueError
3438 props[k] = v
3438 props[k] = v
3439 except ValueError:
3439 except ValueError:
3440 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3440 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3441
3441
3442 if ui.verbose:
3442 if ui.verbose:
3443 aliases = ui.configitems(b'templatealias')
3443 aliases = ui.configitems(b'templatealias')
3444 tree = templater.parse(tmpl)
3444 tree = templater.parse(tmpl)
3445 ui.note(templater.prettyformat(tree), b'\n')
3445 ui.note(templater.prettyformat(tree), b'\n')
3446 newtree = templater.expandaliases(tree, aliases)
3446 newtree = templater.expandaliases(tree, aliases)
3447 if newtree != tree:
3447 if newtree != tree:
3448 ui.notenoi18n(
3448 ui.notenoi18n(
3449 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3449 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3450 )
3450 )
3451
3451
3452 if revs is None:
3452 if revs is None:
3453 tres = formatter.templateresources(ui, repo)
3453 tres = formatter.templateresources(ui, repo)
3454 t = formatter.maketemplater(ui, tmpl, resources=tres)
3454 t = formatter.maketemplater(ui, tmpl, resources=tres)
3455 if ui.verbose:
3455 if ui.verbose:
3456 kwds, funcs = t.symbolsuseddefault()
3456 kwds, funcs = t.symbolsuseddefault()
3457 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3457 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3458 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3458 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3459 ui.write(t.renderdefault(props))
3459 ui.write(t.renderdefault(props))
3460 else:
3460 else:
3461 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3461 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3462 if ui.verbose:
3462 if ui.verbose:
3463 kwds, funcs = displayer.t.symbolsuseddefault()
3463 kwds, funcs = displayer.t.symbolsuseddefault()
3464 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3464 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3465 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3465 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3466 for r in revs:
3466 for r in revs:
3467 displayer.show(repo[r], **pycompat.strkwargs(props))
3467 displayer.show(repo[r], **pycompat.strkwargs(props))
3468 displayer.close()
3468 displayer.close()
3469
3469
3470
3470
3471 @command(
3471 @command(
3472 b'debuguigetpass',
3472 b'debuguigetpass',
3473 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3473 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3474 _(b'[-p TEXT]'),
3474 _(b'[-p TEXT]'),
3475 norepo=True,
3475 norepo=True,
3476 )
3476 )
3477 def debuguigetpass(ui, prompt=b''):
3477 def debuguigetpass(ui, prompt=b''):
3478 """show prompt to type password"""
3478 """show prompt to type password"""
3479 r = ui.getpass(prompt)
3479 r = ui.getpass(prompt)
3480 ui.writenoi18n(b'respose: %s\n' % r)
3480 ui.writenoi18n(b'respose: %s\n' % r)
3481
3481
3482
3482
3483 @command(
3483 @command(
3484 b'debuguiprompt',
3484 b'debuguiprompt',
3485 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3485 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3486 _(b'[-p TEXT]'),
3486 _(b'[-p TEXT]'),
3487 norepo=True,
3487 norepo=True,
3488 )
3488 )
3489 def debuguiprompt(ui, prompt=b''):
3489 def debuguiprompt(ui, prompt=b''):
3490 """show plain prompt"""
3490 """show plain prompt"""
3491 r = ui.prompt(prompt)
3491 r = ui.prompt(prompt)
3492 ui.writenoi18n(b'response: %s\n' % r)
3492 ui.writenoi18n(b'response: %s\n' % r)
3493
3493
3494
3494
3495 @command(b'debugupdatecaches', [])
3495 @command(b'debugupdatecaches', [])
3496 def debugupdatecaches(ui, repo, *pats, **opts):
3496 def debugupdatecaches(ui, repo, *pats, **opts):
3497 """warm all known caches in the repository"""
3497 """warm all known caches in the repository"""
3498 with repo.wlock(), repo.lock():
3498 with repo.wlock(), repo.lock():
3499 repo.updatecaches(full=True)
3499 repo.updatecaches(full=True)
3500
3500
3501
3501
3502 @command(
3502 @command(
3503 b'debugupgraderepo',
3503 b'debugupgraderepo',
3504 [
3504 [
3505 (
3505 (
3506 b'o',
3506 b'o',
3507 b'optimize',
3507 b'optimize',
3508 [],
3508 [],
3509 _(b'extra optimization to perform'),
3509 _(b'extra optimization to perform'),
3510 _(b'NAME'),
3510 _(b'NAME'),
3511 ),
3511 ),
3512 (b'', b'run', False, _(b'performs an upgrade')),
3512 (b'', b'run', False, _(b'performs an upgrade')),
3513 (b'', b'backup', True, _(b'keep the old repository content around')),
3513 (b'', b'backup', True, _(b'keep the old repository content around')),
3514 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3514 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3515 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3515 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3516 ],
3516 ],
3517 )
3517 )
3518 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3518 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3519 """upgrade a repository to use different features
3519 """upgrade a repository to use different features
3520
3520
3521 If no arguments are specified, the repository is evaluated for upgrade
3521 If no arguments are specified, the repository is evaluated for upgrade
3522 and a list of problems and potential optimizations is printed.
3522 and a list of problems and potential optimizations is printed.
3523
3523
3524 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3524 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3525 can be influenced via additional arguments. More details will be provided
3525 can be influenced via additional arguments. More details will be provided
3526 by the command output when run without ``--run``.
3526 by the command output when run without ``--run``.
3527
3527
3528 During the upgrade, the repository will be locked and no writes will be
3528 During the upgrade, the repository will be locked and no writes will be
3529 allowed.
3529 allowed.
3530
3530
3531 At the end of the upgrade, the repository may not be readable while new
3531 At the end of the upgrade, the repository may not be readable while new
3532 repository data is swapped in. This window will be as long as it takes to
3532 repository data is swapped in. This window will be as long as it takes to
3533 rename some directories inside the ``.hg`` directory. On most machines, this
3533 rename some directories inside the ``.hg`` directory. On most machines, this
3534 should complete almost instantaneously and the chances of a consumer being
3534 should complete almost instantaneously and the chances of a consumer being
3535 unable to access the repository should be low.
3535 unable to access the repository should be low.
3536
3536
3537 By default, all revlog will be upgraded. You can restrict this using flag
3537 By default, all revlog will be upgraded. You can restrict this using flag
3538 such as `--manifest`:
3538 such as `--manifest`:
3539
3539
3540 * `--manifest`: only optimize the manifest
3540 * `--manifest`: only optimize the manifest
3541 * `--no-manifest`: optimize all revlog but the manifest
3541 * `--no-manifest`: optimize all revlog but the manifest
3542 * `--changelog`: optimize the changelog only
3542 * `--changelog`: optimize the changelog only
3543 * `--no-changelog --no-manifest`: optimize filelogs only
3543 * `--no-changelog --no-manifest`: optimize filelogs only
3544 """
3544 """
3545 return upgrade.upgraderepo(
3545 return upgrade.upgraderepo(
3546 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3546 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3547 )
3547 )
3548
3548
3549
3549
3550 @command(
3550 @command(
3551 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3551 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3552 )
3552 )
3553 def debugwalk(ui, repo, *pats, **opts):
3553 def debugwalk(ui, repo, *pats, **opts):
3554 """show how files match on given patterns"""
3554 """show how files match on given patterns"""
3555 opts = pycompat.byteskwargs(opts)
3555 opts = pycompat.byteskwargs(opts)
3556 m = scmutil.match(repo[None], pats, opts)
3556 m = scmutil.match(repo[None], pats, opts)
3557 if ui.verbose:
3557 if ui.verbose:
3558 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3558 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3559 items = list(repo[None].walk(m))
3559 items = list(repo[None].walk(m))
3560 if not items:
3560 if not items:
3561 return
3561 return
3562 f = lambda fn: fn
3562 f = lambda fn: fn
3563 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3563 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3564 f = lambda fn: util.normpath(fn)
3564 f = lambda fn: util.normpath(fn)
3565 fmt = b'f %%-%ds %%-%ds %%s' % (
3565 fmt = b'f %%-%ds %%-%ds %%s' % (
3566 max([len(abs) for abs in items]),
3566 max([len(abs) for abs in items]),
3567 max([len(repo.pathto(abs)) for abs in items]),
3567 max([len(repo.pathto(abs)) for abs in items]),
3568 )
3568 )
3569 for abs in items:
3569 for abs in items:
3570 line = fmt % (
3570 line = fmt % (
3571 abs,
3571 abs,
3572 f(repo.pathto(abs)),
3572 f(repo.pathto(abs)),
3573 m.exact(abs) and b'exact' or b'',
3573 m.exact(abs) and b'exact' or b'',
3574 )
3574 )
3575 ui.write(b"%s\n" % line.rstrip())
3575 ui.write(b"%s\n" % line.rstrip())
3576
3576
3577
3577
3578 @command(b'debugwhyunstable', [], _(b'REV'))
3578 @command(b'debugwhyunstable', [], _(b'REV'))
3579 def debugwhyunstable(ui, repo, rev):
3579 def debugwhyunstable(ui, repo, rev):
3580 """explain instabilities of a changeset"""
3580 """explain instabilities of a changeset"""
3581 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3581 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3582 dnodes = b''
3582 dnodes = b''
3583 if entry.get(b'divergentnodes'):
3583 if entry.get(b'divergentnodes'):
3584 dnodes = (
3584 dnodes = (
3585 b' '.join(
3585 b' '.join(
3586 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3586 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3587 for ctx in entry[b'divergentnodes']
3587 for ctx in entry[b'divergentnodes']
3588 )
3588 )
3589 + b' '
3589 + b' '
3590 )
3590 )
3591 ui.write(
3591 ui.write(
3592 b'%s: %s%s %s\n'
3592 b'%s: %s%s %s\n'
3593 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3593 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3594 )
3594 )
3595
3595
3596
3596
3597 @command(
3597 @command(
3598 b'debugwireargs',
3598 b'debugwireargs',
3599 [
3599 [
3600 (b'', b'three', b'', b'three'),
3600 (b'', b'three', b'', b'three'),
3601 (b'', b'four', b'', b'four'),
3601 (b'', b'four', b'', b'four'),
3602 (b'', b'five', b'', b'five'),
3602 (b'', b'five', b'', b'five'),
3603 ]
3603 ]
3604 + cmdutil.remoteopts,
3604 + cmdutil.remoteopts,
3605 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3605 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3606 norepo=True,
3606 norepo=True,
3607 )
3607 )
3608 def debugwireargs(ui, repopath, *vals, **opts):
3608 def debugwireargs(ui, repopath, *vals, **opts):
3609 opts = pycompat.byteskwargs(opts)
3609 opts = pycompat.byteskwargs(opts)
3610 repo = hg.peer(ui, opts, repopath)
3610 repo = hg.peer(ui, opts, repopath)
3611 for opt in cmdutil.remoteopts:
3611 for opt in cmdutil.remoteopts:
3612 del opts[opt[1]]
3612 del opts[opt[1]]
3613 args = {}
3613 args = {}
3614 for k, v in pycompat.iteritems(opts):
3614 for k, v in pycompat.iteritems(opts):
3615 if v:
3615 if v:
3616 args[k] = v
3616 args[k] = v
3617 args = pycompat.strkwargs(args)
3617 args = pycompat.strkwargs(args)
3618 # run twice to check that we don't mess up the stream for the next command
3618 # run twice to check that we don't mess up the stream for the next command
3619 res1 = repo.debugwireargs(*vals, **args)
3619 res1 = repo.debugwireargs(*vals, **args)
3620 res2 = repo.debugwireargs(*vals, **args)
3620 res2 = repo.debugwireargs(*vals, **args)
3621 ui.write(b"%s\n" % res1)
3621 ui.write(b"%s\n" % res1)
3622 if res1 != res2:
3622 if res1 != res2:
3623 ui.warn(b"%s\n" % res2)
3623 ui.warn(b"%s\n" % res2)
3624
3624
3625
3625
3626 def _parsewirelangblocks(fh):
3626 def _parsewirelangblocks(fh):
3627 activeaction = None
3627 activeaction = None
3628 blocklines = []
3628 blocklines = []
3629 lastindent = 0
3629 lastindent = 0
3630
3630
3631 for line in fh:
3631 for line in fh:
3632 line = line.rstrip()
3632 line = line.rstrip()
3633 if not line:
3633 if not line:
3634 continue
3634 continue
3635
3635
3636 if line.startswith(b'#'):
3636 if line.startswith(b'#'):
3637 continue
3637 continue
3638
3638
3639 if not line.startswith(b' '):
3639 if not line.startswith(b' '):
3640 # New block. Flush previous one.
3640 # New block. Flush previous one.
3641 if activeaction:
3641 if activeaction:
3642 yield activeaction, blocklines
3642 yield activeaction, blocklines
3643
3643
3644 activeaction = line
3644 activeaction = line
3645 blocklines = []
3645 blocklines = []
3646 lastindent = 0
3646 lastindent = 0
3647 continue
3647 continue
3648
3648
3649 # Else we start with an indent.
3649 # Else we start with an indent.
3650
3650
3651 if not activeaction:
3651 if not activeaction:
3652 raise error.Abort(_(b'indented line outside of block'))
3652 raise error.Abort(_(b'indented line outside of block'))
3653
3653
3654 indent = len(line) - len(line.lstrip())
3654 indent = len(line) - len(line.lstrip())
3655
3655
3656 # If this line is indented more than the last line, concatenate it.
3656 # If this line is indented more than the last line, concatenate it.
3657 if indent > lastindent and blocklines:
3657 if indent > lastindent and blocklines:
3658 blocklines[-1] += line.lstrip()
3658 blocklines[-1] += line.lstrip()
3659 else:
3659 else:
3660 blocklines.append(line)
3660 blocklines.append(line)
3661 lastindent = indent
3661 lastindent = indent
3662
3662
3663 # Flush last block.
3663 # Flush last block.
3664 if activeaction:
3664 if activeaction:
3665 yield activeaction, blocklines
3665 yield activeaction, blocklines
3666
3666
3667
3667
3668 @command(
3668 @command(
3669 b'debugwireproto',
3669 b'debugwireproto',
3670 [
3670 [
3671 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3671 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3672 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3672 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3673 (
3673 (
3674 b'',
3674 b'',
3675 b'noreadstderr',
3675 b'noreadstderr',
3676 False,
3676 False,
3677 _(b'do not read from stderr of the remote'),
3677 _(b'do not read from stderr of the remote'),
3678 ),
3678 ),
3679 (
3679 (
3680 b'',
3680 b'',
3681 b'nologhandshake',
3681 b'nologhandshake',
3682 False,
3682 False,
3683 _(b'do not log I/O related to the peer handshake'),
3683 _(b'do not log I/O related to the peer handshake'),
3684 ),
3684 ),
3685 ]
3685 ]
3686 + cmdutil.remoteopts,
3686 + cmdutil.remoteopts,
3687 _(b'[PATH]'),
3687 _(b'[PATH]'),
3688 optionalrepo=True,
3688 optionalrepo=True,
3689 )
3689 )
3690 def debugwireproto(ui, repo, path=None, **opts):
3690 def debugwireproto(ui, repo, path=None, **opts):
3691 """send wire protocol commands to a server
3691 """send wire protocol commands to a server
3692
3692
3693 This command can be used to issue wire protocol commands to remote
3693 This command can be used to issue wire protocol commands to remote
3694 peers and to debug the raw data being exchanged.
3694 peers and to debug the raw data being exchanged.
3695
3695
3696 ``--localssh`` will start an SSH server against the current repository
3696 ``--localssh`` will start an SSH server against the current repository
3697 and connect to that. By default, the connection will perform a handshake
3697 and connect to that. By default, the connection will perform a handshake
3698 and establish an appropriate peer instance.
3698 and establish an appropriate peer instance.
3699
3699
3700 ``--peer`` can be used to bypass the handshake protocol and construct a
3700 ``--peer`` can be used to bypass the handshake protocol and construct a
3701 peer instance using the specified class type. Valid values are ``raw``,
3701 peer instance using the specified class type. Valid values are ``raw``,
3702 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3702 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3703 raw data payloads and don't support higher-level command actions.
3703 raw data payloads and don't support higher-level command actions.
3704
3704
3705 ``--noreadstderr`` can be used to disable automatic reading from stderr
3705 ``--noreadstderr`` can be used to disable automatic reading from stderr
3706 of the peer (for SSH connections only). Disabling automatic reading of
3706 of the peer (for SSH connections only). Disabling automatic reading of
3707 stderr is useful for making output more deterministic.
3707 stderr is useful for making output more deterministic.
3708
3708
3709 Commands are issued via a mini language which is specified via stdin.
3709 Commands are issued via a mini language which is specified via stdin.
3710 The language consists of individual actions to perform. An action is
3710 The language consists of individual actions to perform. An action is
3711 defined by a block. A block is defined as a line with no leading
3711 defined by a block. A block is defined as a line with no leading
3712 space followed by 0 or more lines with leading space. Blocks are
3712 space followed by 0 or more lines with leading space. Blocks are
3713 effectively a high-level command with additional metadata.
3713 effectively a high-level command with additional metadata.
3714
3714
3715 Lines beginning with ``#`` are ignored.
3715 Lines beginning with ``#`` are ignored.
3716
3716
3717 The following sections denote available actions.
3717 The following sections denote available actions.
3718
3718
3719 raw
3719 raw
3720 ---
3720 ---
3721
3721
3722 Send raw data to the server.
3722 Send raw data to the server.
3723
3723
3724 The block payload contains the raw data to send as one atomic send
3724 The block payload contains the raw data to send as one atomic send
3725 operation. The data may not actually be delivered in a single system
3725 operation. The data may not actually be delivered in a single system
3726 call: it depends on the abilities of the transport being used.
3726 call: it depends on the abilities of the transport being used.
3727
3727
3728 Each line in the block is de-indented and concatenated. Then, that
3728 Each line in the block is de-indented and concatenated. Then, that
3729 value is evaluated as a Python b'' literal. This allows the use of
3729 value is evaluated as a Python b'' literal. This allows the use of
3730 backslash escaping, etc.
3730 backslash escaping, etc.
3731
3731
3732 raw+
3732 raw+
3733 ----
3733 ----
3734
3734
3735 Behaves like ``raw`` except flushes output afterwards.
3735 Behaves like ``raw`` except flushes output afterwards.
3736
3736
3737 command <X>
3737 command <X>
3738 -----------
3738 -----------
3739
3739
3740 Send a request to run a named command, whose name follows the ``command``
3740 Send a request to run a named command, whose name follows the ``command``
3741 string.
3741 string.
3742
3742
3743 Arguments to the command are defined as lines in this block. The format of
3743 Arguments to the command are defined as lines in this block. The format of
3744 each line is ``<key> <value>``. e.g.::
3744 each line is ``<key> <value>``. e.g.::
3745
3745
3746 command listkeys
3746 command listkeys
3747 namespace bookmarks
3747 namespace bookmarks
3748
3748
3749 If the value begins with ``eval:``, it will be interpreted as a Python
3749 If the value begins with ``eval:``, it will be interpreted as a Python
3750 literal expression. Otherwise values are interpreted as Python b'' literals.
3750 literal expression. Otherwise values are interpreted as Python b'' literals.
3751 This allows sending complex types and encoding special byte sequences via
3751 This allows sending complex types and encoding special byte sequences via
3752 backslash escaping.
3752 backslash escaping.
3753
3753
3754 The following arguments have special meaning:
3754 The following arguments have special meaning:
3755
3755
3756 ``PUSHFILE``
3756 ``PUSHFILE``
3757 When defined, the *push* mechanism of the peer will be used instead
3757 When defined, the *push* mechanism of the peer will be used instead
3758 of the static request-response mechanism and the content of the
3758 of the static request-response mechanism and the content of the
3759 file specified in the value of this argument will be sent as the
3759 file specified in the value of this argument will be sent as the
3760 command payload.
3760 command payload.
3761
3761
3762 This can be used to submit a local bundle file to the remote.
3762 This can be used to submit a local bundle file to the remote.
3763
3763
3764 batchbegin
3764 batchbegin
3765 ----------
3765 ----------
3766
3766
3767 Instruct the peer to begin a batched send.
3767 Instruct the peer to begin a batched send.
3768
3768
3769 All ``command`` blocks are queued for execution until the next
3769 All ``command`` blocks are queued for execution until the next
3770 ``batchsubmit`` block.
3770 ``batchsubmit`` block.
3771
3771
3772 batchsubmit
3772 batchsubmit
3773 -----------
3773 -----------
3774
3774
3775 Submit previously queued ``command`` blocks as a batch request.
3775 Submit previously queued ``command`` blocks as a batch request.
3776
3776
3777 This action MUST be paired with a ``batchbegin`` action.
3777 This action MUST be paired with a ``batchbegin`` action.
3778
3778
3779 httprequest <method> <path>
3779 httprequest <method> <path>
3780 ---------------------------
3780 ---------------------------
3781
3781
3782 (HTTP peer only)
3782 (HTTP peer only)
3783
3783
3784 Send an HTTP request to the peer.
3784 Send an HTTP request to the peer.
3785
3785
3786 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3786 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3787
3787
3788 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3788 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3789 headers to add to the request. e.g. ``Accept: foo``.
3789 headers to add to the request. e.g. ``Accept: foo``.
3790
3790
3791 The following arguments are special:
3791 The following arguments are special:
3792
3792
3793 ``BODYFILE``
3793 ``BODYFILE``
3794 The content of the file defined as the value to this argument will be
3794 The content of the file defined as the value to this argument will be
3795 transferred verbatim as the HTTP request body.
3795 transferred verbatim as the HTTP request body.
3796
3796
3797 ``frame <type> <flags> <payload>``
3797 ``frame <type> <flags> <payload>``
3798 Send a unified protocol frame as part of the request body.
3798 Send a unified protocol frame as part of the request body.
3799
3799
3800 All frames will be collected and sent as the body to the HTTP
3800 All frames will be collected and sent as the body to the HTTP
3801 request.
3801 request.
3802
3802
3803 close
3803 close
3804 -----
3804 -----
3805
3805
3806 Close the connection to the server.
3806 Close the connection to the server.
3807
3807
3808 flush
3808 flush
3809 -----
3809 -----
3810
3810
3811 Flush data written to the server.
3811 Flush data written to the server.
3812
3812
3813 readavailable
3813 readavailable
3814 -------------
3814 -------------
3815
3815
3816 Close the write end of the connection and read all available data from
3816 Close the write end of the connection and read all available data from
3817 the server.
3817 the server.
3818
3818
3819 If the connection to the server encompasses multiple pipes, we poll both
3819 If the connection to the server encompasses multiple pipes, we poll both
3820 pipes and read available data.
3820 pipes and read available data.
3821
3821
3822 readline
3822 readline
3823 --------
3823 --------
3824
3824
3825 Read a line of output from the server. If there are multiple output
3825 Read a line of output from the server. If there are multiple output
3826 pipes, reads only the main pipe.
3826 pipes, reads only the main pipe.
3827
3827
3828 ereadline
3828 ereadline
3829 ---------
3829 ---------
3830
3830
3831 Like ``readline``, but read from the stderr pipe, if available.
3831 Like ``readline``, but read from the stderr pipe, if available.
3832
3832
3833 read <X>
3833 read <X>
3834 --------
3834 --------
3835
3835
3836 ``read()`` N bytes from the server's main output pipe.
3836 ``read()`` N bytes from the server's main output pipe.
3837
3837
3838 eread <X>
3838 eread <X>
3839 ---------
3839 ---------
3840
3840
3841 ``read()`` N bytes from the server's stderr pipe, if available.
3841 ``read()`` N bytes from the server's stderr pipe, if available.
3842
3842
3843 Specifying Unified Frame-Based Protocol Frames
3843 Specifying Unified Frame-Based Protocol Frames
3844 ----------------------------------------------
3844 ----------------------------------------------
3845
3845
3846 It is possible to emit a *Unified Frame-Based Protocol* by using special
3846 It is possible to emit a *Unified Frame-Based Protocol* by using special
3847 syntax.
3847 syntax.
3848
3848
3849 A frame is composed as a type, flags, and payload. These can be parsed
3849 A frame is composed as a type, flags, and payload. These can be parsed
3850 from a string of the form:
3850 from a string of the form:
3851
3851
3852 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3852 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3853
3853
3854 ``request-id`` and ``stream-id`` are integers defining the request and
3854 ``request-id`` and ``stream-id`` are integers defining the request and
3855 stream identifiers.
3855 stream identifiers.
3856
3856
3857 ``type`` can be an integer value for the frame type or the string name
3857 ``type`` can be an integer value for the frame type or the string name
3858 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3858 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3859 ``command-name``.
3859 ``command-name``.
3860
3860
3861 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3861 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3862 components. Each component (and there can be just one) can be an integer
3862 components. Each component (and there can be just one) can be an integer
3863 or a flag name for stream flags or frame flags, respectively. Values are
3863 or a flag name for stream flags or frame flags, respectively. Values are
3864 resolved to integers and then bitwise OR'd together.
3864 resolved to integers and then bitwise OR'd together.
3865
3865
3866 ``payload`` represents the raw frame payload. If it begins with
3866 ``payload`` represents the raw frame payload. If it begins with
3867 ``cbor:``, the following string is evaluated as Python code and the
3867 ``cbor:``, the following string is evaluated as Python code and the
3868 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3868 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3869 as a Python byte string literal.
3869 as a Python byte string literal.
3870 """
3870 """
3871 opts = pycompat.byteskwargs(opts)
3871 opts = pycompat.byteskwargs(opts)
3872
3872
3873 if opts[b'localssh'] and not repo:
3873 if opts[b'localssh'] and not repo:
3874 raise error.Abort(_(b'--localssh requires a repository'))
3874 raise error.Abort(_(b'--localssh requires a repository'))
3875
3875
3876 if opts[b'peer'] and opts[b'peer'] not in (
3876 if opts[b'peer'] and opts[b'peer'] not in (
3877 b'raw',
3877 b'raw',
3878 b'http2',
3878 b'http2',
3879 b'ssh1',
3879 b'ssh1',
3880 b'ssh2',
3880 b'ssh2',
3881 ):
3881 ):
3882 raise error.Abort(
3882 raise error.Abort(
3883 _(b'invalid value for --peer'),
3883 _(b'invalid value for --peer'),
3884 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
3884 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
3885 )
3885 )
3886
3886
3887 if path and opts[b'localssh']:
3887 if path and opts[b'localssh']:
3888 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
3888 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
3889
3889
3890 if ui.interactive():
3890 if ui.interactive():
3891 ui.write(_(b'(waiting for commands on stdin)\n'))
3891 ui.write(_(b'(waiting for commands on stdin)\n'))
3892
3892
3893 blocks = list(_parsewirelangblocks(ui.fin))
3893 blocks = list(_parsewirelangblocks(ui.fin))
3894
3894
3895 proc = None
3895 proc = None
3896 stdin = None
3896 stdin = None
3897 stdout = None
3897 stdout = None
3898 stderr = None
3898 stderr = None
3899 opener = None
3899 opener = None
3900
3900
3901 if opts[b'localssh']:
3901 if opts[b'localssh']:
3902 # We start the SSH server in its own process so there is process
3902 # We start the SSH server in its own process so there is process
3903 # separation. This prevents a whole class of potential bugs around
3903 # separation. This prevents a whole class of potential bugs around
3904 # shared state from interfering with server operation.
3904 # shared state from interfering with server operation.
3905 args = procutil.hgcmd() + [
3905 args = procutil.hgcmd() + [
3906 b'-R',
3906 b'-R',
3907 repo.root,
3907 repo.root,
3908 b'debugserve',
3908 b'debugserve',
3909 b'--sshstdio',
3909 b'--sshstdio',
3910 ]
3910 ]
3911 proc = subprocess.Popen(
3911 proc = subprocess.Popen(
3912 pycompat.rapply(procutil.tonativestr, args),
3912 pycompat.rapply(procutil.tonativestr, args),
3913 stdin=subprocess.PIPE,
3913 stdin=subprocess.PIPE,
3914 stdout=subprocess.PIPE,
3914 stdout=subprocess.PIPE,
3915 stderr=subprocess.PIPE,
3915 stderr=subprocess.PIPE,
3916 bufsize=0,
3916 bufsize=0,
3917 )
3917 )
3918
3918
3919 stdin = proc.stdin
3919 stdin = proc.stdin
3920 stdout = proc.stdout
3920 stdout = proc.stdout
3921 stderr = proc.stderr
3921 stderr = proc.stderr
3922
3922
3923 # We turn the pipes into observers so we can log I/O.
3923 # We turn the pipes into observers so we can log I/O.
3924 if ui.verbose or opts[b'peer'] == b'raw':
3924 if ui.verbose or opts[b'peer'] == b'raw':
3925 stdin = util.makeloggingfileobject(
3925 stdin = util.makeloggingfileobject(
3926 ui, proc.stdin, b'i', logdata=True
3926 ui, proc.stdin, b'i', logdata=True
3927 )
3927 )
3928 stdout = util.makeloggingfileobject(
3928 stdout = util.makeloggingfileobject(
3929 ui, proc.stdout, b'o', logdata=True
3929 ui, proc.stdout, b'o', logdata=True
3930 )
3930 )
3931 stderr = util.makeloggingfileobject(
3931 stderr = util.makeloggingfileobject(
3932 ui, proc.stderr, b'e', logdata=True
3932 ui, proc.stderr, b'e', logdata=True
3933 )
3933 )
3934
3934
3935 # --localssh also implies the peer connection settings.
3935 # --localssh also implies the peer connection settings.
3936
3936
3937 url = b'ssh://localserver'
3937 url = b'ssh://localserver'
3938 autoreadstderr = not opts[b'noreadstderr']
3938 autoreadstderr = not opts[b'noreadstderr']
3939
3939
3940 if opts[b'peer'] == b'ssh1':
3940 if opts[b'peer'] == b'ssh1':
3941 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
3941 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
3942 peer = sshpeer.sshv1peer(
3942 peer = sshpeer.sshv1peer(
3943 ui,
3943 ui,
3944 url,
3944 url,
3945 proc,
3945 proc,
3946 stdin,
3946 stdin,
3947 stdout,
3947 stdout,
3948 stderr,
3948 stderr,
3949 None,
3949 None,
3950 autoreadstderr=autoreadstderr,
3950 autoreadstderr=autoreadstderr,
3951 )
3951 )
3952 elif opts[b'peer'] == b'ssh2':
3952 elif opts[b'peer'] == b'ssh2':
3953 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
3953 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
3954 peer = sshpeer.sshv2peer(
3954 peer = sshpeer.sshv2peer(
3955 ui,
3955 ui,
3956 url,
3956 url,
3957 proc,
3957 proc,
3958 stdin,
3958 stdin,
3959 stdout,
3959 stdout,
3960 stderr,
3960 stderr,
3961 None,
3961 None,
3962 autoreadstderr=autoreadstderr,
3962 autoreadstderr=autoreadstderr,
3963 )
3963 )
3964 elif opts[b'peer'] == b'raw':
3964 elif opts[b'peer'] == b'raw':
3965 ui.write(_(b'using raw connection to peer\n'))
3965 ui.write(_(b'using raw connection to peer\n'))
3966 peer = None
3966 peer = None
3967 else:
3967 else:
3968 ui.write(_(b'creating ssh peer from handshake results\n'))
3968 ui.write(_(b'creating ssh peer from handshake results\n'))
3969 peer = sshpeer.makepeer(
3969 peer = sshpeer.makepeer(
3970 ui,
3970 ui,
3971 url,
3971 url,
3972 proc,
3972 proc,
3973 stdin,
3973 stdin,
3974 stdout,
3974 stdout,
3975 stderr,
3975 stderr,
3976 autoreadstderr=autoreadstderr,
3976 autoreadstderr=autoreadstderr,
3977 )
3977 )
3978
3978
3979 elif path:
3979 elif path:
3980 # We bypass hg.peer() so we can proxy the sockets.
3980 # We bypass hg.peer() so we can proxy the sockets.
3981 # TODO consider not doing this because we skip
3981 # TODO consider not doing this because we skip
3982 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3982 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3983 u = util.url(path)
3983 u = util.url(path)
3984 if u.scheme != b'http':
3984 if u.scheme != b'http':
3985 raise error.Abort(_(b'only http:// paths are currently supported'))
3985 raise error.Abort(_(b'only http:// paths are currently supported'))
3986
3986
3987 url, authinfo = u.authinfo()
3987 url, authinfo = u.authinfo()
3988 openerargs = {
3988 openerargs = {
3989 'useragent': b'Mercurial debugwireproto',
3989 'useragent': b'Mercurial debugwireproto',
3990 }
3990 }
3991
3991
3992 # Turn pipes/sockets into observers so we can log I/O.
3992 # Turn pipes/sockets into observers so we can log I/O.
3993 if ui.verbose:
3993 if ui.verbose:
3994 openerargs.update(
3994 openerargs.update(
3995 {
3995 {
3996 'loggingfh': ui,
3996 'loggingfh': ui,
3997 'loggingname': b's',
3997 'loggingname': b's',
3998 'loggingopts': {'logdata': True, 'logdataapis': False,},
3998 'loggingopts': {'logdata': True, 'logdataapis': False,},
3999 }
3999 }
4000 )
4000 )
4001
4001
4002 if ui.debugflag:
4002 if ui.debugflag:
4003 openerargs['loggingopts']['logdataapis'] = True
4003 openerargs['loggingopts']['logdataapis'] = True
4004
4004
4005 # Don't send default headers when in raw mode. This allows us to
4005 # Don't send default headers when in raw mode. This allows us to
4006 # bypass most of the behavior of our URL handling code so we can
4006 # bypass most of the behavior of our URL handling code so we can
4007 # have near complete control over what's sent on the wire.
4007 # have near complete control over what's sent on the wire.
4008 if opts[b'peer'] == b'raw':
4008 if opts[b'peer'] == b'raw':
4009 openerargs['sendaccept'] = False
4009 openerargs['sendaccept'] = False
4010
4010
4011 opener = urlmod.opener(ui, authinfo, **openerargs)
4011 opener = urlmod.opener(ui, authinfo, **openerargs)
4012
4012
4013 if opts[b'peer'] == b'http2':
4013 if opts[b'peer'] == b'http2':
4014 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4014 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4015 # We go through makepeer() because we need an API descriptor for
4015 # We go through makepeer() because we need an API descriptor for
4016 # the peer instance to be useful.
4016 # the peer instance to be useful.
4017 with ui.configoverride(
4017 with ui.configoverride(
4018 {(b'experimental', b'httppeer.advertise-v2'): True}
4018 {(b'experimental', b'httppeer.advertise-v2'): True}
4019 ):
4019 ):
4020 if opts[b'nologhandshake']:
4020 if opts[b'nologhandshake']:
4021 ui.pushbuffer()
4021 ui.pushbuffer()
4022
4022
4023 peer = httppeer.makepeer(ui, path, opener=opener)
4023 peer = httppeer.makepeer(ui, path, opener=opener)
4024
4024
4025 if opts[b'nologhandshake']:
4025 if opts[b'nologhandshake']:
4026 ui.popbuffer()
4026 ui.popbuffer()
4027
4027
4028 if not isinstance(peer, httppeer.httpv2peer):
4028 if not isinstance(peer, httppeer.httpv2peer):
4029 raise error.Abort(
4029 raise error.Abort(
4030 _(
4030 _(
4031 b'could not instantiate HTTP peer for '
4031 b'could not instantiate HTTP peer for '
4032 b'wire protocol version 2'
4032 b'wire protocol version 2'
4033 ),
4033 ),
4034 hint=_(
4034 hint=_(
4035 b'the server may not have the feature '
4035 b'the server may not have the feature '
4036 b'enabled or is not allowing this '
4036 b'enabled or is not allowing this '
4037 b'client version'
4037 b'client version'
4038 ),
4038 ),
4039 )
4039 )
4040
4040
4041 elif opts[b'peer'] == b'raw':
4041 elif opts[b'peer'] == b'raw':
4042 ui.write(_(b'using raw connection to peer\n'))
4042 ui.write(_(b'using raw connection to peer\n'))
4043 peer = None
4043 peer = None
4044 elif opts[b'peer']:
4044 elif opts[b'peer']:
4045 raise error.Abort(
4045 raise error.Abort(
4046 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4046 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4047 )
4047 )
4048 else:
4048 else:
4049 peer = httppeer.makepeer(ui, path, opener=opener)
4049 peer = httppeer.makepeer(ui, path, opener=opener)
4050
4050
4051 # We /could/ populate stdin/stdout with sock.makefile()...
4051 # We /could/ populate stdin/stdout with sock.makefile()...
4052 else:
4052 else:
4053 raise error.Abort(_(b'unsupported connection configuration'))
4053 raise error.Abort(_(b'unsupported connection configuration'))
4054
4054
4055 batchedcommands = None
4055 batchedcommands = None
4056
4056
4057 # Now perform actions based on the parsed wire language instructions.
4057 # Now perform actions based on the parsed wire language instructions.
4058 for action, lines in blocks:
4058 for action, lines in blocks:
4059 if action in (b'raw', b'raw+'):
4059 if action in (b'raw', b'raw+'):
4060 if not stdin:
4060 if not stdin:
4061 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4061 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4062
4062
4063 # Concatenate the data together.
4063 # Concatenate the data together.
4064 data = b''.join(l.lstrip() for l in lines)
4064 data = b''.join(l.lstrip() for l in lines)
4065 data = stringutil.unescapestr(data)
4065 data = stringutil.unescapestr(data)
4066 stdin.write(data)
4066 stdin.write(data)
4067
4067
4068 if action == b'raw+':
4068 if action == b'raw+':
4069 stdin.flush()
4069 stdin.flush()
4070 elif action == b'flush':
4070 elif action == b'flush':
4071 if not stdin:
4071 if not stdin:
4072 raise error.Abort(_(b'cannot call flush on this peer'))
4072 raise error.Abort(_(b'cannot call flush on this peer'))
4073 stdin.flush()
4073 stdin.flush()
4074 elif action.startswith(b'command'):
4074 elif action.startswith(b'command'):
4075 if not peer:
4075 if not peer:
4076 raise error.Abort(
4076 raise error.Abort(
4077 _(
4077 _(
4078 b'cannot send commands unless peer instance '
4078 b'cannot send commands unless peer instance '
4079 b'is available'
4079 b'is available'
4080 )
4080 )
4081 )
4081 )
4082
4082
4083 command = action.split(b' ', 1)[1]
4083 command = action.split(b' ', 1)[1]
4084
4084
4085 args = {}
4085 args = {}
4086 for line in lines:
4086 for line in lines:
4087 # We need to allow empty values.
4087 # We need to allow empty values.
4088 fields = line.lstrip().split(b' ', 1)
4088 fields = line.lstrip().split(b' ', 1)
4089 if len(fields) == 1:
4089 if len(fields) == 1:
4090 key = fields[0]
4090 key = fields[0]
4091 value = b''
4091 value = b''
4092 else:
4092 else:
4093 key, value = fields
4093 key, value = fields
4094
4094
4095 if value.startswith(b'eval:'):
4095 if value.startswith(b'eval:'):
4096 value = stringutil.evalpythonliteral(value[5:])
4096 value = stringutil.evalpythonliteral(value[5:])
4097 else:
4097 else:
4098 value = stringutil.unescapestr(value)
4098 value = stringutil.unescapestr(value)
4099
4099
4100 args[key] = value
4100 args[key] = value
4101
4101
4102 if batchedcommands is not None:
4102 if batchedcommands is not None:
4103 batchedcommands.append((command, args))
4103 batchedcommands.append((command, args))
4104 continue
4104 continue
4105
4105
4106 ui.status(_(b'sending %s command\n') % command)
4106 ui.status(_(b'sending %s command\n') % command)
4107
4107
4108 if b'PUSHFILE' in args:
4108 if b'PUSHFILE' in args:
4109 with open(args[b'PUSHFILE'], 'rb') as fh:
4109 with open(args[b'PUSHFILE'], 'rb') as fh:
4110 del args[b'PUSHFILE']
4110 del args[b'PUSHFILE']
4111 res, output = peer._callpush(
4111 res, output = peer._callpush(
4112 command, fh, **pycompat.strkwargs(args)
4112 command, fh, **pycompat.strkwargs(args)
4113 )
4113 )
4114 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4114 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4115 ui.status(
4115 ui.status(
4116 _(b'remote output: %s\n') % stringutil.escapestr(output)
4116 _(b'remote output: %s\n') % stringutil.escapestr(output)
4117 )
4117 )
4118 else:
4118 else:
4119 with peer.commandexecutor() as e:
4119 with peer.commandexecutor() as e:
4120 res = e.callcommand(command, args).result()
4120 res = e.callcommand(command, args).result()
4121
4121
4122 if isinstance(res, wireprotov2peer.commandresponse):
4122 if isinstance(res, wireprotov2peer.commandresponse):
4123 val = res.objects()
4123 val = res.objects()
4124 ui.status(
4124 ui.status(
4125 _(b'response: %s\n')
4125 _(b'response: %s\n')
4126 % stringutil.pprint(val, bprefix=True, indent=2)
4126 % stringutil.pprint(val, bprefix=True, indent=2)
4127 )
4127 )
4128 else:
4128 else:
4129 ui.status(
4129 ui.status(
4130 _(b'response: %s\n')
4130 _(b'response: %s\n')
4131 % stringutil.pprint(res, bprefix=True, indent=2)
4131 % stringutil.pprint(res, bprefix=True, indent=2)
4132 )
4132 )
4133
4133
4134 elif action == b'batchbegin':
4134 elif action == b'batchbegin':
4135 if batchedcommands is not None:
4135 if batchedcommands is not None:
4136 raise error.Abort(_(b'nested batchbegin not allowed'))
4136 raise error.Abort(_(b'nested batchbegin not allowed'))
4137
4137
4138 batchedcommands = []
4138 batchedcommands = []
4139 elif action == b'batchsubmit':
4139 elif action == b'batchsubmit':
4140 # There is a batching API we could go through. But it would be
4140 # There is a batching API we could go through. But it would be
4141 # difficult to normalize requests into function calls. It is easier
4141 # difficult to normalize requests into function calls. It is easier
4142 # to bypass this layer and normalize to commands + args.
4142 # to bypass this layer and normalize to commands + args.
4143 ui.status(
4143 ui.status(
4144 _(b'sending batch with %d sub-commands\n')
4144 _(b'sending batch with %d sub-commands\n')
4145 % len(batchedcommands)
4145 % len(batchedcommands)
4146 )
4146 )
4147 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4147 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4148 ui.status(
4148 ui.status(
4149 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4149 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4150 )
4150 )
4151
4151
4152 batchedcommands = None
4152 batchedcommands = None
4153
4153
4154 elif action.startswith(b'httprequest '):
4154 elif action.startswith(b'httprequest '):
4155 if not opener:
4155 if not opener:
4156 raise error.Abort(
4156 raise error.Abort(
4157 _(b'cannot use httprequest without an HTTP peer')
4157 _(b'cannot use httprequest without an HTTP peer')
4158 )
4158 )
4159
4159
4160 request = action.split(b' ', 2)
4160 request = action.split(b' ', 2)
4161 if len(request) != 3:
4161 if len(request) != 3:
4162 raise error.Abort(
4162 raise error.Abort(
4163 _(
4163 _(
4164 b'invalid httprequest: expected format is '
4164 b'invalid httprequest: expected format is '
4165 b'"httprequest <method> <path>'
4165 b'"httprequest <method> <path>'
4166 )
4166 )
4167 )
4167 )
4168
4168
4169 method, httppath = request[1:]
4169 method, httppath = request[1:]
4170 headers = {}
4170 headers = {}
4171 body = None
4171 body = None
4172 frames = []
4172 frames = []
4173 for line in lines:
4173 for line in lines:
4174 line = line.lstrip()
4174 line = line.lstrip()
4175 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4175 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4176 if m:
4176 if m:
4177 # Headers need to use native strings.
4177 # Headers need to use native strings.
4178 key = pycompat.strurl(m.group(1))
4178 key = pycompat.strurl(m.group(1))
4179 value = pycompat.strurl(m.group(2))
4179 value = pycompat.strurl(m.group(2))
4180 headers[key] = value
4180 headers[key] = value
4181 continue
4181 continue
4182
4182
4183 if line.startswith(b'BODYFILE '):
4183 if line.startswith(b'BODYFILE '):
4184 with open(line.split(b' ', 1), b'rb') as fh:
4184 with open(line.split(b' ', 1), b'rb') as fh:
4185 body = fh.read()
4185 body = fh.read()
4186 elif line.startswith(b'frame '):
4186 elif line.startswith(b'frame '):
4187 frame = wireprotoframing.makeframefromhumanstring(
4187 frame = wireprotoframing.makeframefromhumanstring(
4188 line[len(b'frame ') :]
4188 line[len(b'frame ') :]
4189 )
4189 )
4190
4190
4191 frames.append(frame)
4191 frames.append(frame)
4192 else:
4192 else:
4193 raise error.Abort(
4193 raise error.Abort(
4194 _(b'unknown argument to httprequest: %s') % line
4194 _(b'unknown argument to httprequest: %s') % line
4195 )
4195 )
4196
4196
4197 url = path + httppath
4197 url = path + httppath
4198
4198
4199 if frames:
4199 if frames:
4200 body = b''.join(bytes(f) for f in frames)
4200 body = b''.join(bytes(f) for f in frames)
4201
4201
4202 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4202 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4203
4203
4204 # urllib.Request insists on using has_data() as a proxy for
4204 # urllib.Request insists on using has_data() as a proxy for
4205 # determining the request method. Override that to use our
4205 # determining the request method. Override that to use our
4206 # explicitly requested method.
4206 # explicitly requested method.
4207 req.get_method = lambda: pycompat.sysstr(method)
4207 req.get_method = lambda: pycompat.sysstr(method)
4208
4208
4209 try:
4209 try:
4210 res = opener.open(req)
4210 res = opener.open(req)
4211 body = res.read()
4211 body = res.read()
4212 except util.urlerr.urlerror as e:
4212 except util.urlerr.urlerror as e:
4213 # read() method must be called, but only exists in Python 2
4213 # read() method must be called, but only exists in Python 2
4214 getattr(e, 'read', lambda: None)()
4214 getattr(e, 'read', lambda: None)()
4215 continue
4215 continue
4216
4216
4217 ct = res.headers.get('Content-Type')
4217 ct = res.headers.get('Content-Type')
4218 if ct == 'application/mercurial-cbor':
4218 if ct == 'application/mercurial-cbor':
4219 ui.write(
4219 ui.write(
4220 _(b'cbor> %s\n')
4220 _(b'cbor> %s\n')
4221 % stringutil.pprint(
4221 % stringutil.pprint(
4222 cborutil.decodeall(body), bprefix=True, indent=2
4222 cborutil.decodeall(body), bprefix=True, indent=2
4223 )
4223 )
4224 )
4224 )
4225
4225
4226 elif action == b'close':
4226 elif action == b'close':
4227 peer.close()
4227 peer.close()
4228 elif action == b'readavailable':
4228 elif action == b'readavailable':
4229 if not stdout or not stderr:
4229 if not stdout or not stderr:
4230 raise error.Abort(
4230 raise error.Abort(
4231 _(b'readavailable not available on this peer')
4231 _(b'readavailable not available on this peer')
4232 )
4232 )
4233
4233
4234 stdin.close()
4234 stdin.close()
4235 stdout.read()
4235 stdout.read()
4236 stderr.read()
4236 stderr.read()
4237
4237
4238 elif action == b'readline':
4238 elif action == b'readline':
4239 if not stdout:
4239 if not stdout:
4240 raise error.Abort(_(b'readline not available on this peer'))
4240 raise error.Abort(_(b'readline not available on this peer'))
4241 stdout.readline()
4241 stdout.readline()
4242 elif action == b'ereadline':
4242 elif action == b'ereadline':
4243 if not stderr:
4243 if not stderr:
4244 raise error.Abort(_(b'ereadline not available on this peer'))
4244 raise error.Abort(_(b'ereadline not available on this peer'))
4245 stderr.readline()
4245 stderr.readline()
4246 elif action.startswith(b'read '):
4246 elif action.startswith(b'read '):
4247 count = int(action.split(b' ', 1)[1])
4247 count = int(action.split(b' ', 1)[1])
4248 if not stdout:
4248 if not stdout:
4249 raise error.Abort(_(b'read not available on this peer'))
4249 raise error.Abort(_(b'read not available on this peer'))
4250 stdout.read(count)
4250 stdout.read(count)
4251 elif action.startswith(b'eread '):
4251 elif action.startswith(b'eread '):
4252 count = int(action.split(b' ', 1)[1])
4252 count = int(action.split(b' ', 1)[1])
4253 if not stderr:
4253 if not stderr:
4254 raise error.Abort(_(b'eread not available on this peer'))
4254 raise error.Abort(_(b'eread not available on this peer'))
4255 stderr.read(count)
4255 stderr.read(count)
4256 else:
4256 else:
4257 raise error.Abort(_(b'unknown action: %s') % action)
4257 raise error.Abort(_(b'unknown action: %s') % action)
4258
4258
4259 if batchedcommands is not None:
4259 if batchedcommands is not None:
4260 raise error.Abort(_(b'unclosed "batchbegin" request'))
4260 raise error.Abort(_(b'unclosed "batchbegin" request'))
4261
4261
4262 if peer:
4262 if peer:
4263 peer.close()
4263 peer.close()
4264
4264
4265 if proc:
4265 if proc:
4266 proc.kill()
4266 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now