##// END OF EJS Templates
debuginstall: gracefully handle missing __file__ attributes...
Matt Harbison -
r44083:1fb19665 default
parent child Browse files
Show More
@@ -1,4266 +1,4278
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .pycompat import (
35 from .pycompat import (
36 getattr,
36 getattr,
37 open,
37 open,
38 )
38 )
39 from . import (
39 from . import (
40 bundle2,
40 bundle2,
41 changegroup,
41 changegroup,
42 cmdutil,
42 cmdutil,
43 color,
43 color,
44 context,
44 context,
45 copies,
45 copies,
46 dagparser,
46 dagparser,
47 encoding,
47 encoding,
48 error,
48 error,
49 exchange,
49 exchange,
50 extensions,
50 extensions,
51 filemerge,
51 filemerge,
52 filesetlang,
52 filesetlang,
53 formatter,
53 formatter,
54 hg,
54 hg,
55 httppeer,
55 httppeer,
56 localrepo,
56 localrepo,
57 lock as lockmod,
57 lock as lockmod,
58 logcmdutil,
58 logcmdutil,
59 merge as mergemod,
59 merge as mergemod,
60 obsolete,
60 obsolete,
61 obsutil,
61 obsutil,
62 pathutil,
62 pathutil,
63 phases,
63 phases,
64 policy,
64 policy,
65 pvec,
65 pvec,
66 pycompat,
66 pycompat,
67 registrar,
67 registrar,
68 repair,
68 repair,
69 revlog,
69 revlog,
70 revset,
70 revset,
71 revsetlang,
71 revsetlang,
72 scmutil,
72 scmutil,
73 setdiscovery,
73 setdiscovery,
74 simplemerge,
74 simplemerge,
75 sshpeer,
75 sshpeer,
76 sslutil,
76 sslutil,
77 streamclone,
77 streamclone,
78 templater,
78 templater,
79 treediscovery,
79 treediscovery,
80 upgrade,
80 upgrade,
81 url as urlmod,
81 url as urlmod,
82 util,
82 util,
83 vfs as vfsmod,
83 vfs as vfsmod,
84 wireprotoframing,
84 wireprotoframing,
85 wireprotoserver,
85 wireprotoserver,
86 wireprotov2peer,
86 wireprotov2peer,
87 )
87 )
88 from .utils import (
88 from .utils import (
89 cborutil,
89 cborutil,
90 compression,
90 compression,
91 dateutil,
91 dateutil,
92 procutil,
92 procutil,
93 stringutil,
93 stringutil,
94 )
94 )
95
95
96 from .revlogutils import deltas as deltautil
96 from .revlogutils import deltas as deltautil
97
97
98 release = lockmod.release
98 release = lockmod.release
99
99
100 command = registrar.command()
100 command = registrar.command()
101
101
102
102
103 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
103 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
104 def debugancestor(ui, repo, *args):
104 def debugancestor(ui, repo, *args):
105 """find the ancestor revision of two revisions in a given index"""
105 """find the ancestor revision of two revisions in a given index"""
106 if len(args) == 3:
106 if len(args) == 3:
107 index, rev1, rev2 = args
107 index, rev1, rev2 = args
108 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
108 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
109 lookup = r.lookup
109 lookup = r.lookup
110 elif len(args) == 2:
110 elif len(args) == 2:
111 if not repo:
111 if not repo:
112 raise error.Abort(
112 raise error.Abort(
113 _(b'there is no Mercurial repository here (.hg not found)')
113 _(b'there is no Mercurial repository here (.hg not found)')
114 )
114 )
115 rev1, rev2 = args
115 rev1, rev2 = args
116 r = repo.changelog
116 r = repo.changelog
117 lookup = repo.lookup
117 lookup = repo.lookup
118 else:
118 else:
119 raise error.Abort(_(b'either two or three arguments required'))
119 raise error.Abort(_(b'either two or three arguments required'))
120 a = r.ancestor(lookup(rev1), lookup(rev2))
120 a = r.ancestor(lookup(rev1), lookup(rev2))
121 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
121 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
122
122
123
123
124 @command(b'debugapplystreamclonebundle', [], b'FILE')
124 @command(b'debugapplystreamclonebundle', [], b'FILE')
125 def debugapplystreamclonebundle(ui, repo, fname):
125 def debugapplystreamclonebundle(ui, repo, fname):
126 """apply a stream clone bundle file"""
126 """apply a stream clone bundle file"""
127 f = hg.openpath(ui, fname)
127 f = hg.openpath(ui, fname)
128 gen = exchange.readbundle(ui, f, fname)
128 gen = exchange.readbundle(ui, f, fname)
129 gen.apply(repo)
129 gen.apply(repo)
130
130
131
131
132 @command(
132 @command(
133 b'debugbuilddag',
133 b'debugbuilddag',
134 [
134 [
135 (
135 (
136 b'm',
136 b'm',
137 b'mergeable-file',
137 b'mergeable-file',
138 None,
138 None,
139 _(b'add single file mergeable changes'),
139 _(b'add single file mergeable changes'),
140 ),
140 ),
141 (
141 (
142 b'o',
142 b'o',
143 b'overwritten-file',
143 b'overwritten-file',
144 None,
144 None,
145 _(b'add single file all revs overwrite'),
145 _(b'add single file all revs overwrite'),
146 ),
146 ),
147 (b'n', b'new-file', None, _(b'add new file at each rev')),
147 (b'n', b'new-file', None, _(b'add new file at each rev')),
148 ],
148 ],
149 _(b'[OPTION]... [TEXT]'),
149 _(b'[OPTION]... [TEXT]'),
150 )
150 )
151 def debugbuilddag(
151 def debugbuilddag(
152 ui,
152 ui,
153 repo,
153 repo,
154 text=None,
154 text=None,
155 mergeable_file=False,
155 mergeable_file=False,
156 overwritten_file=False,
156 overwritten_file=False,
157 new_file=False,
157 new_file=False,
158 ):
158 ):
159 """builds a repo with a given DAG from scratch in the current empty repo
159 """builds a repo with a given DAG from scratch in the current empty repo
160
160
161 The description of the DAG is read from stdin if not given on the
161 The description of the DAG is read from stdin if not given on the
162 command line.
162 command line.
163
163
164 Elements:
164 Elements:
165
165
166 - "+n" is a linear run of n nodes based on the current default parent
166 - "+n" is a linear run of n nodes based on the current default parent
167 - "." is a single node based on the current default parent
167 - "." is a single node based on the current default parent
168 - "$" resets the default parent to null (implied at the start);
168 - "$" resets the default parent to null (implied at the start);
169 otherwise the default parent is always the last node created
169 otherwise the default parent is always the last node created
170 - "<p" sets the default parent to the backref p
170 - "<p" sets the default parent to the backref p
171 - "*p" is a fork at parent p, which is a backref
171 - "*p" is a fork at parent p, which is a backref
172 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
172 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
173 - "/p2" is a merge of the preceding node and p2
173 - "/p2" is a merge of the preceding node and p2
174 - ":tag" defines a local tag for the preceding node
174 - ":tag" defines a local tag for the preceding node
175 - "@branch" sets the named branch for subsequent nodes
175 - "@branch" sets the named branch for subsequent nodes
176 - "#...\\n" is a comment up to the end of the line
176 - "#...\\n" is a comment up to the end of the line
177
177
178 Whitespace between the above elements is ignored.
178 Whitespace between the above elements is ignored.
179
179
180 A backref is either
180 A backref is either
181
181
182 - a number n, which references the node curr-n, where curr is the current
182 - a number n, which references the node curr-n, where curr is the current
183 node, or
183 node, or
184 - the name of a local tag you placed earlier using ":tag", or
184 - the name of a local tag you placed earlier using ":tag", or
185 - empty to denote the default parent.
185 - empty to denote the default parent.
186
186
187 All string valued-elements are either strictly alphanumeric, or must
187 All string valued-elements are either strictly alphanumeric, or must
188 be enclosed in double quotes ("..."), with "\\" as escape character.
188 be enclosed in double quotes ("..."), with "\\" as escape character.
189 """
189 """
190
190
191 if text is None:
191 if text is None:
192 ui.status(_(b"reading DAG from stdin\n"))
192 ui.status(_(b"reading DAG from stdin\n"))
193 text = ui.fin.read()
193 text = ui.fin.read()
194
194
195 cl = repo.changelog
195 cl = repo.changelog
196 if len(cl) > 0:
196 if len(cl) > 0:
197 raise error.Abort(_(b'repository is not empty'))
197 raise error.Abort(_(b'repository is not empty'))
198
198
199 # determine number of revs in DAG
199 # determine number of revs in DAG
200 total = 0
200 total = 0
201 for type, data in dagparser.parsedag(text):
201 for type, data in dagparser.parsedag(text):
202 if type == b'n':
202 if type == b'n':
203 total += 1
203 total += 1
204
204
205 if mergeable_file:
205 if mergeable_file:
206 linesperrev = 2
206 linesperrev = 2
207 # make a file with k lines per rev
207 # make a file with k lines per rev
208 initialmergedlines = [
208 initialmergedlines = [
209 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
209 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
210 ]
210 ]
211 initialmergedlines.append(b"")
211 initialmergedlines.append(b"")
212
212
213 tags = []
213 tags = []
214 progress = ui.makeprogress(
214 progress = ui.makeprogress(
215 _(b'building'), unit=_(b'revisions'), total=total
215 _(b'building'), unit=_(b'revisions'), total=total
216 )
216 )
217 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
217 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
218 at = -1
218 at = -1
219 atbranch = b'default'
219 atbranch = b'default'
220 nodeids = []
220 nodeids = []
221 id = 0
221 id = 0
222 progress.update(id)
222 progress.update(id)
223 for type, data in dagparser.parsedag(text):
223 for type, data in dagparser.parsedag(text):
224 if type == b'n':
224 if type == b'n':
225 ui.note((b'node %s\n' % pycompat.bytestr(data)))
225 ui.note((b'node %s\n' % pycompat.bytestr(data)))
226 id, ps = data
226 id, ps = data
227
227
228 files = []
228 files = []
229 filecontent = {}
229 filecontent = {}
230
230
231 p2 = None
231 p2 = None
232 if mergeable_file:
232 if mergeable_file:
233 fn = b"mf"
233 fn = b"mf"
234 p1 = repo[ps[0]]
234 p1 = repo[ps[0]]
235 if len(ps) > 1:
235 if len(ps) > 1:
236 p2 = repo[ps[1]]
236 p2 = repo[ps[1]]
237 pa = p1.ancestor(p2)
237 pa = p1.ancestor(p2)
238 base, local, other = [
238 base, local, other = [
239 x[fn].data() for x in (pa, p1, p2)
239 x[fn].data() for x in (pa, p1, p2)
240 ]
240 ]
241 m3 = simplemerge.Merge3Text(base, local, other)
241 m3 = simplemerge.Merge3Text(base, local, other)
242 ml = [l.strip() for l in m3.merge_lines()]
242 ml = [l.strip() for l in m3.merge_lines()]
243 ml.append(b"")
243 ml.append(b"")
244 elif at > 0:
244 elif at > 0:
245 ml = p1[fn].data().split(b"\n")
245 ml = p1[fn].data().split(b"\n")
246 else:
246 else:
247 ml = initialmergedlines
247 ml = initialmergedlines
248 ml[id * linesperrev] += b" r%i" % id
248 ml[id * linesperrev] += b" r%i" % id
249 mergedtext = b"\n".join(ml)
249 mergedtext = b"\n".join(ml)
250 files.append(fn)
250 files.append(fn)
251 filecontent[fn] = mergedtext
251 filecontent[fn] = mergedtext
252
252
253 if overwritten_file:
253 if overwritten_file:
254 fn = b"of"
254 fn = b"of"
255 files.append(fn)
255 files.append(fn)
256 filecontent[fn] = b"r%i\n" % id
256 filecontent[fn] = b"r%i\n" % id
257
257
258 if new_file:
258 if new_file:
259 fn = b"nf%i" % id
259 fn = b"nf%i" % id
260 files.append(fn)
260 files.append(fn)
261 filecontent[fn] = b"r%i\n" % id
261 filecontent[fn] = b"r%i\n" % id
262 if len(ps) > 1:
262 if len(ps) > 1:
263 if not p2:
263 if not p2:
264 p2 = repo[ps[1]]
264 p2 = repo[ps[1]]
265 for fn in p2:
265 for fn in p2:
266 if fn.startswith(b"nf"):
266 if fn.startswith(b"nf"):
267 files.append(fn)
267 files.append(fn)
268 filecontent[fn] = p2[fn].data()
268 filecontent[fn] = p2[fn].data()
269
269
270 def fctxfn(repo, cx, path):
270 def fctxfn(repo, cx, path):
271 if path in filecontent:
271 if path in filecontent:
272 return context.memfilectx(
272 return context.memfilectx(
273 repo, cx, path, filecontent[path]
273 repo, cx, path, filecontent[path]
274 )
274 )
275 return None
275 return None
276
276
277 if len(ps) == 0 or ps[0] < 0:
277 if len(ps) == 0 or ps[0] < 0:
278 pars = [None, None]
278 pars = [None, None]
279 elif len(ps) == 1:
279 elif len(ps) == 1:
280 pars = [nodeids[ps[0]], None]
280 pars = [nodeids[ps[0]], None]
281 else:
281 else:
282 pars = [nodeids[p] for p in ps]
282 pars = [nodeids[p] for p in ps]
283 cx = context.memctx(
283 cx = context.memctx(
284 repo,
284 repo,
285 pars,
285 pars,
286 b"r%i" % id,
286 b"r%i" % id,
287 files,
287 files,
288 fctxfn,
288 fctxfn,
289 date=(id, 0),
289 date=(id, 0),
290 user=b"debugbuilddag",
290 user=b"debugbuilddag",
291 extra={b'branch': atbranch},
291 extra={b'branch': atbranch},
292 )
292 )
293 nodeid = repo.commitctx(cx)
293 nodeid = repo.commitctx(cx)
294 nodeids.append(nodeid)
294 nodeids.append(nodeid)
295 at = id
295 at = id
296 elif type == b'l':
296 elif type == b'l':
297 id, name = data
297 id, name = data
298 ui.note((b'tag %s\n' % name))
298 ui.note((b'tag %s\n' % name))
299 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
299 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
300 elif type == b'a':
300 elif type == b'a':
301 ui.note((b'branch %s\n' % data))
301 ui.note((b'branch %s\n' % data))
302 atbranch = data
302 atbranch = data
303 progress.update(id)
303 progress.update(id)
304
304
305 if tags:
305 if tags:
306 repo.vfs.write(b"localtags", b"".join(tags))
306 repo.vfs.write(b"localtags", b"".join(tags))
307
307
308
308
309 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
309 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
310 indent_string = b' ' * indent
310 indent_string = b' ' * indent
311 if all:
311 if all:
312 ui.writenoi18n(
312 ui.writenoi18n(
313 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
313 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
314 % indent_string
314 % indent_string
315 )
315 )
316
316
317 def showchunks(named):
317 def showchunks(named):
318 ui.write(b"\n%s%s\n" % (indent_string, named))
318 ui.write(b"\n%s%s\n" % (indent_string, named))
319 for deltadata in gen.deltaiter():
319 for deltadata in gen.deltaiter():
320 node, p1, p2, cs, deltabase, delta, flags = deltadata
320 node, p1, p2, cs, deltabase, delta, flags = deltadata
321 ui.write(
321 ui.write(
322 b"%s%s %s %s %s %s %d\n"
322 b"%s%s %s %s %s %s %d\n"
323 % (
323 % (
324 indent_string,
324 indent_string,
325 hex(node),
325 hex(node),
326 hex(p1),
326 hex(p1),
327 hex(p2),
327 hex(p2),
328 hex(cs),
328 hex(cs),
329 hex(deltabase),
329 hex(deltabase),
330 len(delta),
330 len(delta),
331 )
331 )
332 )
332 )
333
333
334 chunkdata = gen.changelogheader()
334 chunkdata = gen.changelogheader()
335 showchunks(b"changelog")
335 showchunks(b"changelog")
336 chunkdata = gen.manifestheader()
336 chunkdata = gen.manifestheader()
337 showchunks(b"manifest")
337 showchunks(b"manifest")
338 for chunkdata in iter(gen.filelogheader, {}):
338 for chunkdata in iter(gen.filelogheader, {}):
339 fname = chunkdata[b'filename']
339 fname = chunkdata[b'filename']
340 showchunks(fname)
340 showchunks(fname)
341 else:
341 else:
342 if isinstance(gen, bundle2.unbundle20):
342 if isinstance(gen, bundle2.unbundle20):
343 raise error.Abort(_(b'use debugbundle2 for this file'))
343 raise error.Abort(_(b'use debugbundle2 for this file'))
344 chunkdata = gen.changelogheader()
344 chunkdata = gen.changelogheader()
345 for deltadata in gen.deltaiter():
345 for deltadata in gen.deltaiter():
346 node, p1, p2, cs, deltabase, delta, flags = deltadata
346 node, p1, p2, cs, deltabase, delta, flags = deltadata
347 ui.write(b"%s%s\n" % (indent_string, hex(node)))
347 ui.write(b"%s%s\n" % (indent_string, hex(node)))
348
348
349
349
350 def _debugobsmarkers(ui, part, indent=0, **opts):
350 def _debugobsmarkers(ui, part, indent=0, **opts):
351 """display version and markers contained in 'data'"""
351 """display version and markers contained in 'data'"""
352 opts = pycompat.byteskwargs(opts)
352 opts = pycompat.byteskwargs(opts)
353 data = part.read()
353 data = part.read()
354 indent_string = b' ' * indent
354 indent_string = b' ' * indent
355 try:
355 try:
356 version, markers = obsolete._readmarkers(data)
356 version, markers = obsolete._readmarkers(data)
357 except error.UnknownVersion as exc:
357 except error.UnknownVersion as exc:
358 msg = b"%sunsupported version: %s (%d bytes)\n"
358 msg = b"%sunsupported version: %s (%d bytes)\n"
359 msg %= indent_string, exc.version, len(data)
359 msg %= indent_string, exc.version, len(data)
360 ui.write(msg)
360 ui.write(msg)
361 else:
361 else:
362 msg = b"%sversion: %d (%d bytes)\n"
362 msg = b"%sversion: %d (%d bytes)\n"
363 msg %= indent_string, version, len(data)
363 msg %= indent_string, version, len(data)
364 ui.write(msg)
364 ui.write(msg)
365 fm = ui.formatter(b'debugobsolete', opts)
365 fm = ui.formatter(b'debugobsolete', opts)
366 for rawmarker in sorted(markers):
366 for rawmarker in sorted(markers):
367 m = obsutil.marker(None, rawmarker)
367 m = obsutil.marker(None, rawmarker)
368 fm.startitem()
368 fm.startitem()
369 fm.plain(indent_string)
369 fm.plain(indent_string)
370 cmdutil.showmarker(fm, m)
370 cmdutil.showmarker(fm, m)
371 fm.end()
371 fm.end()
372
372
373
373
374 def _debugphaseheads(ui, data, indent=0):
374 def _debugphaseheads(ui, data, indent=0):
375 """display version and markers contained in 'data'"""
375 """display version and markers contained in 'data'"""
376 indent_string = b' ' * indent
376 indent_string = b' ' * indent
377 headsbyphase = phases.binarydecode(data)
377 headsbyphase = phases.binarydecode(data)
378 for phase in phases.allphases:
378 for phase in phases.allphases:
379 for head in headsbyphase[phase]:
379 for head in headsbyphase[phase]:
380 ui.write(indent_string)
380 ui.write(indent_string)
381 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
381 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
382
382
383
383
384 def _quasirepr(thing):
384 def _quasirepr(thing):
385 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
385 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
386 return b'{%s}' % (
386 return b'{%s}' % (
387 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
387 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
388 )
388 )
389 return pycompat.bytestr(repr(thing))
389 return pycompat.bytestr(repr(thing))
390
390
391
391
392 def _debugbundle2(ui, gen, all=None, **opts):
392 def _debugbundle2(ui, gen, all=None, **opts):
393 """lists the contents of a bundle2"""
393 """lists the contents of a bundle2"""
394 if not isinstance(gen, bundle2.unbundle20):
394 if not isinstance(gen, bundle2.unbundle20):
395 raise error.Abort(_(b'not a bundle2 file'))
395 raise error.Abort(_(b'not a bundle2 file'))
396 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
396 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
397 parttypes = opts.get('part_type', [])
397 parttypes = opts.get('part_type', [])
398 for part in gen.iterparts():
398 for part in gen.iterparts():
399 if parttypes and part.type not in parttypes:
399 if parttypes and part.type not in parttypes:
400 continue
400 continue
401 msg = b'%s -- %s (mandatory: %r)\n'
401 msg = b'%s -- %s (mandatory: %r)\n'
402 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
402 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
403 if part.type == b'changegroup':
403 if part.type == b'changegroup':
404 version = part.params.get(b'version', b'01')
404 version = part.params.get(b'version', b'01')
405 cg = changegroup.getunbundler(version, part, b'UN')
405 cg = changegroup.getunbundler(version, part, b'UN')
406 if not ui.quiet:
406 if not ui.quiet:
407 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
407 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
408 if part.type == b'obsmarkers':
408 if part.type == b'obsmarkers':
409 if not ui.quiet:
409 if not ui.quiet:
410 _debugobsmarkers(ui, part, indent=4, **opts)
410 _debugobsmarkers(ui, part, indent=4, **opts)
411 if part.type == b'phase-heads':
411 if part.type == b'phase-heads':
412 if not ui.quiet:
412 if not ui.quiet:
413 _debugphaseheads(ui, part, indent=4)
413 _debugphaseheads(ui, part, indent=4)
414
414
415
415
416 @command(
416 @command(
417 b'debugbundle',
417 b'debugbundle',
418 [
418 [
419 (b'a', b'all', None, _(b'show all details')),
419 (b'a', b'all', None, _(b'show all details')),
420 (b'', b'part-type', [], _(b'show only the named part type')),
420 (b'', b'part-type', [], _(b'show only the named part type')),
421 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
421 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
422 ],
422 ],
423 _(b'FILE'),
423 _(b'FILE'),
424 norepo=True,
424 norepo=True,
425 )
425 )
426 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
426 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
427 """lists the contents of a bundle"""
427 """lists the contents of a bundle"""
428 with hg.openpath(ui, bundlepath) as f:
428 with hg.openpath(ui, bundlepath) as f:
429 if spec:
429 if spec:
430 spec = exchange.getbundlespec(ui, f)
430 spec = exchange.getbundlespec(ui, f)
431 ui.write(b'%s\n' % spec)
431 ui.write(b'%s\n' % spec)
432 return
432 return
433
433
434 gen = exchange.readbundle(ui, f, bundlepath)
434 gen = exchange.readbundle(ui, f, bundlepath)
435 if isinstance(gen, bundle2.unbundle20):
435 if isinstance(gen, bundle2.unbundle20):
436 return _debugbundle2(ui, gen, all=all, **opts)
436 return _debugbundle2(ui, gen, all=all, **opts)
437 _debugchangegroup(ui, gen, all=all, **opts)
437 _debugchangegroup(ui, gen, all=all, **opts)
438
438
439
439
440 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
440 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
441 def debugcapabilities(ui, path, **opts):
441 def debugcapabilities(ui, path, **opts):
442 """lists the capabilities of a remote peer"""
442 """lists the capabilities of a remote peer"""
443 opts = pycompat.byteskwargs(opts)
443 opts = pycompat.byteskwargs(opts)
444 peer = hg.peer(ui, opts, path)
444 peer = hg.peer(ui, opts, path)
445 caps = peer.capabilities()
445 caps = peer.capabilities()
446 ui.writenoi18n(b'Main capabilities:\n')
446 ui.writenoi18n(b'Main capabilities:\n')
447 for c in sorted(caps):
447 for c in sorted(caps):
448 ui.write(b' %s\n' % c)
448 ui.write(b' %s\n' % c)
449 b2caps = bundle2.bundle2caps(peer)
449 b2caps = bundle2.bundle2caps(peer)
450 if b2caps:
450 if b2caps:
451 ui.writenoi18n(b'Bundle2 capabilities:\n')
451 ui.writenoi18n(b'Bundle2 capabilities:\n')
452 for key, values in sorted(pycompat.iteritems(b2caps)):
452 for key, values in sorted(pycompat.iteritems(b2caps)):
453 ui.write(b' %s\n' % key)
453 ui.write(b' %s\n' % key)
454 for v in values:
454 for v in values:
455 ui.write(b' %s\n' % v)
455 ui.write(b' %s\n' % v)
456
456
457
457
458 @command(b'debugcheckstate', [], b'')
458 @command(b'debugcheckstate', [], b'')
459 def debugcheckstate(ui, repo):
459 def debugcheckstate(ui, repo):
460 """validate the correctness of the current dirstate"""
460 """validate the correctness of the current dirstate"""
461 parent1, parent2 = repo.dirstate.parents()
461 parent1, parent2 = repo.dirstate.parents()
462 m1 = repo[parent1].manifest()
462 m1 = repo[parent1].manifest()
463 m2 = repo[parent2].manifest()
463 m2 = repo[parent2].manifest()
464 errors = 0
464 errors = 0
465 for f in repo.dirstate:
465 for f in repo.dirstate:
466 state = repo.dirstate[f]
466 state = repo.dirstate[f]
467 if state in b"nr" and f not in m1:
467 if state in b"nr" and f not in m1:
468 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
468 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
469 errors += 1
469 errors += 1
470 if state in b"a" and f in m1:
470 if state in b"a" and f in m1:
471 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
471 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
472 errors += 1
472 errors += 1
473 if state in b"m" and f not in m1 and f not in m2:
473 if state in b"m" and f not in m1 and f not in m2:
474 ui.warn(
474 ui.warn(
475 _(b"%s in state %s, but not in either manifest\n") % (f, state)
475 _(b"%s in state %s, but not in either manifest\n") % (f, state)
476 )
476 )
477 errors += 1
477 errors += 1
478 for f in m1:
478 for f in m1:
479 state = repo.dirstate[f]
479 state = repo.dirstate[f]
480 if state not in b"nrm":
480 if state not in b"nrm":
481 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
481 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
482 errors += 1
482 errors += 1
483 if errors:
483 if errors:
484 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
484 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
485 raise error.Abort(errstr)
485 raise error.Abort(errstr)
486
486
487
487
488 @command(
488 @command(
489 b'debugcolor',
489 b'debugcolor',
490 [(b'', b'style', None, _(b'show all configured styles'))],
490 [(b'', b'style', None, _(b'show all configured styles'))],
491 b'hg debugcolor',
491 b'hg debugcolor',
492 )
492 )
493 def debugcolor(ui, repo, **opts):
493 def debugcolor(ui, repo, **opts):
494 """show available color, effects or style"""
494 """show available color, effects or style"""
495 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
495 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
496 if opts.get('style'):
496 if opts.get('style'):
497 return _debugdisplaystyle(ui)
497 return _debugdisplaystyle(ui)
498 else:
498 else:
499 return _debugdisplaycolor(ui)
499 return _debugdisplaycolor(ui)
500
500
501
501
502 def _debugdisplaycolor(ui):
502 def _debugdisplaycolor(ui):
503 ui = ui.copy()
503 ui = ui.copy()
504 ui._styles.clear()
504 ui._styles.clear()
505 for effect in color._activeeffects(ui).keys():
505 for effect in color._activeeffects(ui).keys():
506 ui._styles[effect] = effect
506 ui._styles[effect] = effect
507 if ui._terminfoparams:
507 if ui._terminfoparams:
508 for k, v in ui.configitems(b'color'):
508 for k, v in ui.configitems(b'color'):
509 if k.startswith(b'color.'):
509 if k.startswith(b'color.'):
510 ui._styles[k] = k[6:]
510 ui._styles[k] = k[6:]
511 elif k.startswith(b'terminfo.'):
511 elif k.startswith(b'terminfo.'):
512 ui._styles[k] = k[9:]
512 ui._styles[k] = k[9:]
513 ui.write(_(b'available colors:\n'))
513 ui.write(_(b'available colors:\n'))
514 # sort label with a '_' after the other to group '_background' entry.
514 # sort label with a '_' after the other to group '_background' entry.
515 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
515 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
516 for colorname, label in items:
516 for colorname, label in items:
517 ui.write(b'%s\n' % colorname, label=label)
517 ui.write(b'%s\n' % colorname, label=label)
518
518
519
519
520 def _debugdisplaystyle(ui):
520 def _debugdisplaystyle(ui):
521 ui.write(_(b'available style:\n'))
521 ui.write(_(b'available style:\n'))
522 if not ui._styles:
522 if not ui._styles:
523 return
523 return
524 width = max(len(s) for s in ui._styles)
524 width = max(len(s) for s in ui._styles)
525 for label, effects in sorted(ui._styles.items()):
525 for label, effects in sorted(ui._styles.items()):
526 ui.write(b'%s' % label, label=label)
526 ui.write(b'%s' % label, label=label)
527 if effects:
527 if effects:
528 # 50
528 # 50
529 ui.write(b': ')
529 ui.write(b': ')
530 ui.write(b' ' * (max(0, width - len(label))))
530 ui.write(b' ' * (max(0, width - len(label))))
531 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
531 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
532 ui.write(b'\n')
532 ui.write(b'\n')
533
533
534
534
535 @command(b'debugcreatestreamclonebundle', [], b'FILE')
535 @command(b'debugcreatestreamclonebundle', [], b'FILE')
536 def debugcreatestreamclonebundle(ui, repo, fname):
536 def debugcreatestreamclonebundle(ui, repo, fname):
537 """create a stream clone bundle file
537 """create a stream clone bundle file
538
538
539 Stream bundles are special bundles that are essentially archives of
539 Stream bundles are special bundles that are essentially archives of
540 revlog files. They are commonly used for cloning very quickly.
540 revlog files. They are commonly used for cloning very quickly.
541 """
541 """
542 # TODO we may want to turn this into an abort when this functionality
542 # TODO we may want to turn this into an abort when this functionality
543 # is moved into `hg bundle`.
543 # is moved into `hg bundle`.
544 if phases.hassecret(repo):
544 if phases.hassecret(repo):
545 ui.warn(
545 ui.warn(
546 _(
546 _(
547 b'(warning: stream clone bundle will contain secret '
547 b'(warning: stream clone bundle will contain secret '
548 b'revisions)\n'
548 b'revisions)\n'
549 )
549 )
550 )
550 )
551
551
552 requirements, gen = streamclone.generatebundlev1(repo)
552 requirements, gen = streamclone.generatebundlev1(repo)
553 changegroup.writechunks(ui, gen, fname)
553 changegroup.writechunks(ui, gen, fname)
554
554
555 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
555 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
556
556
557
557
558 @command(
558 @command(
559 b'debugdag',
559 b'debugdag',
560 [
560 [
561 (b't', b'tags', None, _(b'use tags as labels')),
561 (b't', b'tags', None, _(b'use tags as labels')),
562 (b'b', b'branches', None, _(b'annotate with branch names')),
562 (b'b', b'branches', None, _(b'annotate with branch names')),
563 (b'', b'dots', None, _(b'use dots for runs')),
563 (b'', b'dots', None, _(b'use dots for runs')),
564 (b's', b'spaces', None, _(b'separate elements by spaces')),
564 (b's', b'spaces', None, _(b'separate elements by spaces')),
565 ],
565 ],
566 _(b'[OPTION]... [FILE [REV]...]'),
566 _(b'[OPTION]... [FILE [REV]...]'),
567 optionalrepo=True,
567 optionalrepo=True,
568 )
568 )
569 def debugdag(ui, repo, file_=None, *revs, **opts):
569 def debugdag(ui, repo, file_=None, *revs, **opts):
570 """format the changelog or an index DAG as a concise textual description
570 """format the changelog or an index DAG as a concise textual description
571
571
572 If you pass a revlog index, the revlog's DAG is emitted. If you list
572 If you pass a revlog index, the revlog's DAG is emitted. If you list
573 revision numbers, they get labeled in the output as rN.
573 revision numbers, they get labeled in the output as rN.
574
574
575 Otherwise, the changelog DAG of the current repo is emitted.
575 Otherwise, the changelog DAG of the current repo is emitted.
576 """
576 """
577 spaces = opts.get('spaces')
577 spaces = opts.get('spaces')
578 dots = opts.get('dots')
578 dots = opts.get('dots')
579 if file_:
579 if file_:
580 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
580 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
581 revs = set((int(r) for r in revs))
581 revs = set((int(r) for r in revs))
582
582
583 def events():
583 def events():
584 for r in rlog:
584 for r in rlog:
585 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
585 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
586 if r in revs:
586 if r in revs:
587 yield b'l', (r, b"r%i" % r)
587 yield b'l', (r, b"r%i" % r)
588
588
589 elif repo:
589 elif repo:
590 cl = repo.changelog
590 cl = repo.changelog
591 tags = opts.get('tags')
591 tags = opts.get('tags')
592 branches = opts.get('branches')
592 branches = opts.get('branches')
593 if tags:
593 if tags:
594 labels = {}
594 labels = {}
595 for l, n in repo.tags().items():
595 for l, n in repo.tags().items():
596 labels.setdefault(cl.rev(n), []).append(l)
596 labels.setdefault(cl.rev(n), []).append(l)
597
597
598 def events():
598 def events():
599 b = b"default"
599 b = b"default"
600 for r in cl:
600 for r in cl:
601 if branches:
601 if branches:
602 newb = cl.read(cl.node(r))[5][b'branch']
602 newb = cl.read(cl.node(r))[5][b'branch']
603 if newb != b:
603 if newb != b:
604 yield b'a', newb
604 yield b'a', newb
605 b = newb
605 b = newb
606 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
606 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
607 if tags:
607 if tags:
608 ls = labels.get(r)
608 ls = labels.get(r)
609 if ls:
609 if ls:
610 for l in ls:
610 for l in ls:
611 yield b'l', (r, l)
611 yield b'l', (r, l)
612
612
613 else:
613 else:
614 raise error.Abort(_(b'need repo for changelog dag'))
614 raise error.Abort(_(b'need repo for changelog dag'))
615
615
616 for line in dagparser.dagtextlines(
616 for line in dagparser.dagtextlines(
617 events(),
617 events(),
618 addspaces=spaces,
618 addspaces=spaces,
619 wraplabels=True,
619 wraplabels=True,
620 wrapannotations=True,
620 wrapannotations=True,
621 wrapnonlinear=dots,
621 wrapnonlinear=dots,
622 usedots=dots,
622 usedots=dots,
623 maxlinewidth=70,
623 maxlinewidth=70,
624 ):
624 ):
625 ui.write(line)
625 ui.write(line)
626 ui.write(b"\n")
626 ui.write(b"\n")
627
627
628
628
629 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
629 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
630 def debugdata(ui, repo, file_, rev=None, **opts):
630 def debugdata(ui, repo, file_, rev=None, **opts):
631 """dump the contents of a data file revision"""
631 """dump the contents of a data file revision"""
632 opts = pycompat.byteskwargs(opts)
632 opts = pycompat.byteskwargs(opts)
633 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
633 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
634 if rev is not None:
634 if rev is not None:
635 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
635 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
636 file_, rev = None, file_
636 file_, rev = None, file_
637 elif rev is None:
637 elif rev is None:
638 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
638 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
639 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
639 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
640 try:
640 try:
641 ui.write(r.rawdata(r.lookup(rev)))
641 ui.write(r.rawdata(r.lookup(rev)))
642 except KeyError:
642 except KeyError:
643 raise error.Abort(_(b'invalid revision identifier %s') % rev)
643 raise error.Abort(_(b'invalid revision identifier %s') % rev)
644
644
645
645
646 @command(
646 @command(
647 b'debugdate',
647 b'debugdate',
648 [(b'e', b'extended', None, _(b'try extended date formats'))],
648 [(b'e', b'extended', None, _(b'try extended date formats'))],
649 _(b'[-e] DATE [RANGE]'),
649 _(b'[-e] DATE [RANGE]'),
650 norepo=True,
650 norepo=True,
651 optionalrepo=True,
651 optionalrepo=True,
652 )
652 )
653 def debugdate(ui, date, range=None, **opts):
653 def debugdate(ui, date, range=None, **opts):
654 """parse and display a date"""
654 """parse and display a date"""
655 if opts["extended"]:
655 if opts["extended"]:
656 d = dateutil.parsedate(date, util.extendeddateformats)
656 d = dateutil.parsedate(date, util.extendeddateformats)
657 else:
657 else:
658 d = dateutil.parsedate(date)
658 d = dateutil.parsedate(date)
659 ui.writenoi18n(b"internal: %d %d\n" % d)
659 ui.writenoi18n(b"internal: %d %d\n" % d)
660 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
660 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
661 if range:
661 if range:
662 m = dateutil.matchdate(range)
662 m = dateutil.matchdate(range)
663 ui.writenoi18n(b"match: %s\n" % m(d[0]))
663 ui.writenoi18n(b"match: %s\n" % m(d[0]))
664
664
665
665
666 @command(
666 @command(
667 b'debugdeltachain',
667 b'debugdeltachain',
668 cmdutil.debugrevlogopts + cmdutil.formatteropts,
668 cmdutil.debugrevlogopts + cmdutil.formatteropts,
669 _(b'-c|-m|FILE'),
669 _(b'-c|-m|FILE'),
670 optionalrepo=True,
670 optionalrepo=True,
671 )
671 )
672 def debugdeltachain(ui, repo, file_=None, **opts):
672 def debugdeltachain(ui, repo, file_=None, **opts):
673 """dump information about delta chains in a revlog
673 """dump information about delta chains in a revlog
674
674
675 Output can be templatized. Available template keywords are:
675 Output can be templatized. Available template keywords are:
676
676
677 :``rev``: revision number
677 :``rev``: revision number
678 :``chainid``: delta chain identifier (numbered by unique base)
678 :``chainid``: delta chain identifier (numbered by unique base)
679 :``chainlen``: delta chain length to this revision
679 :``chainlen``: delta chain length to this revision
680 :``prevrev``: previous revision in delta chain
680 :``prevrev``: previous revision in delta chain
681 :``deltatype``: role of delta / how it was computed
681 :``deltatype``: role of delta / how it was computed
682 :``compsize``: compressed size of revision
682 :``compsize``: compressed size of revision
683 :``uncompsize``: uncompressed size of revision
683 :``uncompsize``: uncompressed size of revision
684 :``chainsize``: total size of compressed revisions in chain
684 :``chainsize``: total size of compressed revisions in chain
685 :``chainratio``: total chain size divided by uncompressed revision size
685 :``chainratio``: total chain size divided by uncompressed revision size
686 (new delta chains typically start at ratio 2.00)
686 (new delta chains typically start at ratio 2.00)
687 :``lindist``: linear distance from base revision in delta chain to end
687 :``lindist``: linear distance from base revision in delta chain to end
688 of this revision
688 of this revision
689 :``extradist``: total size of revisions not part of this delta chain from
689 :``extradist``: total size of revisions not part of this delta chain from
690 base of delta chain to end of this revision; a measurement
690 base of delta chain to end of this revision; a measurement
691 of how much extra data we need to read/seek across to read
691 of how much extra data we need to read/seek across to read
692 the delta chain for this revision
692 the delta chain for this revision
693 :``extraratio``: extradist divided by chainsize; another representation of
693 :``extraratio``: extradist divided by chainsize; another representation of
694 how much unrelated data is needed to load this delta chain
694 how much unrelated data is needed to load this delta chain
695
695
696 If the repository is configured to use the sparse read, additional keywords
696 If the repository is configured to use the sparse read, additional keywords
697 are available:
697 are available:
698
698
699 :``readsize``: total size of data read from the disk for a revision
699 :``readsize``: total size of data read from the disk for a revision
700 (sum of the sizes of all the blocks)
700 (sum of the sizes of all the blocks)
701 :``largestblock``: size of the largest block of data read from the disk
701 :``largestblock``: size of the largest block of data read from the disk
702 :``readdensity``: density of useful bytes in the data read from the disk
702 :``readdensity``: density of useful bytes in the data read from the disk
703 :``srchunks``: in how many data hunks the whole revision would be read
703 :``srchunks``: in how many data hunks the whole revision would be read
704
704
705 The sparse read can be enabled with experimental.sparse-read = True
705 The sparse read can be enabled with experimental.sparse-read = True
706 """
706 """
707 opts = pycompat.byteskwargs(opts)
707 opts = pycompat.byteskwargs(opts)
708 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
708 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
709 index = r.index
709 index = r.index
710 start = r.start
710 start = r.start
711 length = r.length
711 length = r.length
712 generaldelta = r.version & revlog.FLAG_GENERALDELTA
712 generaldelta = r.version & revlog.FLAG_GENERALDELTA
713 withsparseread = getattr(r, '_withsparseread', False)
713 withsparseread = getattr(r, '_withsparseread', False)
714
714
715 def revinfo(rev):
715 def revinfo(rev):
716 e = index[rev]
716 e = index[rev]
717 compsize = e[1]
717 compsize = e[1]
718 uncompsize = e[2]
718 uncompsize = e[2]
719 chainsize = 0
719 chainsize = 0
720
720
721 if generaldelta:
721 if generaldelta:
722 if e[3] == e[5]:
722 if e[3] == e[5]:
723 deltatype = b'p1'
723 deltatype = b'p1'
724 elif e[3] == e[6]:
724 elif e[3] == e[6]:
725 deltatype = b'p2'
725 deltatype = b'p2'
726 elif e[3] == rev - 1:
726 elif e[3] == rev - 1:
727 deltatype = b'prev'
727 deltatype = b'prev'
728 elif e[3] == rev:
728 elif e[3] == rev:
729 deltatype = b'base'
729 deltatype = b'base'
730 else:
730 else:
731 deltatype = b'other'
731 deltatype = b'other'
732 else:
732 else:
733 if e[3] == rev:
733 if e[3] == rev:
734 deltatype = b'base'
734 deltatype = b'base'
735 else:
735 else:
736 deltatype = b'prev'
736 deltatype = b'prev'
737
737
738 chain = r._deltachain(rev)[0]
738 chain = r._deltachain(rev)[0]
739 for iterrev in chain:
739 for iterrev in chain:
740 e = index[iterrev]
740 e = index[iterrev]
741 chainsize += e[1]
741 chainsize += e[1]
742
742
743 return compsize, uncompsize, deltatype, chain, chainsize
743 return compsize, uncompsize, deltatype, chain, chainsize
744
744
745 fm = ui.formatter(b'debugdeltachain', opts)
745 fm = ui.formatter(b'debugdeltachain', opts)
746
746
747 fm.plain(
747 fm.plain(
748 b' rev chain# chainlen prev delta '
748 b' rev chain# chainlen prev delta '
749 b'size rawsize chainsize ratio lindist extradist '
749 b'size rawsize chainsize ratio lindist extradist '
750 b'extraratio'
750 b'extraratio'
751 )
751 )
752 if withsparseread:
752 if withsparseread:
753 fm.plain(b' readsize largestblk rddensity srchunks')
753 fm.plain(b' readsize largestblk rddensity srchunks')
754 fm.plain(b'\n')
754 fm.plain(b'\n')
755
755
756 chainbases = {}
756 chainbases = {}
757 for rev in r:
757 for rev in r:
758 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
758 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
759 chainbase = chain[0]
759 chainbase = chain[0]
760 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
760 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
761 basestart = start(chainbase)
761 basestart = start(chainbase)
762 revstart = start(rev)
762 revstart = start(rev)
763 lineardist = revstart + comp - basestart
763 lineardist = revstart + comp - basestart
764 extradist = lineardist - chainsize
764 extradist = lineardist - chainsize
765 try:
765 try:
766 prevrev = chain[-2]
766 prevrev = chain[-2]
767 except IndexError:
767 except IndexError:
768 prevrev = -1
768 prevrev = -1
769
769
770 if uncomp != 0:
770 if uncomp != 0:
771 chainratio = float(chainsize) / float(uncomp)
771 chainratio = float(chainsize) / float(uncomp)
772 else:
772 else:
773 chainratio = chainsize
773 chainratio = chainsize
774
774
775 if chainsize != 0:
775 if chainsize != 0:
776 extraratio = float(extradist) / float(chainsize)
776 extraratio = float(extradist) / float(chainsize)
777 else:
777 else:
778 extraratio = extradist
778 extraratio = extradist
779
779
780 fm.startitem()
780 fm.startitem()
781 fm.write(
781 fm.write(
782 b'rev chainid chainlen prevrev deltatype compsize '
782 b'rev chainid chainlen prevrev deltatype compsize '
783 b'uncompsize chainsize chainratio lindist extradist '
783 b'uncompsize chainsize chainratio lindist extradist '
784 b'extraratio',
784 b'extraratio',
785 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
785 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
786 rev,
786 rev,
787 chainid,
787 chainid,
788 len(chain),
788 len(chain),
789 prevrev,
789 prevrev,
790 deltatype,
790 deltatype,
791 comp,
791 comp,
792 uncomp,
792 uncomp,
793 chainsize,
793 chainsize,
794 chainratio,
794 chainratio,
795 lineardist,
795 lineardist,
796 extradist,
796 extradist,
797 extraratio,
797 extraratio,
798 rev=rev,
798 rev=rev,
799 chainid=chainid,
799 chainid=chainid,
800 chainlen=len(chain),
800 chainlen=len(chain),
801 prevrev=prevrev,
801 prevrev=prevrev,
802 deltatype=deltatype,
802 deltatype=deltatype,
803 compsize=comp,
803 compsize=comp,
804 uncompsize=uncomp,
804 uncompsize=uncomp,
805 chainsize=chainsize,
805 chainsize=chainsize,
806 chainratio=chainratio,
806 chainratio=chainratio,
807 lindist=lineardist,
807 lindist=lineardist,
808 extradist=extradist,
808 extradist=extradist,
809 extraratio=extraratio,
809 extraratio=extraratio,
810 )
810 )
811 if withsparseread:
811 if withsparseread:
812 readsize = 0
812 readsize = 0
813 largestblock = 0
813 largestblock = 0
814 srchunks = 0
814 srchunks = 0
815
815
816 for revschunk in deltautil.slicechunk(r, chain):
816 for revschunk in deltautil.slicechunk(r, chain):
817 srchunks += 1
817 srchunks += 1
818 blkend = start(revschunk[-1]) + length(revschunk[-1])
818 blkend = start(revschunk[-1]) + length(revschunk[-1])
819 blksize = blkend - start(revschunk[0])
819 blksize = blkend - start(revschunk[0])
820
820
821 readsize += blksize
821 readsize += blksize
822 if largestblock < blksize:
822 if largestblock < blksize:
823 largestblock = blksize
823 largestblock = blksize
824
824
825 if readsize:
825 if readsize:
826 readdensity = float(chainsize) / float(readsize)
826 readdensity = float(chainsize) / float(readsize)
827 else:
827 else:
828 readdensity = 1
828 readdensity = 1
829
829
830 fm.write(
830 fm.write(
831 b'readsize largestblock readdensity srchunks',
831 b'readsize largestblock readdensity srchunks',
832 b' %10d %10d %9.5f %8d',
832 b' %10d %10d %9.5f %8d',
833 readsize,
833 readsize,
834 largestblock,
834 largestblock,
835 readdensity,
835 readdensity,
836 srchunks,
836 srchunks,
837 readsize=readsize,
837 readsize=readsize,
838 largestblock=largestblock,
838 largestblock=largestblock,
839 readdensity=readdensity,
839 readdensity=readdensity,
840 srchunks=srchunks,
840 srchunks=srchunks,
841 )
841 )
842
842
843 fm.plain(b'\n')
843 fm.plain(b'\n')
844
844
845 fm.end()
845 fm.end()
846
846
847
847
848 @command(
848 @command(
849 b'debugdirstate|debugstate',
849 b'debugdirstate|debugstate',
850 [
850 [
851 (
851 (
852 b'',
852 b'',
853 b'nodates',
853 b'nodates',
854 None,
854 None,
855 _(b'do not display the saved mtime (DEPRECATED)'),
855 _(b'do not display the saved mtime (DEPRECATED)'),
856 ),
856 ),
857 (b'', b'dates', True, _(b'display the saved mtime')),
857 (b'', b'dates', True, _(b'display the saved mtime')),
858 (b'', b'datesort', None, _(b'sort by saved mtime')),
858 (b'', b'datesort', None, _(b'sort by saved mtime')),
859 ],
859 ],
860 _(b'[OPTION]...'),
860 _(b'[OPTION]...'),
861 )
861 )
862 def debugstate(ui, repo, **opts):
862 def debugstate(ui, repo, **opts):
863 """show the contents of the current dirstate"""
863 """show the contents of the current dirstate"""
864
864
865 nodates = not opts['dates']
865 nodates = not opts['dates']
866 if opts.get('nodates') is not None:
866 if opts.get('nodates') is not None:
867 nodates = True
867 nodates = True
868 datesort = opts.get('datesort')
868 datesort = opts.get('datesort')
869
869
870 if datesort:
870 if datesort:
871 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
871 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
872 else:
872 else:
873 keyfunc = None # sort by filename
873 keyfunc = None # sort by filename
874 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
874 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
875 if ent[3] == -1:
875 if ent[3] == -1:
876 timestr = b'unset '
876 timestr = b'unset '
877 elif nodates:
877 elif nodates:
878 timestr = b'set '
878 timestr = b'set '
879 else:
879 else:
880 timestr = time.strftime(
880 timestr = time.strftime(
881 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
881 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
882 )
882 )
883 timestr = encoding.strtolocal(timestr)
883 timestr = encoding.strtolocal(timestr)
884 if ent[1] & 0o20000:
884 if ent[1] & 0o20000:
885 mode = b'lnk'
885 mode = b'lnk'
886 else:
886 else:
887 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
887 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
888 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
888 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
889 for f in repo.dirstate.copies():
889 for f in repo.dirstate.copies():
890 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
890 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
891
891
892
892
893 @command(
893 @command(
894 b'debugdiscovery',
894 b'debugdiscovery',
895 [
895 [
896 (b'', b'old', None, _(b'use old-style discovery')),
896 (b'', b'old', None, _(b'use old-style discovery')),
897 (
897 (
898 b'',
898 b'',
899 b'nonheads',
899 b'nonheads',
900 None,
900 None,
901 _(b'use old-style discovery with non-heads included'),
901 _(b'use old-style discovery with non-heads included'),
902 ),
902 ),
903 (b'', b'rev', [], b'restrict discovery to this set of revs'),
903 (b'', b'rev', [], b'restrict discovery to this set of revs'),
904 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
904 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
905 ]
905 ]
906 + cmdutil.remoteopts,
906 + cmdutil.remoteopts,
907 _(b'[--rev REV] [OTHER]'),
907 _(b'[--rev REV] [OTHER]'),
908 )
908 )
909 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
909 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
910 """runs the changeset discovery protocol in isolation"""
910 """runs the changeset discovery protocol in isolation"""
911 opts = pycompat.byteskwargs(opts)
911 opts = pycompat.byteskwargs(opts)
912 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
912 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
913 remote = hg.peer(repo, opts, remoteurl)
913 remote = hg.peer(repo, opts, remoteurl)
914 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
914 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
915
915
916 # make sure tests are repeatable
916 # make sure tests are repeatable
917 random.seed(int(opts[b'seed']))
917 random.seed(int(opts[b'seed']))
918
918
919 if opts.get(b'old'):
919 if opts.get(b'old'):
920
920
921 def doit(pushedrevs, remoteheads, remote=remote):
921 def doit(pushedrevs, remoteheads, remote=remote):
922 if not util.safehasattr(remote, b'branches'):
922 if not util.safehasattr(remote, b'branches'):
923 # enable in-client legacy support
923 # enable in-client legacy support
924 remote = localrepo.locallegacypeer(remote.local())
924 remote = localrepo.locallegacypeer(remote.local())
925 common, _in, hds = treediscovery.findcommonincoming(
925 common, _in, hds = treediscovery.findcommonincoming(
926 repo, remote, force=True
926 repo, remote, force=True
927 )
927 )
928 common = set(common)
928 common = set(common)
929 if not opts.get(b'nonheads'):
929 if not opts.get(b'nonheads'):
930 ui.writenoi18n(
930 ui.writenoi18n(
931 b"unpruned common: %s\n"
931 b"unpruned common: %s\n"
932 % b" ".join(sorted(short(n) for n in common))
932 % b" ".join(sorted(short(n) for n in common))
933 )
933 )
934
934
935 clnode = repo.changelog.node
935 clnode = repo.changelog.node
936 common = repo.revs(b'heads(::%ln)', common)
936 common = repo.revs(b'heads(::%ln)', common)
937 common = {clnode(r) for r in common}
937 common = {clnode(r) for r in common}
938 return common, hds
938 return common, hds
939
939
940 else:
940 else:
941
941
942 def doit(pushedrevs, remoteheads, remote=remote):
942 def doit(pushedrevs, remoteheads, remote=remote):
943 nodes = None
943 nodes = None
944 if pushedrevs:
944 if pushedrevs:
945 revs = scmutil.revrange(repo, pushedrevs)
945 revs = scmutil.revrange(repo, pushedrevs)
946 nodes = [repo[r].node() for r in revs]
946 nodes = [repo[r].node() for r in revs]
947 common, any, hds = setdiscovery.findcommonheads(
947 common, any, hds = setdiscovery.findcommonheads(
948 ui, repo, remote, ancestorsof=nodes
948 ui, repo, remote, ancestorsof=nodes
949 )
949 )
950 return common, hds
950 return common, hds
951
951
952 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
952 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
953 localrevs = opts[b'rev']
953 localrevs = opts[b'rev']
954 with util.timedcm('debug-discovery') as t:
954 with util.timedcm('debug-discovery') as t:
955 common, hds = doit(localrevs, remoterevs)
955 common, hds = doit(localrevs, remoterevs)
956
956
957 # compute all statistics
957 # compute all statistics
958 common = set(common)
958 common = set(common)
959 rheads = set(hds)
959 rheads = set(hds)
960 lheads = set(repo.heads())
960 lheads = set(repo.heads())
961
961
962 data = {}
962 data = {}
963 data[b'elapsed'] = t.elapsed
963 data[b'elapsed'] = t.elapsed
964 data[b'nb-common'] = len(common)
964 data[b'nb-common'] = len(common)
965 data[b'nb-common-local'] = len(common & lheads)
965 data[b'nb-common-local'] = len(common & lheads)
966 data[b'nb-common-remote'] = len(common & rheads)
966 data[b'nb-common-remote'] = len(common & rheads)
967 data[b'nb-common-both'] = len(common & rheads & lheads)
967 data[b'nb-common-both'] = len(common & rheads & lheads)
968 data[b'nb-local'] = len(lheads)
968 data[b'nb-local'] = len(lheads)
969 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
969 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
970 data[b'nb-remote'] = len(rheads)
970 data[b'nb-remote'] = len(rheads)
971 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
971 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
972 data[b'nb-revs'] = len(repo.revs(b'all()'))
972 data[b'nb-revs'] = len(repo.revs(b'all()'))
973 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
973 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
974 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
974 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
975
975
976 # display discovery summary
976 # display discovery summary
977 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
977 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
978 ui.writenoi18n(b"heads summary:\n")
978 ui.writenoi18n(b"heads summary:\n")
979 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
979 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
980 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
980 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
981 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
981 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
982 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
982 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
983 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
983 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
984 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
984 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
985 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
985 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
986 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
986 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
987 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
987 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
988 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
988 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
989 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
989 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
990 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
990 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
991 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
991 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
992
992
993 if ui.verbose:
993 if ui.verbose:
994 ui.writenoi18n(
994 ui.writenoi18n(
995 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
995 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
996 )
996 )
997
997
998
998
999 _chunksize = 4 << 10
999 _chunksize = 4 << 10
1000
1000
1001
1001
1002 @command(
1002 @command(
1003 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1003 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1004 )
1004 )
1005 def debugdownload(ui, repo, url, output=None, **opts):
1005 def debugdownload(ui, repo, url, output=None, **opts):
1006 """download a resource using Mercurial logic and config
1006 """download a resource using Mercurial logic and config
1007 """
1007 """
1008 fh = urlmod.open(ui, url, output)
1008 fh = urlmod.open(ui, url, output)
1009
1009
1010 dest = ui
1010 dest = ui
1011 if output:
1011 if output:
1012 dest = open(output, b"wb", _chunksize)
1012 dest = open(output, b"wb", _chunksize)
1013 try:
1013 try:
1014 data = fh.read(_chunksize)
1014 data = fh.read(_chunksize)
1015 while data:
1015 while data:
1016 dest.write(data)
1016 dest.write(data)
1017 data = fh.read(_chunksize)
1017 data = fh.read(_chunksize)
1018 finally:
1018 finally:
1019 if output:
1019 if output:
1020 dest.close()
1020 dest.close()
1021
1021
1022
1022
1023 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1023 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1024 def debugextensions(ui, repo, **opts):
1024 def debugextensions(ui, repo, **opts):
1025 '''show information about active extensions'''
1025 '''show information about active extensions'''
1026 opts = pycompat.byteskwargs(opts)
1026 opts = pycompat.byteskwargs(opts)
1027 exts = extensions.extensions(ui)
1027 exts = extensions.extensions(ui)
1028 hgver = util.version()
1028 hgver = util.version()
1029 fm = ui.formatter(b'debugextensions', opts)
1029 fm = ui.formatter(b'debugextensions', opts)
1030 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1030 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1031 isinternal = extensions.ismoduleinternal(extmod)
1031 isinternal = extensions.ismoduleinternal(extmod)
1032 extsource = pycompat.fsencode(extmod.__file__)
1032 extsource = pycompat.fsencode(extmod.__file__)
1033 if isinternal:
1033 if isinternal:
1034 exttestedwith = [] # never expose magic string to users
1034 exttestedwith = [] # never expose magic string to users
1035 else:
1035 else:
1036 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1036 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1037 extbuglink = getattr(extmod, 'buglink', None)
1037 extbuglink = getattr(extmod, 'buglink', None)
1038
1038
1039 fm.startitem()
1039 fm.startitem()
1040
1040
1041 if ui.quiet or ui.verbose:
1041 if ui.quiet or ui.verbose:
1042 fm.write(b'name', b'%s\n', extname)
1042 fm.write(b'name', b'%s\n', extname)
1043 else:
1043 else:
1044 fm.write(b'name', b'%s', extname)
1044 fm.write(b'name', b'%s', extname)
1045 if isinternal or hgver in exttestedwith:
1045 if isinternal or hgver in exttestedwith:
1046 fm.plain(b'\n')
1046 fm.plain(b'\n')
1047 elif not exttestedwith:
1047 elif not exttestedwith:
1048 fm.plain(_(b' (untested!)\n'))
1048 fm.plain(_(b' (untested!)\n'))
1049 else:
1049 else:
1050 lasttestedversion = exttestedwith[-1]
1050 lasttestedversion = exttestedwith[-1]
1051 fm.plain(b' (%s!)\n' % lasttestedversion)
1051 fm.plain(b' (%s!)\n' % lasttestedversion)
1052
1052
1053 fm.condwrite(
1053 fm.condwrite(
1054 ui.verbose and extsource,
1054 ui.verbose and extsource,
1055 b'source',
1055 b'source',
1056 _(b' location: %s\n'),
1056 _(b' location: %s\n'),
1057 extsource or b"",
1057 extsource or b"",
1058 )
1058 )
1059
1059
1060 if ui.verbose:
1060 if ui.verbose:
1061 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1061 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1062 fm.data(bundled=isinternal)
1062 fm.data(bundled=isinternal)
1063
1063
1064 fm.condwrite(
1064 fm.condwrite(
1065 ui.verbose and exttestedwith,
1065 ui.verbose and exttestedwith,
1066 b'testedwith',
1066 b'testedwith',
1067 _(b' tested with: %s\n'),
1067 _(b' tested with: %s\n'),
1068 fm.formatlist(exttestedwith, name=b'ver'),
1068 fm.formatlist(exttestedwith, name=b'ver'),
1069 )
1069 )
1070
1070
1071 fm.condwrite(
1071 fm.condwrite(
1072 ui.verbose and extbuglink,
1072 ui.verbose and extbuglink,
1073 b'buglink',
1073 b'buglink',
1074 _(b' bug reporting: %s\n'),
1074 _(b' bug reporting: %s\n'),
1075 extbuglink or b"",
1075 extbuglink or b"",
1076 )
1076 )
1077
1077
1078 fm.end()
1078 fm.end()
1079
1079
1080
1080
1081 @command(
1081 @command(
1082 b'debugfileset',
1082 b'debugfileset',
1083 [
1083 [
1084 (
1084 (
1085 b'r',
1085 b'r',
1086 b'rev',
1086 b'rev',
1087 b'',
1087 b'',
1088 _(b'apply the filespec on this revision'),
1088 _(b'apply the filespec on this revision'),
1089 _(b'REV'),
1089 _(b'REV'),
1090 ),
1090 ),
1091 (
1091 (
1092 b'',
1092 b'',
1093 b'all-files',
1093 b'all-files',
1094 False,
1094 False,
1095 _(b'test files from all revisions and working directory'),
1095 _(b'test files from all revisions and working directory'),
1096 ),
1096 ),
1097 (
1097 (
1098 b's',
1098 b's',
1099 b'show-matcher',
1099 b'show-matcher',
1100 None,
1100 None,
1101 _(b'print internal representation of matcher'),
1101 _(b'print internal representation of matcher'),
1102 ),
1102 ),
1103 (
1103 (
1104 b'p',
1104 b'p',
1105 b'show-stage',
1105 b'show-stage',
1106 [],
1106 [],
1107 _(b'print parsed tree at the given stage'),
1107 _(b'print parsed tree at the given stage'),
1108 _(b'NAME'),
1108 _(b'NAME'),
1109 ),
1109 ),
1110 ],
1110 ],
1111 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1111 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1112 )
1112 )
1113 def debugfileset(ui, repo, expr, **opts):
1113 def debugfileset(ui, repo, expr, **opts):
1114 '''parse and apply a fileset specification'''
1114 '''parse and apply a fileset specification'''
1115 from . import fileset
1115 from . import fileset
1116
1116
1117 fileset.symbols # force import of fileset so we have predicates to optimize
1117 fileset.symbols # force import of fileset so we have predicates to optimize
1118 opts = pycompat.byteskwargs(opts)
1118 opts = pycompat.byteskwargs(opts)
1119 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1119 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1120
1120
1121 stages = [
1121 stages = [
1122 (b'parsed', pycompat.identity),
1122 (b'parsed', pycompat.identity),
1123 (b'analyzed', filesetlang.analyze),
1123 (b'analyzed', filesetlang.analyze),
1124 (b'optimized', filesetlang.optimize),
1124 (b'optimized', filesetlang.optimize),
1125 ]
1125 ]
1126 stagenames = set(n for n, f in stages)
1126 stagenames = set(n for n, f in stages)
1127
1127
1128 showalways = set()
1128 showalways = set()
1129 if ui.verbose and not opts[b'show_stage']:
1129 if ui.verbose and not opts[b'show_stage']:
1130 # show parsed tree by --verbose (deprecated)
1130 # show parsed tree by --verbose (deprecated)
1131 showalways.add(b'parsed')
1131 showalways.add(b'parsed')
1132 if opts[b'show_stage'] == [b'all']:
1132 if opts[b'show_stage'] == [b'all']:
1133 showalways.update(stagenames)
1133 showalways.update(stagenames)
1134 else:
1134 else:
1135 for n in opts[b'show_stage']:
1135 for n in opts[b'show_stage']:
1136 if n not in stagenames:
1136 if n not in stagenames:
1137 raise error.Abort(_(b'invalid stage name: %s') % n)
1137 raise error.Abort(_(b'invalid stage name: %s') % n)
1138 showalways.update(opts[b'show_stage'])
1138 showalways.update(opts[b'show_stage'])
1139
1139
1140 tree = filesetlang.parse(expr)
1140 tree = filesetlang.parse(expr)
1141 for n, f in stages:
1141 for n, f in stages:
1142 tree = f(tree)
1142 tree = f(tree)
1143 if n in showalways:
1143 if n in showalways:
1144 if opts[b'show_stage'] or n != b'parsed':
1144 if opts[b'show_stage'] or n != b'parsed':
1145 ui.write(b"* %s:\n" % n)
1145 ui.write(b"* %s:\n" % n)
1146 ui.write(filesetlang.prettyformat(tree), b"\n")
1146 ui.write(filesetlang.prettyformat(tree), b"\n")
1147
1147
1148 files = set()
1148 files = set()
1149 if opts[b'all_files']:
1149 if opts[b'all_files']:
1150 for r in repo:
1150 for r in repo:
1151 c = repo[r]
1151 c = repo[r]
1152 files.update(c.files())
1152 files.update(c.files())
1153 files.update(c.substate)
1153 files.update(c.substate)
1154 if opts[b'all_files'] or ctx.rev() is None:
1154 if opts[b'all_files'] or ctx.rev() is None:
1155 wctx = repo[None]
1155 wctx = repo[None]
1156 files.update(
1156 files.update(
1157 repo.dirstate.walk(
1157 repo.dirstate.walk(
1158 scmutil.matchall(repo),
1158 scmutil.matchall(repo),
1159 subrepos=list(wctx.substate),
1159 subrepos=list(wctx.substate),
1160 unknown=True,
1160 unknown=True,
1161 ignored=True,
1161 ignored=True,
1162 )
1162 )
1163 )
1163 )
1164 files.update(wctx.substate)
1164 files.update(wctx.substate)
1165 else:
1165 else:
1166 files.update(ctx.files())
1166 files.update(ctx.files())
1167 files.update(ctx.substate)
1167 files.update(ctx.substate)
1168
1168
1169 m = ctx.matchfileset(expr)
1169 m = ctx.matchfileset(expr)
1170 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1170 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1171 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1171 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1172 for f in sorted(files):
1172 for f in sorted(files):
1173 if not m(f):
1173 if not m(f):
1174 continue
1174 continue
1175 ui.write(b"%s\n" % f)
1175 ui.write(b"%s\n" % f)
1176
1176
1177
1177
1178 @command(b'debugformat', [] + cmdutil.formatteropts)
1178 @command(b'debugformat', [] + cmdutil.formatteropts)
1179 def debugformat(ui, repo, **opts):
1179 def debugformat(ui, repo, **opts):
1180 """display format information about the current repository
1180 """display format information about the current repository
1181
1181
1182 Use --verbose to get extra information about current config value and
1182 Use --verbose to get extra information about current config value and
1183 Mercurial default."""
1183 Mercurial default."""
1184 opts = pycompat.byteskwargs(opts)
1184 opts = pycompat.byteskwargs(opts)
1185 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1185 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1186 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1186 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1187
1187
1188 def makeformatname(name):
1188 def makeformatname(name):
1189 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1189 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1190
1190
1191 fm = ui.formatter(b'debugformat', opts)
1191 fm = ui.formatter(b'debugformat', opts)
1192 if fm.isplain():
1192 if fm.isplain():
1193
1193
1194 def formatvalue(value):
1194 def formatvalue(value):
1195 if util.safehasattr(value, b'startswith'):
1195 if util.safehasattr(value, b'startswith'):
1196 return value
1196 return value
1197 if value:
1197 if value:
1198 return b'yes'
1198 return b'yes'
1199 else:
1199 else:
1200 return b'no'
1200 return b'no'
1201
1201
1202 else:
1202 else:
1203 formatvalue = pycompat.identity
1203 formatvalue = pycompat.identity
1204
1204
1205 fm.plain(b'format-variant')
1205 fm.plain(b'format-variant')
1206 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1206 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1207 fm.plain(b' repo')
1207 fm.plain(b' repo')
1208 if ui.verbose:
1208 if ui.verbose:
1209 fm.plain(b' config default')
1209 fm.plain(b' config default')
1210 fm.plain(b'\n')
1210 fm.plain(b'\n')
1211 for fv in upgrade.allformatvariant:
1211 for fv in upgrade.allformatvariant:
1212 fm.startitem()
1212 fm.startitem()
1213 repovalue = fv.fromrepo(repo)
1213 repovalue = fv.fromrepo(repo)
1214 configvalue = fv.fromconfig(repo)
1214 configvalue = fv.fromconfig(repo)
1215
1215
1216 if repovalue != configvalue:
1216 if repovalue != configvalue:
1217 namelabel = b'formatvariant.name.mismatchconfig'
1217 namelabel = b'formatvariant.name.mismatchconfig'
1218 repolabel = b'formatvariant.repo.mismatchconfig'
1218 repolabel = b'formatvariant.repo.mismatchconfig'
1219 elif repovalue != fv.default:
1219 elif repovalue != fv.default:
1220 namelabel = b'formatvariant.name.mismatchdefault'
1220 namelabel = b'formatvariant.name.mismatchdefault'
1221 repolabel = b'formatvariant.repo.mismatchdefault'
1221 repolabel = b'formatvariant.repo.mismatchdefault'
1222 else:
1222 else:
1223 namelabel = b'formatvariant.name.uptodate'
1223 namelabel = b'formatvariant.name.uptodate'
1224 repolabel = b'formatvariant.repo.uptodate'
1224 repolabel = b'formatvariant.repo.uptodate'
1225
1225
1226 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1226 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1227 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1227 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1228 if fv.default != configvalue:
1228 if fv.default != configvalue:
1229 configlabel = b'formatvariant.config.special'
1229 configlabel = b'formatvariant.config.special'
1230 else:
1230 else:
1231 configlabel = b'formatvariant.config.default'
1231 configlabel = b'formatvariant.config.default'
1232 fm.condwrite(
1232 fm.condwrite(
1233 ui.verbose,
1233 ui.verbose,
1234 b'config',
1234 b'config',
1235 b' %6s',
1235 b' %6s',
1236 formatvalue(configvalue),
1236 formatvalue(configvalue),
1237 label=configlabel,
1237 label=configlabel,
1238 )
1238 )
1239 fm.condwrite(
1239 fm.condwrite(
1240 ui.verbose,
1240 ui.verbose,
1241 b'default',
1241 b'default',
1242 b' %7s',
1242 b' %7s',
1243 formatvalue(fv.default),
1243 formatvalue(fv.default),
1244 label=b'formatvariant.default',
1244 label=b'formatvariant.default',
1245 )
1245 )
1246 fm.plain(b'\n')
1246 fm.plain(b'\n')
1247 fm.end()
1247 fm.end()
1248
1248
1249
1249
1250 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1250 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1251 def debugfsinfo(ui, path=b"."):
1251 def debugfsinfo(ui, path=b"."):
1252 """show information detected about current filesystem"""
1252 """show information detected about current filesystem"""
1253 ui.writenoi18n(b'path: %s\n' % path)
1253 ui.writenoi18n(b'path: %s\n' % path)
1254 ui.writenoi18n(
1254 ui.writenoi18n(
1255 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1255 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1256 )
1256 )
1257 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1257 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1258 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1258 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1259 ui.writenoi18n(
1259 ui.writenoi18n(
1260 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1260 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1261 )
1261 )
1262 ui.writenoi18n(
1262 ui.writenoi18n(
1263 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1263 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1264 )
1264 )
1265 casesensitive = b'(unknown)'
1265 casesensitive = b'(unknown)'
1266 try:
1266 try:
1267 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1267 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1268 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1268 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1269 except OSError:
1269 except OSError:
1270 pass
1270 pass
1271 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1271 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1272
1272
1273
1273
1274 @command(
1274 @command(
1275 b'debuggetbundle',
1275 b'debuggetbundle',
1276 [
1276 [
1277 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1277 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1278 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1278 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1279 (
1279 (
1280 b't',
1280 b't',
1281 b'type',
1281 b'type',
1282 b'bzip2',
1282 b'bzip2',
1283 _(b'bundle compression type to use'),
1283 _(b'bundle compression type to use'),
1284 _(b'TYPE'),
1284 _(b'TYPE'),
1285 ),
1285 ),
1286 ],
1286 ],
1287 _(b'REPO FILE [-H|-C ID]...'),
1287 _(b'REPO FILE [-H|-C ID]...'),
1288 norepo=True,
1288 norepo=True,
1289 )
1289 )
1290 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1290 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1291 """retrieves a bundle from a repo
1291 """retrieves a bundle from a repo
1292
1292
1293 Every ID must be a full-length hex node id string. Saves the bundle to the
1293 Every ID must be a full-length hex node id string. Saves the bundle to the
1294 given file.
1294 given file.
1295 """
1295 """
1296 opts = pycompat.byteskwargs(opts)
1296 opts = pycompat.byteskwargs(opts)
1297 repo = hg.peer(ui, opts, repopath)
1297 repo = hg.peer(ui, opts, repopath)
1298 if not repo.capable(b'getbundle'):
1298 if not repo.capable(b'getbundle'):
1299 raise error.Abort(b"getbundle() not supported by target repository")
1299 raise error.Abort(b"getbundle() not supported by target repository")
1300 args = {}
1300 args = {}
1301 if common:
1301 if common:
1302 args['common'] = [bin(s) for s in common]
1302 args['common'] = [bin(s) for s in common]
1303 if head:
1303 if head:
1304 args['heads'] = [bin(s) for s in head]
1304 args['heads'] = [bin(s) for s in head]
1305 # TODO: get desired bundlecaps from command line.
1305 # TODO: get desired bundlecaps from command line.
1306 args['bundlecaps'] = None
1306 args['bundlecaps'] = None
1307 bundle = repo.getbundle(b'debug', **args)
1307 bundle = repo.getbundle(b'debug', **args)
1308
1308
1309 bundletype = opts.get(b'type', b'bzip2').lower()
1309 bundletype = opts.get(b'type', b'bzip2').lower()
1310 btypes = {
1310 btypes = {
1311 b'none': b'HG10UN',
1311 b'none': b'HG10UN',
1312 b'bzip2': b'HG10BZ',
1312 b'bzip2': b'HG10BZ',
1313 b'gzip': b'HG10GZ',
1313 b'gzip': b'HG10GZ',
1314 b'bundle2': b'HG20',
1314 b'bundle2': b'HG20',
1315 }
1315 }
1316 bundletype = btypes.get(bundletype)
1316 bundletype = btypes.get(bundletype)
1317 if bundletype not in bundle2.bundletypes:
1317 if bundletype not in bundle2.bundletypes:
1318 raise error.Abort(_(b'unknown bundle type specified with --type'))
1318 raise error.Abort(_(b'unknown bundle type specified with --type'))
1319 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1319 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1320
1320
1321
1321
1322 @command(b'debugignore', [], b'[FILE]')
1322 @command(b'debugignore', [], b'[FILE]')
1323 def debugignore(ui, repo, *files, **opts):
1323 def debugignore(ui, repo, *files, **opts):
1324 """display the combined ignore pattern and information about ignored files
1324 """display the combined ignore pattern and information about ignored files
1325
1325
1326 With no argument display the combined ignore pattern.
1326 With no argument display the combined ignore pattern.
1327
1327
1328 Given space separated file names, shows if the given file is ignored and
1328 Given space separated file names, shows if the given file is ignored and
1329 if so, show the ignore rule (file and line number) that matched it.
1329 if so, show the ignore rule (file and line number) that matched it.
1330 """
1330 """
1331 ignore = repo.dirstate._ignore
1331 ignore = repo.dirstate._ignore
1332 if not files:
1332 if not files:
1333 # Show all the patterns
1333 # Show all the patterns
1334 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1334 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1335 else:
1335 else:
1336 m = scmutil.match(repo[None], pats=files)
1336 m = scmutil.match(repo[None], pats=files)
1337 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1337 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1338 for f in m.files():
1338 for f in m.files():
1339 nf = util.normpath(f)
1339 nf = util.normpath(f)
1340 ignored = None
1340 ignored = None
1341 ignoredata = None
1341 ignoredata = None
1342 if nf != b'.':
1342 if nf != b'.':
1343 if ignore(nf):
1343 if ignore(nf):
1344 ignored = nf
1344 ignored = nf
1345 ignoredata = repo.dirstate._ignorefileandline(nf)
1345 ignoredata = repo.dirstate._ignorefileandline(nf)
1346 else:
1346 else:
1347 for p in pathutil.finddirs(nf):
1347 for p in pathutil.finddirs(nf):
1348 if ignore(p):
1348 if ignore(p):
1349 ignored = p
1349 ignored = p
1350 ignoredata = repo.dirstate._ignorefileandline(p)
1350 ignoredata = repo.dirstate._ignorefileandline(p)
1351 break
1351 break
1352 if ignored:
1352 if ignored:
1353 if ignored == nf:
1353 if ignored == nf:
1354 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1354 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1355 else:
1355 else:
1356 ui.write(
1356 ui.write(
1357 _(
1357 _(
1358 b"%s is ignored because of "
1358 b"%s is ignored because of "
1359 b"containing directory %s\n"
1359 b"containing directory %s\n"
1360 )
1360 )
1361 % (uipathfn(f), ignored)
1361 % (uipathfn(f), ignored)
1362 )
1362 )
1363 ignorefile, lineno, line = ignoredata
1363 ignorefile, lineno, line = ignoredata
1364 ui.write(
1364 ui.write(
1365 _(b"(ignore rule in %s, line %d: '%s')\n")
1365 _(b"(ignore rule in %s, line %d: '%s')\n")
1366 % (ignorefile, lineno, line)
1366 % (ignorefile, lineno, line)
1367 )
1367 )
1368 else:
1368 else:
1369 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1369 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1370
1370
1371
1371
1372 @command(
1372 @command(
1373 b'debugindex',
1373 b'debugindex',
1374 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1374 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1375 _(b'-c|-m|FILE'),
1375 _(b'-c|-m|FILE'),
1376 )
1376 )
1377 def debugindex(ui, repo, file_=None, **opts):
1377 def debugindex(ui, repo, file_=None, **opts):
1378 """dump index data for a storage primitive"""
1378 """dump index data for a storage primitive"""
1379 opts = pycompat.byteskwargs(opts)
1379 opts = pycompat.byteskwargs(opts)
1380 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1380 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1381
1381
1382 if ui.debugflag:
1382 if ui.debugflag:
1383 shortfn = hex
1383 shortfn = hex
1384 else:
1384 else:
1385 shortfn = short
1385 shortfn = short
1386
1386
1387 idlen = 12
1387 idlen = 12
1388 for i in store:
1388 for i in store:
1389 idlen = len(shortfn(store.node(i)))
1389 idlen = len(shortfn(store.node(i)))
1390 break
1390 break
1391
1391
1392 fm = ui.formatter(b'debugindex', opts)
1392 fm = ui.formatter(b'debugindex', opts)
1393 fm.plain(
1393 fm.plain(
1394 b' rev linkrev %s %s p2\n'
1394 b' rev linkrev %s %s p2\n'
1395 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1395 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1396 )
1396 )
1397
1397
1398 for rev in store:
1398 for rev in store:
1399 node = store.node(rev)
1399 node = store.node(rev)
1400 parents = store.parents(node)
1400 parents = store.parents(node)
1401
1401
1402 fm.startitem()
1402 fm.startitem()
1403 fm.write(b'rev', b'%6d ', rev)
1403 fm.write(b'rev', b'%6d ', rev)
1404 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1404 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1405 fm.write(b'node', b'%s ', shortfn(node))
1405 fm.write(b'node', b'%s ', shortfn(node))
1406 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1406 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1407 fm.write(b'p2', b'%s', shortfn(parents[1]))
1407 fm.write(b'p2', b'%s', shortfn(parents[1]))
1408 fm.plain(b'\n')
1408 fm.plain(b'\n')
1409
1409
1410 fm.end()
1410 fm.end()
1411
1411
1412
1412
1413 @command(
1413 @command(
1414 b'debugindexdot',
1414 b'debugindexdot',
1415 cmdutil.debugrevlogopts,
1415 cmdutil.debugrevlogopts,
1416 _(b'-c|-m|FILE'),
1416 _(b'-c|-m|FILE'),
1417 optionalrepo=True,
1417 optionalrepo=True,
1418 )
1418 )
1419 def debugindexdot(ui, repo, file_=None, **opts):
1419 def debugindexdot(ui, repo, file_=None, **opts):
1420 """dump an index DAG as a graphviz dot file"""
1420 """dump an index DAG as a graphviz dot file"""
1421 opts = pycompat.byteskwargs(opts)
1421 opts = pycompat.byteskwargs(opts)
1422 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1422 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1423 ui.writenoi18n(b"digraph G {\n")
1423 ui.writenoi18n(b"digraph G {\n")
1424 for i in r:
1424 for i in r:
1425 node = r.node(i)
1425 node = r.node(i)
1426 pp = r.parents(node)
1426 pp = r.parents(node)
1427 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1427 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1428 if pp[1] != nullid:
1428 if pp[1] != nullid:
1429 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1429 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1430 ui.write(b"}\n")
1430 ui.write(b"}\n")
1431
1431
1432
1432
1433 @command(b'debugindexstats', [])
1433 @command(b'debugindexstats', [])
1434 def debugindexstats(ui, repo):
1434 def debugindexstats(ui, repo):
1435 """show stats related to the changelog index"""
1435 """show stats related to the changelog index"""
1436 repo.changelog.shortest(nullid, 1)
1436 repo.changelog.shortest(nullid, 1)
1437 index = repo.changelog.index
1437 index = repo.changelog.index
1438 if not util.safehasattr(index, b'stats'):
1438 if not util.safehasattr(index, b'stats'):
1439 raise error.Abort(_(b'debugindexstats only works with native code'))
1439 raise error.Abort(_(b'debugindexstats only works with native code'))
1440 for k, v in sorted(index.stats().items()):
1440 for k, v in sorted(index.stats().items()):
1441 ui.write(b'%s: %d\n' % (k, v))
1441 ui.write(b'%s: %d\n' % (k, v))
1442
1442
1443
1443
1444 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1444 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1445 def debuginstall(ui, **opts):
1445 def debuginstall(ui, **opts):
1446 '''test Mercurial installation
1446 '''test Mercurial installation
1447
1447
1448 Returns 0 on success.
1448 Returns 0 on success.
1449 '''
1449 '''
1450 opts = pycompat.byteskwargs(opts)
1450 opts = pycompat.byteskwargs(opts)
1451
1451
1452 problems = 0
1452 problems = 0
1453
1453
1454 fm = ui.formatter(b'debuginstall', opts)
1454 fm = ui.formatter(b'debuginstall', opts)
1455 fm.startitem()
1455 fm.startitem()
1456
1456
1457 # encoding
1457 # encoding
1458 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1458 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1459 err = None
1459 err = None
1460 try:
1460 try:
1461 codecs.lookup(pycompat.sysstr(encoding.encoding))
1461 codecs.lookup(pycompat.sysstr(encoding.encoding))
1462 except LookupError as inst:
1462 except LookupError as inst:
1463 err = stringutil.forcebytestr(inst)
1463 err = stringutil.forcebytestr(inst)
1464 problems += 1
1464 problems += 1
1465 fm.condwrite(
1465 fm.condwrite(
1466 err,
1466 err,
1467 b'encodingerror',
1467 b'encodingerror',
1468 _(b" %s\n (check that your locale is properly set)\n"),
1468 _(b" %s\n (check that your locale is properly set)\n"),
1469 err,
1469 err,
1470 )
1470 )
1471
1471
1472 # Python
1472 # Python
1473 pythonlib = None
1474 if util.safehasattr(os, '__file__'):
1475 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1476 elif getattr(sys, 'oxidized', False):
1477 pythonlib = pycompat.sysexecutable
1478
1473 fm.write(
1479 fm.write(
1474 b'pythonexe',
1480 b'pythonexe',
1475 _(b"checking Python executable (%s)\n"),
1481 _(b"checking Python executable (%s)\n"),
1476 pycompat.sysexecutable or _(b"unknown"),
1482 pycompat.sysexecutable or _(b"unknown"),
1477 )
1483 )
1478 fm.write(
1484 fm.write(
1479 b'pythonver',
1485 b'pythonver',
1480 _(b"checking Python version (%s)\n"),
1486 _(b"checking Python version (%s)\n"),
1481 (b"%d.%d.%d" % sys.version_info[:3]),
1487 (b"%d.%d.%d" % sys.version_info[:3]),
1482 )
1488 )
1483 fm.write(
1489 fm.write(
1484 b'pythonlib',
1490 b'pythonlib',
1485 _(b"checking Python lib (%s)...\n"),
1491 _(b"checking Python lib (%s)...\n"),
1486 os.path.dirname(pycompat.fsencode(os.__file__)),
1492 pythonlib or _(b"unknown"),
1487 )
1493 )
1488
1494
1489 security = set(sslutil.supportedprotocols)
1495 security = set(sslutil.supportedprotocols)
1490 if sslutil.hassni:
1496 if sslutil.hassni:
1491 security.add(b'sni')
1497 security.add(b'sni')
1492
1498
1493 fm.write(
1499 fm.write(
1494 b'pythonsecurity',
1500 b'pythonsecurity',
1495 _(b"checking Python security support (%s)\n"),
1501 _(b"checking Python security support (%s)\n"),
1496 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1502 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1497 )
1503 )
1498
1504
1499 # These are warnings, not errors. So don't increment problem count. This
1505 # These are warnings, not errors. So don't increment problem count. This
1500 # may change in the future.
1506 # may change in the future.
1501 if b'tls1.2' not in security:
1507 if b'tls1.2' not in security:
1502 fm.plain(
1508 fm.plain(
1503 _(
1509 _(
1504 b' TLS 1.2 not supported by Python install; '
1510 b' TLS 1.2 not supported by Python install; '
1505 b'network connections lack modern security\n'
1511 b'network connections lack modern security\n'
1506 )
1512 )
1507 )
1513 )
1508 if b'sni' not in security:
1514 if b'sni' not in security:
1509 fm.plain(
1515 fm.plain(
1510 _(
1516 _(
1511 b' SNI not supported by Python install; may have '
1517 b' SNI not supported by Python install; may have '
1512 b'connectivity issues with some servers\n'
1518 b'connectivity issues with some servers\n'
1513 )
1519 )
1514 )
1520 )
1515
1521
1516 # TODO print CA cert info
1522 # TODO print CA cert info
1517
1523
1518 # hg version
1524 # hg version
1519 hgver = util.version()
1525 hgver = util.version()
1520 fm.write(
1526 fm.write(
1521 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1527 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1522 )
1528 )
1523 fm.write(
1529 fm.write(
1524 b'hgverextra',
1530 b'hgverextra',
1525 _(b"checking Mercurial custom build (%s)\n"),
1531 _(b"checking Mercurial custom build (%s)\n"),
1526 b'+'.join(hgver.split(b'+')[1:]),
1532 b'+'.join(hgver.split(b'+')[1:]),
1527 )
1533 )
1528
1534
1529 # compiled modules
1535 # compiled modules
1536 hgmodules = None
1537 if util.safehasattr(sys.modules[__name__], '__file__'):
1538 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1539 elif getattr(sys, 'oxidized', False):
1540 hgmodules = pycompat.sysexecutable
1541
1530 fm.write(
1542 fm.write(
1531 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1543 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1532 )
1544 )
1533 fm.write(
1545 fm.write(
1534 b'hgmodules',
1546 b'hgmodules',
1535 _(b"checking installed modules (%s)...\n"),
1547 _(b"checking installed modules (%s)...\n"),
1536 os.path.dirname(pycompat.fsencode(__file__)),
1548 hgmodules or _(b"unknown"),
1537 )
1549 )
1538
1550
1539 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1551 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1540 rustext = rustandc # for now, that's the only case
1552 rustext = rustandc # for now, that's the only case
1541 cext = policy.policy in (b'c', b'allow') or rustandc
1553 cext = policy.policy in (b'c', b'allow') or rustandc
1542 nopure = cext or rustext
1554 nopure = cext or rustext
1543 if nopure:
1555 if nopure:
1544 err = None
1556 err = None
1545 try:
1557 try:
1546 if cext:
1558 if cext:
1547 from .cext import (
1559 from .cext import (
1548 base85,
1560 base85,
1549 bdiff,
1561 bdiff,
1550 mpatch,
1562 mpatch,
1551 osutil,
1563 osutil,
1552 )
1564 )
1553
1565
1554 # quiet pyflakes
1566 # quiet pyflakes
1555 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1567 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1556 if rustext:
1568 if rustext:
1557 from .rustext import (
1569 from .rustext import (
1558 ancestor,
1570 ancestor,
1559 dirstate,
1571 dirstate,
1560 )
1572 )
1561
1573
1562 dir(ancestor), dir(dirstate) # quiet pyflakes
1574 dir(ancestor), dir(dirstate) # quiet pyflakes
1563 except Exception as inst:
1575 except Exception as inst:
1564 err = stringutil.forcebytestr(inst)
1576 err = stringutil.forcebytestr(inst)
1565 problems += 1
1577 problems += 1
1566 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1578 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1567
1579
1568 compengines = util.compengines._engines.values()
1580 compengines = util.compengines._engines.values()
1569 fm.write(
1581 fm.write(
1570 b'compengines',
1582 b'compengines',
1571 _(b'checking registered compression engines (%s)\n'),
1583 _(b'checking registered compression engines (%s)\n'),
1572 fm.formatlist(
1584 fm.formatlist(
1573 sorted(e.name() for e in compengines),
1585 sorted(e.name() for e in compengines),
1574 name=b'compengine',
1586 name=b'compengine',
1575 fmt=b'%s',
1587 fmt=b'%s',
1576 sep=b', ',
1588 sep=b', ',
1577 ),
1589 ),
1578 )
1590 )
1579 fm.write(
1591 fm.write(
1580 b'compenginesavail',
1592 b'compenginesavail',
1581 _(b'checking available compression engines (%s)\n'),
1593 _(b'checking available compression engines (%s)\n'),
1582 fm.formatlist(
1594 fm.formatlist(
1583 sorted(e.name() for e in compengines if e.available()),
1595 sorted(e.name() for e in compengines if e.available()),
1584 name=b'compengine',
1596 name=b'compengine',
1585 fmt=b'%s',
1597 fmt=b'%s',
1586 sep=b', ',
1598 sep=b', ',
1587 ),
1599 ),
1588 )
1600 )
1589 wirecompengines = compression.compengines.supportedwireengines(
1601 wirecompengines = compression.compengines.supportedwireengines(
1590 compression.SERVERROLE
1602 compression.SERVERROLE
1591 )
1603 )
1592 fm.write(
1604 fm.write(
1593 b'compenginesserver',
1605 b'compenginesserver',
1594 _(
1606 _(
1595 b'checking available compression engines '
1607 b'checking available compression engines '
1596 b'for wire protocol (%s)\n'
1608 b'for wire protocol (%s)\n'
1597 ),
1609 ),
1598 fm.formatlist(
1610 fm.formatlist(
1599 [e.name() for e in wirecompengines if e.wireprotosupport()],
1611 [e.name() for e in wirecompengines if e.wireprotosupport()],
1600 name=b'compengine',
1612 name=b'compengine',
1601 fmt=b'%s',
1613 fmt=b'%s',
1602 sep=b', ',
1614 sep=b', ',
1603 ),
1615 ),
1604 )
1616 )
1605 re2 = b'missing'
1617 re2 = b'missing'
1606 if util._re2:
1618 if util._re2:
1607 re2 = b'available'
1619 re2 = b'available'
1608 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1620 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1609 fm.data(re2=bool(util._re2))
1621 fm.data(re2=bool(util._re2))
1610
1622
1611 # templates
1623 # templates
1612 p = templater.templatepaths()
1624 p = templater.templatepaths()
1613 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1625 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1614 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1626 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1615 if p:
1627 if p:
1616 m = templater.templatepath(b"map-cmdline.default")
1628 m = templater.templatepath(b"map-cmdline.default")
1617 if m:
1629 if m:
1618 # template found, check if it is working
1630 # template found, check if it is working
1619 err = None
1631 err = None
1620 try:
1632 try:
1621 templater.templater.frommapfile(m)
1633 templater.templater.frommapfile(m)
1622 except Exception as inst:
1634 except Exception as inst:
1623 err = stringutil.forcebytestr(inst)
1635 err = stringutil.forcebytestr(inst)
1624 p = None
1636 p = None
1625 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1637 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1626 else:
1638 else:
1627 p = None
1639 p = None
1628 fm.condwrite(
1640 fm.condwrite(
1629 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1641 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1630 )
1642 )
1631 fm.condwrite(
1643 fm.condwrite(
1632 not m,
1644 not m,
1633 b'defaulttemplatenotfound',
1645 b'defaulttemplatenotfound',
1634 _(b" template '%s' not found\n"),
1646 _(b" template '%s' not found\n"),
1635 b"default",
1647 b"default",
1636 )
1648 )
1637 if not p:
1649 if not p:
1638 problems += 1
1650 problems += 1
1639 fm.condwrite(
1651 fm.condwrite(
1640 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1652 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1641 )
1653 )
1642
1654
1643 # editor
1655 # editor
1644 editor = ui.geteditor()
1656 editor = ui.geteditor()
1645 editor = util.expandpath(editor)
1657 editor = util.expandpath(editor)
1646 editorbin = procutil.shellsplit(editor)[0]
1658 editorbin = procutil.shellsplit(editor)[0]
1647 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1659 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1648 cmdpath = procutil.findexe(editorbin)
1660 cmdpath = procutil.findexe(editorbin)
1649 fm.condwrite(
1661 fm.condwrite(
1650 not cmdpath and editor == b'vi',
1662 not cmdpath and editor == b'vi',
1651 b'vinotfound',
1663 b'vinotfound',
1652 _(
1664 _(
1653 b" No commit editor set and can't find %s in PATH\n"
1665 b" No commit editor set and can't find %s in PATH\n"
1654 b" (specify a commit editor in your configuration"
1666 b" (specify a commit editor in your configuration"
1655 b" file)\n"
1667 b" file)\n"
1656 ),
1668 ),
1657 not cmdpath and editor == b'vi' and editorbin,
1669 not cmdpath and editor == b'vi' and editorbin,
1658 )
1670 )
1659 fm.condwrite(
1671 fm.condwrite(
1660 not cmdpath and editor != b'vi',
1672 not cmdpath and editor != b'vi',
1661 b'editornotfound',
1673 b'editornotfound',
1662 _(
1674 _(
1663 b" Can't find editor '%s' in PATH\n"
1675 b" Can't find editor '%s' in PATH\n"
1664 b" (specify a commit editor in your configuration"
1676 b" (specify a commit editor in your configuration"
1665 b" file)\n"
1677 b" file)\n"
1666 ),
1678 ),
1667 not cmdpath and editorbin,
1679 not cmdpath and editorbin,
1668 )
1680 )
1669 if not cmdpath and editor != b'vi':
1681 if not cmdpath and editor != b'vi':
1670 problems += 1
1682 problems += 1
1671
1683
1672 # check username
1684 # check username
1673 username = None
1685 username = None
1674 err = None
1686 err = None
1675 try:
1687 try:
1676 username = ui.username()
1688 username = ui.username()
1677 except error.Abort as e:
1689 except error.Abort as e:
1678 err = stringutil.forcebytestr(e)
1690 err = stringutil.forcebytestr(e)
1679 problems += 1
1691 problems += 1
1680
1692
1681 fm.condwrite(
1693 fm.condwrite(
1682 username, b'username', _(b"checking username (%s)\n"), username
1694 username, b'username', _(b"checking username (%s)\n"), username
1683 )
1695 )
1684 fm.condwrite(
1696 fm.condwrite(
1685 err,
1697 err,
1686 b'usernameerror',
1698 b'usernameerror',
1687 _(
1699 _(
1688 b"checking username...\n %s\n"
1700 b"checking username...\n %s\n"
1689 b" (specify a username in your configuration file)\n"
1701 b" (specify a username in your configuration file)\n"
1690 ),
1702 ),
1691 err,
1703 err,
1692 )
1704 )
1693
1705
1694 for name, mod in extensions.extensions():
1706 for name, mod in extensions.extensions():
1695 handler = getattr(mod, 'debuginstall', None)
1707 handler = getattr(mod, 'debuginstall', None)
1696 if handler is not None:
1708 if handler is not None:
1697 problems += handler(ui, fm)
1709 problems += handler(ui, fm)
1698
1710
1699 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1711 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1700 if not problems:
1712 if not problems:
1701 fm.data(problems=problems)
1713 fm.data(problems=problems)
1702 fm.condwrite(
1714 fm.condwrite(
1703 problems,
1715 problems,
1704 b'problems',
1716 b'problems',
1705 _(b"%d problems detected, please check your install!\n"),
1717 _(b"%d problems detected, please check your install!\n"),
1706 problems,
1718 problems,
1707 )
1719 )
1708 fm.end()
1720 fm.end()
1709
1721
1710 return problems
1722 return problems
1711
1723
1712
1724
1713 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1725 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1714 def debugknown(ui, repopath, *ids, **opts):
1726 def debugknown(ui, repopath, *ids, **opts):
1715 """test whether node ids are known to a repo
1727 """test whether node ids are known to a repo
1716
1728
1717 Every ID must be a full-length hex node id string. Returns a list of 0s
1729 Every ID must be a full-length hex node id string. Returns a list of 0s
1718 and 1s indicating unknown/known.
1730 and 1s indicating unknown/known.
1719 """
1731 """
1720 opts = pycompat.byteskwargs(opts)
1732 opts = pycompat.byteskwargs(opts)
1721 repo = hg.peer(ui, opts, repopath)
1733 repo = hg.peer(ui, opts, repopath)
1722 if not repo.capable(b'known'):
1734 if not repo.capable(b'known'):
1723 raise error.Abort(b"known() not supported by target repository")
1735 raise error.Abort(b"known() not supported by target repository")
1724 flags = repo.known([bin(s) for s in ids])
1736 flags = repo.known([bin(s) for s in ids])
1725 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1737 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1726
1738
1727
1739
1728 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1740 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1729 def debuglabelcomplete(ui, repo, *args):
1741 def debuglabelcomplete(ui, repo, *args):
1730 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1742 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1731 debugnamecomplete(ui, repo, *args)
1743 debugnamecomplete(ui, repo, *args)
1732
1744
1733
1745
1734 @command(
1746 @command(
1735 b'debuglocks',
1747 b'debuglocks',
1736 [
1748 [
1737 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1749 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1738 (
1750 (
1739 b'W',
1751 b'W',
1740 b'force-wlock',
1752 b'force-wlock',
1741 None,
1753 None,
1742 _(b'free the working state lock (DANGEROUS)'),
1754 _(b'free the working state lock (DANGEROUS)'),
1743 ),
1755 ),
1744 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1756 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1745 (
1757 (
1746 b'S',
1758 b'S',
1747 b'set-wlock',
1759 b'set-wlock',
1748 None,
1760 None,
1749 _(b'set the working state lock until stopped'),
1761 _(b'set the working state lock until stopped'),
1750 ),
1762 ),
1751 ],
1763 ],
1752 _(b'[OPTION]...'),
1764 _(b'[OPTION]...'),
1753 )
1765 )
1754 def debuglocks(ui, repo, **opts):
1766 def debuglocks(ui, repo, **opts):
1755 """show or modify state of locks
1767 """show or modify state of locks
1756
1768
1757 By default, this command will show which locks are held. This
1769 By default, this command will show which locks are held. This
1758 includes the user and process holding the lock, the amount of time
1770 includes the user and process holding the lock, the amount of time
1759 the lock has been held, and the machine name where the process is
1771 the lock has been held, and the machine name where the process is
1760 running if it's not local.
1772 running if it's not local.
1761
1773
1762 Locks protect the integrity of Mercurial's data, so should be
1774 Locks protect the integrity of Mercurial's data, so should be
1763 treated with care. System crashes or other interruptions may cause
1775 treated with care. System crashes or other interruptions may cause
1764 locks to not be properly released, though Mercurial will usually
1776 locks to not be properly released, though Mercurial will usually
1765 detect and remove such stale locks automatically.
1777 detect and remove such stale locks automatically.
1766
1778
1767 However, detecting stale locks may not always be possible (for
1779 However, detecting stale locks may not always be possible (for
1768 instance, on a shared filesystem). Removing locks may also be
1780 instance, on a shared filesystem). Removing locks may also be
1769 blocked by filesystem permissions.
1781 blocked by filesystem permissions.
1770
1782
1771 Setting a lock will prevent other commands from changing the data.
1783 Setting a lock will prevent other commands from changing the data.
1772 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1784 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1773 The set locks are removed when the command exits.
1785 The set locks are removed when the command exits.
1774
1786
1775 Returns 0 if no locks are held.
1787 Returns 0 if no locks are held.
1776
1788
1777 """
1789 """
1778
1790
1779 if opts.get('force_lock'):
1791 if opts.get('force_lock'):
1780 repo.svfs.unlink(b'lock')
1792 repo.svfs.unlink(b'lock')
1781 if opts.get('force_wlock'):
1793 if opts.get('force_wlock'):
1782 repo.vfs.unlink(b'wlock')
1794 repo.vfs.unlink(b'wlock')
1783 if opts.get('force_lock') or opts.get('force_wlock'):
1795 if opts.get('force_lock') or opts.get('force_wlock'):
1784 return 0
1796 return 0
1785
1797
1786 locks = []
1798 locks = []
1787 try:
1799 try:
1788 if opts.get('set_wlock'):
1800 if opts.get('set_wlock'):
1789 try:
1801 try:
1790 locks.append(repo.wlock(False))
1802 locks.append(repo.wlock(False))
1791 except error.LockHeld:
1803 except error.LockHeld:
1792 raise error.Abort(_(b'wlock is already held'))
1804 raise error.Abort(_(b'wlock is already held'))
1793 if opts.get('set_lock'):
1805 if opts.get('set_lock'):
1794 try:
1806 try:
1795 locks.append(repo.lock(False))
1807 locks.append(repo.lock(False))
1796 except error.LockHeld:
1808 except error.LockHeld:
1797 raise error.Abort(_(b'lock is already held'))
1809 raise error.Abort(_(b'lock is already held'))
1798 if len(locks):
1810 if len(locks):
1799 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1811 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1800 return 0
1812 return 0
1801 finally:
1813 finally:
1802 release(*locks)
1814 release(*locks)
1803
1815
1804 now = time.time()
1816 now = time.time()
1805 held = 0
1817 held = 0
1806
1818
1807 def report(vfs, name, method):
1819 def report(vfs, name, method):
1808 # this causes stale locks to get reaped for more accurate reporting
1820 # this causes stale locks to get reaped for more accurate reporting
1809 try:
1821 try:
1810 l = method(False)
1822 l = method(False)
1811 except error.LockHeld:
1823 except error.LockHeld:
1812 l = None
1824 l = None
1813
1825
1814 if l:
1826 if l:
1815 l.release()
1827 l.release()
1816 else:
1828 else:
1817 try:
1829 try:
1818 st = vfs.lstat(name)
1830 st = vfs.lstat(name)
1819 age = now - st[stat.ST_MTIME]
1831 age = now - st[stat.ST_MTIME]
1820 user = util.username(st.st_uid)
1832 user = util.username(st.st_uid)
1821 locker = vfs.readlock(name)
1833 locker = vfs.readlock(name)
1822 if b":" in locker:
1834 if b":" in locker:
1823 host, pid = locker.split(b':')
1835 host, pid = locker.split(b':')
1824 if host == socket.gethostname():
1836 if host == socket.gethostname():
1825 locker = b'user %s, process %s' % (user or b'None', pid)
1837 locker = b'user %s, process %s' % (user or b'None', pid)
1826 else:
1838 else:
1827 locker = b'user %s, process %s, host %s' % (
1839 locker = b'user %s, process %s, host %s' % (
1828 user or b'None',
1840 user or b'None',
1829 pid,
1841 pid,
1830 host,
1842 host,
1831 )
1843 )
1832 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1844 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1833 return 1
1845 return 1
1834 except OSError as e:
1846 except OSError as e:
1835 if e.errno != errno.ENOENT:
1847 if e.errno != errno.ENOENT:
1836 raise
1848 raise
1837
1849
1838 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1850 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1839 return 0
1851 return 0
1840
1852
1841 held += report(repo.svfs, b"lock", repo.lock)
1853 held += report(repo.svfs, b"lock", repo.lock)
1842 held += report(repo.vfs, b"wlock", repo.wlock)
1854 held += report(repo.vfs, b"wlock", repo.wlock)
1843
1855
1844 return held
1856 return held
1845
1857
1846
1858
1847 @command(
1859 @command(
1848 b'debugmanifestfulltextcache',
1860 b'debugmanifestfulltextcache',
1849 [
1861 [
1850 (b'', b'clear', False, _(b'clear the cache')),
1862 (b'', b'clear', False, _(b'clear the cache')),
1851 (
1863 (
1852 b'a',
1864 b'a',
1853 b'add',
1865 b'add',
1854 [],
1866 [],
1855 _(b'add the given manifest nodes to the cache'),
1867 _(b'add the given manifest nodes to the cache'),
1856 _(b'NODE'),
1868 _(b'NODE'),
1857 ),
1869 ),
1858 ],
1870 ],
1859 b'',
1871 b'',
1860 )
1872 )
1861 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1873 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1862 """show, clear or amend the contents of the manifest fulltext cache"""
1874 """show, clear or amend the contents of the manifest fulltext cache"""
1863
1875
1864 def getcache():
1876 def getcache():
1865 r = repo.manifestlog.getstorage(b'')
1877 r = repo.manifestlog.getstorage(b'')
1866 try:
1878 try:
1867 return r._fulltextcache
1879 return r._fulltextcache
1868 except AttributeError:
1880 except AttributeError:
1869 msg = _(
1881 msg = _(
1870 b"Current revlog implementation doesn't appear to have a "
1882 b"Current revlog implementation doesn't appear to have a "
1871 b"manifest fulltext cache\n"
1883 b"manifest fulltext cache\n"
1872 )
1884 )
1873 raise error.Abort(msg)
1885 raise error.Abort(msg)
1874
1886
1875 if opts.get('clear'):
1887 if opts.get('clear'):
1876 with repo.wlock():
1888 with repo.wlock():
1877 cache = getcache()
1889 cache = getcache()
1878 cache.clear(clear_persisted_data=True)
1890 cache.clear(clear_persisted_data=True)
1879 return
1891 return
1880
1892
1881 if add:
1893 if add:
1882 with repo.wlock():
1894 with repo.wlock():
1883 m = repo.manifestlog
1895 m = repo.manifestlog
1884 store = m.getstorage(b'')
1896 store = m.getstorage(b'')
1885 for n in add:
1897 for n in add:
1886 try:
1898 try:
1887 manifest = m[store.lookup(n)]
1899 manifest = m[store.lookup(n)]
1888 except error.LookupError as e:
1900 except error.LookupError as e:
1889 raise error.Abort(e, hint=b"Check your manifest node id")
1901 raise error.Abort(e, hint=b"Check your manifest node id")
1890 manifest.read() # stores revisision in cache too
1902 manifest.read() # stores revisision in cache too
1891 return
1903 return
1892
1904
1893 cache = getcache()
1905 cache = getcache()
1894 if not len(cache):
1906 if not len(cache):
1895 ui.write(_(b'cache empty\n'))
1907 ui.write(_(b'cache empty\n'))
1896 else:
1908 else:
1897 ui.write(
1909 ui.write(
1898 _(
1910 _(
1899 b'cache contains %d manifest entries, in order of most to '
1911 b'cache contains %d manifest entries, in order of most to '
1900 b'least recent:\n'
1912 b'least recent:\n'
1901 )
1913 )
1902 % (len(cache),)
1914 % (len(cache),)
1903 )
1915 )
1904 totalsize = 0
1916 totalsize = 0
1905 for nodeid in cache:
1917 for nodeid in cache:
1906 # Use cache.get to not update the LRU order
1918 # Use cache.get to not update the LRU order
1907 data = cache.peek(nodeid)
1919 data = cache.peek(nodeid)
1908 size = len(data)
1920 size = len(data)
1909 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1921 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1910 ui.write(
1922 ui.write(
1911 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1923 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1912 )
1924 )
1913 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1925 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1914 ui.write(
1926 ui.write(
1915 _(b'total cache data size %s, on-disk %s\n')
1927 _(b'total cache data size %s, on-disk %s\n')
1916 % (util.bytecount(totalsize), util.bytecount(ondisk))
1928 % (util.bytecount(totalsize), util.bytecount(ondisk))
1917 )
1929 )
1918
1930
1919
1931
1920 @command(b'debugmergestate', [], b'')
1932 @command(b'debugmergestate', [], b'')
1921 def debugmergestate(ui, repo, *args):
1933 def debugmergestate(ui, repo, *args):
1922 """print merge state
1934 """print merge state
1923
1935
1924 Use --verbose to print out information about whether v1 or v2 merge state
1936 Use --verbose to print out information about whether v1 or v2 merge state
1925 was chosen."""
1937 was chosen."""
1926
1938
1927 def _hashornull(h):
1939 def _hashornull(h):
1928 if h == nullhex:
1940 if h == nullhex:
1929 return b'null'
1941 return b'null'
1930 else:
1942 else:
1931 return h
1943 return h
1932
1944
1933 def printrecords(version):
1945 def printrecords(version):
1934 ui.writenoi18n(b'* version %d records\n' % version)
1946 ui.writenoi18n(b'* version %d records\n' % version)
1935 if version == 1:
1947 if version == 1:
1936 records = v1records
1948 records = v1records
1937 else:
1949 else:
1938 records = v2records
1950 records = v2records
1939
1951
1940 for rtype, record in records:
1952 for rtype, record in records:
1941 # pretty print some record types
1953 # pretty print some record types
1942 if rtype == b'L':
1954 if rtype == b'L':
1943 ui.writenoi18n(b'local: %s\n' % record)
1955 ui.writenoi18n(b'local: %s\n' % record)
1944 elif rtype == b'O':
1956 elif rtype == b'O':
1945 ui.writenoi18n(b'other: %s\n' % record)
1957 ui.writenoi18n(b'other: %s\n' % record)
1946 elif rtype == b'm':
1958 elif rtype == b'm':
1947 driver, mdstate = record.split(b'\0', 1)
1959 driver, mdstate = record.split(b'\0', 1)
1948 ui.writenoi18n(
1960 ui.writenoi18n(
1949 b'merge driver: %s (state "%s")\n' % (driver, mdstate)
1961 b'merge driver: %s (state "%s")\n' % (driver, mdstate)
1950 )
1962 )
1951 elif rtype in b'FDC':
1963 elif rtype in b'FDC':
1952 r = record.split(b'\0')
1964 r = record.split(b'\0')
1953 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1965 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1954 if version == 1:
1966 if version == 1:
1955 onode = b'not stored in v1 format'
1967 onode = b'not stored in v1 format'
1956 flags = r[7]
1968 flags = r[7]
1957 else:
1969 else:
1958 onode, flags = r[7:9]
1970 onode, flags = r[7:9]
1959 ui.writenoi18n(
1971 ui.writenoi18n(
1960 b'file: %s (record type "%s", state "%s", hash %s)\n'
1972 b'file: %s (record type "%s", state "%s", hash %s)\n'
1961 % (f, rtype, state, _hashornull(hash))
1973 % (f, rtype, state, _hashornull(hash))
1962 )
1974 )
1963 ui.writenoi18n(
1975 ui.writenoi18n(
1964 b' local path: %s (flags "%s")\n' % (lfile, flags)
1976 b' local path: %s (flags "%s")\n' % (lfile, flags)
1965 )
1977 )
1966 ui.writenoi18n(
1978 ui.writenoi18n(
1967 b' ancestor path: %s (node %s)\n'
1979 b' ancestor path: %s (node %s)\n'
1968 % (afile, _hashornull(anode))
1980 % (afile, _hashornull(anode))
1969 )
1981 )
1970 ui.writenoi18n(
1982 ui.writenoi18n(
1971 b' other path: %s (node %s)\n'
1983 b' other path: %s (node %s)\n'
1972 % (ofile, _hashornull(onode))
1984 % (ofile, _hashornull(onode))
1973 )
1985 )
1974 elif rtype == b'f':
1986 elif rtype == b'f':
1975 filename, rawextras = record.split(b'\0', 1)
1987 filename, rawextras = record.split(b'\0', 1)
1976 extras = rawextras.split(b'\0')
1988 extras = rawextras.split(b'\0')
1977 i = 0
1989 i = 0
1978 extrastrings = []
1990 extrastrings = []
1979 while i < len(extras):
1991 while i < len(extras):
1980 extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1]))
1992 extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1]))
1981 i += 2
1993 i += 2
1982
1994
1983 ui.writenoi18n(
1995 ui.writenoi18n(
1984 b'file extras: %s (%s)\n'
1996 b'file extras: %s (%s)\n'
1985 % (filename, b', '.join(extrastrings))
1997 % (filename, b', '.join(extrastrings))
1986 )
1998 )
1987 elif rtype == b'l':
1999 elif rtype == b'l':
1988 labels = record.split(b'\0', 2)
2000 labels = record.split(b'\0', 2)
1989 labels = [l for l in labels if len(l) > 0]
2001 labels = [l for l in labels if len(l) > 0]
1990 ui.writenoi18n(b'labels:\n')
2002 ui.writenoi18n(b'labels:\n')
1991 ui.write((b' local: %s\n' % labels[0]))
2003 ui.write((b' local: %s\n' % labels[0]))
1992 ui.write((b' other: %s\n' % labels[1]))
2004 ui.write((b' other: %s\n' % labels[1]))
1993 if len(labels) > 2:
2005 if len(labels) > 2:
1994 ui.write((b' base: %s\n' % labels[2]))
2006 ui.write((b' base: %s\n' % labels[2]))
1995 else:
2007 else:
1996 ui.writenoi18n(
2008 ui.writenoi18n(
1997 b'unrecognized entry: %s\t%s\n'
2009 b'unrecognized entry: %s\t%s\n'
1998 % (rtype, record.replace(b'\0', b'\t'))
2010 % (rtype, record.replace(b'\0', b'\t'))
1999 )
2011 )
2000
2012
2001 # Avoid mergestate.read() since it may raise an exception for unsupported
2013 # Avoid mergestate.read() since it may raise an exception for unsupported
2002 # merge state records. We shouldn't be doing this, but this is OK since this
2014 # merge state records. We shouldn't be doing this, but this is OK since this
2003 # command is pretty low-level.
2015 # command is pretty low-level.
2004 ms = mergemod.mergestate(repo)
2016 ms = mergemod.mergestate(repo)
2005
2017
2006 # sort so that reasonable information is on top
2018 # sort so that reasonable information is on top
2007 v1records = ms._readrecordsv1()
2019 v1records = ms._readrecordsv1()
2008 v2records = ms._readrecordsv2()
2020 v2records = ms._readrecordsv2()
2009 order = b'LOml'
2021 order = b'LOml'
2010
2022
2011 def key(r):
2023 def key(r):
2012 idx = order.find(r[0])
2024 idx = order.find(r[0])
2013 if idx == -1:
2025 if idx == -1:
2014 return (1, r[1])
2026 return (1, r[1])
2015 else:
2027 else:
2016 return (0, idx)
2028 return (0, idx)
2017
2029
2018 v1records.sort(key=key)
2030 v1records.sort(key=key)
2019 v2records.sort(key=key)
2031 v2records.sort(key=key)
2020
2032
2021 if not v1records and not v2records:
2033 if not v1records and not v2records:
2022 ui.writenoi18n(b'no merge state found\n')
2034 ui.writenoi18n(b'no merge state found\n')
2023 elif not v2records:
2035 elif not v2records:
2024 ui.notenoi18n(b'no version 2 merge state\n')
2036 ui.notenoi18n(b'no version 2 merge state\n')
2025 printrecords(1)
2037 printrecords(1)
2026 elif ms._v1v2match(v1records, v2records):
2038 elif ms._v1v2match(v1records, v2records):
2027 ui.notenoi18n(b'v1 and v2 states match: using v2\n')
2039 ui.notenoi18n(b'v1 and v2 states match: using v2\n')
2028 printrecords(2)
2040 printrecords(2)
2029 else:
2041 else:
2030 ui.notenoi18n(b'v1 and v2 states mismatch: using v1\n')
2042 ui.notenoi18n(b'v1 and v2 states mismatch: using v1\n')
2031 printrecords(1)
2043 printrecords(1)
2032 if ui.verbose:
2044 if ui.verbose:
2033 printrecords(2)
2045 printrecords(2)
2034
2046
2035
2047
2036 @command(b'debugnamecomplete', [], _(b'NAME...'))
2048 @command(b'debugnamecomplete', [], _(b'NAME...'))
2037 def debugnamecomplete(ui, repo, *args):
2049 def debugnamecomplete(ui, repo, *args):
2038 '''complete "names" - tags, open branch names, bookmark names'''
2050 '''complete "names" - tags, open branch names, bookmark names'''
2039
2051
2040 names = set()
2052 names = set()
2041 # since we previously only listed open branches, we will handle that
2053 # since we previously only listed open branches, we will handle that
2042 # specially (after this for loop)
2054 # specially (after this for loop)
2043 for name, ns in pycompat.iteritems(repo.names):
2055 for name, ns in pycompat.iteritems(repo.names):
2044 if name != b'branches':
2056 if name != b'branches':
2045 names.update(ns.listnames(repo))
2057 names.update(ns.listnames(repo))
2046 names.update(
2058 names.update(
2047 tag
2059 tag
2048 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2060 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2049 if not closed
2061 if not closed
2050 )
2062 )
2051 completions = set()
2063 completions = set()
2052 if not args:
2064 if not args:
2053 args = [b'']
2065 args = [b'']
2054 for a in args:
2066 for a in args:
2055 completions.update(n for n in names if n.startswith(a))
2067 completions.update(n for n in names if n.startswith(a))
2056 ui.write(b'\n'.join(sorted(completions)))
2068 ui.write(b'\n'.join(sorted(completions)))
2057 ui.write(b'\n')
2069 ui.write(b'\n')
2058
2070
2059
2071
2060 @command(
2072 @command(
2061 b'debugobsolete',
2073 b'debugobsolete',
2062 [
2074 [
2063 (b'', b'flags', 0, _(b'markers flag')),
2075 (b'', b'flags', 0, _(b'markers flag')),
2064 (
2076 (
2065 b'',
2077 b'',
2066 b'record-parents',
2078 b'record-parents',
2067 False,
2079 False,
2068 _(b'record parent information for the precursor'),
2080 _(b'record parent information for the precursor'),
2069 ),
2081 ),
2070 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2082 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2071 (
2083 (
2072 b'',
2084 b'',
2073 b'exclusive',
2085 b'exclusive',
2074 False,
2086 False,
2075 _(b'restrict display to markers only relevant to REV'),
2087 _(b'restrict display to markers only relevant to REV'),
2076 ),
2088 ),
2077 (b'', b'index', False, _(b'display index of the marker')),
2089 (b'', b'index', False, _(b'display index of the marker')),
2078 (b'', b'delete', [], _(b'delete markers specified by indices')),
2090 (b'', b'delete', [], _(b'delete markers specified by indices')),
2079 ]
2091 ]
2080 + cmdutil.commitopts2
2092 + cmdutil.commitopts2
2081 + cmdutil.formatteropts,
2093 + cmdutil.formatteropts,
2082 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2094 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2083 )
2095 )
2084 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2096 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2085 """create arbitrary obsolete marker
2097 """create arbitrary obsolete marker
2086
2098
2087 With no arguments, displays the list of obsolescence markers."""
2099 With no arguments, displays the list of obsolescence markers."""
2088
2100
2089 opts = pycompat.byteskwargs(opts)
2101 opts = pycompat.byteskwargs(opts)
2090
2102
2091 def parsenodeid(s):
2103 def parsenodeid(s):
2092 try:
2104 try:
2093 # We do not use revsingle/revrange functions here to accept
2105 # We do not use revsingle/revrange functions here to accept
2094 # arbitrary node identifiers, possibly not present in the
2106 # arbitrary node identifiers, possibly not present in the
2095 # local repository.
2107 # local repository.
2096 n = bin(s)
2108 n = bin(s)
2097 if len(n) != len(nullid):
2109 if len(n) != len(nullid):
2098 raise TypeError()
2110 raise TypeError()
2099 return n
2111 return n
2100 except TypeError:
2112 except TypeError:
2101 raise error.Abort(
2113 raise error.Abort(
2102 b'changeset references must be full hexadecimal '
2114 b'changeset references must be full hexadecimal '
2103 b'node identifiers'
2115 b'node identifiers'
2104 )
2116 )
2105
2117
2106 if opts.get(b'delete'):
2118 if opts.get(b'delete'):
2107 indices = []
2119 indices = []
2108 for v in opts.get(b'delete'):
2120 for v in opts.get(b'delete'):
2109 try:
2121 try:
2110 indices.append(int(v))
2122 indices.append(int(v))
2111 except ValueError:
2123 except ValueError:
2112 raise error.Abort(
2124 raise error.Abort(
2113 _(b'invalid index value: %r') % v,
2125 _(b'invalid index value: %r') % v,
2114 hint=_(b'use integers for indices'),
2126 hint=_(b'use integers for indices'),
2115 )
2127 )
2116
2128
2117 if repo.currenttransaction():
2129 if repo.currenttransaction():
2118 raise error.Abort(
2130 raise error.Abort(
2119 _(b'cannot delete obsmarkers in the middle of transaction.')
2131 _(b'cannot delete obsmarkers in the middle of transaction.')
2120 )
2132 )
2121
2133
2122 with repo.lock():
2134 with repo.lock():
2123 n = repair.deleteobsmarkers(repo.obsstore, indices)
2135 n = repair.deleteobsmarkers(repo.obsstore, indices)
2124 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2136 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2125
2137
2126 return
2138 return
2127
2139
2128 if precursor is not None:
2140 if precursor is not None:
2129 if opts[b'rev']:
2141 if opts[b'rev']:
2130 raise error.Abort(b'cannot select revision when creating marker')
2142 raise error.Abort(b'cannot select revision when creating marker')
2131 metadata = {}
2143 metadata = {}
2132 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2144 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2133 succs = tuple(parsenodeid(succ) for succ in successors)
2145 succs = tuple(parsenodeid(succ) for succ in successors)
2134 l = repo.lock()
2146 l = repo.lock()
2135 try:
2147 try:
2136 tr = repo.transaction(b'debugobsolete')
2148 tr = repo.transaction(b'debugobsolete')
2137 try:
2149 try:
2138 date = opts.get(b'date')
2150 date = opts.get(b'date')
2139 if date:
2151 if date:
2140 date = dateutil.parsedate(date)
2152 date = dateutil.parsedate(date)
2141 else:
2153 else:
2142 date = None
2154 date = None
2143 prec = parsenodeid(precursor)
2155 prec = parsenodeid(precursor)
2144 parents = None
2156 parents = None
2145 if opts[b'record_parents']:
2157 if opts[b'record_parents']:
2146 if prec not in repo.unfiltered():
2158 if prec not in repo.unfiltered():
2147 raise error.Abort(
2159 raise error.Abort(
2148 b'cannot used --record-parents on '
2160 b'cannot used --record-parents on '
2149 b'unknown changesets'
2161 b'unknown changesets'
2150 )
2162 )
2151 parents = repo.unfiltered()[prec].parents()
2163 parents = repo.unfiltered()[prec].parents()
2152 parents = tuple(p.node() for p in parents)
2164 parents = tuple(p.node() for p in parents)
2153 repo.obsstore.create(
2165 repo.obsstore.create(
2154 tr,
2166 tr,
2155 prec,
2167 prec,
2156 succs,
2168 succs,
2157 opts[b'flags'],
2169 opts[b'flags'],
2158 parents=parents,
2170 parents=parents,
2159 date=date,
2171 date=date,
2160 metadata=metadata,
2172 metadata=metadata,
2161 ui=ui,
2173 ui=ui,
2162 )
2174 )
2163 tr.close()
2175 tr.close()
2164 except ValueError as exc:
2176 except ValueError as exc:
2165 raise error.Abort(
2177 raise error.Abort(
2166 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2178 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2167 )
2179 )
2168 finally:
2180 finally:
2169 tr.release()
2181 tr.release()
2170 finally:
2182 finally:
2171 l.release()
2183 l.release()
2172 else:
2184 else:
2173 if opts[b'rev']:
2185 if opts[b'rev']:
2174 revs = scmutil.revrange(repo, opts[b'rev'])
2186 revs = scmutil.revrange(repo, opts[b'rev'])
2175 nodes = [repo[r].node() for r in revs]
2187 nodes = [repo[r].node() for r in revs]
2176 markers = list(
2188 markers = list(
2177 obsutil.getmarkers(
2189 obsutil.getmarkers(
2178 repo, nodes=nodes, exclusive=opts[b'exclusive']
2190 repo, nodes=nodes, exclusive=opts[b'exclusive']
2179 )
2191 )
2180 )
2192 )
2181 markers.sort(key=lambda x: x._data)
2193 markers.sort(key=lambda x: x._data)
2182 else:
2194 else:
2183 markers = obsutil.getmarkers(repo)
2195 markers = obsutil.getmarkers(repo)
2184
2196
2185 markerstoiter = markers
2197 markerstoiter = markers
2186 isrelevant = lambda m: True
2198 isrelevant = lambda m: True
2187 if opts.get(b'rev') and opts.get(b'index'):
2199 if opts.get(b'rev') and opts.get(b'index'):
2188 markerstoiter = obsutil.getmarkers(repo)
2200 markerstoiter = obsutil.getmarkers(repo)
2189 markerset = set(markers)
2201 markerset = set(markers)
2190 isrelevant = lambda m: m in markerset
2202 isrelevant = lambda m: m in markerset
2191
2203
2192 fm = ui.formatter(b'debugobsolete', opts)
2204 fm = ui.formatter(b'debugobsolete', opts)
2193 for i, m in enumerate(markerstoiter):
2205 for i, m in enumerate(markerstoiter):
2194 if not isrelevant(m):
2206 if not isrelevant(m):
2195 # marker can be irrelevant when we're iterating over a set
2207 # marker can be irrelevant when we're iterating over a set
2196 # of markers (markerstoiter) which is bigger than the set
2208 # of markers (markerstoiter) which is bigger than the set
2197 # of markers we want to display (markers)
2209 # of markers we want to display (markers)
2198 # this can happen if both --index and --rev options are
2210 # this can happen if both --index and --rev options are
2199 # provided and thus we need to iterate over all of the markers
2211 # provided and thus we need to iterate over all of the markers
2200 # to get the correct indices, but only display the ones that
2212 # to get the correct indices, but only display the ones that
2201 # are relevant to --rev value
2213 # are relevant to --rev value
2202 continue
2214 continue
2203 fm.startitem()
2215 fm.startitem()
2204 ind = i if opts.get(b'index') else None
2216 ind = i if opts.get(b'index') else None
2205 cmdutil.showmarker(fm, m, index=ind)
2217 cmdutil.showmarker(fm, m, index=ind)
2206 fm.end()
2218 fm.end()
2207
2219
2208
2220
2209 @command(
2221 @command(
2210 b'debugp1copies',
2222 b'debugp1copies',
2211 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2223 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2212 _(b'[-r REV]'),
2224 _(b'[-r REV]'),
2213 )
2225 )
2214 def debugp1copies(ui, repo, **opts):
2226 def debugp1copies(ui, repo, **opts):
2215 """dump copy information compared to p1"""
2227 """dump copy information compared to p1"""
2216
2228
2217 opts = pycompat.byteskwargs(opts)
2229 opts = pycompat.byteskwargs(opts)
2218 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2230 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2219 for dst, src in ctx.p1copies().items():
2231 for dst, src in ctx.p1copies().items():
2220 ui.write(b'%s -> %s\n' % (src, dst))
2232 ui.write(b'%s -> %s\n' % (src, dst))
2221
2233
2222
2234
2223 @command(
2235 @command(
2224 b'debugp2copies',
2236 b'debugp2copies',
2225 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2237 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2226 _(b'[-r REV]'),
2238 _(b'[-r REV]'),
2227 )
2239 )
2228 def debugp1copies(ui, repo, **opts):
2240 def debugp1copies(ui, repo, **opts):
2229 """dump copy information compared to p2"""
2241 """dump copy information compared to p2"""
2230
2242
2231 opts = pycompat.byteskwargs(opts)
2243 opts = pycompat.byteskwargs(opts)
2232 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2244 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2233 for dst, src in ctx.p2copies().items():
2245 for dst, src in ctx.p2copies().items():
2234 ui.write(b'%s -> %s\n' % (src, dst))
2246 ui.write(b'%s -> %s\n' % (src, dst))
2235
2247
2236
2248
2237 @command(
2249 @command(
2238 b'debugpathcomplete',
2250 b'debugpathcomplete',
2239 [
2251 [
2240 (b'f', b'full', None, _(b'complete an entire path')),
2252 (b'f', b'full', None, _(b'complete an entire path')),
2241 (b'n', b'normal', None, _(b'show only normal files')),
2253 (b'n', b'normal', None, _(b'show only normal files')),
2242 (b'a', b'added', None, _(b'show only added files')),
2254 (b'a', b'added', None, _(b'show only added files')),
2243 (b'r', b'removed', None, _(b'show only removed files')),
2255 (b'r', b'removed', None, _(b'show only removed files')),
2244 ],
2256 ],
2245 _(b'FILESPEC...'),
2257 _(b'FILESPEC...'),
2246 )
2258 )
2247 def debugpathcomplete(ui, repo, *specs, **opts):
2259 def debugpathcomplete(ui, repo, *specs, **opts):
2248 '''complete part or all of a tracked path
2260 '''complete part or all of a tracked path
2249
2261
2250 This command supports shells that offer path name completion. It
2262 This command supports shells that offer path name completion. It
2251 currently completes only files already known to the dirstate.
2263 currently completes only files already known to the dirstate.
2252
2264
2253 Completion extends only to the next path segment unless
2265 Completion extends only to the next path segment unless
2254 --full is specified, in which case entire paths are used.'''
2266 --full is specified, in which case entire paths are used.'''
2255
2267
2256 def complete(path, acceptable):
2268 def complete(path, acceptable):
2257 dirstate = repo.dirstate
2269 dirstate = repo.dirstate
2258 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2270 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2259 rootdir = repo.root + pycompat.ossep
2271 rootdir = repo.root + pycompat.ossep
2260 if spec != repo.root and not spec.startswith(rootdir):
2272 if spec != repo.root and not spec.startswith(rootdir):
2261 return [], []
2273 return [], []
2262 if os.path.isdir(spec):
2274 if os.path.isdir(spec):
2263 spec += b'/'
2275 spec += b'/'
2264 spec = spec[len(rootdir) :]
2276 spec = spec[len(rootdir) :]
2265 fixpaths = pycompat.ossep != b'/'
2277 fixpaths = pycompat.ossep != b'/'
2266 if fixpaths:
2278 if fixpaths:
2267 spec = spec.replace(pycompat.ossep, b'/')
2279 spec = spec.replace(pycompat.ossep, b'/')
2268 speclen = len(spec)
2280 speclen = len(spec)
2269 fullpaths = opts['full']
2281 fullpaths = opts['full']
2270 files, dirs = set(), set()
2282 files, dirs = set(), set()
2271 adddir, addfile = dirs.add, files.add
2283 adddir, addfile = dirs.add, files.add
2272 for f, st in pycompat.iteritems(dirstate):
2284 for f, st in pycompat.iteritems(dirstate):
2273 if f.startswith(spec) and st[0] in acceptable:
2285 if f.startswith(spec) and st[0] in acceptable:
2274 if fixpaths:
2286 if fixpaths:
2275 f = f.replace(b'/', pycompat.ossep)
2287 f = f.replace(b'/', pycompat.ossep)
2276 if fullpaths:
2288 if fullpaths:
2277 addfile(f)
2289 addfile(f)
2278 continue
2290 continue
2279 s = f.find(pycompat.ossep, speclen)
2291 s = f.find(pycompat.ossep, speclen)
2280 if s >= 0:
2292 if s >= 0:
2281 adddir(f[:s])
2293 adddir(f[:s])
2282 else:
2294 else:
2283 addfile(f)
2295 addfile(f)
2284 return files, dirs
2296 return files, dirs
2285
2297
2286 acceptable = b''
2298 acceptable = b''
2287 if opts['normal']:
2299 if opts['normal']:
2288 acceptable += b'nm'
2300 acceptable += b'nm'
2289 if opts['added']:
2301 if opts['added']:
2290 acceptable += b'a'
2302 acceptable += b'a'
2291 if opts['removed']:
2303 if opts['removed']:
2292 acceptable += b'r'
2304 acceptable += b'r'
2293 cwd = repo.getcwd()
2305 cwd = repo.getcwd()
2294 if not specs:
2306 if not specs:
2295 specs = [b'.']
2307 specs = [b'.']
2296
2308
2297 files, dirs = set(), set()
2309 files, dirs = set(), set()
2298 for spec in specs:
2310 for spec in specs:
2299 f, d = complete(spec, acceptable or b'nmar')
2311 f, d = complete(spec, acceptable or b'nmar')
2300 files.update(f)
2312 files.update(f)
2301 dirs.update(d)
2313 dirs.update(d)
2302 files.update(dirs)
2314 files.update(dirs)
2303 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2315 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2304 ui.write(b'\n')
2316 ui.write(b'\n')
2305
2317
2306
2318
2307 @command(
2319 @command(
2308 b'debugpathcopies',
2320 b'debugpathcopies',
2309 cmdutil.walkopts,
2321 cmdutil.walkopts,
2310 b'hg debugpathcopies REV1 REV2 [FILE]',
2322 b'hg debugpathcopies REV1 REV2 [FILE]',
2311 inferrepo=True,
2323 inferrepo=True,
2312 )
2324 )
2313 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2325 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2314 """show copies between two revisions"""
2326 """show copies between two revisions"""
2315 ctx1 = scmutil.revsingle(repo, rev1)
2327 ctx1 = scmutil.revsingle(repo, rev1)
2316 ctx2 = scmutil.revsingle(repo, rev2)
2328 ctx2 = scmutil.revsingle(repo, rev2)
2317 m = scmutil.match(ctx1, pats, opts)
2329 m = scmutil.match(ctx1, pats, opts)
2318 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2330 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2319 ui.write(b'%s -> %s\n' % (src, dst))
2331 ui.write(b'%s -> %s\n' % (src, dst))
2320
2332
2321
2333
2322 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2334 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2323 def debugpeer(ui, path):
2335 def debugpeer(ui, path):
2324 """establish a connection to a peer repository"""
2336 """establish a connection to a peer repository"""
2325 # Always enable peer request logging. Requires --debug to display
2337 # Always enable peer request logging. Requires --debug to display
2326 # though.
2338 # though.
2327 overrides = {
2339 overrides = {
2328 (b'devel', b'debug.peer-request'): True,
2340 (b'devel', b'debug.peer-request'): True,
2329 }
2341 }
2330
2342
2331 with ui.configoverride(overrides):
2343 with ui.configoverride(overrides):
2332 peer = hg.peer(ui, {}, path)
2344 peer = hg.peer(ui, {}, path)
2333
2345
2334 local = peer.local() is not None
2346 local = peer.local() is not None
2335 canpush = peer.canpush()
2347 canpush = peer.canpush()
2336
2348
2337 ui.write(_(b'url: %s\n') % peer.url())
2349 ui.write(_(b'url: %s\n') % peer.url())
2338 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2350 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2339 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2351 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2340
2352
2341
2353
2342 @command(
2354 @command(
2343 b'debugpickmergetool',
2355 b'debugpickmergetool',
2344 [
2356 [
2345 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2357 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2346 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2358 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2347 ]
2359 ]
2348 + cmdutil.walkopts
2360 + cmdutil.walkopts
2349 + cmdutil.mergetoolopts,
2361 + cmdutil.mergetoolopts,
2350 _(b'[PATTERN]...'),
2362 _(b'[PATTERN]...'),
2351 inferrepo=True,
2363 inferrepo=True,
2352 )
2364 )
2353 def debugpickmergetool(ui, repo, *pats, **opts):
2365 def debugpickmergetool(ui, repo, *pats, **opts):
2354 """examine which merge tool is chosen for specified file
2366 """examine which merge tool is chosen for specified file
2355
2367
2356 As described in :hg:`help merge-tools`, Mercurial examines
2368 As described in :hg:`help merge-tools`, Mercurial examines
2357 configurations below in this order to decide which merge tool is
2369 configurations below in this order to decide which merge tool is
2358 chosen for specified file.
2370 chosen for specified file.
2359
2371
2360 1. ``--tool`` option
2372 1. ``--tool`` option
2361 2. ``HGMERGE`` environment variable
2373 2. ``HGMERGE`` environment variable
2362 3. configurations in ``merge-patterns`` section
2374 3. configurations in ``merge-patterns`` section
2363 4. configuration of ``ui.merge``
2375 4. configuration of ``ui.merge``
2364 5. configurations in ``merge-tools`` section
2376 5. configurations in ``merge-tools`` section
2365 6. ``hgmerge`` tool (for historical reason only)
2377 6. ``hgmerge`` tool (for historical reason only)
2366 7. default tool for fallback (``:merge`` or ``:prompt``)
2378 7. default tool for fallback (``:merge`` or ``:prompt``)
2367
2379
2368 This command writes out examination result in the style below::
2380 This command writes out examination result in the style below::
2369
2381
2370 FILE = MERGETOOL
2382 FILE = MERGETOOL
2371
2383
2372 By default, all files known in the first parent context of the
2384 By default, all files known in the first parent context of the
2373 working directory are examined. Use file patterns and/or -I/-X
2385 working directory are examined. Use file patterns and/or -I/-X
2374 options to limit target files. -r/--rev is also useful to examine
2386 options to limit target files. -r/--rev is also useful to examine
2375 files in another context without actual updating to it.
2387 files in another context without actual updating to it.
2376
2388
2377 With --debug, this command shows warning messages while matching
2389 With --debug, this command shows warning messages while matching
2378 against ``merge-patterns`` and so on, too. It is recommended to
2390 against ``merge-patterns`` and so on, too. It is recommended to
2379 use this option with explicit file patterns and/or -I/-X options,
2391 use this option with explicit file patterns and/or -I/-X options,
2380 because this option increases amount of output per file according
2392 because this option increases amount of output per file according
2381 to configurations in hgrc.
2393 to configurations in hgrc.
2382
2394
2383 With -v/--verbose, this command shows configurations below at
2395 With -v/--verbose, this command shows configurations below at
2384 first (only if specified).
2396 first (only if specified).
2385
2397
2386 - ``--tool`` option
2398 - ``--tool`` option
2387 - ``HGMERGE`` environment variable
2399 - ``HGMERGE`` environment variable
2388 - configuration of ``ui.merge``
2400 - configuration of ``ui.merge``
2389
2401
2390 If merge tool is chosen before matching against
2402 If merge tool is chosen before matching against
2391 ``merge-patterns``, this command can't show any helpful
2403 ``merge-patterns``, this command can't show any helpful
2392 information, even with --debug. In such case, information above is
2404 information, even with --debug. In such case, information above is
2393 useful to know why a merge tool is chosen.
2405 useful to know why a merge tool is chosen.
2394 """
2406 """
2395 opts = pycompat.byteskwargs(opts)
2407 opts = pycompat.byteskwargs(opts)
2396 overrides = {}
2408 overrides = {}
2397 if opts[b'tool']:
2409 if opts[b'tool']:
2398 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2410 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2399 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2411 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2400
2412
2401 with ui.configoverride(overrides, b'debugmergepatterns'):
2413 with ui.configoverride(overrides, b'debugmergepatterns'):
2402 hgmerge = encoding.environ.get(b"HGMERGE")
2414 hgmerge = encoding.environ.get(b"HGMERGE")
2403 if hgmerge is not None:
2415 if hgmerge is not None:
2404 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2416 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2405 uimerge = ui.config(b"ui", b"merge")
2417 uimerge = ui.config(b"ui", b"merge")
2406 if uimerge:
2418 if uimerge:
2407 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2419 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2408
2420
2409 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2421 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2410 m = scmutil.match(ctx, pats, opts)
2422 m = scmutil.match(ctx, pats, opts)
2411 changedelete = opts[b'changedelete']
2423 changedelete = opts[b'changedelete']
2412 for path in ctx.walk(m):
2424 for path in ctx.walk(m):
2413 fctx = ctx[path]
2425 fctx = ctx[path]
2414 try:
2426 try:
2415 if not ui.debugflag:
2427 if not ui.debugflag:
2416 ui.pushbuffer(error=True)
2428 ui.pushbuffer(error=True)
2417 tool, toolpath = filemerge._picktool(
2429 tool, toolpath = filemerge._picktool(
2418 repo,
2430 repo,
2419 ui,
2431 ui,
2420 path,
2432 path,
2421 fctx.isbinary(),
2433 fctx.isbinary(),
2422 b'l' in fctx.flags(),
2434 b'l' in fctx.flags(),
2423 changedelete,
2435 changedelete,
2424 )
2436 )
2425 finally:
2437 finally:
2426 if not ui.debugflag:
2438 if not ui.debugflag:
2427 ui.popbuffer()
2439 ui.popbuffer()
2428 ui.write(b'%s = %s\n' % (path, tool))
2440 ui.write(b'%s = %s\n' % (path, tool))
2429
2441
2430
2442
2431 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2443 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2432 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2444 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2433 '''access the pushkey key/value protocol
2445 '''access the pushkey key/value protocol
2434
2446
2435 With two args, list the keys in the given namespace.
2447 With two args, list the keys in the given namespace.
2436
2448
2437 With five args, set a key to new if it currently is set to old.
2449 With five args, set a key to new if it currently is set to old.
2438 Reports success or failure.
2450 Reports success or failure.
2439 '''
2451 '''
2440
2452
2441 target = hg.peer(ui, {}, repopath)
2453 target = hg.peer(ui, {}, repopath)
2442 if keyinfo:
2454 if keyinfo:
2443 key, old, new = keyinfo
2455 key, old, new = keyinfo
2444 with target.commandexecutor() as e:
2456 with target.commandexecutor() as e:
2445 r = e.callcommand(
2457 r = e.callcommand(
2446 b'pushkey',
2458 b'pushkey',
2447 {
2459 {
2448 b'namespace': namespace,
2460 b'namespace': namespace,
2449 b'key': key,
2461 b'key': key,
2450 b'old': old,
2462 b'old': old,
2451 b'new': new,
2463 b'new': new,
2452 },
2464 },
2453 ).result()
2465 ).result()
2454
2466
2455 ui.status(pycompat.bytestr(r) + b'\n')
2467 ui.status(pycompat.bytestr(r) + b'\n')
2456 return not r
2468 return not r
2457 else:
2469 else:
2458 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2470 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2459 ui.write(
2471 ui.write(
2460 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2472 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2461 )
2473 )
2462
2474
2463
2475
2464 @command(b'debugpvec', [], _(b'A B'))
2476 @command(b'debugpvec', [], _(b'A B'))
2465 def debugpvec(ui, repo, a, b=None):
2477 def debugpvec(ui, repo, a, b=None):
2466 ca = scmutil.revsingle(repo, a)
2478 ca = scmutil.revsingle(repo, a)
2467 cb = scmutil.revsingle(repo, b)
2479 cb = scmutil.revsingle(repo, b)
2468 pa = pvec.ctxpvec(ca)
2480 pa = pvec.ctxpvec(ca)
2469 pb = pvec.ctxpvec(cb)
2481 pb = pvec.ctxpvec(cb)
2470 if pa == pb:
2482 if pa == pb:
2471 rel = b"="
2483 rel = b"="
2472 elif pa > pb:
2484 elif pa > pb:
2473 rel = b">"
2485 rel = b">"
2474 elif pa < pb:
2486 elif pa < pb:
2475 rel = b"<"
2487 rel = b"<"
2476 elif pa | pb:
2488 elif pa | pb:
2477 rel = b"|"
2489 rel = b"|"
2478 ui.write(_(b"a: %s\n") % pa)
2490 ui.write(_(b"a: %s\n") % pa)
2479 ui.write(_(b"b: %s\n") % pb)
2491 ui.write(_(b"b: %s\n") % pb)
2480 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2492 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2481 ui.write(
2493 ui.write(
2482 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2494 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2483 % (
2495 % (
2484 abs(pa._depth - pb._depth),
2496 abs(pa._depth - pb._depth),
2485 pvec._hamming(pa._vec, pb._vec),
2497 pvec._hamming(pa._vec, pb._vec),
2486 pa.distance(pb),
2498 pa.distance(pb),
2487 rel,
2499 rel,
2488 )
2500 )
2489 )
2501 )
2490
2502
2491
2503
2492 @command(
2504 @command(
2493 b'debugrebuilddirstate|debugrebuildstate',
2505 b'debugrebuilddirstate|debugrebuildstate',
2494 [
2506 [
2495 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2507 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2496 (
2508 (
2497 b'',
2509 b'',
2498 b'minimal',
2510 b'minimal',
2499 None,
2511 None,
2500 _(
2512 _(
2501 b'only rebuild files that are inconsistent with '
2513 b'only rebuild files that are inconsistent with '
2502 b'the working copy parent'
2514 b'the working copy parent'
2503 ),
2515 ),
2504 ),
2516 ),
2505 ],
2517 ],
2506 _(b'[-r REV]'),
2518 _(b'[-r REV]'),
2507 )
2519 )
2508 def debugrebuilddirstate(ui, repo, rev, **opts):
2520 def debugrebuilddirstate(ui, repo, rev, **opts):
2509 """rebuild the dirstate as it would look like for the given revision
2521 """rebuild the dirstate as it would look like for the given revision
2510
2522
2511 If no revision is specified the first current parent will be used.
2523 If no revision is specified the first current parent will be used.
2512
2524
2513 The dirstate will be set to the files of the given revision.
2525 The dirstate will be set to the files of the given revision.
2514 The actual working directory content or existing dirstate
2526 The actual working directory content or existing dirstate
2515 information such as adds or removes is not considered.
2527 information such as adds or removes is not considered.
2516
2528
2517 ``minimal`` will only rebuild the dirstate status for files that claim to be
2529 ``minimal`` will only rebuild the dirstate status for files that claim to be
2518 tracked but are not in the parent manifest, or that exist in the parent
2530 tracked but are not in the parent manifest, or that exist in the parent
2519 manifest but are not in the dirstate. It will not change adds, removes, or
2531 manifest but are not in the dirstate. It will not change adds, removes, or
2520 modified files that are in the working copy parent.
2532 modified files that are in the working copy parent.
2521
2533
2522 One use of this command is to make the next :hg:`status` invocation
2534 One use of this command is to make the next :hg:`status` invocation
2523 check the actual file content.
2535 check the actual file content.
2524 """
2536 """
2525 ctx = scmutil.revsingle(repo, rev)
2537 ctx = scmutil.revsingle(repo, rev)
2526 with repo.wlock():
2538 with repo.wlock():
2527 dirstate = repo.dirstate
2539 dirstate = repo.dirstate
2528 changedfiles = None
2540 changedfiles = None
2529 # See command doc for what minimal does.
2541 # See command doc for what minimal does.
2530 if opts.get('minimal'):
2542 if opts.get('minimal'):
2531 manifestfiles = set(ctx.manifest().keys())
2543 manifestfiles = set(ctx.manifest().keys())
2532 dirstatefiles = set(dirstate)
2544 dirstatefiles = set(dirstate)
2533 manifestonly = manifestfiles - dirstatefiles
2545 manifestonly = manifestfiles - dirstatefiles
2534 dsonly = dirstatefiles - manifestfiles
2546 dsonly = dirstatefiles - manifestfiles
2535 dsnotadded = set(f for f in dsonly if dirstate[f] != b'a')
2547 dsnotadded = set(f for f in dsonly if dirstate[f] != b'a')
2536 changedfiles = manifestonly | dsnotadded
2548 changedfiles = manifestonly | dsnotadded
2537
2549
2538 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2550 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2539
2551
2540
2552
2541 @command(b'debugrebuildfncache', [], b'')
2553 @command(b'debugrebuildfncache', [], b'')
2542 def debugrebuildfncache(ui, repo):
2554 def debugrebuildfncache(ui, repo):
2543 """rebuild the fncache file"""
2555 """rebuild the fncache file"""
2544 repair.rebuildfncache(ui, repo)
2556 repair.rebuildfncache(ui, repo)
2545
2557
2546
2558
2547 @command(
2559 @command(
2548 b'debugrename',
2560 b'debugrename',
2549 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2561 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2550 _(b'[-r REV] [FILE]...'),
2562 _(b'[-r REV] [FILE]...'),
2551 )
2563 )
2552 def debugrename(ui, repo, *pats, **opts):
2564 def debugrename(ui, repo, *pats, **opts):
2553 """dump rename information"""
2565 """dump rename information"""
2554
2566
2555 opts = pycompat.byteskwargs(opts)
2567 opts = pycompat.byteskwargs(opts)
2556 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2568 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2557 m = scmutil.match(ctx, pats, opts)
2569 m = scmutil.match(ctx, pats, opts)
2558 for abs in ctx.walk(m):
2570 for abs in ctx.walk(m):
2559 fctx = ctx[abs]
2571 fctx = ctx[abs]
2560 o = fctx.filelog().renamed(fctx.filenode())
2572 o = fctx.filelog().renamed(fctx.filenode())
2561 rel = repo.pathto(abs)
2573 rel = repo.pathto(abs)
2562 if o:
2574 if o:
2563 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2575 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2564 else:
2576 else:
2565 ui.write(_(b"%s not renamed\n") % rel)
2577 ui.write(_(b"%s not renamed\n") % rel)
2566
2578
2567
2579
2568 @command(
2580 @command(
2569 b'debugrevlog',
2581 b'debugrevlog',
2570 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2582 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2571 _(b'-c|-m|FILE'),
2583 _(b'-c|-m|FILE'),
2572 optionalrepo=True,
2584 optionalrepo=True,
2573 )
2585 )
2574 def debugrevlog(ui, repo, file_=None, **opts):
2586 def debugrevlog(ui, repo, file_=None, **opts):
2575 """show data and statistics about a revlog"""
2587 """show data and statistics about a revlog"""
2576 opts = pycompat.byteskwargs(opts)
2588 opts = pycompat.byteskwargs(opts)
2577 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2589 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2578
2590
2579 if opts.get(b"dump"):
2591 if opts.get(b"dump"):
2580 numrevs = len(r)
2592 numrevs = len(r)
2581 ui.write(
2593 ui.write(
2582 (
2594 (
2583 b"# rev p1rev p2rev start end deltastart base p1 p2"
2595 b"# rev p1rev p2rev start end deltastart base p1 p2"
2584 b" rawsize totalsize compression heads chainlen\n"
2596 b" rawsize totalsize compression heads chainlen\n"
2585 )
2597 )
2586 )
2598 )
2587 ts = 0
2599 ts = 0
2588 heads = set()
2600 heads = set()
2589
2601
2590 for rev in pycompat.xrange(numrevs):
2602 for rev in pycompat.xrange(numrevs):
2591 dbase = r.deltaparent(rev)
2603 dbase = r.deltaparent(rev)
2592 if dbase == -1:
2604 if dbase == -1:
2593 dbase = rev
2605 dbase = rev
2594 cbase = r.chainbase(rev)
2606 cbase = r.chainbase(rev)
2595 clen = r.chainlen(rev)
2607 clen = r.chainlen(rev)
2596 p1, p2 = r.parentrevs(rev)
2608 p1, p2 = r.parentrevs(rev)
2597 rs = r.rawsize(rev)
2609 rs = r.rawsize(rev)
2598 ts = ts + rs
2610 ts = ts + rs
2599 heads -= set(r.parentrevs(rev))
2611 heads -= set(r.parentrevs(rev))
2600 heads.add(rev)
2612 heads.add(rev)
2601 try:
2613 try:
2602 compression = ts / r.end(rev)
2614 compression = ts / r.end(rev)
2603 except ZeroDivisionError:
2615 except ZeroDivisionError:
2604 compression = 0
2616 compression = 0
2605 ui.write(
2617 ui.write(
2606 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2618 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2607 b"%11d %5d %8d\n"
2619 b"%11d %5d %8d\n"
2608 % (
2620 % (
2609 rev,
2621 rev,
2610 p1,
2622 p1,
2611 p2,
2623 p2,
2612 r.start(rev),
2624 r.start(rev),
2613 r.end(rev),
2625 r.end(rev),
2614 r.start(dbase),
2626 r.start(dbase),
2615 r.start(cbase),
2627 r.start(cbase),
2616 r.start(p1),
2628 r.start(p1),
2617 r.start(p2),
2629 r.start(p2),
2618 rs,
2630 rs,
2619 ts,
2631 ts,
2620 compression,
2632 compression,
2621 len(heads),
2633 len(heads),
2622 clen,
2634 clen,
2623 )
2635 )
2624 )
2636 )
2625 return 0
2637 return 0
2626
2638
2627 v = r.version
2639 v = r.version
2628 format = v & 0xFFFF
2640 format = v & 0xFFFF
2629 flags = []
2641 flags = []
2630 gdelta = False
2642 gdelta = False
2631 if v & revlog.FLAG_INLINE_DATA:
2643 if v & revlog.FLAG_INLINE_DATA:
2632 flags.append(b'inline')
2644 flags.append(b'inline')
2633 if v & revlog.FLAG_GENERALDELTA:
2645 if v & revlog.FLAG_GENERALDELTA:
2634 gdelta = True
2646 gdelta = True
2635 flags.append(b'generaldelta')
2647 flags.append(b'generaldelta')
2636 if not flags:
2648 if not flags:
2637 flags = [b'(none)']
2649 flags = [b'(none)']
2638
2650
2639 ### tracks merge vs single parent
2651 ### tracks merge vs single parent
2640 nummerges = 0
2652 nummerges = 0
2641
2653
2642 ### tracks ways the "delta" are build
2654 ### tracks ways the "delta" are build
2643 # nodelta
2655 # nodelta
2644 numempty = 0
2656 numempty = 0
2645 numemptytext = 0
2657 numemptytext = 0
2646 numemptydelta = 0
2658 numemptydelta = 0
2647 # full file content
2659 # full file content
2648 numfull = 0
2660 numfull = 0
2649 # intermediate snapshot against a prior snapshot
2661 # intermediate snapshot against a prior snapshot
2650 numsemi = 0
2662 numsemi = 0
2651 # snapshot count per depth
2663 # snapshot count per depth
2652 numsnapdepth = collections.defaultdict(lambda: 0)
2664 numsnapdepth = collections.defaultdict(lambda: 0)
2653 # delta against previous revision
2665 # delta against previous revision
2654 numprev = 0
2666 numprev = 0
2655 # delta against first or second parent (not prev)
2667 # delta against first or second parent (not prev)
2656 nump1 = 0
2668 nump1 = 0
2657 nump2 = 0
2669 nump2 = 0
2658 # delta against neither prev nor parents
2670 # delta against neither prev nor parents
2659 numother = 0
2671 numother = 0
2660 # delta against prev that are also first or second parent
2672 # delta against prev that are also first or second parent
2661 # (details of `numprev`)
2673 # (details of `numprev`)
2662 nump1prev = 0
2674 nump1prev = 0
2663 nump2prev = 0
2675 nump2prev = 0
2664
2676
2665 # data about delta chain of each revs
2677 # data about delta chain of each revs
2666 chainlengths = []
2678 chainlengths = []
2667 chainbases = []
2679 chainbases = []
2668 chainspans = []
2680 chainspans = []
2669
2681
2670 # data about each revision
2682 # data about each revision
2671 datasize = [None, 0, 0]
2683 datasize = [None, 0, 0]
2672 fullsize = [None, 0, 0]
2684 fullsize = [None, 0, 0]
2673 semisize = [None, 0, 0]
2685 semisize = [None, 0, 0]
2674 # snapshot count per depth
2686 # snapshot count per depth
2675 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2687 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2676 deltasize = [None, 0, 0]
2688 deltasize = [None, 0, 0]
2677 chunktypecounts = {}
2689 chunktypecounts = {}
2678 chunktypesizes = {}
2690 chunktypesizes = {}
2679
2691
2680 def addsize(size, l):
2692 def addsize(size, l):
2681 if l[0] is None or size < l[0]:
2693 if l[0] is None or size < l[0]:
2682 l[0] = size
2694 l[0] = size
2683 if size > l[1]:
2695 if size > l[1]:
2684 l[1] = size
2696 l[1] = size
2685 l[2] += size
2697 l[2] += size
2686
2698
2687 numrevs = len(r)
2699 numrevs = len(r)
2688 for rev in pycompat.xrange(numrevs):
2700 for rev in pycompat.xrange(numrevs):
2689 p1, p2 = r.parentrevs(rev)
2701 p1, p2 = r.parentrevs(rev)
2690 delta = r.deltaparent(rev)
2702 delta = r.deltaparent(rev)
2691 if format > 0:
2703 if format > 0:
2692 addsize(r.rawsize(rev), datasize)
2704 addsize(r.rawsize(rev), datasize)
2693 if p2 != nullrev:
2705 if p2 != nullrev:
2694 nummerges += 1
2706 nummerges += 1
2695 size = r.length(rev)
2707 size = r.length(rev)
2696 if delta == nullrev:
2708 if delta == nullrev:
2697 chainlengths.append(0)
2709 chainlengths.append(0)
2698 chainbases.append(r.start(rev))
2710 chainbases.append(r.start(rev))
2699 chainspans.append(size)
2711 chainspans.append(size)
2700 if size == 0:
2712 if size == 0:
2701 numempty += 1
2713 numempty += 1
2702 numemptytext += 1
2714 numemptytext += 1
2703 else:
2715 else:
2704 numfull += 1
2716 numfull += 1
2705 numsnapdepth[0] += 1
2717 numsnapdepth[0] += 1
2706 addsize(size, fullsize)
2718 addsize(size, fullsize)
2707 addsize(size, snapsizedepth[0])
2719 addsize(size, snapsizedepth[0])
2708 else:
2720 else:
2709 chainlengths.append(chainlengths[delta] + 1)
2721 chainlengths.append(chainlengths[delta] + 1)
2710 baseaddr = chainbases[delta]
2722 baseaddr = chainbases[delta]
2711 revaddr = r.start(rev)
2723 revaddr = r.start(rev)
2712 chainbases.append(baseaddr)
2724 chainbases.append(baseaddr)
2713 chainspans.append((revaddr - baseaddr) + size)
2725 chainspans.append((revaddr - baseaddr) + size)
2714 if size == 0:
2726 if size == 0:
2715 numempty += 1
2727 numempty += 1
2716 numemptydelta += 1
2728 numemptydelta += 1
2717 elif r.issnapshot(rev):
2729 elif r.issnapshot(rev):
2718 addsize(size, semisize)
2730 addsize(size, semisize)
2719 numsemi += 1
2731 numsemi += 1
2720 depth = r.snapshotdepth(rev)
2732 depth = r.snapshotdepth(rev)
2721 numsnapdepth[depth] += 1
2733 numsnapdepth[depth] += 1
2722 addsize(size, snapsizedepth[depth])
2734 addsize(size, snapsizedepth[depth])
2723 else:
2735 else:
2724 addsize(size, deltasize)
2736 addsize(size, deltasize)
2725 if delta == rev - 1:
2737 if delta == rev - 1:
2726 numprev += 1
2738 numprev += 1
2727 if delta == p1:
2739 if delta == p1:
2728 nump1prev += 1
2740 nump1prev += 1
2729 elif delta == p2:
2741 elif delta == p2:
2730 nump2prev += 1
2742 nump2prev += 1
2731 elif delta == p1:
2743 elif delta == p1:
2732 nump1 += 1
2744 nump1 += 1
2733 elif delta == p2:
2745 elif delta == p2:
2734 nump2 += 1
2746 nump2 += 1
2735 elif delta != nullrev:
2747 elif delta != nullrev:
2736 numother += 1
2748 numother += 1
2737
2749
2738 # Obtain data on the raw chunks in the revlog.
2750 # Obtain data on the raw chunks in the revlog.
2739 if util.safehasattr(r, b'_getsegmentforrevs'):
2751 if util.safehasattr(r, b'_getsegmentforrevs'):
2740 segment = r._getsegmentforrevs(rev, rev)[1]
2752 segment = r._getsegmentforrevs(rev, rev)[1]
2741 else:
2753 else:
2742 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2754 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2743 if segment:
2755 if segment:
2744 chunktype = bytes(segment[0:1])
2756 chunktype = bytes(segment[0:1])
2745 else:
2757 else:
2746 chunktype = b'empty'
2758 chunktype = b'empty'
2747
2759
2748 if chunktype not in chunktypecounts:
2760 if chunktype not in chunktypecounts:
2749 chunktypecounts[chunktype] = 0
2761 chunktypecounts[chunktype] = 0
2750 chunktypesizes[chunktype] = 0
2762 chunktypesizes[chunktype] = 0
2751
2763
2752 chunktypecounts[chunktype] += 1
2764 chunktypecounts[chunktype] += 1
2753 chunktypesizes[chunktype] += size
2765 chunktypesizes[chunktype] += size
2754
2766
2755 # Adjust size min value for empty cases
2767 # Adjust size min value for empty cases
2756 for size in (datasize, fullsize, semisize, deltasize):
2768 for size in (datasize, fullsize, semisize, deltasize):
2757 if size[0] is None:
2769 if size[0] is None:
2758 size[0] = 0
2770 size[0] = 0
2759
2771
2760 numdeltas = numrevs - numfull - numempty - numsemi
2772 numdeltas = numrevs - numfull - numempty - numsemi
2761 numoprev = numprev - nump1prev - nump2prev
2773 numoprev = numprev - nump1prev - nump2prev
2762 totalrawsize = datasize[2]
2774 totalrawsize = datasize[2]
2763 datasize[2] /= numrevs
2775 datasize[2] /= numrevs
2764 fulltotal = fullsize[2]
2776 fulltotal = fullsize[2]
2765 if numfull == 0:
2777 if numfull == 0:
2766 fullsize[2] = 0
2778 fullsize[2] = 0
2767 else:
2779 else:
2768 fullsize[2] /= numfull
2780 fullsize[2] /= numfull
2769 semitotal = semisize[2]
2781 semitotal = semisize[2]
2770 snaptotal = {}
2782 snaptotal = {}
2771 if numsemi > 0:
2783 if numsemi > 0:
2772 semisize[2] /= numsemi
2784 semisize[2] /= numsemi
2773 for depth in snapsizedepth:
2785 for depth in snapsizedepth:
2774 snaptotal[depth] = snapsizedepth[depth][2]
2786 snaptotal[depth] = snapsizedepth[depth][2]
2775 snapsizedepth[depth][2] /= numsnapdepth[depth]
2787 snapsizedepth[depth][2] /= numsnapdepth[depth]
2776
2788
2777 deltatotal = deltasize[2]
2789 deltatotal = deltasize[2]
2778 if numdeltas > 0:
2790 if numdeltas > 0:
2779 deltasize[2] /= numdeltas
2791 deltasize[2] /= numdeltas
2780 totalsize = fulltotal + semitotal + deltatotal
2792 totalsize = fulltotal + semitotal + deltatotal
2781 avgchainlen = sum(chainlengths) / numrevs
2793 avgchainlen = sum(chainlengths) / numrevs
2782 maxchainlen = max(chainlengths)
2794 maxchainlen = max(chainlengths)
2783 maxchainspan = max(chainspans)
2795 maxchainspan = max(chainspans)
2784 compratio = 1
2796 compratio = 1
2785 if totalsize:
2797 if totalsize:
2786 compratio = totalrawsize / totalsize
2798 compratio = totalrawsize / totalsize
2787
2799
2788 basedfmtstr = b'%%%dd\n'
2800 basedfmtstr = b'%%%dd\n'
2789 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2801 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2790
2802
2791 def dfmtstr(max):
2803 def dfmtstr(max):
2792 return basedfmtstr % len(str(max))
2804 return basedfmtstr % len(str(max))
2793
2805
2794 def pcfmtstr(max, padding=0):
2806 def pcfmtstr(max, padding=0):
2795 return basepcfmtstr % (len(str(max)), b' ' * padding)
2807 return basepcfmtstr % (len(str(max)), b' ' * padding)
2796
2808
2797 def pcfmt(value, total):
2809 def pcfmt(value, total):
2798 if total:
2810 if total:
2799 return (value, 100 * float(value) / total)
2811 return (value, 100 * float(value) / total)
2800 else:
2812 else:
2801 return value, 100.0
2813 return value, 100.0
2802
2814
2803 ui.writenoi18n(b'format : %d\n' % format)
2815 ui.writenoi18n(b'format : %d\n' % format)
2804 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2816 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2805
2817
2806 ui.write(b'\n')
2818 ui.write(b'\n')
2807 fmt = pcfmtstr(totalsize)
2819 fmt = pcfmtstr(totalsize)
2808 fmt2 = dfmtstr(totalsize)
2820 fmt2 = dfmtstr(totalsize)
2809 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2821 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2810 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2822 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2811 ui.writenoi18n(
2823 ui.writenoi18n(
2812 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2824 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2813 )
2825 )
2814 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2826 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2815 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2827 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2816 ui.writenoi18n(
2828 ui.writenoi18n(
2817 b' text : '
2829 b' text : '
2818 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2830 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2819 )
2831 )
2820 ui.writenoi18n(
2832 ui.writenoi18n(
2821 b' delta : '
2833 b' delta : '
2822 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2834 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2823 )
2835 )
2824 ui.writenoi18n(
2836 ui.writenoi18n(
2825 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2837 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2826 )
2838 )
2827 for depth in sorted(numsnapdepth):
2839 for depth in sorted(numsnapdepth):
2828 ui.write(
2840 ui.write(
2829 (b' lvl-%-3d : ' % depth)
2841 (b' lvl-%-3d : ' % depth)
2830 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2842 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2831 )
2843 )
2832 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2844 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2833 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2845 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2834 ui.writenoi18n(
2846 ui.writenoi18n(
2835 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2847 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2836 )
2848 )
2837 for depth in sorted(numsnapdepth):
2849 for depth in sorted(numsnapdepth):
2838 ui.write(
2850 ui.write(
2839 (b' lvl-%-3d : ' % depth)
2851 (b' lvl-%-3d : ' % depth)
2840 + fmt % pcfmt(snaptotal[depth], totalsize)
2852 + fmt % pcfmt(snaptotal[depth], totalsize)
2841 )
2853 )
2842 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2854 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2843
2855
2844 def fmtchunktype(chunktype):
2856 def fmtchunktype(chunktype):
2845 if chunktype == b'empty':
2857 if chunktype == b'empty':
2846 return b' %s : ' % chunktype
2858 return b' %s : ' % chunktype
2847 elif chunktype in pycompat.bytestr(string.ascii_letters):
2859 elif chunktype in pycompat.bytestr(string.ascii_letters):
2848 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2860 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2849 else:
2861 else:
2850 return b' 0x%s : ' % hex(chunktype)
2862 return b' 0x%s : ' % hex(chunktype)
2851
2863
2852 ui.write(b'\n')
2864 ui.write(b'\n')
2853 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2865 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2854 for chunktype in sorted(chunktypecounts):
2866 for chunktype in sorted(chunktypecounts):
2855 ui.write(fmtchunktype(chunktype))
2867 ui.write(fmtchunktype(chunktype))
2856 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2868 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2857 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2869 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2858 for chunktype in sorted(chunktypecounts):
2870 for chunktype in sorted(chunktypecounts):
2859 ui.write(fmtchunktype(chunktype))
2871 ui.write(fmtchunktype(chunktype))
2860 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2872 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2861
2873
2862 ui.write(b'\n')
2874 ui.write(b'\n')
2863 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2875 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2864 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2876 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2865 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2877 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2866 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2878 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2867 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2879 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2868
2880
2869 if format > 0:
2881 if format > 0:
2870 ui.write(b'\n')
2882 ui.write(b'\n')
2871 ui.writenoi18n(
2883 ui.writenoi18n(
2872 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2884 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2873 % tuple(datasize)
2885 % tuple(datasize)
2874 )
2886 )
2875 ui.writenoi18n(
2887 ui.writenoi18n(
2876 b'full revision size (min/max/avg) : %d / %d / %d\n'
2888 b'full revision size (min/max/avg) : %d / %d / %d\n'
2877 % tuple(fullsize)
2889 % tuple(fullsize)
2878 )
2890 )
2879 ui.writenoi18n(
2891 ui.writenoi18n(
2880 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2892 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2881 % tuple(semisize)
2893 % tuple(semisize)
2882 )
2894 )
2883 for depth in sorted(snapsizedepth):
2895 for depth in sorted(snapsizedepth):
2884 if depth == 0:
2896 if depth == 0:
2885 continue
2897 continue
2886 ui.writenoi18n(
2898 ui.writenoi18n(
2887 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2899 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2888 % ((depth,) + tuple(snapsizedepth[depth]))
2900 % ((depth,) + tuple(snapsizedepth[depth]))
2889 )
2901 )
2890 ui.writenoi18n(
2902 ui.writenoi18n(
2891 b'delta size (min/max/avg) : %d / %d / %d\n'
2903 b'delta size (min/max/avg) : %d / %d / %d\n'
2892 % tuple(deltasize)
2904 % tuple(deltasize)
2893 )
2905 )
2894
2906
2895 if numdeltas > 0:
2907 if numdeltas > 0:
2896 ui.write(b'\n')
2908 ui.write(b'\n')
2897 fmt = pcfmtstr(numdeltas)
2909 fmt = pcfmtstr(numdeltas)
2898 fmt2 = pcfmtstr(numdeltas, 4)
2910 fmt2 = pcfmtstr(numdeltas, 4)
2899 ui.writenoi18n(
2911 ui.writenoi18n(
2900 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2912 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2901 )
2913 )
2902 if numprev > 0:
2914 if numprev > 0:
2903 ui.writenoi18n(
2915 ui.writenoi18n(
2904 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2916 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2905 )
2917 )
2906 ui.writenoi18n(
2918 ui.writenoi18n(
2907 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2919 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2908 )
2920 )
2909 ui.writenoi18n(
2921 ui.writenoi18n(
2910 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
2922 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
2911 )
2923 )
2912 if gdelta:
2924 if gdelta:
2913 ui.writenoi18n(
2925 ui.writenoi18n(
2914 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
2926 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
2915 )
2927 )
2916 ui.writenoi18n(
2928 ui.writenoi18n(
2917 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
2929 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
2918 )
2930 )
2919 ui.writenoi18n(
2931 ui.writenoi18n(
2920 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
2932 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
2921 )
2933 )
2922
2934
2923
2935
2924 @command(
2936 @command(
2925 b'debugrevlogindex',
2937 b'debugrevlogindex',
2926 cmdutil.debugrevlogopts
2938 cmdutil.debugrevlogopts
2927 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
2939 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
2928 _(b'[-f FORMAT] -c|-m|FILE'),
2940 _(b'[-f FORMAT] -c|-m|FILE'),
2929 optionalrepo=True,
2941 optionalrepo=True,
2930 )
2942 )
2931 def debugrevlogindex(ui, repo, file_=None, **opts):
2943 def debugrevlogindex(ui, repo, file_=None, **opts):
2932 """dump the contents of a revlog index"""
2944 """dump the contents of a revlog index"""
2933 opts = pycompat.byteskwargs(opts)
2945 opts = pycompat.byteskwargs(opts)
2934 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
2946 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
2935 format = opts.get(b'format', 0)
2947 format = opts.get(b'format', 0)
2936 if format not in (0, 1):
2948 if format not in (0, 1):
2937 raise error.Abort(_(b"unknown format %d") % format)
2949 raise error.Abort(_(b"unknown format %d") % format)
2938
2950
2939 if ui.debugflag:
2951 if ui.debugflag:
2940 shortfn = hex
2952 shortfn = hex
2941 else:
2953 else:
2942 shortfn = short
2954 shortfn = short
2943
2955
2944 # There might not be anything in r, so have a sane default
2956 # There might not be anything in r, so have a sane default
2945 idlen = 12
2957 idlen = 12
2946 for i in r:
2958 for i in r:
2947 idlen = len(shortfn(r.node(i)))
2959 idlen = len(shortfn(r.node(i)))
2948 break
2960 break
2949
2961
2950 if format == 0:
2962 if format == 0:
2951 if ui.verbose:
2963 if ui.verbose:
2952 ui.writenoi18n(
2964 ui.writenoi18n(
2953 b" rev offset length linkrev %s %s p2\n"
2965 b" rev offset length linkrev %s %s p2\n"
2954 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2966 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2955 )
2967 )
2956 else:
2968 else:
2957 ui.writenoi18n(
2969 ui.writenoi18n(
2958 b" rev linkrev %s %s p2\n"
2970 b" rev linkrev %s %s p2\n"
2959 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2971 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2960 )
2972 )
2961 elif format == 1:
2973 elif format == 1:
2962 if ui.verbose:
2974 if ui.verbose:
2963 ui.writenoi18n(
2975 ui.writenoi18n(
2964 (
2976 (
2965 b" rev flag offset length size link p1"
2977 b" rev flag offset length size link p1"
2966 b" p2 %s\n"
2978 b" p2 %s\n"
2967 )
2979 )
2968 % b"nodeid".rjust(idlen)
2980 % b"nodeid".rjust(idlen)
2969 )
2981 )
2970 else:
2982 else:
2971 ui.writenoi18n(
2983 ui.writenoi18n(
2972 b" rev flag size link p1 p2 %s\n"
2984 b" rev flag size link p1 p2 %s\n"
2973 % b"nodeid".rjust(idlen)
2985 % b"nodeid".rjust(idlen)
2974 )
2986 )
2975
2987
2976 for i in r:
2988 for i in r:
2977 node = r.node(i)
2989 node = r.node(i)
2978 if format == 0:
2990 if format == 0:
2979 try:
2991 try:
2980 pp = r.parents(node)
2992 pp = r.parents(node)
2981 except Exception:
2993 except Exception:
2982 pp = [nullid, nullid]
2994 pp = [nullid, nullid]
2983 if ui.verbose:
2995 if ui.verbose:
2984 ui.write(
2996 ui.write(
2985 b"% 6d % 9d % 7d % 7d %s %s %s\n"
2997 b"% 6d % 9d % 7d % 7d %s %s %s\n"
2986 % (
2998 % (
2987 i,
2999 i,
2988 r.start(i),
3000 r.start(i),
2989 r.length(i),
3001 r.length(i),
2990 r.linkrev(i),
3002 r.linkrev(i),
2991 shortfn(node),
3003 shortfn(node),
2992 shortfn(pp[0]),
3004 shortfn(pp[0]),
2993 shortfn(pp[1]),
3005 shortfn(pp[1]),
2994 )
3006 )
2995 )
3007 )
2996 else:
3008 else:
2997 ui.write(
3009 ui.write(
2998 b"% 6d % 7d %s %s %s\n"
3010 b"% 6d % 7d %s %s %s\n"
2999 % (
3011 % (
3000 i,
3012 i,
3001 r.linkrev(i),
3013 r.linkrev(i),
3002 shortfn(node),
3014 shortfn(node),
3003 shortfn(pp[0]),
3015 shortfn(pp[0]),
3004 shortfn(pp[1]),
3016 shortfn(pp[1]),
3005 )
3017 )
3006 )
3018 )
3007 elif format == 1:
3019 elif format == 1:
3008 pr = r.parentrevs(i)
3020 pr = r.parentrevs(i)
3009 if ui.verbose:
3021 if ui.verbose:
3010 ui.write(
3022 ui.write(
3011 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3023 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3012 % (
3024 % (
3013 i,
3025 i,
3014 r.flags(i),
3026 r.flags(i),
3015 r.start(i),
3027 r.start(i),
3016 r.length(i),
3028 r.length(i),
3017 r.rawsize(i),
3029 r.rawsize(i),
3018 r.linkrev(i),
3030 r.linkrev(i),
3019 pr[0],
3031 pr[0],
3020 pr[1],
3032 pr[1],
3021 shortfn(node),
3033 shortfn(node),
3022 )
3034 )
3023 )
3035 )
3024 else:
3036 else:
3025 ui.write(
3037 ui.write(
3026 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3038 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3027 % (
3039 % (
3028 i,
3040 i,
3029 r.flags(i),
3041 r.flags(i),
3030 r.rawsize(i),
3042 r.rawsize(i),
3031 r.linkrev(i),
3043 r.linkrev(i),
3032 pr[0],
3044 pr[0],
3033 pr[1],
3045 pr[1],
3034 shortfn(node),
3046 shortfn(node),
3035 )
3047 )
3036 )
3048 )
3037
3049
3038
3050
3039 @command(
3051 @command(
3040 b'debugrevspec',
3052 b'debugrevspec',
3041 [
3053 [
3042 (
3054 (
3043 b'',
3055 b'',
3044 b'optimize',
3056 b'optimize',
3045 None,
3057 None,
3046 _(b'print parsed tree after optimizing (DEPRECATED)'),
3058 _(b'print parsed tree after optimizing (DEPRECATED)'),
3047 ),
3059 ),
3048 (
3060 (
3049 b'',
3061 b'',
3050 b'show-revs',
3062 b'show-revs',
3051 True,
3063 True,
3052 _(b'print list of result revisions (default)'),
3064 _(b'print list of result revisions (default)'),
3053 ),
3065 ),
3054 (
3066 (
3055 b's',
3067 b's',
3056 b'show-set',
3068 b'show-set',
3057 None,
3069 None,
3058 _(b'print internal representation of result set'),
3070 _(b'print internal representation of result set'),
3059 ),
3071 ),
3060 (
3072 (
3061 b'p',
3073 b'p',
3062 b'show-stage',
3074 b'show-stage',
3063 [],
3075 [],
3064 _(b'print parsed tree at the given stage'),
3076 _(b'print parsed tree at the given stage'),
3065 _(b'NAME'),
3077 _(b'NAME'),
3066 ),
3078 ),
3067 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3079 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3068 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3080 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3069 ],
3081 ],
3070 b'REVSPEC',
3082 b'REVSPEC',
3071 )
3083 )
3072 def debugrevspec(ui, repo, expr, **opts):
3084 def debugrevspec(ui, repo, expr, **opts):
3073 """parse and apply a revision specification
3085 """parse and apply a revision specification
3074
3086
3075 Use -p/--show-stage option to print the parsed tree at the given stages.
3087 Use -p/--show-stage option to print the parsed tree at the given stages.
3076 Use -p all to print tree at every stage.
3088 Use -p all to print tree at every stage.
3077
3089
3078 Use --no-show-revs option with -s or -p to print only the set
3090 Use --no-show-revs option with -s or -p to print only the set
3079 representation or the parsed tree respectively.
3091 representation or the parsed tree respectively.
3080
3092
3081 Use --verify-optimized to compare the optimized result with the unoptimized
3093 Use --verify-optimized to compare the optimized result with the unoptimized
3082 one. Returns 1 if the optimized result differs.
3094 one. Returns 1 if the optimized result differs.
3083 """
3095 """
3084 opts = pycompat.byteskwargs(opts)
3096 opts = pycompat.byteskwargs(opts)
3085 aliases = ui.configitems(b'revsetalias')
3097 aliases = ui.configitems(b'revsetalias')
3086 stages = [
3098 stages = [
3087 (b'parsed', lambda tree: tree),
3099 (b'parsed', lambda tree: tree),
3088 (
3100 (
3089 b'expanded',
3101 b'expanded',
3090 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3102 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3091 ),
3103 ),
3092 (b'concatenated', revsetlang.foldconcat),
3104 (b'concatenated', revsetlang.foldconcat),
3093 (b'analyzed', revsetlang.analyze),
3105 (b'analyzed', revsetlang.analyze),
3094 (b'optimized', revsetlang.optimize),
3106 (b'optimized', revsetlang.optimize),
3095 ]
3107 ]
3096 if opts[b'no_optimized']:
3108 if opts[b'no_optimized']:
3097 stages = stages[:-1]
3109 stages = stages[:-1]
3098 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3110 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3099 raise error.Abort(
3111 raise error.Abort(
3100 _(b'cannot use --verify-optimized with --no-optimized')
3112 _(b'cannot use --verify-optimized with --no-optimized')
3101 )
3113 )
3102 stagenames = set(n for n, f in stages)
3114 stagenames = set(n for n, f in stages)
3103
3115
3104 showalways = set()
3116 showalways = set()
3105 showchanged = set()
3117 showchanged = set()
3106 if ui.verbose and not opts[b'show_stage']:
3118 if ui.verbose and not opts[b'show_stage']:
3107 # show parsed tree by --verbose (deprecated)
3119 # show parsed tree by --verbose (deprecated)
3108 showalways.add(b'parsed')
3120 showalways.add(b'parsed')
3109 showchanged.update([b'expanded', b'concatenated'])
3121 showchanged.update([b'expanded', b'concatenated'])
3110 if opts[b'optimize']:
3122 if opts[b'optimize']:
3111 showalways.add(b'optimized')
3123 showalways.add(b'optimized')
3112 if opts[b'show_stage'] and opts[b'optimize']:
3124 if opts[b'show_stage'] and opts[b'optimize']:
3113 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3125 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3114 if opts[b'show_stage'] == [b'all']:
3126 if opts[b'show_stage'] == [b'all']:
3115 showalways.update(stagenames)
3127 showalways.update(stagenames)
3116 else:
3128 else:
3117 for n in opts[b'show_stage']:
3129 for n in opts[b'show_stage']:
3118 if n not in stagenames:
3130 if n not in stagenames:
3119 raise error.Abort(_(b'invalid stage name: %s') % n)
3131 raise error.Abort(_(b'invalid stage name: %s') % n)
3120 showalways.update(opts[b'show_stage'])
3132 showalways.update(opts[b'show_stage'])
3121
3133
3122 treebystage = {}
3134 treebystage = {}
3123 printedtree = None
3135 printedtree = None
3124 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3136 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3125 for n, f in stages:
3137 for n, f in stages:
3126 treebystage[n] = tree = f(tree)
3138 treebystage[n] = tree = f(tree)
3127 if n in showalways or (n in showchanged and tree != printedtree):
3139 if n in showalways or (n in showchanged and tree != printedtree):
3128 if opts[b'show_stage'] or n != b'parsed':
3140 if opts[b'show_stage'] or n != b'parsed':
3129 ui.write(b"* %s:\n" % n)
3141 ui.write(b"* %s:\n" % n)
3130 ui.write(revsetlang.prettyformat(tree), b"\n")
3142 ui.write(revsetlang.prettyformat(tree), b"\n")
3131 printedtree = tree
3143 printedtree = tree
3132
3144
3133 if opts[b'verify_optimized']:
3145 if opts[b'verify_optimized']:
3134 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3146 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3135 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3147 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3136 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3148 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3137 ui.writenoi18n(
3149 ui.writenoi18n(
3138 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3150 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3139 )
3151 )
3140 ui.writenoi18n(
3152 ui.writenoi18n(
3141 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3153 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3142 )
3154 )
3143 arevs = list(arevs)
3155 arevs = list(arevs)
3144 brevs = list(brevs)
3156 brevs = list(brevs)
3145 if arevs == brevs:
3157 if arevs == brevs:
3146 return 0
3158 return 0
3147 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3159 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3148 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3160 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3149 sm = difflib.SequenceMatcher(None, arevs, brevs)
3161 sm = difflib.SequenceMatcher(None, arevs, brevs)
3150 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3162 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3151 if tag in ('delete', 'replace'):
3163 if tag in ('delete', 'replace'):
3152 for c in arevs[alo:ahi]:
3164 for c in arevs[alo:ahi]:
3153 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3165 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3154 if tag in ('insert', 'replace'):
3166 if tag in ('insert', 'replace'):
3155 for c in brevs[blo:bhi]:
3167 for c in brevs[blo:bhi]:
3156 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3168 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3157 if tag == 'equal':
3169 if tag == 'equal':
3158 for c in arevs[alo:ahi]:
3170 for c in arevs[alo:ahi]:
3159 ui.write(b' %d\n' % c)
3171 ui.write(b' %d\n' % c)
3160 return 1
3172 return 1
3161
3173
3162 func = revset.makematcher(tree)
3174 func = revset.makematcher(tree)
3163 revs = func(repo)
3175 revs = func(repo)
3164 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3176 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3165 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3177 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3166 if not opts[b'show_revs']:
3178 if not opts[b'show_revs']:
3167 return
3179 return
3168 for c in revs:
3180 for c in revs:
3169 ui.write(b"%d\n" % c)
3181 ui.write(b"%d\n" % c)
3170
3182
3171
3183
3172 @command(
3184 @command(
3173 b'debugserve',
3185 b'debugserve',
3174 [
3186 [
3175 (
3187 (
3176 b'',
3188 b'',
3177 b'sshstdio',
3189 b'sshstdio',
3178 False,
3190 False,
3179 _(b'run an SSH server bound to process handles'),
3191 _(b'run an SSH server bound to process handles'),
3180 ),
3192 ),
3181 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3193 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3182 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3194 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3183 ],
3195 ],
3184 b'',
3196 b'',
3185 )
3197 )
3186 def debugserve(ui, repo, **opts):
3198 def debugserve(ui, repo, **opts):
3187 """run a server with advanced settings
3199 """run a server with advanced settings
3188
3200
3189 This command is similar to :hg:`serve`. It exists partially as a
3201 This command is similar to :hg:`serve`. It exists partially as a
3190 workaround to the fact that ``hg serve --stdio`` must have specific
3202 workaround to the fact that ``hg serve --stdio`` must have specific
3191 arguments for security reasons.
3203 arguments for security reasons.
3192 """
3204 """
3193 opts = pycompat.byteskwargs(opts)
3205 opts = pycompat.byteskwargs(opts)
3194
3206
3195 if not opts[b'sshstdio']:
3207 if not opts[b'sshstdio']:
3196 raise error.Abort(_(b'only --sshstdio is currently supported'))
3208 raise error.Abort(_(b'only --sshstdio is currently supported'))
3197
3209
3198 logfh = None
3210 logfh = None
3199
3211
3200 if opts[b'logiofd'] and opts[b'logiofile']:
3212 if opts[b'logiofd'] and opts[b'logiofile']:
3201 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3213 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3202
3214
3203 if opts[b'logiofd']:
3215 if opts[b'logiofd']:
3204 # Line buffered because output is line based.
3216 # Line buffered because output is line based.
3205 try:
3217 try:
3206 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 1)
3218 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 1)
3207 except OSError as e:
3219 except OSError as e:
3208 if e.errno != errno.ESPIPE:
3220 if e.errno != errno.ESPIPE:
3209 raise
3221 raise
3210 # can't seek a pipe, so `ab` mode fails on py3
3222 # can't seek a pipe, so `ab` mode fails on py3
3211 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 1)
3223 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 1)
3212 elif opts[b'logiofile']:
3224 elif opts[b'logiofile']:
3213 logfh = open(opts[b'logiofile'], b'ab', 1)
3225 logfh = open(opts[b'logiofile'], b'ab', 1)
3214
3226
3215 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3227 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3216 s.serve_forever()
3228 s.serve_forever()
3217
3229
3218
3230
3219 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3231 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3220 def debugsetparents(ui, repo, rev1, rev2=None):
3232 def debugsetparents(ui, repo, rev1, rev2=None):
3221 """manually set the parents of the current working directory
3233 """manually set the parents of the current working directory
3222
3234
3223 This is useful for writing repository conversion tools, but should
3235 This is useful for writing repository conversion tools, but should
3224 be used with care. For example, neither the working directory nor the
3236 be used with care. For example, neither the working directory nor the
3225 dirstate is updated, so file status may be incorrect after running this
3237 dirstate is updated, so file status may be incorrect after running this
3226 command.
3238 command.
3227
3239
3228 Returns 0 on success.
3240 Returns 0 on success.
3229 """
3241 """
3230
3242
3231 node1 = scmutil.revsingle(repo, rev1).node()
3243 node1 = scmutil.revsingle(repo, rev1).node()
3232 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3244 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3233
3245
3234 with repo.wlock():
3246 with repo.wlock():
3235 repo.setparents(node1, node2)
3247 repo.setparents(node1, node2)
3236
3248
3237
3249
3238 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3250 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3239 def debugsidedata(ui, repo, file_, rev=None, **opts):
3251 def debugsidedata(ui, repo, file_, rev=None, **opts):
3240 """dump the side data for a cl/manifest/file revision
3252 """dump the side data for a cl/manifest/file revision
3241
3253
3242 Use --verbose to dump the sidedata content."""
3254 Use --verbose to dump the sidedata content."""
3243 opts = pycompat.byteskwargs(opts)
3255 opts = pycompat.byteskwargs(opts)
3244 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3256 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3245 if rev is not None:
3257 if rev is not None:
3246 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3258 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3247 file_, rev = None, file_
3259 file_, rev = None, file_
3248 elif rev is None:
3260 elif rev is None:
3249 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3261 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3250 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3262 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3251 r = getattr(r, '_revlog', r)
3263 r = getattr(r, '_revlog', r)
3252 try:
3264 try:
3253 sidedata = r.sidedata(r.lookup(rev))
3265 sidedata = r.sidedata(r.lookup(rev))
3254 except KeyError:
3266 except KeyError:
3255 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3267 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3256 if sidedata:
3268 if sidedata:
3257 sidedata = list(sidedata.items())
3269 sidedata = list(sidedata.items())
3258 sidedata.sort()
3270 sidedata.sort()
3259 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3271 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3260 for key, value in sidedata:
3272 for key, value in sidedata:
3261 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3273 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3262 if ui.verbose:
3274 if ui.verbose:
3263 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3275 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3264
3276
3265
3277
3266 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3278 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3267 def debugssl(ui, repo, source=None, **opts):
3279 def debugssl(ui, repo, source=None, **opts):
3268 '''test a secure connection to a server
3280 '''test a secure connection to a server
3269
3281
3270 This builds the certificate chain for the server on Windows, installing the
3282 This builds the certificate chain for the server on Windows, installing the
3271 missing intermediates and trusted root via Windows Update if necessary. It
3283 missing intermediates and trusted root via Windows Update if necessary. It
3272 does nothing on other platforms.
3284 does nothing on other platforms.
3273
3285
3274 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3286 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3275 that server is used. See :hg:`help urls` for more information.
3287 that server is used. See :hg:`help urls` for more information.
3276
3288
3277 If the update succeeds, retry the original operation. Otherwise, the cause
3289 If the update succeeds, retry the original operation. Otherwise, the cause
3278 of the SSL error is likely another issue.
3290 of the SSL error is likely another issue.
3279 '''
3291 '''
3280 if not pycompat.iswindows:
3292 if not pycompat.iswindows:
3281 raise error.Abort(
3293 raise error.Abort(
3282 _(b'certificate chain building is only possible on Windows')
3294 _(b'certificate chain building is only possible on Windows')
3283 )
3295 )
3284
3296
3285 if not source:
3297 if not source:
3286 if not repo:
3298 if not repo:
3287 raise error.Abort(
3299 raise error.Abort(
3288 _(
3300 _(
3289 b"there is no Mercurial repository here, and no "
3301 b"there is no Mercurial repository here, and no "
3290 b"server specified"
3302 b"server specified"
3291 )
3303 )
3292 )
3304 )
3293 source = b"default"
3305 source = b"default"
3294
3306
3295 source, branches = hg.parseurl(ui.expandpath(source))
3307 source, branches = hg.parseurl(ui.expandpath(source))
3296 url = util.url(source)
3308 url = util.url(source)
3297
3309
3298 defaultport = {b'https': 443, b'ssh': 22}
3310 defaultport = {b'https': 443, b'ssh': 22}
3299 if url.scheme in defaultport:
3311 if url.scheme in defaultport:
3300 try:
3312 try:
3301 addr = (url.host, int(url.port or defaultport[url.scheme]))
3313 addr = (url.host, int(url.port or defaultport[url.scheme]))
3302 except ValueError:
3314 except ValueError:
3303 raise error.Abort(_(b"malformed port number in URL"))
3315 raise error.Abort(_(b"malformed port number in URL"))
3304 else:
3316 else:
3305 raise error.Abort(_(b"only https and ssh connections are supported"))
3317 raise error.Abort(_(b"only https and ssh connections are supported"))
3306
3318
3307 from . import win32
3319 from . import win32
3308
3320
3309 s = ssl.wrap_socket(
3321 s = ssl.wrap_socket(
3310 socket.socket(),
3322 socket.socket(),
3311 ssl_version=ssl.PROTOCOL_TLS,
3323 ssl_version=ssl.PROTOCOL_TLS,
3312 cert_reqs=ssl.CERT_NONE,
3324 cert_reqs=ssl.CERT_NONE,
3313 ca_certs=None,
3325 ca_certs=None,
3314 )
3326 )
3315
3327
3316 try:
3328 try:
3317 s.connect(addr)
3329 s.connect(addr)
3318 cert = s.getpeercert(True)
3330 cert = s.getpeercert(True)
3319
3331
3320 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3332 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3321
3333
3322 complete = win32.checkcertificatechain(cert, build=False)
3334 complete = win32.checkcertificatechain(cert, build=False)
3323
3335
3324 if not complete:
3336 if not complete:
3325 ui.status(_(b'certificate chain is incomplete, updating... '))
3337 ui.status(_(b'certificate chain is incomplete, updating... '))
3326
3338
3327 if not win32.checkcertificatechain(cert):
3339 if not win32.checkcertificatechain(cert):
3328 ui.status(_(b'failed.\n'))
3340 ui.status(_(b'failed.\n'))
3329 else:
3341 else:
3330 ui.status(_(b'done.\n'))
3342 ui.status(_(b'done.\n'))
3331 else:
3343 else:
3332 ui.status(_(b'full certificate chain is available\n'))
3344 ui.status(_(b'full certificate chain is available\n'))
3333 finally:
3345 finally:
3334 s.close()
3346 s.close()
3335
3347
3336
3348
3337 @command(
3349 @command(
3338 b'debugsub',
3350 b'debugsub',
3339 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3351 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3340 _(b'[-r REV] [REV]'),
3352 _(b'[-r REV] [REV]'),
3341 )
3353 )
3342 def debugsub(ui, repo, rev=None):
3354 def debugsub(ui, repo, rev=None):
3343 ctx = scmutil.revsingle(repo, rev, None)
3355 ctx = scmutil.revsingle(repo, rev, None)
3344 for k, v in sorted(ctx.substate.items()):
3356 for k, v in sorted(ctx.substate.items()):
3345 ui.writenoi18n(b'path %s\n' % k)
3357 ui.writenoi18n(b'path %s\n' % k)
3346 ui.writenoi18n(b' source %s\n' % v[0])
3358 ui.writenoi18n(b' source %s\n' % v[0])
3347 ui.writenoi18n(b' revision %s\n' % v[1])
3359 ui.writenoi18n(b' revision %s\n' % v[1])
3348
3360
3349
3361
3350 @command(
3362 @command(
3351 b'debugsuccessorssets',
3363 b'debugsuccessorssets',
3352 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3364 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3353 _(b'[REV]'),
3365 _(b'[REV]'),
3354 )
3366 )
3355 def debugsuccessorssets(ui, repo, *revs, **opts):
3367 def debugsuccessorssets(ui, repo, *revs, **opts):
3356 """show set of successors for revision
3368 """show set of successors for revision
3357
3369
3358 A successors set of changeset A is a consistent group of revisions that
3370 A successors set of changeset A is a consistent group of revisions that
3359 succeed A. It contains non-obsolete changesets only unless closests
3371 succeed A. It contains non-obsolete changesets only unless closests
3360 successors set is set.
3372 successors set is set.
3361
3373
3362 In most cases a changeset A has a single successors set containing a single
3374 In most cases a changeset A has a single successors set containing a single
3363 successor (changeset A replaced by A').
3375 successor (changeset A replaced by A').
3364
3376
3365 A changeset that is made obsolete with no successors are called "pruned".
3377 A changeset that is made obsolete with no successors are called "pruned".
3366 Such changesets have no successors sets at all.
3378 Such changesets have no successors sets at all.
3367
3379
3368 A changeset that has been "split" will have a successors set containing
3380 A changeset that has been "split" will have a successors set containing
3369 more than one successor.
3381 more than one successor.
3370
3382
3371 A changeset that has been rewritten in multiple different ways is called
3383 A changeset that has been rewritten in multiple different ways is called
3372 "divergent". Such changesets have multiple successor sets (each of which
3384 "divergent". Such changesets have multiple successor sets (each of which
3373 may also be split, i.e. have multiple successors).
3385 may also be split, i.e. have multiple successors).
3374
3386
3375 Results are displayed as follows::
3387 Results are displayed as follows::
3376
3388
3377 <rev1>
3389 <rev1>
3378 <successors-1A>
3390 <successors-1A>
3379 <rev2>
3391 <rev2>
3380 <successors-2A>
3392 <successors-2A>
3381 <successors-2B1> <successors-2B2> <successors-2B3>
3393 <successors-2B1> <successors-2B2> <successors-2B3>
3382
3394
3383 Here rev2 has two possible (i.e. divergent) successors sets. The first
3395 Here rev2 has two possible (i.e. divergent) successors sets. The first
3384 holds one element, whereas the second holds three (i.e. the changeset has
3396 holds one element, whereas the second holds three (i.e. the changeset has
3385 been split).
3397 been split).
3386 """
3398 """
3387 # passed to successorssets caching computation from one call to another
3399 # passed to successorssets caching computation from one call to another
3388 cache = {}
3400 cache = {}
3389 ctx2str = bytes
3401 ctx2str = bytes
3390 node2str = short
3402 node2str = short
3391 for rev in scmutil.revrange(repo, revs):
3403 for rev in scmutil.revrange(repo, revs):
3392 ctx = repo[rev]
3404 ctx = repo[rev]
3393 ui.write(b'%s\n' % ctx2str(ctx))
3405 ui.write(b'%s\n' % ctx2str(ctx))
3394 for succsset in obsutil.successorssets(
3406 for succsset in obsutil.successorssets(
3395 repo, ctx.node(), closest=opts['closest'], cache=cache
3407 repo, ctx.node(), closest=opts['closest'], cache=cache
3396 ):
3408 ):
3397 if succsset:
3409 if succsset:
3398 ui.write(b' ')
3410 ui.write(b' ')
3399 ui.write(node2str(succsset[0]))
3411 ui.write(node2str(succsset[0]))
3400 for node in succsset[1:]:
3412 for node in succsset[1:]:
3401 ui.write(b' ')
3413 ui.write(b' ')
3402 ui.write(node2str(node))
3414 ui.write(node2str(node))
3403 ui.write(b'\n')
3415 ui.write(b'\n')
3404
3416
3405
3417
3406 @command(
3418 @command(
3407 b'debugtemplate',
3419 b'debugtemplate',
3408 [
3420 [
3409 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3421 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3410 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3422 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3411 ],
3423 ],
3412 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3424 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3413 optionalrepo=True,
3425 optionalrepo=True,
3414 )
3426 )
3415 def debugtemplate(ui, repo, tmpl, **opts):
3427 def debugtemplate(ui, repo, tmpl, **opts):
3416 """parse and apply a template
3428 """parse and apply a template
3417
3429
3418 If -r/--rev is given, the template is processed as a log template and
3430 If -r/--rev is given, the template is processed as a log template and
3419 applied to the given changesets. Otherwise, it is processed as a generic
3431 applied to the given changesets. Otherwise, it is processed as a generic
3420 template.
3432 template.
3421
3433
3422 Use --verbose to print the parsed tree.
3434 Use --verbose to print the parsed tree.
3423 """
3435 """
3424 revs = None
3436 revs = None
3425 if opts['rev']:
3437 if opts['rev']:
3426 if repo is None:
3438 if repo is None:
3427 raise error.RepoError(
3439 raise error.RepoError(
3428 _(b'there is no Mercurial repository here (.hg not found)')
3440 _(b'there is no Mercurial repository here (.hg not found)')
3429 )
3441 )
3430 revs = scmutil.revrange(repo, opts['rev'])
3442 revs = scmutil.revrange(repo, opts['rev'])
3431
3443
3432 props = {}
3444 props = {}
3433 for d in opts['define']:
3445 for d in opts['define']:
3434 try:
3446 try:
3435 k, v = (e.strip() for e in d.split(b'=', 1))
3447 k, v = (e.strip() for e in d.split(b'=', 1))
3436 if not k or k == b'ui':
3448 if not k or k == b'ui':
3437 raise ValueError
3449 raise ValueError
3438 props[k] = v
3450 props[k] = v
3439 except ValueError:
3451 except ValueError:
3440 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3452 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3441
3453
3442 if ui.verbose:
3454 if ui.verbose:
3443 aliases = ui.configitems(b'templatealias')
3455 aliases = ui.configitems(b'templatealias')
3444 tree = templater.parse(tmpl)
3456 tree = templater.parse(tmpl)
3445 ui.note(templater.prettyformat(tree), b'\n')
3457 ui.note(templater.prettyformat(tree), b'\n')
3446 newtree = templater.expandaliases(tree, aliases)
3458 newtree = templater.expandaliases(tree, aliases)
3447 if newtree != tree:
3459 if newtree != tree:
3448 ui.notenoi18n(
3460 ui.notenoi18n(
3449 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3461 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3450 )
3462 )
3451
3463
3452 if revs is None:
3464 if revs is None:
3453 tres = formatter.templateresources(ui, repo)
3465 tres = formatter.templateresources(ui, repo)
3454 t = formatter.maketemplater(ui, tmpl, resources=tres)
3466 t = formatter.maketemplater(ui, tmpl, resources=tres)
3455 if ui.verbose:
3467 if ui.verbose:
3456 kwds, funcs = t.symbolsuseddefault()
3468 kwds, funcs = t.symbolsuseddefault()
3457 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3469 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3458 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3470 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3459 ui.write(t.renderdefault(props))
3471 ui.write(t.renderdefault(props))
3460 else:
3472 else:
3461 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3473 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3462 if ui.verbose:
3474 if ui.verbose:
3463 kwds, funcs = displayer.t.symbolsuseddefault()
3475 kwds, funcs = displayer.t.symbolsuseddefault()
3464 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3476 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3465 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3477 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3466 for r in revs:
3478 for r in revs:
3467 displayer.show(repo[r], **pycompat.strkwargs(props))
3479 displayer.show(repo[r], **pycompat.strkwargs(props))
3468 displayer.close()
3480 displayer.close()
3469
3481
3470
3482
3471 @command(
3483 @command(
3472 b'debuguigetpass',
3484 b'debuguigetpass',
3473 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3485 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3474 _(b'[-p TEXT]'),
3486 _(b'[-p TEXT]'),
3475 norepo=True,
3487 norepo=True,
3476 )
3488 )
3477 def debuguigetpass(ui, prompt=b''):
3489 def debuguigetpass(ui, prompt=b''):
3478 """show prompt to type password"""
3490 """show prompt to type password"""
3479 r = ui.getpass(prompt)
3491 r = ui.getpass(prompt)
3480 ui.writenoi18n(b'respose: %s\n' % r)
3492 ui.writenoi18n(b'respose: %s\n' % r)
3481
3493
3482
3494
3483 @command(
3495 @command(
3484 b'debuguiprompt',
3496 b'debuguiprompt',
3485 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3497 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3486 _(b'[-p TEXT]'),
3498 _(b'[-p TEXT]'),
3487 norepo=True,
3499 norepo=True,
3488 )
3500 )
3489 def debuguiprompt(ui, prompt=b''):
3501 def debuguiprompt(ui, prompt=b''):
3490 """show plain prompt"""
3502 """show plain prompt"""
3491 r = ui.prompt(prompt)
3503 r = ui.prompt(prompt)
3492 ui.writenoi18n(b'response: %s\n' % r)
3504 ui.writenoi18n(b'response: %s\n' % r)
3493
3505
3494
3506
3495 @command(b'debugupdatecaches', [])
3507 @command(b'debugupdatecaches', [])
3496 def debugupdatecaches(ui, repo, *pats, **opts):
3508 def debugupdatecaches(ui, repo, *pats, **opts):
3497 """warm all known caches in the repository"""
3509 """warm all known caches in the repository"""
3498 with repo.wlock(), repo.lock():
3510 with repo.wlock(), repo.lock():
3499 repo.updatecaches(full=True)
3511 repo.updatecaches(full=True)
3500
3512
3501
3513
3502 @command(
3514 @command(
3503 b'debugupgraderepo',
3515 b'debugupgraderepo',
3504 [
3516 [
3505 (
3517 (
3506 b'o',
3518 b'o',
3507 b'optimize',
3519 b'optimize',
3508 [],
3520 [],
3509 _(b'extra optimization to perform'),
3521 _(b'extra optimization to perform'),
3510 _(b'NAME'),
3522 _(b'NAME'),
3511 ),
3523 ),
3512 (b'', b'run', False, _(b'performs an upgrade')),
3524 (b'', b'run', False, _(b'performs an upgrade')),
3513 (b'', b'backup', True, _(b'keep the old repository content around')),
3525 (b'', b'backup', True, _(b'keep the old repository content around')),
3514 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3526 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3515 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3527 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3516 ],
3528 ],
3517 )
3529 )
3518 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3530 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3519 """upgrade a repository to use different features
3531 """upgrade a repository to use different features
3520
3532
3521 If no arguments are specified, the repository is evaluated for upgrade
3533 If no arguments are specified, the repository is evaluated for upgrade
3522 and a list of problems and potential optimizations is printed.
3534 and a list of problems and potential optimizations is printed.
3523
3535
3524 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3536 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3525 can be influenced via additional arguments. More details will be provided
3537 can be influenced via additional arguments. More details will be provided
3526 by the command output when run without ``--run``.
3538 by the command output when run without ``--run``.
3527
3539
3528 During the upgrade, the repository will be locked and no writes will be
3540 During the upgrade, the repository will be locked and no writes will be
3529 allowed.
3541 allowed.
3530
3542
3531 At the end of the upgrade, the repository may not be readable while new
3543 At the end of the upgrade, the repository may not be readable while new
3532 repository data is swapped in. This window will be as long as it takes to
3544 repository data is swapped in. This window will be as long as it takes to
3533 rename some directories inside the ``.hg`` directory. On most machines, this
3545 rename some directories inside the ``.hg`` directory. On most machines, this
3534 should complete almost instantaneously and the chances of a consumer being
3546 should complete almost instantaneously and the chances of a consumer being
3535 unable to access the repository should be low.
3547 unable to access the repository should be low.
3536
3548
3537 By default, all revlog will be upgraded. You can restrict this using flag
3549 By default, all revlog will be upgraded. You can restrict this using flag
3538 such as `--manifest`:
3550 such as `--manifest`:
3539
3551
3540 * `--manifest`: only optimize the manifest
3552 * `--manifest`: only optimize the manifest
3541 * `--no-manifest`: optimize all revlog but the manifest
3553 * `--no-manifest`: optimize all revlog but the manifest
3542 * `--changelog`: optimize the changelog only
3554 * `--changelog`: optimize the changelog only
3543 * `--no-changelog --no-manifest`: optimize filelogs only
3555 * `--no-changelog --no-manifest`: optimize filelogs only
3544 """
3556 """
3545 return upgrade.upgraderepo(
3557 return upgrade.upgraderepo(
3546 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3558 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3547 )
3559 )
3548
3560
3549
3561
3550 @command(
3562 @command(
3551 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3563 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3552 )
3564 )
3553 def debugwalk(ui, repo, *pats, **opts):
3565 def debugwalk(ui, repo, *pats, **opts):
3554 """show how files match on given patterns"""
3566 """show how files match on given patterns"""
3555 opts = pycompat.byteskwargs(opts)
3567 opts = pycompat.byteskwargs(opts)
3556 m = scmutil.match(repo[None], pats, opts)
3568 m = scmutil.match(repo[None], pats, opts)
3557 if ui.verbose:
3569 if ui.verbose:
3558 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3570 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3559 items = list(repo[None].walk(m))
3571 items = list(repo[None].walk(m))
3560 if not items:
3572 if not items:
3561 return
3573 return
3562 f = lambda fn: fn
3574 f = lambda fn: fn
3563 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3575 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3564 f = lambda fn: util.normpath(fn)
3576 f = lambda fn: util.normpath(fn)
3565 fmt = b'f %%-%ds %%-%ds %%s' % (
3577 fmt = b'f %%-%ds %%-%ds %%s' % (
3566 max([len(abs) for abs in items]),
3578 max([len(abs) for abs in items]),
3567 max([len(repo.pathto(abs)) for abs in items]),
3579 max([len(repo.pathto(abs)) for abs in items]),
3568 )
3580 )
3569 for abs in items:
3581 for abs in items:
3570 line = fmt % (
3582 line = fmt % (
3571 abs,
3583 abs,
3572 f(repo.pathto(abs)),
3584 f(repo.pathto(abs)),
3573 m.exact(abs) and b'exact' or b'',
3585 m.exact(abs) and b'exact' or b'',
3574 )
3586 )
3575 ui.write(b"%s\n" % line.rstrip())
3587 ui.write(b"%s\n" % line.rstrip())
3576
3588
3577
3589
3578 @command(b'debugwhyunstable', [], _(b'REV'))
3590 @command(b'debugwhyunstable', [], _(b'REV'))
3579 def debugwhyunstable(ui, repo, rev):
3591 def debugwhyunstable(ui, repo, rev):
3580 """explain instabilities of a changeset"""
3592 """explain instabilities of a changeset"""
3581 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3593 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3582 dnodes = b''
3594 dnodes = b''
3583 if entry.get(b'divergentnodes'):
3595 if entry.get(b'divergentnodes'):
3584 dnodes = (
3596 dnodes = (
3585 b' '.join(
3597 b' '.join(
3586 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3598 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3587 for ctx in entry[b'divergentnodes']
3599 for ctx in entry[b'divergentnodes']
3588 )
3600 )
3589 + b' '
3601 + b' '
3590 )
3602 )
3591 ui.write(
3603 ui.write(
3592 b'%s: %s%s %s\n'
3604 b'%s: %s%s %s\n'
3593 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3605 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3594 )
3606 )
3595
3607
3596
3608
3597 @command(
3609 @command(
3598 b'debugwireargs',
3610 b'debugwireargs',
3599 [
3611 [
3600 (b'', b'three', b'', b'three'),
3612 (b'', b'three', b'', b'three'),
3601 (b'', b'four', b'', b'four'),
3613 (b'', b'four', b'', b'four'),
3602 (b'', b'five', b'', b'five'),
3614 (b'', b'five', b'', b'five'),
3603 ]
3615 ]
3604 + cmdutil.remoteopts,
3616 + cmdutil.remoteopts,
3605 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3617 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3606 norepo=True,
3618 norepo=True,
3607 )
3619 )
3608 def debugwireargs(ui, repopath, *vals, **opts):
3620 def debugwireargs(ui, repopath, *vals, **opts):
3609 opts = pycompat.byteskwargs(opts)
3621 opts = pycompat.byteskwargs(opts)
3610 repo = hg.peer(ui, opts, repopath)
3622 repo = hg.peer(ui, opts, repopath)
3611 for opt in cmdutil.remoteopts:
3623 for opt in cmdutil.remoteopts:
3612 del opts[opt[1]]
3624 del opts[opt[1]]
3613 args = {}
3625 args = {}
3614 for k, v in pycompat.iteritems(opts):
3626 for k, v in pycompat.iteritems(opts):
3615 if v:
3627 if v:
3616 args[k] = v
3628 args[k] = v
3617 args = pycompat.strkwargs(args)
3629 args = pycompat.strkwargs(args)
3618 # run twice to check that we don't mess up the stream for the next command
3630 # run twice to check that we don't mess up the stream for the next command
3619 res1 = repo.debugwireargs(*vals, **args)
3631 res1 = repo.debugwireargs(*vals, **args)
3620 res2 = repo.debugwireargs(*vals, **args)
3632 res2 = repo.debugwireargs(*vals, **args)
3621 ui.write(b"%s\n" % res1)
3633 ui.write(b"%s\n" % res1)
3622 if res1 != res2:
3634 if res1 != res2:
3623 ui.warn(b"%s\n" % res2)
3635 ui.warn(b"%s\n" % res2)
3624
3636
3625
3637
3626 def _parsewirelangblocks(fh):
3638 def _parsewirelangblocks(fh):
3627 activeaction = None
3639 activeaction = None
3628 blocklines = []
3640 blocklines = []
3629 lastindent = 0
3641 lastindent = 0
3630
3642
3631 for line in fh:
3643 for line in fh:
3632 line = line.rstrip()
3644 line = line.rstrip()
3633 if not line:
3645 if not line:
3634 continue
3646 continue
3635
3647
3636 if line.startswith(b'#'):
3648 if line.startswith(b'#'):
3637 continue
3649 continue
3638
3650
3639 if not line.startswith(b' '):
3651 if not line.startswith(b' '):
3640 # New block. Flush previous one.
3652 # New block. Flush previous one.
3641 if activeaction:
3653 if activeaction:
3642 yield activeaction, blocklines
3654 yield activeaction, blocklines
3643
3655
3644 activeaction = line
3656 activeaction = line
3645 blocklines = []
3657 blocklines = []
3646 lastindent = 0
3658 lastindent = 0
3647 continue
3659 continue
3648
3660
3649 # Else we start with an indent.
3661 # Else we start with an indent.
3650
3662
3651 if not activeaction:
3663 if not activeaction:
3652 raise error.Abort(_(b'indented line outside of block'))
3664 raise error.Abort(_(b'indented line outside of block'))
3653
3665
3654 indent = len(line) - len(line.lstrip())
3666 indent = len(line) - len(line.lstrip())
3655
3667
3656 # If this line is indented more than the last line, concatenate it.
3668 # If this line is indented more than the last line, concatenate it.
3657 if indent > lastindent and blocklines:
3669 if indent > lastindent and blocklines:
3658 blocklines[-1] += line.lstrip()
3670 blocklines[-1] += line.lstrip()
3659 else:
3671 else:
3660 blocklines.append(line)
3672 blocklines.append(line)
3661 lastindent = indent
3673 lastindent = indent
3662
3674
3663 # Flush last block.
3675 # Flush last block.
3664 if activeaction:
3676 if activeaction:
3665 yield activeaction, blocklines
3677 yield activeaction, blocklines
3666
3678
3667
3679
3668 @command(
3680 @command(
3669 b'debugwireproto',
3681 b'debugwireproto',
3670 [
3682 [
3671 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3683 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3672 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3684 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3673 (
3685 (
3674 b'',
3686 b'',
3675 b'noreadstderr',
3687 b'noreadstderr',
3676 False,
3688 False,
3677 _(b'do not read from stderr of the remote'),
3689 _(b'do not read from stderr of the remote'),
3678 ),
3690 ),
3679 (
3691 (
3680 b'',
3692 b'',
3681 b'nologhandshake',
3693 b'nologhandshake',
3682 False,
3694 False,
3683 _(b'do not log I/O related to the peer handshake'),
3695 _(b'do not log I/O related to the peer handshake'),
3684 ),
3696 ),
3685 ]
3697 ]
3686 + cmdutil.remoteopts,
3698 + cmdutil.remoteopts,
3687 _(b'[PATH]'),
3699 _(b'[PATH]'),
3688 optionalrepo=True,
3700 optionalrepo=True,
3689 )
3701 )
3690 def debugwireproto(ui, repo, path=None, **opts):
3702 def debugwireproto(ui, repo, path=None, **opts):
3691 """send wire protocol commands to a server
3703 """send wire protocol commands to a server
3692
3704
3693 This command can be used to issue wire protocol commands to remote
3705 This command can be used to issue wire protocol commands to remote
3694 peers and to debug the raw data being exchanged.
3706 peers and to debug the raw data being exchanged.
3695
3707
3696 ``--localssh`` will start an SSH server against the current repository
3708 ``--localssh`` will start an SSH server against the current repository
3697 and connect to that. By default, the connection will perform a handshake
3709 and connect to that. By default, the connection will perform a handshake
3698 and establish an appropriate peer instance.
3710 and establish an appropriate peer instance.
3699
3711
3700 ``--peer`` can be used to bypass the handshake protocol and construct a
3712 ``--peer`` can be used to bypass the handshake protocol and construct a
3701 peer instance using the specified class type. Valid values are ``raw``,
3713 peer instance using the specified class type. Valid values are ``raw``,
3702 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3714 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3703 raw data payloads and don't support higher-level command actions.
3715 raw data payloads and don't support higher-level command actions.
3704
3716
3705 ``--noreadstderr`` can be used to disable automatic reading from stderr
3717 ``--noreadstderr`` can be used to disable automatic reading from stderr
3706 of the peer (for SSH connections only). Disabling automatic reading of
3718 of the peer (for SSH connections only). Disabling automatic reading of
3707 stderr is useful for making output more deterministic.
3719 stderr is useful for making output more deterministic.
3708
3720
3709 Commands are issued via a mini language which is specified via stdin.
3721 Commands are issued via a mini language which is specified via stdin.
3710 The language consists of individual actions to perform. An action is
3722 The language consists of individual actions to perform. An action is
3711 defined by a block. A block is defined as a line with no leading
3723 defined by a block. A block is defined as a line with no leading
3712 space followed by 0 or more lines with leading space. Blocks are
3724 space followed by 0 or more lines with leading space. Blocks are
3713 effectively a high-level command with additional metadata.
3725 effectively a high-level command with additional metadata.
3714
3726
3715 Lines beginning with ``#`` are ignored.
3727 Lines beginning with ``#`` are ignored.
3716
3728
3717 The following sections denote available actions.
3729 The following sections denote available actions.
3718
3730
3719 raw
3731 raw
3720 ---
3732 ---
3721
3733
3722 Send raw data to the server.
3734 Send raw data to the server.
3723
3735
3724 The block payload contains the raw data to send as one atomic send
3736 The block payload contains the raw data to send as one atomic send
3725 operation. The data may not actually be delivered in a single system
3737 operation. The data may not actually be delivered in a single system
3726 call: it depends on the abilities of the transport being used.
3738 call: it depends on the abilities of the transport being used.
3727
3739
3728 Each line in the block is de-indented and concatenated. Then, that
3740 Each line in the block is de-indented and concatenated. Then, that
3729 value is evaluated as a Python b'' literal. This allows the use of
3741 value is evaluated as a Python b'' literal. This allows the use of
3730 backslash escaping, etc.
3742 backslash escaping, etc.
3731
3743
3732 raw+
3744 raw+
3733 ----
3745 ----
3734
3746
3735 Behaves like ``raw`` except flushes output afterwards.
3747 Behaves like ``raw`` except flushes output afterwards.
3736
3748
3737 command <X>
3749 command <X>
3738 -----------
3750 -----------
3739
3751
3740 Send a request to run a named command, whose name follows the ``command``
3752 Send a request to run a named command, whose name follows the ``command``
3741 string.
3753 string.
3742
3754
3743 Arguments to the command are defined as lines in this block. The format of
3755 Arguments to the command are defined as lines in this block. The format of
3744 each line is ``<key> <value>``. e.g.::
3756 each line is ``<key> <value>``. e.g.::
3745
3757
3746 command listkeys
3758 command listkeys
3747 namespace bookmarks
3759 namespace bookmarks
3748
3760
3749 If the value begins with ``eval:``, it will be interpreted as a Python
3761 If the value begins with ``eval:``, it will be interpreted as a Python
3750 literal expression. Otherwise values are interpreted as Python b'' literals.
3762 literal expression. Otherwise values are interpreted as Python b'' literals.
3751 This allows sending complex types and encoding special byte sequences via
3763 This allows sending complex types and encoding special byte sequences via
3752 backslash escaping.
3764 backslash escaping.
3753
3765
3754 The following arguments have special meaning:
3766 The following arguments have special meaning:
3755
3767
3756 ``PUSHFILE``
3768 ``PUSHFILE``
3757 When defined, the *push* mechanism of the peer will be used instead
3769 When defined, the *push* mechanism of the peer will be used instead
3758 of the static request-response mechanism and the content of the
3770 of the static request-response mechanism and the content of the
3759 file specified in the value of this argument will be sent as the
3771 file specified in the value of this argument will be sent as the
3760 command payload.
3772 command payload.
3761
3773
3762 This can be used to submit a local bundle file to the remote.
3774 This can be used to submit a local bundle file to the remote.
3763
3775
3764 batchbegin
3776 batchbegin
3765 ----------
3777 ----------
3766
3778
3767 Instruct the peer to begin a batched send.
3779 Instruct the peer to begin a batched send.
3768
3780
3769 All ``command`` blocks are queued for execution until the next
3781 All ``command`` blocks are queued for execution until the next
3770 ``batchsubmit`` block.
3782 ``batchsubmit`` block.
3771
3783
3772 batchsubmit
3784 batchsubmit
3773 -----------
3785 -----------
3774
3786
3775 Submit previously queued ``command`` blocks as a batch request.
3787 Submit previously queued ``command`` blocks as a batch request.
3776
3788
3777 This action MUST be paired with a ``batchbegin`` action.
3789 This action MUST be paired with a ``batchbegin`` action.
3778
3790
3779 httprequest <method> <path>
3791 httprequest <method> <path>
3780 ---------------------------
3792 ---------------------------
3781
3793
3782 (HTTP peer only)
3794 (HTTP peer only)
3783
3795
3784 Send an HTTP request to the peer.
3796 Send an HTTP request to the peer.
3785
3797
3786 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3798 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3787
3799
3788 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3800 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3789 headers to add to the request. e.g. ``Accept: foo``.
3801 headers to add to the request. e.g. ``Accept: foo``.
3790
3802
3791 The following arguments are special:
3803 The following arguments are special:
3792
3804
3793 ``BODYFILE``
3805 ``BODYFILE``
3794 The content of the file defined as the value to this argument will be
3806 The content of the file defined as the value to this argument will be
3795 transferred verbatim as the HTTP request body.
3807 transferred verbatim as the HTTP request body.
3796
3808
3797 ``frame <type> <flags> <payload>``
3809 ``frame <type> <flags> <payload>``
3798 Send a unified protocol frame as part of the request body.
3810 Send a unified protocol frame as part of the request body.
3799
3811
3800 All frames will be collected and sent as the body to the HTTP
3812 All frames will be collected and sent as the body to the HTTP
3801 request.
3813 request.
3802
3814
3803 close
3815 close
3804 -----
3816 -----
3805
3817
3806 Close the connection to the server.
3818 Close the connection to the server.
3807
3819
3808 flush
3820 flush
3809 -----
3821 -----
3810
3822
3811 Flush data written to the server.
3823 Flush data written to the server.
3812
3824
3813 readavailable
3825 readavailable
3814 -------------
3826 -------------
3815
3827
3816 Close the write end of the connection and read all available data from
3828 Close the write end of the connection and read all available data from
3817 the server.
3829 the server.
3818
3830
3819 If the connection to the server encompasses multiple pipes, we poll both
3831 If the connection to the server encompasses multiple pipes, we poll both
3820 pipes and read available data.
3832 pipes and read available data.
3821
3833
3822 readline
3834 readline
3823 --------
3835 --------
3824
3836
3825 Read a line of output from the server. If there are multiple output
3837 Read a line of output from the server. If there are multiple output
3826 pipes, reads only the main pipe.
3838 pipes, reads only the main pipe.
3827
3839
3828 ereadline
3840 ereadline
3829 ---------
3841 ---------
3830
3842
3831 Like ``readline``, but read from the stderr pipe, if available.
3843 Like ``readline``, but read from the stderr pipe, if available.
3832
3844
3833 read <X>
3845 read <X>
3834 --------
3846 --------
3835
3847
3836 ``read()`` N bytes from the server's main output pipe.
3848 ``read()`` N bytes from the server's main output pipe.
3837
3849
3838 eread <X>
3850 eread <X>
3839 ---------
3851 ---------
3840
3852
3841 ``read()`` N bytes from the server's stderr pipe, if available.
3853 ``read()`` N bytes from the server's stderr pipe, if available.
3842
3854
3843 Specifying Unified Frame-Based Protocol Frames
3855 Specifying Unified Frame-Based Protocol Frames
3844 ----------------------------------------------
3856 ----------------------------------------------
3845
3857
3846 It is possible to emit a *Unified Frame-Based Protocol* by using special
3858 It is possible to emit a *Unified Frame-Based Protocol* by using special
3847 syntax.
3859 syntax.
3848
3860
3849 A frame is composed as a type, flags, and payload. These can be parsed
3861 A frame is composed as a type, flags, and payload. These can be parsed
3850 from a string of the form:
3862 from a string of the form:
3851
3863
3852 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3864 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3853
3865
3854 ``request-id`` and ``stream-id`` are integers defining the request and
3866 ``request-id`` and ``stream-id`` are integers defining the request and
3855 stream identifiers.
3867 stream identifiers.
3856
3868
3857 ``type`` can be an integer value for the frame type or the string name
3869 ``type`` can be an integer value for the frame type or the string name
3858 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3870 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3859 ``command-name``.
3871 ``command-name``.
3860
3872
3861 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3873 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3862 components. Each component (and there can be just one) can be an integer
3874 components. Each component (and there can be just one) can be an integer
3863 or a flag name for stream flags or frame flags, respectively. Values are
3875 or a flag name for stream flags or frame flags, respectively. Values are
3864 resolved to integers and then bitwise OR'd together.
3876 resolved to integers and then bitwise OR'd together.
3865
3877
3866 ``payload`` represents the raw frame payload. If it begins with
3878 ``payload`` represents the raw frame payload. If it begins with
3867 ``cbor:``, the following string is evaluated as Python code and the
3879 ``cbor:``, the following string is evaluated as Python code and the
3868 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3880 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3869 as a Python byte string literal.
3881 as a Python byte string literal.
3870 """
3882 """
3871 opts = pycompat.byteskwargs(opts)
3883 opts = pycompat.byteskwargs(opts)
3872
3884
3873 if opts[b'localssh'] and not repo:
3885 if opts[b'localssh'] and not repo:
3874 raise error.Abort(_(b'--localssh requires a repository'))
3886 raise error.Abort(_(b'--localssh requires a repository'))
3875
3887
3876 if opts[b'peer'] and opts[b'peer'] not in (
3888 if opts[b'peer'] and opts[b'peer'] not in (
3877 b'raw',
3889 b'raw',
3878 b'http2',
3890 b'http2',
3879 b'ssh1',
3891 b'ssh1',
3880 b'ssh2',
3892 b'ssh2',
3881 ):
3893 ):
3882 raise error.Abort(
3894 raise error.Abort(
3883 _(b'invalid value for --peer'),
3895 _(b'invalid value for --peer'),
3884 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
3896 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
3885 )
3897 )
3886
3898
3887 if path and opts[b'localssh']:
3899 if path and opts[b'localssh']:
3888 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
3900 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
3889
3901
3890 if ui.interactive():
3902 if ui.interactive():
3891 ui.write(_(b'(waiting for commands on stdin)\n'))
3903 ui.write(_(b'(waiting for commands on stdin)\n'))
3892
3904
3893 blocks = list(_parsewirelangblocks(ui.fin))
3905 blocks = list(_parsewirelangblocks(ui.fin))
3894
3906
3895 proc = None
3907 proc = None
3896 stdin = None
3908 stdin = None
3897 stdout = None
3909 stdout = None
3898 stderr = None
3910 stderr = None
3899 opener = None
3911 opener = None
3900
3912
3901 if opts[b'localssh']:
3913 if opts[b'localssh']:
3902 # We start the SSH server in its own process so there is process
3914 # We start the SSH server in its own process so there is process
3903 # separation. This prevents a whole class of potential bugs around
3915 # separation. This prevents a whole class of potential bugs around
3904 # shared state from interfering with server operation.
3916 # shared state from interfering with server operation.
3905 args = procutil.hgcmd() + [
3917 args = procutil.hgcmd() + [
3906 b'-R',
3918 b'-R',
3907 repo.root,
3919 repo.root,
3908 b'debugserve',
3920 b'debugserve',
3909 b'--sshstdio',
3921 b'--sshstdio',
3910 ]
3922 ]
3911 proc = subprocess.Popen(
3923 proc = subprocess.Popen(
3912 pycompat.rapply(procutil.tonativestr, args),
3924 pycompat.rapply(procutil.tonativestr, args),
3913 stdin=subprocess.PIPE,
3925 stdin=subprocess.PIPE,
3914 stdout=subprocess.PIPE,
3926 stdout=subprocess.PIPE,
3915 stderr=subprocess.PIPE,
3927 stderr=subprocess.PIPE,
3916 bufsize=0,
3928 bufsize=0,
3917 )
3929 )
3918
3930
3919 stdin = proc.stdin
3931 stdin = proc.stdin
3920 stdout = proc.stdout
3932 stdout = proc.stdout
3921 stderr = proc.stderr
3933 stderr = proc.stderr
3922
3934
3923 # We turn the pipes into observers so we can log I/O.
3935 # We turn the pipes into observers so we can log I/O.
3924 if ui.verbose or opts[b'peer'] == b'raw':
3936 if ui.verbose or opts[b'peer'] == b'raw':
3925 stdin = util.makeloggingfileobject(
3937 stdin = util.makeloggingfileobject(
3926 ui, proc.stdin, b'i', logdata=True
3938 ui, proc.stdin, b'i', logdata=True
3927 )
3939 )
3928 stdout = util.makeloggingfileobject(
3940 stdout = util.makeloggingfileobject(
3929 ui, proc.stdout, b'o', logdata=True
3941 ui, proc.stdout, b'o', logdata=True
3930 )
3942 )
3931 stderr = util.makeloggingfileobject(
3943 stderr = util.makeloggingfileobject(
3932 ui, proc.stderr, b'e', logdata=True
3944 ui, proc.stderr, b'e', logdata=True
3933 )
3945 )
3934
3946
3935 # --localssh also implies the peer connection settings.
3947 # --localssh also implies the peer connection settings.
3936
3948
3937 url = b'ssh://localserver'
3949 url = b'ssh://localserver'
3938 autoreadstderr = not opts[b'noreadstderr']
3950 autoreadstderr = not opts[b'noreadstderr']
3939
3951
3940 if opts[b'peer'] == b'ssh1':
3952 if opts[b'peer'] == b'ssh1':
3941 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
3953 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
3942 peer = sshpeer.sshv1peer(
3954 peer = sshpeer.sshv1peer(
3943 ui,
3955 ui,
3944 url,
3956 url,
3945 proc,
3957 proc,
3946 stdin,
3958 stdin,
3947 stdout,
3959 stdout,
3948 stderr,
3960 stderr,
3949 None,
3961 None,
3950 autoreadstderr=autoreadstderr,
3962 autoreadstderr=autoreadstderr,
3951 )
3963 )
3952 elif opts[b'peer'] == b'ssh2':
3964 elif opts[b'peer'] == b'ssh2':
3953 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
3965 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
3954 peer = sshpeer.sshv2peer(
3966 peer = sshpeer.sshv2peer(
3955 ui,
3967 ui,
3956 url,
3968 url,
3957 proc,
3969 proc,
3958 stdin,
3970 stdin,
3959 stdout,
3971 stdout,
3960 stderr,
3972 stderr,
3961 None,
3973 None,
3962 autoreadstderr=autoreadstderr,
3974 autoreadstderr=autoreadstderr,
3963 )
3975 )
3964 elif opts[b'peer'] == b'raw':
3976 elif opts[b'peer'] == b'raw':
3965 ui.write(_(b'using raw connection to peer\n'))
3977 ui.write(_(b'using raw connection to peer\n'))
3966 peer = None
3978 peer = None
3967 else:
3979 else:
3968 ui.write(_(b'creating ssh peer from handshake results\n'))
3980 ui.write(_(b'creating ssh peer from handshake results\n'))
3969 peer = sshpeer.makepeer(
3981 peer = sshpeer.makepeer(
3970 ui,
3982 ui,
3971 url,
3983 url,
3972 proc,
3984 proc,
3973 stdin,
3985 stdin,
3974 stdout,
3986 stdout,
3975 stderr,
3987 stderr,
3976 autoreadstderr=autoreadstderr,
3988 autoreadstderr=autoreadstderr,
3977 )
3989 )
3978
3990
3979 elif path:
3991 elif path:
3980 # We bypass hg.peer() so we can proxy the sockets.
3992 # We bypass hg.peer() so we can proxy the sockets.
3981 # TODO consider not doing this because we skip
3993 # TODO consider not doing this because we skip
3982 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3994 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3983 u = util.url(path)
3995 u = util.url(path)
3984 if u.scheme != b'http':
3996 if u.scheme != b'http':
3985 raise error.Abort(_(b'only http:// paths are currently supported'))
3997 raise error.Abort(_(b'only http:// paths are currently supported'))
3986
3998
3987 url, authinfo = u.authinfo()
3999 url, authinfo = u.authinfo()
3988 openerargs = {
4000 openerargs = {
3989 'useragent': b'Mercurial debugwireproto',
4001 'useragent': b'Mercurial debugwireproto',
3990 }
4002 }
3991
4003
3992 # Turn pipes/sockets into observers so we can log I/O.
4004 # Turn pipes/sockets into observers so we can log I/O.
3993 if ui.verbose:
4005 if ui.verbose:
3994 openerargs.update(
4006 openerargs.update(
3995 {
4007 {
3996 'loggingfh': ui,
4008 'loggingfh': ui,
3997 'loggingname': b's',
4009 'loggingname': b's',
3998 'loggingopts': {'logdata': True, 'logdataapis': False,},
4010 'loggingopts': {'logdata': True, 'logdataapis': False,},
3999 }
4011 }
4000 )
4012 )
4001
4013
4002 if ui.debugflag:
4014 if ui.debugflag:
4003 openerargs['loggingopts']['logdataapis'] = True
4015 openerargs['loggingopts']['logdataapis'] = True
4004
4016
4005 # Don't send default headers when in raw mode. This allows us to
4017 # Don't send default headers when in raw mode. This allows us to
4006 # bypass most of the behavior of our URL handling code so we can
4018 # bypass most of the behavior of our URL handling code so we can
4007 # have near complete control over what's sent on the wire.
4019 # have near complete control over what's sent on the wire.
4008 if opts[b'peer'] == b'raw':
4020 if opts[b'peer'] == b'raw':
4009 openerargs['sendaccept'] = False
4021 openerargs['sendaccept'] = False
4010
4022
4011 opener = urlmod.opener(ui, authinfo, **openerargs)
4023 opener = urlmod.opener(ui, authinfo, **openerargs)
4012
4024
4013 if opts[b'peer'] == b'http2':
4025 if opts[b'peer'] == b'http2':
4014 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4026 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4015 # We go through makepeer() because we need an API descriptor for
4027 # We go through makepeer() because we need an API descriptor for
4016 # the peer instance to be useful.
4028 # the peer instance to be useful.
4017 with ui.configoverride(
4029 with ui.configoverride(
4018 {(b'experimental', b'httppeer.advertise-v2'): True}
4030 {(b'experimental', b'httppeer.advertise-v2'): True}
4019 ):
4031 ):
4020 if opts[b'nologhandshake']:
4032 if opts[b'nologhandshake']:
4021 ui.pushbuffer()
4033 ui.pushbuffer()
4022
4034
4023 peer = httppeer.makepeer(ui, path, opener=opener)
4035 peer = httppeer.makepeer(ui, path, opener=opener)
4024
4036
4025 if opts[b'nologhandshake']:
4037 if opts[b'nologhandshake']:
4026 ui.popbuffer()
4038 ui.popbuffer()
4027
4039
4028 if not isinstance(peer, httppeer.httpv2peer):
4040 if not isinstance(peer, httppeer.httpv2peer):
4029 raise error.Abort(
4041 raise error.Abort(
4030 _(
4042 _(
4031 b'could not instantiate HTTP peer for '
4043 b'could not instantiate HTTP peer for '
4032 b'wire protocol version 2'
4044 b'wire protocol version 2'
4033 ),
4045 ),
4034 hint=_(
4046 hint=_(
4035 b'the server may not have the feature '
4047 b'the server may not have the feature '
4036 b'enabled or is not allowing this '
4048 b'enabled or is not allowing this '
4037 b'client version'
4049 b'client version'
4038 ),
4050 ),
4039 )
4051 )
4040
4052
4041 elif opts[b'peer'] == b'raw':
4053 elif opts[b'peer'] == b'raw':
4042 ui.write(_(b'using raw connection to peer\n'))
4054 ui.write(_(b'using raw connection to peer\n'))
4043 peer = None
4055 peer = None
4044 elif opts[b'peer']:
4056 elif opts[b'peer']:
4045 raise error.Abort(
4057 raise error.Abort(
4046 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4058 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4047 )
4059 )
4048 else:
4060 else:
4049 peer = httppeer.makepeer(ui, path, opener=opener)
4061 peer = httppeer.makepeer(ui, path, opener=opener)
4050
4062
4051 # We /could/ populate stdin/stdout with sock.makefile()...
4063 # We /could/ populate stdin/stdout with sock.makefile()...
4052 else:
4064 else:
4053 raise error.Abort(_(b'unsupported connection configuration'))
4065 raise error.Abort(_(b'unsupported connection configuration'))
4054
4066
4055 batchedcommands = None
4067 batchedcommands = None
4056
4068
4057 # Now perform actions based on the parsed wire language instructions.
4069 # Now perform actions based on the parsed wire language instructions.
4058 for action, lines in blocks:
4070 for action, lines in blocks:
4059 if action in (b'raw', b'raw+'):
4071 if action in (b'raw', b'raw+'):
4060 if not stdin:
4072 if not stdin:
4061 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4073 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4062
4074
4063 # Concatenate the data together.
4075 # Concatenate the data together.
4064 data = b''.join(l.lstrip() for l in lines)
4076 data = b''.join(l.lstrip() for l in lines)
4065 data = stringutil.unescapestr(data)
4077 data = stringutil.unescapestr(data)
4066 stdin.write(data)
4078 stdin.write(data)
4067
4079
4068 if action == b'raw+':
4080 if action == b'raw+':
4069 stdin.flush()
4081 stdin.flush()
4070 elif action == b'flush':
4082 elif action == b'flush':
4071 if not stdin:
4083 if not stdin:
4072 raise error.Abort(_(b'cannot call flush on this peer'))
4084 raise error.Abort(_(b'cannot call flush on this peer'))
4073 stdin.flush()
4085 stdin.flush()
4074 elif action.startswith(b'command'):
4086 elif action.startswith(b'command'):
4075 if not peer:
4087 if not peer:
4076 raise error.Abort(
4088 raise error.Abort(
4077 _(
4089 _(
4078 b'cannot send commands unless peer instance '
4090 b'cannot send commands unless peer instance '
4079 b'is available'
4091 b'is available'
4080 )
4092 )
4081 )
4093 )
4082
4094
4083 command = action.split(b' ', 1)[1]
4095 command = action.split(b' ', 1)[1]
4084
4096
4085 args = {}
4097 args = {}
4086 for line in lines:
4098 for line in lines:
4087 # We need to allow empty values.
4099 # We need to allow empty values.
4088 fields = line.lstrip().split(b' ', 1)
4100 fields = line.lstrip().split(b' ', 1)
4089 if len(fields) == 1:
4101 if len(fields) == 1:
4090 key = fields[0]
4102 key = fields[0]
4091 value = b''
4103 value = b''
4092 else:
4104 else:
4093 key, value = fields
4105 key, value = fields
4094
4106
4095 if value.startswith(b'eval:'):
4107 if value.startswith(b'eval:'):
4096 value = stringutil.evalpythonliteral(value[5:])
4108 value = stringutil.evalpythonliteral(value[5:])
4097 else:
4109 else:
4098 value = stringutil.unescapestr(value)
4110 value = stringutil.unescapestr(value)
4099
4111
4100 args[key] = value
4112 args[key] = value
4101
4113
4102 if batchedcommands is not None:
4114 if batchedcommands is not None:
4103 batchedcommands.append((command, args))
4115 batchedcommands.append((command, args))
4104 continue
4116 continue
4105
4117
4106 ui.status(_(b'sending %s command\n') % command)
4118 ui.status(_(b'sending %s command\n') % command)
4107
4119
4108 if b'PUSHFILE' in args:
4120 if b'PUSHFILE' in args:
4109 with open(args[b'PUSHFILE'], 'rb') as fh:
4121 with open(args[b'PUSHFILE'], 'rb') as fh:
4110 del args[b'PUSHFILE']
4122 del args[b'PUSHFILE']
4111 res, output = peer._callpush(
4123 res, output = peer._callpush(
4112 command, fh, **pycompat.strkwargs(args)
4124 command, fh, **pycompat.strkwargs(args)
4113 )
4125 )
4114 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4126 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4115 ui.status(
4127 ui.status(
4116 _(b'remote output: %s\n') % stringutil.escapestr(output)
4128 _(b'remote output: %s\n') % stringutil.escapestr(output)
4117 )
4129 )
4118 else:
4130 else:
4119 with peer.commandexecutor() as e:
4131 with peer.commandexecutor() as e:
4120 res = e.callcommand(command, args).result()
4132 res = e.callcommand(command, args).result()
4121
4133
4122 if isinstance(res, wireprotov2peer.commandresponse):
4134 if isinstance(res, wireprotov2peer.commandresponse):
4123 val = res.objects()
4135 val = res.objects()
4124 ui.status(
4136 ui.status(
4125 _(b'response: %s\n')
4137 _(b'response: %s\n')
4126 % stringutil.pprint(val, bprefix=True, indent=2)
4138 % stringutil.pprint(val, bprefix=True, indent=2)
4127 )
4139 )
4128 else:
4140 else:
4129 ui.status(
4141 ui.status(
4130 _(b'response: %s\n')
4142 _(b'response: %s\n')
4131 % stringutil.pprint(res, bprefix=True, indent=2)
4143 % stringutil.pprint(res, bprefix=True, indent=2)
4132 )
4144 )
4133
4145
4134 elif action == b'batchbegin':
4146 elif action == b'batchbegin':
4135 if batchedcommands is not None:
4147 if batchedcommands is not None:
4136 raise error.Abort(_(b'nested batchbegin not allowed'))
4148 raise error.Abort(_(b'nested batchbegin not allowed'))
4137
4149
4138 batchedcommands = []
4150 batchedcommands = []
4139 elif action == b'batchsubmit':
4151 elif action == b'batchsubmit':
4140 # There is a batching API we could go through. But it would be
4152 # There is a batching API we could go through. But it would be
4141 # difficult to normalize requests into function calls. It is easier
4153 # difficult to normalize requests into function calls. It is easier
4142 # to bypass this layer and normalize to commands + args.
4154 # to bypass this layer and normalize to commands + args.
4143 ui.status(
4155 ui.status(
4144 _(b'sending batch with %d sub-commands\n')
4156 _(b'sending batch with %d sub-commands\n')
4145 % len(batchedcommands)
4157 % len(batchedcommands)
4146 )
4158 )
4147 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4159 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4148 ui.status(
4160 ui.status(
4149 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4161 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4150 )
4162 )
4151
4163
4152 batchedcommands = None
4164 batchedcommands = None
4153
4165
4154 elif action.startswith(b'httprequest '):
4166 elif action.startswith(b'httprequest '):
4155 if not opener:
4167 if not opener:
4156 raise error.Abort(
4168 raise error.Abort(
4157 _(b'cannot use httprequest without an HTTP peer')
4169 _(b'cannot use httprequest without an HTTP peer')
4158 )
4170 )
4159
4171
4160 request = action.split(b' ', 2)
4172 request = action.split(b' ', 2)
4161 if len(request) != 3:
4173 if len(request) != 3:
4162 raise error.Abort(
4174 raise error.Abort(
4163 _(
4175 _(
4164 b'invalid httprequest: expected format is '
4176 b'invalid httprequest: expected format is '
4165 b'"httprequest <method> <path>'
4177 b'"httprequest <method> <path>'
4166 )
4178 )
4167 )
4179 )
4168
4180
4169 method, httppath = request[1:]
4181 method, httppath = request[1:]
4170 headers = {}
4182 headers = {}
4171 body = None
4183 body = None
4172 frames = []
4184 frames = []
4173 for line in lines:
4185 for line in lines:
4174 line = line.lstrip()
4186 line = line.lstrip()
4175 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4187 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4176 if m:
4188 if m:
4177 # Headers need to use native strings.
4189 # Headers need to use native strings.
4178 key = pycompat.strurl(m.group(1))
4190 key = pycompat.strurl(m.group(1))
4179 value = pycompat.strurl(m.group(2))
4191 value = pycompat.strurl(m.group(2))
4180 headers[key] = value
4192 headers[key] = value
4181 continue
4193 continue
4182
4194
4183 if line.startswith(b'BODYFILE '):
4195 if line.startswith(b'BODYFILE '):
4184 with open(line.split(b' ', 1), b'rb') as fh:
4196 with open(line.split(b' ', 1), b'rb') as fh:
4185 body = fh.read()
4197 body = fh.read()
4186 elif line.startswith(b'frame '):
4198 elif line.startswith(b'frame '):
4187 frame = wireprotoframing.makeframefromhumanstring(
4199 frame = wireprotoframing.makeframefromhumanstring(
4188 line[len(b'frame ') :]
4200 line[len(b'frame ') :]
4189 )
4201 )
4190
4202
4191 frames.append(frame)
4203 frames.append(frame)
4192 else:
4204 else:
4193 raise error.Abort(
4205 raise error.Abort(
4194 _(b'unknown argument to httprequest: %s') % line
4206 _(b'unknown argument to httprequest: %s') % line
4195 )
4207 )
4196
4208
4197 url = path + httppath
4209 url = path + httppath
4198
4210
4199 if frames:
4211 if frames:
4200 body = b''.join(bytes(f) for f in frames)
4212 body = b''.join(bytes(f) for f in frames)
4201
4213
4202 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4214 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4203
4215
4204 # urllib.Request insists on using has_data() as a proxy for
4216 # urllib.Request insists on using has_data() as a proxy for
4205 # determining the request method. Override that to use our
4217 # determining the request method. Override that to use our
4206 # explicitly requested method.
4218 # explicitly requested method.
4207 req.get_method = lambda: pycompat.sysstr(method)
4219 req.get_method = lambda: pycompat.sysstr(method)
4208
4220
4209 try:
4221 try:
4210 res = opener.open(req)
4222 res = opener.open(req)
4211 body = res.read()
4223 body = res.read()
4212 except util.urlerr.urlerror as e:
4224 except util.urlerr.urlerror as e:
4213 # read() method must be called, but only exists in Python 2
4225 # read() method must be called, but only exists in Python 2
4214 getattr(e, 'read', lambda: None)()
4226 getattr(e, 'read', lambda: None)()
4215 continue
4227 continue
4216
4228
4217 ct = res.headers.get('Content-Type')
4229 ct = res.headers.get('Content-Type')
4218 if ct == 'application/mercurial-cbor':
4230 if ct == 'application/mercurial-cbor':
4219 ui.write(
4231 ui.write(
4220 _(b'cbor> %s\n')
4232 _(b'cbor> %s\n')
4221 % stringutil.pprint(
4233 % stringutil.pprint(
4222 cborutil.decodeall(body), bprefix=True, indent=2
4234 cborutil.decodeall(body), bprefix=True, indent=2
4223 )
4235 )
4224 )
4236 )
4225
4237
4226 elif action == b'close':
4238 elif action == b'close':
4227 peer.close()
4239 peer.close()
4228 elif action == b'readavailable':
4240 elif action == b'readavailable':
4229 if not stdout or not stderr:
4241 if not stdout or not stderr:
4230 raise error.Abort(
4242 raise error.Abort(
4231 _(b'readavailable not available on this peer')
4243 _(b'readavailable not available on this peer')
4232 )
4244 )
4233
4245
4234 stdin.close()
4246 stdin.close()
4235 stdout.read()
4247 stdout.read()
4236 stderr.read()
4248 stderr.read()
4237
4249
4238 elif action == b'readline':
4250 elif action == b'readline':
4239 if not stdout:
4251 if not stdout:
4240 raise error.Abort(_(b'readline not available on this peer'))
4252 raise error.Abort(_(b'readline not available on this peer'))
4241 stdout.readline()
4253 stdout.readline()
4242 elif action == b'ereadline':
4254 elif action == b'ereadline':
4243 if not stderr:
4255 if not stderr:
4244 raise error.Abort(_(b'ereadline not available on this peer'))
4256 raise error.Abort(_(b'ereadline not available on this peer'))
4245 stderr.readline()
4257 stderr.readline()
4246 elif action.startswith(b'read '):
4258 elif action.startswith(b'read '):
4247 count = int(action.split(b' ', 1)[1])
4259 count = int(action.split(b' ', 1)[1])
4248 if not stdout:
4260 if not stdout:
4249 raise error.Abort(_(b'read not available on this peer'))
4261 raise error.Abort(_(b'read not available on this peer'))
4250 stdout.read(count)
4262 stdout.read(count)
4251 elif action.startswith(b'eread '):
4263 elif action.startswith(b'eread '):
4252 count = int(action.split(b' ', 1)[1])
4264 count = int(action.split(b' ', 1)[1])
4253 if not stderr:
4265 if not stderr:
4254 raise error.Abort(_(b'eread not available on this peer'))
4266 raise error.Abort(_(b'eread not available on this peer'))
4255 stderr.read(count)
4267 stderr.read(count)
4256 else:
4268 else:
4257 raise error.Abort(_(b'unknown action: %s') % action)
4269 raise error.Abort(_(b'unknown action: %s') % action)
4258
4270
4259 if batchedcommands is not None:
4271 if batchedcommands is not None:
4260 raise error.Abort(_(b'unclosed "batchbegin" request'))
4272 raise error.Abort(_(b'unclosed "batchbegin" request'))
4261
4273
4262 if peer:
4274 if peer:
4263 peer.close()
4275 peer.close()
4264
4276
4265 if proc:
4277 if proc:
4266 proc.kill()
4278 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now