##// END OF EJS Templates
debugsidedata: small doc improvement...
marmoute -
r43406:ba5b062a default
parent child Browse files
Show More
@@ -1,4263 +1,4265
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .pycompat import (
35 from .pycompat import (
36 getattr,
36 getattr,
37 open,
37 open,
38 )
38 )
39 from . import (
39 from . import (
40 bundle2,
40 bundle2,
41 changegroup,
41 changegroup,
42 cmdutil,
42 cmdutil,
43 color,
43 color,
44 context,
44 context,
45 copies,
45 copies,
46 dagparser,
46 dagparser,
47 encoding,
47 encoding,
48 error,
48 error,
49 exchange,
49 exchange,
50 extensions,
50 extensions,
51 filemerge,
51 filemerge,
52 filesetlang,
52 filesetlang,
53 formatter,
53 formatter,
54 hg,
54 hg,
55 httppeer,
55 httppeer,
56 localrepo,
56 localrepo,
57 lock as lockmod,
57 lock as lockmod,
58 logcmdutil,
58 logcmdutil,
59 merge as mergemod,
59 merge as mergemod,
60 obsolete,
60 obsolete,
61 obsutil,
61 obsutil,
62 phases,
62 phases,
63 policy,
63 policy,
64 pvec,
64 pvec,
65 pycompat,
65 pycompat,
66 registrar,
66 registrar,
67 repair,
67 repair,
68 revlog,
68 revlog,
69 revset,
69 revset,
70 revsetlang,
70 revsetlang,
71 scmutil,
71 scmutil,
72 setdiscovery,
72 setdiscovery,
73 simplemerge,
73 simplemerge,
74 sshpeer,
74 sshpeer,
75 sslutil,
75 sslutil,
76 streamclone,
76 streamclone,
77 templater,
77 templater,
78 treediscovery,
78 treediscovery,
79 upgrade,
79 upgrade,
80 url as urlmod,
80 url as urlmod,
81 util,
81 util,
82 vfs as vfsmod,
82 vfs as vfsmod,
83 wireprotoframing,
83 wireprotoframing,
84 wireprotoserver,
84 wireprotoserver,
85 wireprotov2peer,
85 wireprotov2peer,
86 )
86 )
87 from .utils import (
87 from .utils import (
88 cborutil,
88 cborutil,
89 compression,
89 compression,
90 dateutil,
90 dateutil,
91 procutil,
91 procutil,
92 stringutil,
92 stringutil,
93 )
93 )
94
94
95 from .revlogutils import deltas as deltautil
95 from .revlogutils import deltas as deltautil
96
96
97 release = lockmod.release
97 release = lockmod.release
98
98
99 command = registrar.command()
99 command = registrar.command()
100
100
101
101
102 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
102 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
103 def debugancestor(ui, repo, *args):
103 def debugancestor(ui, repo, *args):
104 """find the ancestor revision of two revisions in a given index"""
104 """find the ancestor revision of two revisions in a given index"""
105 if len(args) == 3:
105 if len(args) == 3:
106 index, rev1, rev2 = args
106 index, rev1, rev2 = args
107 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
107 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
108 lookup = r.lookup
108 lookup = r.lookup
109 elif len(args) == 2:
109 elif len(args) == 2:
110 if not repo:
110 if not repo:
111 raise error.Abort(
111 raise error.Abort(
112 _(b'there is no Mercurial repository here (.hg not found)')
112 _(b'there is no Mercurial repository here (.hg not found)')
113 )
113 )
114 rev1, rev2 = args
114 rev1, rev2 = args
115 r = repo.changelog
115 r = repo.changelog
116 lookup = repo.lookup
116 lookup = repo.lookup
117 else:
117 else:
118 raise error.Abort(_(b'either two or three arguments required'))
118 raise error.Abort(_(b'either two or three arguments required'))
119 a = r.ancestor(lookup(rev1), lookup(rev2))
119 a = r.ancestor(lookup(rev1), lookup(rev2))
120 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
120 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
121
121
122
122
123 @command(b'debugapplystreamclonebundle', [], b'FILE')
123 @command(b'debugapplystreamclonebundle', [], b'FILE')
124 def debugapplystreamclonebundle(ui, repo, fname):
124 def debugapplystreamclonebundle(ui, repo, fname):
125 """apply a stream clone bundle file"""
125 """apply a stream clone bundle file"""
126 f = hg.openpath(ui, fname)
126 f = hg.openpath(ui, fname)
127 gen = exchange.readbundle(ui, f, fname)
127 gen = exchange.readbundle(ui, f, fname)
128 gen.apply(repo)
128 gen.apply(repo)
129
129
130
130
131 @command(
131 @command(
132 b'debugbuilddag',
132 b'debugbuilddag',
133 [
133 [
134 (
134 (
135 b'm',
135 b'm',
136 b'mergeable-file',
136 b'mergeable-file',
137 None,
137 None,
138 _(b'add single file mergeable changes'),
138 _(b'add single file mergeable changes'),
139 ),
139 ),
140 (
140 (
141 b'o',
141 b'o',
142 b'overwritten-file',
142 b'overwritten-file',
143 None,
143 None,
144 _(b'add single file all revs overwrite'),
144 _(b'add single file all revs overwrite'),
145 ),
145 ),
146 (b'n', b'new-file', None, _(b'add new file at each rev')),
146 (b'n', b'new-file', None, _(b'add new file at each rev')),
147 ],
147 ],
148 _(b'[OPTION]... [TEXT]'),
148 _(b'[OPTION]... [TEXT]'),
149 )
149 )
150 def debugbuilddag(
150 def debugbuilddag(
151 ui,
151 ui,
152 repo,
152 repo,
153 text=None,
153 text=None,
154 mergeable_file=False,
154 mergeable_file=False,
155 overwritten_file=False,
155 overwritten_file=False,
156 new_file=False,
156 new_file=False,
157 ):
157 ):
158 """builds a repo with a given DAG from scratch in the current empty repo
158 """builds a repo with a given DAG from scratch in the current empty repo
159
159
160 The description of the DAG is read from stdin if not given on the
160 The description of the DAG is read from stdin if not given on the
161 command line.
161 command line.
162
162
163 Elements:
163 Elements:
164
164
165 - "+n" is a linear run of n nodes based on the current default parent
165 - "+n" is a linear run of n nodes based on the current default parent
166 - "." is a single node based on the current default parent
166 - "." is a single node based on the current default parent
167 - "$" resets the default parent to null (implied at the start);
167 - "$" resets the default parent to null (implied at the start);
168 otherwise the default parent is always the last node created
168 otherwise the default parent is always the last node created
169 - "<p" sets the default parent to the backref p
169 - "<p" sets the default parent to the backref p
170 - "*p" is a fork at parent p, which is a backref
170 - "*p" is a fork at parent p, which is a backref
171 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
171 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
172 - "/p2" is a merge of the preceding node and p2
172 - "/p2" is a merge of the preceding node and p2
173 - ":tag" defines a local tag for the preceding node
173 - ":tag" defines a local tag for the preceding node
174 - "@branch" sets the named branch for subsequent nodes
174 - "@branch" sets the named branch for subsequent nodes
175 - "#...\\n" is a comment up to the end of the line
175 - "#...\\n" is a comment up to the end of the line
176
176
177 Whitespace between the above elements is ignored.
177 Whitespace between the above elements is ignored.
178
178
179 A backref is either
179 A backref is either
180
180
181 - a number n, which references the node curr-n, where curr is the current
181 - a number n, which references the node curr-n, where curr is the current
182 node, or
182 node, or
183 - the name of a local tag you placed earlier using ":tag", or
183 - the name of a local tag you placed earlier using ":tag", or
184 - empty to denote the default parent.
184 - empty to denote the default parent.
185
185
186 All string valued-elements are either strictly alphanumeric, or must
186 All string valued-elements are either strictly alphanumeric, or must
187 be enclosed in double quotes ("..."), with "\\" as escape character.
187 be enclosed in double quotes ("..."), with "\\" as escape character.
188 """
188 """
189
189
190 if text is None:
190 if text is None:
191 ui.status(_(b"reading DAG from stdin\n"))
191 ui.status(_(b"reading DAG from stdin\n"))
192 text = ui.fin.read()
192 text = ui.fin.read()
193
193
194 cl = repo.changelog
194 cl = repo.changelog
195 if len(cl) > 0:
195 if len(cl) > 0:
196 raise error.Abort(_(b'repository is not empty'))
196 raise error.Abort(_(b'repository is not empty'))
197
197
198 # determine number of revs in DAG
198 # determine number of revs in DAG
199 total = 0
199 total = 0
200 for type, data in dagparser.parsedag(text):
200 for type, data in dagparser.parsedag(text):
201 if type == b'n':
201 if type == b'n':
202 total += 1
202 total += 1
203
203
204 if mergeable_file:
204 if mergeable_file:
205 linesperrev = 2
205 linesperrev = 2
206 # make a file with k lines per rev
206 # make a file with k lines per rev
207 initialmergedlines = [
207 initialmergedlines = [
208 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
208 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
209 ]
209 ]
210 initialmergedlines.append(b"")
210 initialmergedlines.append(b"")
211
211
212 tags = []
212 tags = []
213 progress = ui.makeprogress(
213 progress = ui.makeprogress(
214 _(b'building'), unit=_(b'revisions'), total=total
214 _(b'building'), unit=_(b'revisions'), total=total
215 )
215 )
216 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
216 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
217 at = -1
217 at = -1
218 atbranch = b'default'
218 atbranch = b'default'
219 nodeids = []
219 nodeids = []
220 id = 0
220 id = 0
221 progress.update(id)
221 progress.update(id)
222 for type, data in dagparser.parsedag(text):
222 for type, data in dagparser.parsedag(text):
223 if type == b'n':
223 if type == b'n':
224 ui.note((b'node %s\n' % pycompat.bytestr(data)))
224 ui.note((b'node %s\n' % pycompat.bytestr(data)))
225 id, ps = data
225 id, ps = data
226
226
227 files = []
227 files = []
228 filecontent = {}
228 filecontent = {}
229
229
230 p2 = None
230 p2 = None
231 if mergeable_file:
231 if mergeable_file:
232 fn = b"mf"
232 fn = b"mf"
233 p1 = repo[ps[0]]
233 p1 = repo[ps[0]]
234 if len(ps) > 1:
234 if len(ps) > 1:
235 p2 = repo[ps[1]]
235 p2 = repo[ps[1]]
236 pa = p1.ancestor(p2)
236 pa = p1.ancestor(p2)
237 base, local, other = [
237 base, local, other = [
238 x[fn].data() for x in (pa, p1, p2)
238 x[fn].data() for x in (pa, p1, p2)
239 ]
239 ]
240 m3 = simplemerge.Merge3Text(base, local, other)
240 m3 = simplemerge.Merge3Text(base, local, other)
241 ml = [l.strip() for l in m3.merge_lines()]
241 ml = [l.strip() for l in m3.merge_lines()]
242 ml.append(b"")
242 ml.append(b"")
243 elif at > 0:
243 elif at > 0:
244 ml = p1[fn].data().split(b"\n")
244 ml = p1[fn].data().split(b"\n")
245 else:
245 else:
246 ml = initialmergedlines
246 ml = initialmergedlines
247 ml[id * linesperrev] += b" r%i" % id
247 ml[id * linesperrev] += b" r%i" % id
248 mergedtext = b"\n".join(ml)
248 mergedtext = b"\n".join(ml)
249 files.append(fn)
249 files.append(fn)
250 filecontent[fn] = mergedtext
250 filecontent[fn] = mergedtext
251
251
252 if overwritten_file:
252 if overwritten_file:
253 fn = b"of"
253 fn = b"of"
254 files.append(fn)
254 files.append(fn)
255 filecontent[fn] = b"r%i\n" % id
255 filecontent[fn] = b"r%i\n" % id
256
256
257 if new_file:
257 if new_file:
258 fn = b"nf%i" % id
258 fn = b"nf%i" % id
259 files.append(fn)
259 files.append(fn)
260 filecontent[fn] = b"r%i\n" % id
260 filecontent[fn] = b"r%i\n" % id
261 if len(ps) > 1:
261 if len(ps) > 1:
262 if not p2:
262 if not p2:
263 p2 = repo[ps[1]]
263 p2 = repo[ps[1]]
264 for fn in p2:
264 for fn in p2:
265 if fn.startswith(b"nf"):
265 if fn.startswith(b"nf"):
266 files.append(fn)
266 files.append(fn)
267 filecontent[fn] = p2[fn].data()
267 filecontent[fn] = p2[fn].data()
268
268
269 def fctxfn(repo, cx, path):
269 def fctxfn(repo, cx, path):
270 if path in filecontent:
270 if path in filecontent:
271 return context.memfilectx(
271 return context.memfilectx(
272 repo, cx, path, filecontent[path]
272 repo, cx, path, filecontent[path]
273 )
273 )
274 return None
274 return None
275
275
276 if len(ps) == 0 or ps[0] < 0:
276 if len(ps) == 0 or ps[0] < 0:
277 pars = [None, None]
277 pars = [None, None]
278 elif len(ps) == 1:
278 elif len(ps) == 1:
279 pars = [nodeids[ps[0]], None]
279 pars = [nodeids[ps[0]], None]
280 else:
280 else:
281 pars = [nodeids[p] for p in ps]
281 pars = [nodeids[p] for p in ps]
282 cx = context.memctx(
282 cx = context.memctx(
283 repo,
283 repo,
284 pars,
284 pars,
285 b"r%i" % id,
285 b"r%i" % id,
286 files,
286 files,
287 fctxfn,
287 fctxfn,
288 date=(id, 0),
288 date=(id, 0),
289 user=b"debugbuilddag",
289 user=b"debugbuilddag",
290 extra={b'branch': atbranch},
290 extra={b'branch': atbranch},
291 )
291 )
292 nodeid = repo.commitctx(cx)
292 nodeid = repo.commitctx(cx)
293 nodeids.append(nodeid)
293 nodeids.append(nodeid)
294 at = id
294 at = id
295 elif type == b'l':
295 elif type == b'l':
296 id, name = data
296 id, name = data
297 ui.note((b'tag %s\n' % name))
297 ui.note((b'tag %s\n' % name))
298 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
298 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
299 elif type == b'a':
299 elif type == b'a':
300 ui.note((b'branch %s\n' % data))
300 ui.note((b'branch %s\n' % data))
301 atbranch = data
301 atbranch = data
302 progress.update(id)
302 progress.update(id)
303
303
304 if tags:
304 if tags:
305 repo.vfs.write(b"localtags", b"".join(tags))
305 repo.vfs.write(b"localtags", b"".join(tags))
306
306
307
307
308 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
308 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
309 indent_string = b' ' * indent
309 indent_string = b' ' * indent
310 if all:
310 if all:
311 ui.writenoi18n(
311 ui.writenoi18n(
312 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
312 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
313 % indent_string
313 % indent_string
314 )
314 )
315
315
316 def showchunks(named):
316 def showchunks(named):
317 ui.write(b"\n%s%s\n" % (indent_string, named))
317 ui.write(b"\n%s%s\n" % (indent_string, named))
318 for deltadata in gen.deltaiter():
318 for deltadata in gen.deltaiter():
319 node, p1, p2, cs, deltabase, delta, flags = deltadata
319 node, p1, p2, cs, deltabase, delta, flags = deltadata
320 ui.write(
320 ui.write(
321 b"%s%s %s %s %s %s %d\n"
321 b"%s%s %s %s %s %s %d\n"
322 % (
322 % (
323 indent_string,
323 indent_string,
324 hex(node),
324 hex(node),
325 hex(p1),
325 hex(p1),
326 hex(p2),
326 hex(p2),
327 hex(cs),
327 hex(cs),
328 hex(deltabase),
328 hex(deltabase),
329 len(delta),
329 len(delta),
330 )
330 )
331 )
331 )
332
332
333 chunkdata = gen.changelogheader()
333 chunkdata = gen.changelogheader()
334 showchunks(b"changelog")
334 showchunks(b"changelog")
335 chunkdata = gen.manifestheader()
335 chunkdata = gen.manifestheader()
336 showchunks(b"manifest")
336 showchunks(b"manifest")
337 for chunkdata in iter(gen.filelogheader, {}):
337 for chunkdata in iter(gen.filelogheader, {}):
338 fname = chunkdata[b'filename']
338 fname = chunkdata[b'filename']
339 showchunks(fname)
339 showchunks(fname)
340 else:
340 else:
341 if isinstance(gen, bundle2.unbundle20):
341 if isinstance(gen, bundle2.unbundle20):
342 raise error.Abort(_(b'use debugbundle2 for this file'))
342 raise error.Abort(_(b'use debugbundle2 for this file'))
343 chunkdata = gen.changelogheader()
343 chunkdata = gen.changelogheader()
344 for deltadata in gen.deltaiter():
344 for deltadata in gen.deltaiter():
345 node, p1, p2, cs, deltabase, delta, flags = deltadata
345 node, p1, p2, cs, deltabase, delta, flags = deltadata
346 ui.write(b"%s%s\n" % (indent_string, hex(node)))
346 ui.write(b"%s%s\n" % (indent_string, hex(node)))
347
347
348
348
349 def _debugobsmarkers(ui, part, indent=0, **opts):
349 def _debugobsmarkers(ui, part, indent=0, **opts):
350 """display version and markers contained in 'data'"""
350 """display version and markers contained in 'data'"""
351 opts = pycompat.byteskwargs(opts)
351 opts = pycompat.byteskwargs(opts)
352 data = part.read()
352 data = part.read()
353 indent_string = b' ' * indent
353 indent_string = b' ' * indent
354 try:
354 try:
355 version, markers = obsolete._readmarkers(data)
355 version, markers = obsolete._readmarkers(data)
356 except error.UnknownVersion as exc:
356 except error.UnknownVersion as exc:
357 msg = b"%sunsupported version: %s (%d bytes)\n"
357 msg = b"%sunsupported version: %s (%d bytes)\n"
358 msg %= indent_string, exc.version, len(data)
358 msg %= indent_string, exc.version, len(data)
359 ui.write(msg)
359 ui.write(msg)
360 else:
360 else:
361 msg = b"%sversion: %d (%d bytes)\n"
361 msg = b"%sversion: %d (%d bytes)\n"
362 msg %= indent_string, version, len(data)
362 msg %= indent_string, version, len(data)
363 ui.write(msg)
363 ui.write(msg)
364 fm = ui.formatter(b'debugobsolete', opts)
364 fm = ui.formatter(b'debugobsolete', opts)
365 for rawmarker in sorted(markers):
365 for rawmarker in sorted(markers):
366 m = obsutil.marker(None, rawmarker)
366 m = obsutil.marker(None, rawmarker)
367 fm.startitem()
367 fm.startitem()
368 fm.plain(indent_string)
368 fm.plain(indent_string)
369 cmdutil.showmarker(fm, m)
369 cmdutil.showmarker(fm, m)
370 fm.end()
370 fm.end()
371
371
372
372
373 def _debugphaseheads(ui, data, indent=0):
373 def _debugphaseheads(ui, data, indent=0):
374 """display version and markers contained in 'data'"""
374 """display version and markers contained in 'data'"""
375 indent_string = b' ' * indent
375 indent_string = b' ' * indent
376 headsbyphase = phases.binarydecode(data)
376 headsbyphase = phases.binarydecode(data)
377 for phase in phases.allphases:
377 for phase in phases.allphases:
378 for head in headsbyphase[phase]:
378 for head in headsbyphase[phase]:
379 ui.write(indent_string)
379 ui.write(indent_string)
380 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
380 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
381
381
382
382
383 def _quasirepr(thing):
383 def _quasirepr(thing):
384 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
384 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
385 return b'{%s}' % (
385 return b'{%s}' % (
386 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
386 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
387 )
387 )
388 return pycompat.bytestr(repr(thing))
388 return pycompat.bytestr(repr(thing))
389
389
390
390
391 def _debugbundle2(ui, gen, all=None, **opts):
391 def _debugbundle2(ui, gen, all=None, **opts):
392 """lists the contents of a bundle2"""
392 """lists the contents of a bundle2"""
393 if not isinstance(gen, bundle2.unbundle20):
393 if not isinstance(gen, bundle2.unbundle20):
394 raise error.Abort(_(b'not a bundle2 file'))
394 raise error.Abort(_(b'not a bundle2 file'))
395 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
395 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
396 parttypes = opts.get(r'part_type', [])
396 parttypes = opts.get(r'part_type', [])
397 for part in gen.iterparts():
397 for part in gen.iterparts():
398 if parttypes and part.type not in parttypes:
398 if parttypes and part.type not in parttypes:
399 continue
399 continue
400 msg = b'%s -- %s (mandatory: %r)\n'
400 msg = b'%s -- %s (mandatory: %r)\n'
401 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
401 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
402 if part.type == b'changegroup':
402 if part.type == b'changegroup':
403 version = part.params.get(b'version', b'01')
403 version = part.params.get(b'version', b'01')
404 cg = changegroup.getunbundler(version, part, b'UN')
404 cg = changegroup.getunbundler(version, part, b'UN')
405 if not ui.quiet:
405 if not ui.quiet:
406 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
406 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
407 if part.type == b'obsmarkers':
407 if part.type == b'obsmarkers':
408 if not ui.quiet:
408 if not ui.quiet:
409 _debugobsmarkers(ui, part, indent=4, **opts)
409 _debugobsmarkers(ui, part, indent=4, **opts)
410 if part.type == b'phase-heads':
410 if part.type == b'phase-heads':
411 if not ui.quiet:
411 if not ui.quiet:
412 _debugphaseheads(ui, part, indent=4)
412 _debugphaseheads(ui, part, indent=4)
413
413
414
414
415 @command(
415 @command(
416 b'debugbundle',
416 b'debugbundle',
417 [
417 [
418 (b'a', b'all', None, _(b'show all details')),
418 (b'a', b'all', None, _(b'show all details')),
419 (b'', b'part-type', [], _(b'show only the named part type')),
419 (b'', b'part-type', [], _(b'show only the named part type')),
420 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
420 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
421 ],
421 ],
422 _(b'FILE'),
422 _(b'FILE'),
423 norepo=True,
423 norepo=True,
424 )
424 )
425 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
425 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
426 """lists the contents of a bundle"""
426 """lists the contents of a bundle"""
427 with hg.openpath(ui, bundlepath) as f:
427 with hg.openpath(ui, bundlepath) as f:
428 if spec:
428 if spec:
429 spec = exchange.getbundlespec(ui, f)
429 spec = exchange.getbundlespec(ui, f)
430 ui.write(b'%s\n' % spec)
430 ui.write(b'%s\n' % spec)
431 return
431 return
432
432
433 gen = exchange.readbundle(ui, f, bundlepath)
433 gen = exchange.readbundle(ui, f, bundlepath)
434 if isinstance(gen, bundle2.unbundle20):
434 if isinstance(gen, bundle2.unbundle20):
435 return _debugbundle2(ui, gen, all=all, **opts)
435 return _debugbundle2(ui, gen, all=all, **opts)
436 _debugchangegroup(ui, gen, all=all, **opts)
436 _debugchangegroup(ui, gen, all=all, **opts)
437
437
438
438
439 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
439 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
440 def debugcapabilities(ui, path, **opts):
440 def debugcapabilities(ui, path, **opts):
441 """lists the capabilities of a remote peer"""
441 """lists the capabilities of a remote peer"""
442 opts = pycompat.byteskwargs(opts)
442 opts = pycompat.byteskwargs(opts)
443 peer = hg.peer(ui, opts, path)
443 peer = hg.peer(ui, opts, path)
444 caps = peer.capabilities()
444 caps = peer.capabilities()
445 ui.writenoi18n(b'Main capabilities:\n')
445 ui.writenoi18n(b'Main capabilities:\n')
446 for c in sorted(caps):
446 for c in sorted(caps):
447 ui.write(b' %s\n' % c)
447 ui.write(b' %s\n' % c)
448 b2caps = bundle2.bundle2caps(peer)
448 b2caps = bundle2.bundle2caps(peer)
449 if b2caps:
449 if b2caps:
450 ui.writenoi18n(b'Bundle2 capabilities:\n')
450 ui.writenoi18n(b'Bundle2 capabilities:\n')
451 for key, values in sorted(pycompat.iteritems(b2caps)):
451 for key, values in sorted(pycompat.iteritems(b2caps)):
452 ui.write(b' %s\n' % key)
452 ui.write(b' %s\n' % key)
453 for v in values:
453 for v in values:
454 ui.write(b' %s\n' % v)
454 ui.write(b' %s\n' % v)
455
455
456
456
457 @command(b'debugcheckstate', [], b'')
457 @command(b'debugcheckstate', [], b'')
458 def debugcheckstate(ui, repo):
458 def debugcheckstate(ui, repo):
459 """validate the correctness of the current dirstate"""
459 """validate the correctness of the current dirstate"""
460 parent1, parent2 = repo.dirstate.parents()
460 parent1, parent2 = repo.dirstate.parents()
461 m1 = repo[parent1].manifest()
461 m1 = repo[parent1].manifest()
462 m2 = repo[parent2].manifest()
462 m2 = repo[parent2].manifest()
463 errors = 0
463 errors = 0
464 for f in repo.dirstate:
464 for f in repo.dirstate:
465 state = repo.dirstate[f]
465 state = repo.dirstate[f]
466 if state in b"nr" and f not in m1:
466 if state in b"nr" and f not in m1:
467 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
467 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
468 errors += 1
468 errors += 1
469 if state in b"a" and f in m1:
469 if state in b"a" and f in m1:
470 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
470 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
471 errors += 1
471 errors += 1
472 if state in b"m" and f not in m1 and f not in m2:
472 if state in b"m" and f not in m1 and f not in m2:
473 ui.warn(
473 ui.warn(
474 _(b"%s in state %s, but not in either manifest\n") % (f, state)
474 _(b"%s in state %s, but not in either manifest\n") % (f, state)
475 )
475 )
476 errors += 1
476 errors += 1
477 for f in m1:
477 for f in m1:
478 state = repo.dirstate[f]
478 state = repo.dirstate[f]
479 if state not in b"nrm":
479 if state not in b"nrm":
480 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
480 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
481 errors += 1
481 errors += 1
482 if errors:
482 if errors:
483 error = _(b".hg/dirstate inconsistent with current parent's manifest")
483 error = _(b".hg/dirstate inconsistent with current parent's manifest")
484 raise error.Abort(error)
484 raise error.Abort(error)
485
485
486
486
487 @command(
487 @command(
488 b'debugcolor',
488 b'debugcolor',
489 [(b'', b'style', None, _(b'show all configured styles'))],
489 [(b'', b'style', None, _(b'show all configured styles'))],
490 b'hg debugcolor',
490 b'hg debugcolor',
491 )
491 )
492 def debugcolor(ui, repo, **opts):
492 def debugcolor(ui, repo, **opts):
493 """show available color, effects or style"""
493 """show available color, effects or style"""
494 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
494 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
495 if opts.get(r'style'):
495 if opts.get(r'style'):
496 return _debugdisplaystyle(ui)
496 return _debugdisplaystyle(ui)
497 else:
497 else:
498 return _debugdisplaycolor(ui)
498 return _debugdisplaycolor(ui)
499
499
500
500
501 def _debugdisplaycolor(ui):
501 def _debugdisplaycolor(ui):
502 ui = ui.copy()
502 ui = ui.copy()
503 ui._styles.clear()
503 ui._styles.clear()
504 for effect in color._activeeffects(ui).keys():
504 for effect in color._activeeffects(ui).keys():
505 ui._styles[effect] = effect
505 ui._styles[effect] = effect
506 if ui._terminfoparams:
506 if ui._terminfoparams:
507 for k, v in ui.configitems(b'color'):
507 for k, v in ui.configitems(b'color'):
508 if k.startswith(b'color.'):
508 if k.startswith(b'color.'):
509 ui._styles[k] = k[6:]
509 ui._styles[k] = k[6:]
510 elif k.startswith(b'terminfo.'):
510 elif k.startswith(b'terminfo.'):
511 ui._styles[k] = k[9:]
511 ui._styles[k] = k[9:]
512 ui.write(_(b'available colors:\n'))
512 ui.write(_(b'available colors:\n'))
513 # sort label with a '_' after the other to group '_background' entry.
513 # sort label with a '_' after the other to group '_background' entry.
514 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
514 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
515 for colorname, label in items:
515 for colorname, label in items:
516 ui.write(b'%s\n' % colorname, label=label)
516 ui.write(b'%s\n' % colorname, label=label)
517
517
518
518
519 def _debugdisplaystyle(ui):
519 def _debugdisplaystyle(ui):
520 ui.write(_(b'available style:\n'))
520 ui.write(_(b'available style:\n'))
521 if not ui._styles:
521 if not ui._styles:
522 return
522 return
523 width = max(len(s) for s in ui._styles)
523 width = max(len(s) for s in ui._styles)
524 for label, effects in sorted(ui._styles.items()):
524 for label, effects in sorted(ui._styles.items()):
525 ui.write(b'%s' % label, label=label)
525 ui.write(b'%s' % label, label=label)
526 if effects:
526 if effects:
527 # 50
527 # 50
528 ui.write(b': ')
528 ui.write(b': ')
529 ui.write(b' ' * (max(0, width - len(label))))
529 ui.write(b' ' * (max(0, width - len(label))))
530 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
530 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
531 ui.write(b'\n')
531 ui.write(b'\n')
532
532
533
533
534 @command(b'debugcreatestreamclonebundle', [], b'FILE')
534 @command(b'debugcreatestreamclonebundle', [], b'FILE')
535 def debugcreatestreamclonebundle(ui, repo, fname):
535 def debugcreatestreamclonebundle(ui, repo, fname):
536 """create a stream clone bundle file
536 """create a stream clone bundle file
537
537
538 Stream bundles are special bundles that are essentially archives of
538 Stream bundles are special bundles that are essentially archives of
539 revlog files. They are commonly used for cloning very quickly.
539 revlog files. They are commonly used for cloning very quickly.
540 """
540 """
541 # TODO we may want to turn this into an abort when this functionality
541 # TODO we may want to turn this into an abort when this functionality
542 # is moved into `hg bundle`.
542 # is moved into `hg bundle`.
543 if phases.hassecret(repo):
543 if phases.hassecret(repo):
544 ui.warn(
544 ui.warn(
545 _(
545 _(
546 b'(warning: stream clone bundle will contain secret '
546 b'(warning: stream clone bundle will contain secret '
547 b'revisions)\n'
547 b'revisions)\n'
548 )
548 )
549 )
549 )
550
550
551 requirements, gen = streamclone.generatebundlev1(repo)
551 requirements, gen = streamclone.generatebundlev1(repo)
552 changegroup.writechunks(ui, gen, fname)
552 changegroup.writechunks(ui, gen, fname)
553
553
554 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
554 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
555
555
556
556
557 @command(
557 @command(
558 b'debugdag',
558 b'debugdag',
559 [
559 [
560 (b't', b'tags', None, _(b'use tags as labels')),
560 (b't', b'tags', None, _(b'use tags as labels')),
561 (b'b', b'branches', None, _(b'annotate with branch names')),
561 (b'b', b'branches', None, _(b'annotate with branch names')),
562 (b'', b'dots', None, _(b'use dots for runs')),
562 (b'', b'dots', None, _(b'use dots for runs')),
563 (b's', b'spaces', None, _(b'separate elements by spaces')),
563 (b's', b'spaces', None, _(b'separate elements by spaces')),
564 ],
564 ],
565 _(b'[OPTION]... [FILE [REV]...]'),
565 _(b'[OPTION]... [FILE [REV]...]'),
566 optionalrepo=True,
566 optionalrepo=True,
567 )
567 )
568 def debugdag(ui, repo, file_=None, *revs, **opts):
568 def debugdag(ui, repo, file_=None, *revs, **opts):
569 """format the changelog or an index DAG as a concise textual description
569 """format the changelog or an index DAG as a concise textual description
570
570
571 If you pass a revlog index, the revlog's DAG is emitted. If you list
571 If you pass a revlog index, the revlog's DAG is emitted. If you list
572 revision numbers, they get labeled in the output as rN.
572 revision numbers, they get labeled in the output as rN.
573
573
574 Otherwise, the changelog DAG of the current repo is emitted.
574 Otherwise, the changelog DAG of the current repo is emitted.
575 """
575 """
576 spaces = opts.get(r'spaces')
576 spaces = opts.get(r'spaces')
577 dots = opts.get(r'dots')
577 dots = opts.get(r'dots')
578 if file_:
578 if file_:
579 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
579 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
580 revs = set((int(r) for r in revs))
580 revs = set((int(r) for r in revs))
581
581
582 def events():
582 def events():
583 for r in rlog:
583 for r in rlog:
584 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
584 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
585 if r in revs:
585 if r in revs:
586 yield b'l', (r, b"r%i" % r)
586 yield b'l', (r, b"r%i" % r)
587
587
588 elif repo:
588 elif repo:
589 cl = repo.changelog
589 cl = repo.changelog
590 tags = opts.get(r'tags')
590 tags = opts.get(r'tags')
591 branches = opts.get(r'branches')
591 branches = opts.get(r'branches')
592 if tags:
592 if tags:
593 labels = {}
593 labels = {}
594 for l, n in repo.tags().items():
594 for l, n in repo.tags().items():
595 labels.setdefault(cl.rev(n), []).append(l)
595 labels.setdefault(cl.rev(n), []).append(l)
596
596
597 def events():
597 def events():
598 b = b"default"
598 b = b"default"
599 for r in cl:
599 for r in cl:
600 if branches:
600 if branches:
601 newb = cl.read(cl.node(r))[5][b'branch']
601 newb = cl.read(cl.node(r))[5][b'branch']
602 if newb != b:
602 if newb != b:
603 yield b'a', newb
603 yield b'a', newb
604 b = newb
604 b = newb
605 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
605 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
606 if tags:
606 if tags:
607 ls = labels.get(r)
607 ls = labels.get(r)
608 if ls:
608 if ls:
609 for l in ls:
609 for l in ls:
610 yield b'l', (r, l)
610 yield b'l', (r, l)
611
611
612 else:
612 else:
613 raise error.Abort(_(b'need repo for changelog dag'))
613 raise error.Abort(_(b'need repo for changelog dag'))
614
614
615 for line in dagparser.dagtextlines(
615 for line in dagparser.dagtextlines(
616 events(),
616 events(),
617 addspaces=spaces,
617 addspaces=spaces,
618 wraplabels=True,
618 wraplabels=True,
619 wrapannotations=True,
619 wrapannotations=True,
620 wrapnonlinear=dots,
620 wrapnonlinear=dots,
621 usedots=dots,
621 usedots=dots,
622 maxlinewidth=70,
622 maxlinewidth=70,
623 ):
623 ):
624 ui.write(line)
624 ui.write(line)
625 ui.write(b"\n")
625 ui.write(b"\n")
626
626
627
627
628 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
628 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
629 def debugdata(ui, repo, file_, rev=None, **opts):
629 def debugdata(ui, repo, file_, rev=None, **opts):
630 """dump the contents of a data file revision"""
630 """dump the contents of a data file revision"""
631 opts = pycompat.byteskwargs(opts)
631 opts = pycompat.byteskwargs(opts)
632 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
632 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
633 if rev is not None:
633 if rev is not None:
634 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
634 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
635 file_, rev = None, file_
635 file_, rev = None, file_
636 elif rev is None:
636 elif rev is None:
637 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
637 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
638 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
638 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
639 try:
639 try:
640 ui.write(r.rawdata(r.lookup(rev)))
640 ui.write(r.rawdata(r.lookup(rev)))
641 except KeyError:
641 except KeyError:
642 raise error.Abort(_(b'invalid revision identifier %s') % rev)
642 raise error.Abort(_(b'invalid revision identifier %s') % rev)
643
643
644
644
645 @command(
645 @command(
646 b'debugdate',
646 b'debugdate',
647 [(b'e', b'extended', None, _(b'try extended date formats'))],
647 [(b'e', b'extended', None, _(b'try extended date formats'))],
648 _(b'[-e] DATE [RANGE]'),
648 _(b'[-e] DATE [RANGE]'),
649 norepo=True,
649 norepo=True,
650 optionalrepo=True,
650 optionalrepo=True,
651 )
651 )
652 def debugdate(ui, date, range=None, **opts):
652 def debugdate(ui, date, range=None, **opts):
653 """parse and display a date"""
653 """parse and display a date"""
654 if opts[r"extended"]:
654 if opts[r"extended"]:
655 d = dateutil.parsedate(date, util.extendeddateformats)
655 d = dateutil.parsedate(date, util.extendeddateformats)
656 else:
656 else:
657 d = dateutil.parsedate(date)
657 d = dateutil.parsedate(date)
658 ui.writenoi18n(b"internal: %d %d\n" % d)
658 ui.writenoi18n(b"internal: %d %d\n" % d)
659 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
659 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
660 if range:
660 if range:
661 m = dateutil.matchdate(range)
661 m = dateutil.matchdate(range)
662 ui.writenoi18n(b"match: %s\n" % m(d[0]))
662 ui.writenoi18n(b"match: %s\n" % m(d[0]))
663
663
664
664
665 @command(
665 @command(
666 b'debugdeltachain',
666 b'debugdeltachain',
667 cmdutil.debugrevlogopts + cmdutil.formatteropts,
667 cmdutil.debugrevlogopts + cmdutil.formatteropts,
668 _(b'-c|-m|FILE'),
668 _(b'-c|-m|FILE'),
669 optionalrepo=True,
669 optionalrepo=True,
670 )
670 )
671 def debugdeltachain(ui, repo, file_=None, **opts):
671 def debugdeltachain(ui, repo, file_=None, **opts):
672 """dump information about delta chains in a revlog
672 """dump information about delta chains in a revlog
673
673
674 Output can be templatized. Available template keywords are:
674 Output can be templatized. Available template keywords are:
675
675
676 :``rev``: revision number
676 :``rev``: revision number
677 :``chainid``: delta chain identifier (numbered by unique base)
677 :``chainid``: delta chain identifier (numbered by unique base)
678 :``chainlen``: delta chain length to this revision
678 :``chainlen``: delta chain length to this revision
679 :``prevrev``: previous revision in delta chain
679 :``prevrev``: previous revision in delta chain
680 :``deltatype``: role of delta / how it was computed
680 :``deltatype``: role of delta / how it was computed
681 :``compsize``: compressed size of revision
681 :``compsize``: compressed size of revision
682 :``uncompsize``: uncompressed size of revision
682 :``uncompsize``: uncompressed size of revision
683 :``chainsize``: total size of compressed revisions in chain
683 :``chainsize``: total size of compressed revisions in chain
684 :``chainratio``: total chain size divided by uncompressed revision size
684 :``chainratio``: total chain size divided by uncompressed revision size
685 (new delta chains typically start at ratio 2.00)
685 (new delta chains typically start at ratio 2.00)
686 :``lindist``: linear distance from base revision in delta chain to end
686 :``lindist``: linear distance from base revision in delta chain to end
687 of this revision
687 of this revision
688 :``extradist``: total size of revisions not part of this delta chain from
688 :``extradist``: total size of revisions not part of this delta chain from
689 base of delta chain to end of this revision; a measurement
689 base of delta chain to end of this revision; a measurement
690 of how much extra data we need to read/seek across to read
690 of how much extra data we need to read/seek across to read
691 the delta chain for this revision
691 the delta chain for this revision
692 :``extraratio``: extradist divided by chainsize; another representation of
692 :``extraratio``: extradist divided by chainsize; another representation of
693 how much unrelated data is needed to load this delta chain
693 how much unrelated data is needed to load this delta chain
694
694
695 If the repository is configured to use the sparse read, additional keywords
695 If the repository is configured to use the sparse read, additional keywords
696 are available:
696 are available:
697
697
698 :``readsize``: total size of data read from the disk for a revision
698 :``readsize``: total size of data read from the disk for a revision
699 (sum of the sizes of all the blocks)
699 (sum of the sizes of all the blocks)
700 :``largestblock``: size of the largest block of data read from the disk
700 :``largestblock``: size of the largest block of data read from the disk
701 :``readdensity``: density of useful bytes in the data read from the disk
701 :``readdensity``: density of useful bytes in the data read from the disk
702 :``srchunks``: in how many data hunks the whole revision would be read
702 :``srchunks``: in how many data hunks the whole revision would be read
703
703
704 The sparse read can be enabled with experimental.sparse-read = True
704 The sparse read can be enabled with experimental.sparse-read = True
705 """
705 """
706 opts = pycompat.byteskwargs(opts)
706 opts = pycompat.byteskwargs(opts)
707 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
707 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
708 index = r.index
708 index = r.index
709 start = r.start
709 start = r.start
710 length = r.length
710 length = r.length
711 generaldelta = r.version & revlog.FLAG_GENERALDELTA
711 generaldelta = r.version & revlog.FLAG_GENERALDELTA
712 withsparseread = getattr(r, '_withsparseread', False)
712 withsparseread = getattr(r, '_withsparseread', False)
713
713
714 def revinfo(rev):
714 def revinfo(rev):
715 e = index[rev]
715 e = index[rev]
716 compsize = e[1]
716 compsize = e[1]
717 uncompsize = e[2]
717 uncompsize = e[2]
718 chainsize = 0
718 chainsize = 0
719
719
720 if generaldelta:
720 if generaldelta:
721 if e[3] == e[5]:
721 if e[3] == e[5]:
722 deltatype = b'p1'
722 deltatype = b'p1'
723 elif e[3] == e[6]:
723 elif e[3] == e[6]:
724 deltatype = b'p2'
724 deltatype = b'p2'
725 elif e[3] == rev - 1:
725 elif e[3] == rev - 1:
726 deltatype = b'prev'
726 deltatype = b'prev'
727 elif e[3] == rev:
727 elif e[3] == rev:
728 deltatype = b'base'
728 deltatype = b'base'
729 else:
729 else:
730 deltatype = b'other'
730 deltatype = b'other'
731 else:
731 else:
732 if e[3] == rev:
732 if e[3] == rev:
733 deltatype = b'base'
733 deltatype = b'base'
734 else:
734 else:
735 deltatype = b'prev'
735 deltatype = b'prev'
736
736
737 chain = r._deltachain(rev)[0]
737 chain = r._deltachain(rev)[0]
738 for iterrev in chain:
738 for iterrev in chain:
739 e = index[iterrev]
739 e = index[iterrev]
740 chainsize += e[1]
740 chainsize += e[1]
741
741
742 return compsize, uncompsize, deltatype, chain, chainsize
742 return compsize, uncompsize, deltatype, chain, chainsize
743
743
744 fm = ui.formatter(b'debugdeltachain', opts)
744 fm = ui.formatter(b'debugdeltachain', opts)
745
745
746 fm.plain(
746 fm.plain(
747 b' rev chain# chainlen prev delta '
747 b' rev chain# chainlen prev delta '
748 b'size rawsize chainsize ratio lindist extradist '
748 b'size rawsize chainsize ratio lindist extradist '
749 b'extraratio'
749 b'extraratio'
750 )
750 )
751 if withsparseread:
751 if withsparseread:
752 fm.plain(b' readsize largestblk rddensity srchunks')
752 fm.plain(b' readsize largestblk rddensity srchunks')
753 fm.plain(b'\n')
753 fm.plain(b'\n')
754
754
755 chainbases = {}
755 chainbases = {}
756 for rev in r:
756 for rev in r:
757 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
757 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
758 chainbase = chain[0]
758 chainbase = chain[0]
759 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
759 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
760 basestart = start(chainbase)
760 basestart = start(chainbase)
761 revstart = start(rev)
761 revstart = start(rev)
762 lineardist = revstart + comp - basestart
762 lineardist = revstart + comp - basestart
763 extradist = lineardist - chainsize
763 extradist = lineardist - chainsize
764 try:
764 try:
765 prevrev = chain[-2]
765 prevrev = chain[-2]
766 except IndexError:
766 except IndexError:
767 prevrev = -1
767 prevrev = -1
768
768
769 if uncomp != 0:
769 if uncomp != 0:
770 chainratio = float(chainsize) / float(uncomp)
770 chainratio = float(chainsize) / float(uncomp)
771 else:
771 else:
772 chainratio = chainsize
772 chainratio = chainsize
773
773
774 if chainsize != 0:
774 if chainsize != 0:
775 extraratio = float(extradist) / float(chainsize)
775 extraratio = float(extradist) / float(chainsize)
776 else:
776 else:
777 extraratio = extradist
777 extraratio = extradist
778
778
779 fm.startitem()
779 fm.startitem()
780 fm.write(
780 fm.write(
781 b'rev chainid chainlen prevrev deltatype compsize '
781 b'rev chainid chainlen prevrev deltatype compsize '
782 b'uncompsize chainsize chainratio lindist extradist '
782 b'uncompsize chainsize chainratio lindist extradist '
783 b'extraratio',
783 b'extraratio',
784 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
784 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
785 rev,
785 rev,
786 chainid,
786 chainid,
787 len(chain),
787 len(chain),
788 prevrev,
788 prevrev,
789 deltatype,
789 deltatype,
790 comp,
790 comp,
791 uncomp,
791 uncomp,
792 chainsize,
792 chainsize,
793 chainratio,
793 chainratio,
794 lineardist,
794 lineardist,
795 extradist,
795 extradist,
796 extraratio,
796 extraratio,
797 rev=rev,
797 rev=rev,
798 chainid=chainid,
798 chainid=chainid,
799 chainlen=len(chain),
799 chainlen=len(chain),
800 prevrev=prevrev,
800 prevrev=prevrev,
801 deltatype=deltatype,
801 deltatype=deltatype,
802 compsize=comp,
802 compsize=comp,
803 uncompsize=uncomp,
803 uncompsize=uncomp,
804 chainsize=chainsize,
804 chainsize=chainsize,
805 chainratio=chainratio,
805 chainratio=chainratio,
806 lindist=lineardist,
806 lindist=lineardist,
807 extradist=extradist,
807 extradist=extradist,
808 extraratio=extraratio,
808 extraratio=extraratio,
809 )
809 )
810 if withsparseread:
810 if withsparseread:
811 readsize = 0
811 readsize = 0
812 largestblock = 0
812 largestblock = 0
813 srchunks = 0
813 srchunks = 0
814
814
815 for revschunk in deltautil.slicechunk(r, chain):
815 for revschunk in deltautil.slicechunk(r, chain):
816 srchunks += 1
816 srchunks += 1
817 blkend = start(revschunk[-1]) + length(revschunk[-1])
817 blkend = start(revschunk[-1]) + length(revschunk[-1])
818 blksize = blkend - start(revschunk[0])
818 blksize = blkend - start(revschunk[0])
819
819
820 readsize += blksize
820 readsize += blksize
821 if largestblock < blksize:
821 if largestblock < blksize:
822 largestblock = blksize
822 largestblock = blksize
823
823
824 if readsize:
824 if readsize:
825 readdensity = float(chainsize) / float(readsize)
825 readdensity = float(chainsize) / float(readsize)
826 else:
826 else:
827 readdensity = 1
827 readdensity = 1
828
828
829 fm.write(
829 fm.write(
830 b'readsize largestblock readdensity srchunks',
830 b'readsize largestblock readdensity srchunks',
831 b' %10d %10d %9.5f %8d',
831 b' %10d %10d %9.5f %8d',
832 readsize,
832 readsize,
833 largestblock,
833 largestblock,
834 readdensity,
834 readdensity,
835 srchunks,
835 srchunks,
836 readsize=readsize,
836 readsize=readsize,
837 largestblock=largestblock,
837 largestblock=largestblock,
838 readdensity=readdensity,
838 readdensity=readdensity,
839 srchunks=srchunks,
839 srchunks=srchunks,
840 )
840 )
841
841
842 fm.plain(b'\n')
842 fm.plain(b'\n')
843
843
844 fm.end()
844 fm.end()
845
845
846
846
847 @command(
847 @command(
848 b'debugdirstate|debugstate',
848 b'debugdirstate|debugstate',
849 [
849 [
850 (
850 (
851 b'',
851 b'',
852 b'nodates',
852 b'nodates',
853 None,
853 None,
854 _(b'do not display the saved mtime (DEPRECATED)'),
854 _(b'do not display the saved mtime (DEPRECATED)'),
855 ),
855 ),
856 (b'', b'dates', True, _(b'display the saved mtime')),
856 (b'', b'dates', True, _(b'display the saved mtime')),
857 (b'', b'datesort', None, _(b'sort by saved mtime')),
857 (b'', b'datesort', None, _(b'sort by saved mtime')),
858 ],
858 ],
859 _(b'[OPTION]...'),
859 _(b'[OPTION]...'),
860 )
860 )
861 def debugstate(ui, repo, **opts):
861 def debugstate(ui, repo, **opts):
862 """show the contents of the current dirstate"""
862 """show the contents of the current dirstate"""
863
863
864 nodates = not opts[r'dates']
864 nodates = not opts[r'dates']
865 if opts.get(r'nodates') is not None:
865 if opts.get(r'nodates') is not None:
866 nodates = True
866 nodates = True
867 datesort = opts.get(r'datesort')
867 datesort = opts.get(r'datesort')
868
868
869 if datesort:
869 if datesort:
870 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
870 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
871 else:
871 else:
872 keyfunc = None # sort by filename
872 keyfunc = None # sort by filename
873 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
873 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
874 if ent[3] == -1:
874 if ent[3] == -1:
875 timestr = b'unset '
875 timestr = b'unset '
876 elif nodates:
876 elif nodates:
877 timestr = b'set '
877 timestr = b'set '
878 else:
878 else:
879 timestr = time.strftime(
879 timestr = time.strftime(
880 r"%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
880 r"%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
881 )
881 )
882 timestr = encoding.strtolocal(timestr)
882 timestr = encoding.strtolocal(timestr)
883 if ent[1] & 0o20000:
883 if ent[1] & 0o20000:
884 mode = b'lnk'
884 mode = b'lnk'
885 else:
885 else:
886 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
886 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
887 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
887 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
888 for f in repo.dirstate.copies():
888 for f in repo.dirstate.copies():
889 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
889 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
890
890
891
891
892 @command(
892 @command(
893 b'debugdiscovery',
893 b'debugdiscovery',
894 [
894 [
895 (b'', b'old', None, _(b'use old-style discovery')),
895 (b'', b'old', None, _(b'use old-style discovery')),
896 (
896 (
897 b'',
897 b'',
898 b'nonheads',
898 b'nonheads',
899 None,
899 None,
900 _(b'use old-style discovery with non-heads included'),
900 _(b'use old-style discovery with non-heads included'),
901 ),
901 ),
902 (b'', b'rev', [], b'restrict discovery to this set of revs'),
902 (b'', b'rev', [], b'restrict discovery to this set of revs'),
903 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
903 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
904 ]
904 ]
905 + cmdutil.remoteopts,
905 + cmdutil.remoteopts,
906 _(b'[--rev REV] [OTHER]'),
906 _(b'[--rev REV] [OTHER]'),
907 )
907 )
908 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
908 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
909 """runs the changeset discovery protocol in isolation"""
909 """runs the changeset discovery protocol in isolation"""
910 opts = pycompat.byteskwargs(opts)
910 opts = pycompat.byteskwargs(opts)
911 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
911 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
912 remote = hg.peer(repo, opts, remoteurl)
912 remote = hg.peer(repo, opts, remoteurl)
913 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
913 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
914
914
915 # make sure tests are repeatable
915 # make sure tests are repeatable
916 random.seed(int(opts[b'seed']))
916 random.seed(int(opts[b'seed']))
917
917
918 if opts.get(b'old'):
918 if opts.get(b'old'):
919
919
920 def doit(pushedrevs, remoteheads, remote=remote):
920 def doit(pushedrevs, remoteheads, remote=remote):
921 if not util.safehasattr(remote, b'branches'):
921 if not util.safehasattr(remote, b'branches'):
922 # enable in-client legacy support
922 # enable in-client legacy support
923 remote = localrepo.locallegacypeer(remote.local())
923 remote = localrepo.locallegacypeer(remote.local())
924 common, _in, hds = treediscovery.findcommonincoming(
924 common, _in, hds = treediscovery.findcommonincoming(
925 repo, remote, force=True
925 repo, remote, force=True
926 )
926 )
927 common = set(common)
927 common = set(common)
928 if not opts.get(b'nonheads'):
928 if not opts.get(b'nonheads'):
929 ui.writenoi18n(
929 ui.writenoi18n(
930 b"unpruned common: %s\n"
930 b"unpruned common: %s\n"
931 % b" ".join(sorted(short(n) for n in common))
931 % b" ".join(sorted(short(n) for n in common))
932 )
932 )
933
933
934 clnode = repo.changelog.node
934 clnode = repo.changelog.node
935 common = repo.revs(b'heads(::%ln)', common)
935 common = repo.revs(b'heads(::%ln)', common)
936 common = {clnode(r) for r in common}
936 common = {clnode(r) for r in common}
937 return common, hds
937 return common, hds
938
938
939 else:
939 else:
940
940
941 def doit(pushedrevs, remoteheads, remote=remote):
941 def doit(pushedrevs, remoteheads, remote=remote):
942 nodes = None
942 nodes = None
943 if pushedrevs:
943 if pushedrevs:
944 revs = scmutil.revrange(repo, pushedrevs)
944 revs = scmutil.revrange(repo, pushedrevs)
945 nodes = [repo[r].node() for r in revs]
945 nodes = [repo[r].node() for r in revs]
946 common, any, hds = setdiscovery.findcommonheads(
946 common, any, hds = setdiscovery.findcommonheads(
947 ui, repo, remote, ancestorsof=nodes
947 ui, repo, remote, ancestorsof=nodes
948 )
948 )
949 return common, hds
949 return common, hds
950
950
951 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
951 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
952 localrevs = opts[b'rev']
952 localrevs = opts[b'rev']
953 with util.timedcm(b'debug-discovery') as t:
953 with util.timedcm(b'debug-discovery') as t:
954 common, hds = doit(localrevs, remoterevs)
954 common, hds = doit(localrevs, remoterevs)
955
955
956 # compute all statistics
956 # compute all statistics
957 common = set(common)
957 common = set(common)
958 rheads = set(hds)
958 rheads = set(hds)
959 lheads = set(repo.heads())
959 lheads = set(repo.heads())
960
960
961 data = {}
961 data = {}
962 data[b'elapsed'] = t.elapsed
962 data[b'elapsed'] = t.elapsed
963 data[b'nb-common'] = len(common)
963 data[b'nb-common'] = len(common)
964 data[b'nb-common-local'] = len(common & lheads)
964 data[b'nb-common-local'] = len(common & lheads)
965 data[b'nb-common-remote'] = len(common & rheads)
965 data[b'nb-common-remote'] = len(common & rheads)
966 data[b'nb-common-both'] = len(common & rheads & lheads)
966 data[b'nb-common-both'] = len(common & rheads & lheads)
967 data[b'nb-local'] = len(lheads)
967 data[b'nb-local'] = len(lheads)
968 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
968 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
969 data[b'nb-remote'] = len(rheads)
969 data[b'nb-remote'] = len(rheads)
970 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
970 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
971 data[b'nb-revs'] = len(repo.revs(b'all()'))
971 data[b'nb-revs'] = len(repo.revs(b'all()'))
972 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
972 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
973 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
973 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
974
974
975 # display discovery summary
975 # display discovery summary
976 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
976 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
977 ui.writenoi18n(b"heads summary:\n")
977 ui.writenoi18n(b"heads summary:\n")
978 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
978 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
979 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
979 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
980 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
980 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
981 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
981 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
982 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
982 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
983 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
983 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
984 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
984 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
985 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
985 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
986 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
986 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
987 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
987 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
988 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
988 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
989 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
989 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
990 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
990 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
991
991
992 if ui.verbose:
992 if ui.verbose:
993 ui.writenoi18n(
993 ui.writenoi18n(
994 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
994 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
995 )
995 )
996
996
997
997
998 _chunksize = 4 << 10
998 _chunksize = 4 << 10
999
999
1000
1000
1001 @command(
1001 @command(
1002 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1002 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1003 )
1003 )
1004 def debugdownload(ui, repo, url, output=None, **opts):
1004 def debugdownload(ui, repo, url, output=None, **opts):
1005 """download a resource using Mercurial logic and config
1005 """download a resource using Mercurial logic and config
1006 """
1006 """
1007 fh = urlmod.open(ui, url, output)
1007 fh = urlmod.open(ui, url, output)
1008
1008
1009 dest = ui
1009 dest = ui
1010 if output:
1010 if output:
1011 dest = open(output, b"wb", _chunksize)
1011 dest = open(output, b"wb", _chunksize)
1012 try:
1012 try:
1013 data = fh.read(_chunksize)
1013 data = fh.read(_chunksize)
1014 while data:
1014 while data:
1015 dest.write(data)
1015 dest.write(data)
1016 data = fh.read(_chunksize)
1016 data = fh.read(_chunksize)
1017 finally:
1017 finally:
1018 if output:
1018 if output:
1019 dest.close()
1019 dest.close()
1020
1020
1021
1021
1022 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1022 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1023 def debugextensions(ui, repo, **opts):
1023 def debugextensions(ui, repo, **opts):
1024 '''show information about active extensions'''
1024 '''show information about active extensions'''
1025 opts = pycompat.byteskwargs(opts)
1025 opts = pycompat.byteskwargs(opts)
1026 exts = extensions.extensions(ui)
1026 exts = extensions.extensions(ui)
1027 hgver = util.version()
1027 hgver = util.version()
1028 fm = ui.formatter(b'debugextensions', opts)
1028 fm = ui.formatter(b'debugextensions', opts)
1029 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1029 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1030 isinternal = extensions.ismoduleinternal(extmod)
1030 isinternal = extensions.ismoduleinternal(extmod)
1031 extsource = pycompat.fsencode(extmod.__file__)
1031 extsource = pycompat.fsencode(extmod.__file__)
1032 if isinternal:
1032 if isinternal:
1033 exttestedwith = [] # never expose magic string to users
1033 exttestedwith = [] # never expose magic string to users
1034 else:
1034 else:
1035 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1035 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1036 extbuglink = getattr(extmod, 'buglink', None)
1036 extbuglink = getattr(extmod, 'buglink', None)
1037
1037
1038 fm.startitem()
1038 fm.startitem()
1039
1039
1040 if ui.quiet or ui.verbose:
1040 if ui.quiet or ui.verbose:
1041 fm.write(b'name', b'%s\n', extname)
1041 fm.write(b'name', b'%s\n', extname)
1042 else:
1042 else:
1043 fm.write(b'name', b'%s', extname)
1043 fm.write(b'name', b'%s', extname)
1044 if isinternal or hgver in exttestedwith:
1044 if isinternal or hgver in exttestedwith:
1045 fm.plain(b'\n')
1045 fm.plain(b'\n')
1046 elif not exttestedwith:
1046 elif not exttestedwith:
1047 fm.plain(_(b' (untested!)\n'))
1047 fm.plain(_(b' (untested!)\n'))
1048 else:
1048 else:
1049 lasttestedversion = exttestedwith[-1]
1049 lasttestedversion = exttestedwith[-1]
1050 fm.plain(b' (%s!)\n' % lasttestedversion)
1050 fm.plain(b' (%s!)\n' % lasttestedversion)
1051
1051
1052 fm.condwrite(
1052 fm.condwrite(
1053 ui.verbose and extsource,
1053 ui.verbose and extsource,
1054 b'source',
1054 b'source',
1055 _(b' location: %s\n'),
1055 _(b' location: %s\n'),
1056 extsource or b"",
1056 extsource or b"",
1057 )
1057 )
1058
1058
1059 if ui.verbose:
1059 if ui.verbose:
1060 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1060 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1061 fm.data(bundled=isinternal)
1061 fm.data(bundled=isinternal)
1062
1062
1063 fm.condwrite(
1063 fm.condwrite(
1064 ui.verbose and exttestedwith,
1064 ui.verbose and exttestedwith,
1065 b'testedwith',
1065 b'testedwith',
1066 _(b' tested with: %s\n'),
1066 _(b' tested with: %s\n'),
1067 fm.formatlist(exttestedwith, name=b'ver'),
1067 fm.formatlist(exttestedwith, name=b'ver'),
1068 )
1068 )
1069
1069
1070 fm.condwrite(
1070 fm.condwrite(
1071 ui.verbose and extbuglink,
1071 ui.verbose and extbuglink,
1072 b'buglink',
1072 b'buglink',
1073 _(b' bug reporting: %s\n'),
1073 _(b' bug reporting: %s\n'),
1074 extbuglink or b"",
1074 extbuglink or b"",
1075 )
1075 )
1076
1076
1077 fm.end()
1077 fm.end()
1078
1078
1079
1079
1080 @command(
1080 @command(
1081 b'debugfileset',
1081 b'debugfileset',
1082 [
1082 [
1083 (
1083 (
1084 b'r',
1084 b'r',
1085 b'rev',
1085 b'rev',
1086 b'',
1086 b'',
1087 _(b'apply the filespec on this revision'),
1087 _(b'apply the filespec on this revision'),
1088 _(b'REV'),
1088 _(b'REV'),
1089 ),
1089 ),
1090 (
1090 (
1091 b'',
1091 b'',
1092 b'all-files',
1092 b'all-files',
1093 False,
1093 False,
1094 _(b'test files from all revisions and working directory'),
1094 _(b'test files from all revisions and working directory'),
1095 ),
1095 ),
1096 (
1096 (
1097 b's',
1097 b's',
1098 b'show-matcher',
1098 b'show-matcher',
1099 None,
1099 None,
1100 _(b'print internal representation of matcher'),
1100 _(b'print internal representation of matcher'),
1101 ),
1101 ),
1102 (
1102 (
1103 b'p',
1103 b'p',
1104 b'show-stage',
1104 b'show-stage',
1105 [],
1105 [],
1106 _(b'print parsed tree at the given stage'),
1106 _(b'print parsed tree at the given stage'),
1107 _(b'NAME'),
1107 _(b'NAME'),
1108 ),
1108 ),
1109 ],
1109 ],
1110 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1110 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1111 )
1111 )
1112 def debugfileset(ui, repo, expr, **opts):
1112 def debugfileset(ui, repo, expr, **opts):
1113 '''parse and apply a fileset specification'''
1113 '''parse and apply a fileset specification'''
1114 from . import fileset
1114 from . import fileset
1115
1115
1116 fileset.symbols # force import of fileset so we have predicates to optimize
1116 fileset.symbols # force import of fileset so we have predicates to optimize
1117 opts = pycompat.byteskwargs(opts)
1117 opts = pycompat.byteskwargs(opts)
1118 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1118 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1119
1119
1120 stages = [
1120 stages = [
1121 (b'parsed', pycompat.identity),
1121 (b'parsed', pycompat.identity),
1122 (b'analyzed', filesetlang.analyze),
1122 (b'analyzed', filesetlang.analyze),
1123 (b'optimized', filesetlang.optimize),
1123 (b'optimized', filesetlang.optimize),
1124 ]
1124 ]
1125 stagenames = set(n for n, f in stages)
1125 stagenames = set(n for n, f in stages)
1126
1126
1127 showalways = set()
1127 showalways = set()
1128 if ui.verbose and not opts[b'show_stage']:
1128 if ui.verbose and not opts[b'show_stage']:
1129 # show parsed tree by --verbose (deprecated)
1129 # show parsed tree by --verbose (deprecated)
1130 showalways.add(b'parsed')
1130 showalways.add(b'parsed')
1131 if opts[b'show_stage'] == [b'all']:
1131 if opts[b'show_stage'] == [b'all']:
1132 showalways.update(stagenames)
1132 showalways.update(stagenames)
1133 else:
1133 else:
1134 for n in opts[b'show_stage']:
1134 for n in opts[b'show_stage']:
1135 if n not in stagenames:
1135 if n not in stagenames:
1136 raise error.Abort(_(b'invalid stage name: %s') % n)
1136 raise error.Abort(_(b'invalid stage name: %s') % n)
1137 showalways.update(opts[b'show_stage'])
1137 showalways.update(opts[b'show_stage'])
1138
1138
1139 tree = filesetlang.parse(expr)
1139 tree = filesetlang.parse(expr)
1140 for n, f in stages:
1140 for n, f in stages:
1141 tree = f(tree)
1141 tree = f(tree)
1142 if n in showalways:
1142 if n in showalways:
1143 if opts[b'show_stage'] or n != b'parsed':
1143 if opts[b'show_stage'] or n != b'parsed':
1144 ui.write(b"* %s:\n" % n)
1144 ui.write(b"* %s:\n" % n)
1145 ui.write(filesetlang.prettyformat(tree), b"\n")
1145 ui.write(filesetlang.prettyformat(tree), b"\n")
1146
1146
1147 files = set()
1147 files = set()
1148 if opts[b'all_files']:
1148 if opts[b'all_files']:
1149 for r in repo:
1149 for r in repo:
1150 c = repo[r]
1150 c = repo[r]
1151 files.update(c.files())
1151 files.update(c.files())
1152 files.update(c.substate)
1152 files.update(c.substate)
1153 if opts[b'all_files'] or ctx.rev() is None:
1153 if opts[b'all_files'] or ctx.rev() is None:
1154 wctx = repo[None]
1154 wctx = repo[None]
1155 files.update(
1155 files.update(
1156 repo.dirstate.walk(
1156 repo.dirstate.walk(
1157 scmutil.matchall(repo),
1157 scmutil.matchall(repo),
1158 subrepos=list(wctx.substate),
1158 subrepos=list(wctx.substate),
1159 unknown=True,
1159 unknown=True,
1160 ignored=True,
1160 ignored=True,
1161 )
1161 )
1162 )
1162 )
1163 files.update(wctx.substate)
1163 files.update(wctx.substate)
1164 else:
1164 else:
1165 files.update(ctx.files())
1165 files.update(ctx.files())
1166 files.update(ctx.substate)
1166 files.update(ctx.substate)
1167
1167
1168 m = ctx.matchfileset(expr)
1168 m = ctx.matchfileset(expr)
1169 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1169 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1170 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1170 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1171 for f in sorted(files):
1171 for f in sorted(files):
1172 if not m(f):
1172 if not m(f):
1173 continue
1173 continue
1174 ui.write(b"%s\n" % f)
1174 ui.write(b"%s\n" % f)
1175
1175
1176
1176
1177 @command(b'debugformat', [] + cmdutil.formatteropts)
1177 @command(b'debugformat', [] + cmdutil.formatteropts)
1178 def debugformat(ui, repo, **opts):
1178 def debugformat(ui, repo, **opts):
1179 """display format information about the current repository
1179 """display format information about the current repository
1180
1180
1181 Use --verbose to get extra information about current config value and
1181 Use --verbose to get extra information about current config value and
1182 Mercurial default."""
1182 Mercurial default."""
1183 opts = pycompat.byteskwargs(opts)
1183 opts = pycompat.byteskwargs(opts)
1184 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1184 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1185 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1185 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1186
1186
1187 def makeformatname(name):
1187 def makeformatname(name):
1188 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1188 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1189
1189
1190 fm = ui.formatter(b'debugformat', opts)
1190 fm = ui.formatter(b'debugformat', opts)
1191 if fm.isplain():
1191 if fm.isplain():
1192
1192
1193 def formatvalue(value):
1193 def formatvalue(value):
1194 if util.safehasattr(value, b'startswith'):
1194 if util.safehasattr(value, b'startswith'):
1195 return value
1195 return value
1196 if value:
1196 if value:
1197 return b'yes'
1197 return b'yes'
1198 else:
1198 else:
1199 return b'no'
1199 return b'no'
1200
1200
1201 else:
1201 else:
1202 formatvalue = pycompat.identity
1202 formatvalue = pycompat.identity
1203
1203
1204 fm.plain(b'format-variant')
1204 fm.plain(b'format-variant')
1205 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1205 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1206 fm.plain(b' repo')
1206 fm.plain(b' repo')
1207 if ui.verbose:
1207 if ui.verbose:
1208 fm.plain(b' config default')
1208 fm.plain(b' config default')
1209 fm.plain(b'\n')
1209 fm.plain(b'\n')
1210 for fv in upgrade.allformatvariant:
1210 for fv in upgrade.allformatvariant:
1211 fm.startitem()
1211 fm.startitem()
1212 repovalue = fv.fromrepo(repo)
1212 repovalue = fv.fromrepo(repo)
1213 configvalue = fv.fromconfig(repo)
1213 configvalue = fv.fromconfig(repo)
1214
1214
1215 if repovalue != configvalue:
1215 if repovalue != configvalue:
1216 namelabel = b'formatvariant.name.mismatchconfig'
1216 namelabel = b'formatvariant.name.mismatchconfig'
1217 repolabel = b'formatvariant.repo.mismatchconfig'
1217 repolabel = b'formatvariant.repo.mismatchconfig'
1218 elif repovalue != fv.default:
1218 elif repovalue != fv.default:
1219 namelabel = b'formatvariant.name.mismatchdefault'
1219 namelabel = b'formatvariant.name.mismatchdefault'
1220 repolabel = b'formatvariant.repo.mismatchdefault'
1220 repolabel = b'formatvariant.repo.mismatchdefault'
1221 else:
1221 else:
1222 namelabel = b'formatvariant.name.uptodate'
1222 namelabel = b'formatvariant.name.uptodate'
1223 repolabel = b'formatvariant.repo.uptodate'
1223 repolabel = b'formatvariant.repo.uptodate'
1224
1224
1225 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1225 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1226 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1226 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1227 if fv.default != configvalue:
1227 if fv.default != configvalue:
1228 configlabel = b'formatvariant.config.special'
1228 configlabel = b'formatvariant.config.special'
1229 else:
1229 else:
1230 configlabel = b'formatvariant.config.default'
1230 configlabel = b'formatvariant.config.default'
1231 fm.condwrite(
1231 fm.condwrite(
1232 ui.verbose,
1232 ui.verbose,
1233 b'config',
1233 b'config',
1234 b' %6s',
1234 b' %6s',
1235 formatvalue(configvalue),
1235 formatvalue(configvalue),
1236 label=configlabel,
1236 label=configlabel,
1237 )
1237 )
1238 fm.condwrite(
1238 fm.condwrite(
1239 ui.verbose,
1239 ui.verbose,
1240 b'default',
1240 b'default',
1241 b' %7s',
1241 b' %7s',
1242 formatvalue(fv.default),
1242 formatvalue(fv.default),
1243 label=b'formatvariant.default',
1243 label=b'formatvariant.default',
1244 )
1244 )
1245 fm.plain(b'\n')
1245 fm.plain(b'\n')
1246 fm.end()
1246 fm.end()
1247
1247
1248
1248
1249 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1249 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1250 def debugfsinfo(ui, path=b"."):
1250 def debugfsinfo(ui, path=b"."):
1251 """show information detected about current filesystem"""
1251 """show information detected about current filesystem"""
1252 ui.writenoi18n(b'path: %s\n' % path)
1252 ui.writenoi18n(b'path: %s\n' % path)
1253 ui.writenoi18n(
1253 ui.writenoi18n(
1254 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1254 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1255 )
1255 )
1256 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1256 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1257 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1257 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1258 ui.writenoi18n(
1258 ui.writenoi18n(
1259 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1259 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1260 )
1260 )
1261 ui.writenoi18n(
1261 ui.writenoi18n(
1262 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1262 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1263 )
1263 )
1264 casesensitive = b'(unknown)'
1264 casesensitive = b'(unknown)'
1265 try:
1265 try:
1266 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1266 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1267 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1267 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1268 except OSError:
1268 except OSError:
1269 pass
1269 pass
1270 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1270 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1271
1271
1272
1272
1273 @command(
1273 @command(
1274 b'debuggetbundle',
1274 b'debuggetbundle',
1275 [
1275 [
1276 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1276 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1277 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1277 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1278 (
1278 (
1279 b't',
1279 b't',
1280 b'type',
1280 b'type',
1281 b'bzip2',
1281 b'bzip2',
1282 _(b'bundle compression type to use'),
1282 _(b'bundle compression type to use'),
1283 _(b'TYPE'),
1283 _(b'TYPE'),
1284 ),
1284 ),
1285 ],
1285 ],
1286 _(b'REPO FILE [-H|-C ID]...'),
1286 _(b'REPO FILE [-H|-C ID]...'),
1287 norepo=True,
1287 norepo=True,
1288 )
1288 )
1289 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1289 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1290 """retrieves a bundle from a repo
1290 """retrieves a bundle from a repo
1291
1291
1292 Every ID must be a full-length hex node id string. Saves the bundle to the
1292 Every ID must be a full-length hex node id string. Saves the bundle to the
1293 given file.
1293 given file.
1294 """
1294 """
1295 opts = pycompat.byteskwargs(opts)
1295 opts = pycompat.byteskwargs(opts)
1296 repo = hg.peer(ui, opts, repopath)
1296 repo = hg.peer(ui, opts, repopath)
1297 if not repo.capable(b'getbundle'):
1297 if not repo.capable(b'getbundle'):
1298 raise error.Abort(b"getbundle() not supported by target repository")
1298 raise error.Abort(b"getbundle() not supported by target repository")
1299 args = {}
1299 args = {}
1300 if common:
1300 if common:
1301 args[r'common'] = [bin(s) for s in common]
1301 args[r'common'] = [bin(s) for s in common]
1302 if head:
1302 if head:
1303 args[r'heads'] = [bin(s) for s in head]
1303 args[r'heads'] = [bin(s) for s in head]
1304 # TODO: get desired bundlecaps from command line.
1304 # TODO: get desired bundlecaps from command line.
1305 args[r'bundlecaps'] = None
1305 args[r'bundlecaps'] = None
1306 bundle = repo.getbundle(b'debug', **args)
1306 bundle = repo.getbundle(b'debug', **args)
1307
1307
1308 bundletype = opts.get(b'type', b'bzip2').lower()
1308 bundletype = opts.get(b'type', b'bzip2').lower()
1309 btypes = {
1309 btypes = {
1310 b'none': b'HG10UN',
1310 b'none': b'HG10UN',
1311 b'bzip2': b'HG10BZ',
1311 b'bzip2': b'HG10BZ',
1312 b'gzip': b'HG10GZ',
1312 b'gzip': b'HG10GZ',
1313 b'bundle2': b'HG20',
1313 b'bundle2': b'HG20',
1314 }
1314 }
1315 bundletype = btypes.get(bundletype)
1315 bundletype = btypes.get(bundletype)
1316 if bundletype not in bundle2.bundletypes:
1316 if bundletype not in bundle2.bundletypes:
1317 raise error.Abort(_(b'unknown bundle type specified with --type'))
1317 raise error.Abort(_(b'unknown bundle type specified with --type'))
1318 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1318 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1319
1319
1320
1320
1321 @command(b'debugignore', [], b'[FILE]')
1321 @command(b'debugignore', [], b'[FILE]')
1322 def debugignore(ui, repo, *files, **opts):
1322 def debugignore(ui, repo, *files, **opts):
1323 """display the combined ignore pattern and information about ignored files
1323 """display the combined ignore pattern and information about ignored files
1324
1324
1325 With no argument display the combined ignore pattern.
1325 With no argument display the combined ignore pattern.
1326
1326
1327 Given space separated file names, shows if the given file is ignored and
1327 Given space separated file names, shows if the given file is ignored and
1328 if so, show the ignore rule (file and line number) that matched it.
1328 if so, show the ignore rule (file and line number) that matched it.
1329 """
1329 """
1330 ignore = repo.dirstate._ignore
1330 ignore = repo.dirstate._ignore
1331 if not files:
1331 if not files:
1332 # Show all the patterns
1332 # Show all the patterns
1333 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1333 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1334 else:
1334 else:
1335 m = scmutil.match(repo[None], pats=files)
1335 m = scmutil.match(repo[None], pats=files)
1336 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1336 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1337 for f in m.files():
1337 for f in m.files():
1338 nf = util.normpath(f)
1338 nf = util.normpath(f)
1339 ignored = None
1339 ignored = None
1340 ignoredata = None
1340 ignoredata = None
1341 if nf != b'.':
1341 if nf != b'.':
1342 if ignore(nf):
1342 if ignore(nf):
1343 ignored = nf
1343 ignored = nf
1344 ignoredata = repo.dirstate._ignorefileandline(nf)
1344 ignoredata = repo.dirstate._ignorefileandline(nf)
1345 else:
1345 else:
1346 for p in util.finddirs(nf):
1346 for p in util.finddirs(nf):
1347 if ignore(p):
1347 if ignore(p):
1348 ignored = p
1348 ignored = p
1349 ignoredata = repo.dirstate._ignorefileandline(p)
1349 ignoredata = repo.dirstate._ignorefileandline(p)
1350 break
1350 break
1351 if ignored:
1351 if ignored:
1352 if ignored == nf:
1352 if ignored == nf:
1353 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1353 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1354 else:
1354 else:
1355 ui.write(
1355 ui.write(
1356 _(
1356 _(
1357 b"%s is ignored because of "
1357 b"%s is ignored because of "
1358 b"containing directory %s\n"
1358 b"containing directory %s\n"
1359 )
1359 )
1360 % (uipathfn(f), ignored)
1360 % (uipathfn(f), ignored)
1361 )
1361 )
1362 ignorefile, lineno, line = ignoredata
1362 ignorefile, lineno, line = ignoredata
1363 ui.write(
1363 ui.write(
1364 _(b"(ignore rule in %s, line %d: '%s')\n")
1364 _(b"(ignore rule in %s, line %d: '%s')\n")
1365 % (ignorefile, lineno, line)
1365 % (ignorefile, lineno, line)
1366 )
1366 )
1367 else:
1367 else:
1368 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1368 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1369
1369
1370
1370
1371 @command(
1371 @command(
1372 b'debugindex',
1372 b'debugindex',
1373 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1373 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1374 _(b'-c|-m|FILE'),
1374 _(b'-c|-m|FILE'),
1375 )
1375 )
1376 def debugindex(ui, repo, file_=None, **opts):
1376 def debugindex(ui, repo, file_=None, **opts):
1377 """dump index data for a storage primitive"""
1377 """dump index data for a storage primitive"""
1378 opts = pycompat.byteskwargs(opts)
1378 opts = pycompat.byteskwargs(opts)
1379 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1379 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1380
1380
1381 if ui.debugflag:
1381 if ui.debugflag:
1382 shortfn = hex
1382 shortfn = hex
1383 else:
1383 else:
1384 shortfn = short
1384 shortfn = short
1385
1385
1386 idlen = 12
1386 idlen = 12
1387 for i in store:
1387 for i in store:
1388 idlen = len(shortfn(store.node(i)))
1388 idlen = len(shortfn(store.node(i)))
1389 break
1389 break
1390
1390
1391 fm = ui.formatter(b'debugindex', opts)
1391 fm = ui.formatter(b'debugindex', opts)
1392 fm.plain(
1392 fm.plain(
1393 b' rev linkrev %s %s p2\n'
1393 b' rev linkrev %s %s p2\n'
1394 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1394 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1395 )
1395 )
1396
1396
1397 for rev in store:
1397 for rev in store:
1398 node = store.node(rev)
1398 node = store.node(rev)
1399 parents = store.parents(node)
1399 parents = store.parents(node)
1400
1400
1401 fm.startitem()
1401 fm.startitem()
1402 fm.write(b'rev', b'%6d ', rev)
1402 fm.write(b'rev', b'%6d ', rev)
1403 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1403 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1404 fm.write(b'node', b'%s ', shortfn(node))
1404 fm.write(b'node', b'%s ', shortfn(node))
1405 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1405 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1406 fm.write(b'p2', b'%s', shortfn(parents[1]))
1406 fm.write(b'p2', b'%s', shortfn(parents[1]))
1407 fm.plain(b'\n')
1407 fm.plain(b'\n')
1408
1408
1409 fm.end()
1409 fm.end()
1410
1410
1411
1411
1412 @command(
1412 @command(
1413 b'debugindexdot',
1413 b'debugindexdot',
1414 cmdutil.debugrevlogopts,
1414 cmdutil.debugrevlogopts,
1415 _(b'-c|-m|FILE'),
1415 _(b'-c|-m|FILE'),
1416 optionalrepo=True,
1416 optionalrepo=True,
1417 )
1417 )
1418 def debugindexdot(ui, repo, file_=None, **opts):
1418 def debugindexdot(ui, repo, file_=None, **opts):
1419 """dump an index DAG as a graphviz dot file"""
1419 """dump an index DAG as a graphviz dot file"""
1420 opts = pycompat.byteskwargs(opts)
1420 opts = pycompat.byteskwargs(opts)
1421 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1421 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1422 ui.writenoi18n(b"digraph G {\n")
1422 ui.writenoi18n(b"digraph G {\n")
1423 for i in r:
1423 for i in r:
1424 node = r.node(i)
1424 node = r.node(i)
1425 pp = r.parents(node)
1425 pp = r.parents(node)
1426 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1426 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1427 if pp[1] != nullid:
1427 if pp[1] != nullid:
1428 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1428 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1429 ui.write(b"}\n")
1429 ui.write(b"}\n")
1430
1430
1431
1431
1432 @command(b'debugindexstats', [])
1432 @command(b'debugindexstats', [])
1433 def debugindexstats(ui, repo):
1433 def debugindexstats(ui, repo):
1434 """show stats related to the changelog index"""
1434 """show stats related to the changelog index"""
1435 repo.changelog.shortest(nullid, 1)
1435 repo.changelog.shortest(nullid, 1)
1436 index = repo.changelog.index
1436 index = repo.changelog.index
1437 if not util.safehasattr(index, b'stats'):
1437 if not util.safehasattr(index, b'stats'):
1438 raise error.Abort(_(b'debugindexstats only works with native code'))
1438 raise error.Abort(_(b'debugindexstats only works with native code'))
1439 for k, v in sorted(index.stats().items()):
1439 for k, v in sorted(index.stats().items()):
1440 ui.write(b'%s: %d\n' % (k, v))
1440 ui.write(b'%s: %d\n' % (k, v))
1441
1441
1442
1442
1443 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1443 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1444 def debuginstall(ui, **opts):
1444 def debuginstall(ui, **opts):
1445 '''test Mercurial installation
1445 '''test Mercurial installation
1446
1446
1447 Returns 0 on success.
1447 Returns 0 on success.
1448 '''
1448 '''
1449 opts = pycompat.byteskwargs(opts)
1449 opts = pycompat.byteskwargs(opts)
1450
1450
1451 problems = 0
1451 problems = 0
1452
1452
1453 fm = ui.formatter(b'debuginstall', opts)
1453 fm = ui.formatter(b'debuginstall', opts)
1454 fm.startitem()
1454 fm.startitem()
1455
1455
1456 # encoding
1456 # encoding
1457 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1457 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1458 err = None
1458 err = None
1459 try:
1459 try:
1460 codecs.lookup(pycompat.sysstr(encoding.encoding))
1460 codecs.lookup(pycompat.sysstr(encoding.encoding))
1461 except LookupError as inst:
1461 except LookupError as inst:
1462 err = stringutil.forcebytestr(inst)
1462 err = stringutil.forcebytestr(inst)
1463 problems += 1
1463 problems += 1
1464 fm.condwrite(
1464 fm.condwrite(
1465 err,
1465 err,
1466 b'encodingerror',
1466 b'encodingerror',
1467 _(b" %s\n (check that your locale is properly set)\n"),
1467 _(b" %s\n (check that your locale is properly set)\n"),
1468 err,
1468 err,
1469 )
1469 )
1470
1470
1471 # Python
1471 # Python
1472 fm.write(
1472 fm.write(
1473 b'pythonexe',
1473 b'pythonexe',
1474 _(b"checking Python executable (%s)\n"),
1474 _(b"checking Python executable (%s)\n"),
1475 pycompat.sysexecutable or _(b"unknown"),
1475 pycompat.sysexecutable or _(b"unknown"),
1476 )
1476 )
1477 fm.write(
1477 fm.write(
1478 b'pythonver',
1478 b'pythonver',
1479 _(b"checking Python version (%s)\n"),
1479 _(b"checking Python version (%s)\n"),
1480 (b"%d.%d.%d" % sys.version_info[:3]),
1480 (b"%d.%d.%d" % sys.version_info[:3]),
1481 )
1481 )
1482 fm.write(
1482 fm.write(
1483 b'pythonlib',
1483 b'pythonlib',
1484 _(b"checking Python lib (%s)...\n"),
1484 _(b"checking Python lib (%s)...\n"),
1485 os.path.dirname(pycompat.fsencode(os.__file__)),
1485 os.path.dirname(pycompat.fsencode(os.__file__)),
1486 )
1486 )
1487
1487
1488 security = set(sslutil.supportedprotocols)
1488 security = set(sslutil.supportedprotocols)
1489 if sslutil.hassni:
1489 if sslutil.hassni:
1490 security.add(b'sni')
1490 security.add(b'sni')
1491
1491
1492 fm.write(
1492 fm.write(
1493 b'pythonsecurity',
1493 b'pythonsecurity',
1494 _(b"checking Python security support (%s)\n"),
1494 _(b"checking Python security support (%s)\n"),
1495 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1495 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1496 )
1496 )
1497
1497
1498 # These are warnings, not errors. So don't increment problem count. This
1498 # These are warnings, not errors. So don't increment problem count. This
1499 # may change in the future.
1499 # may change in the future.
1500 if b'tls1.2' not in security:
1500 if b'tls1.2' not in security:
1501 fm.plain(
1501 fm.plain(
1502 _(
1502 _(
1503 b' TLS 1.2 not supported by Python install; '
1503 b' TLS 1.2 not supported by Python install; '
1504 b'network connections lack modern security\n'
1504 b'network connections lack modern security\n'
1505 )
1505 )
1506 )
1506 )
1507 if b'sni' not in security:
1507 if b'sni' not in security:
1508 fm.plain(
1508 fm.plain(
1509 _(
1509 _(
1510 b' SNI not supported by Python install; may have '
1510 b' SNI not supported by Python install; may have '
1511 b'connectivity issues with some servers\n'
1511 b'connectivity issues with some servers\n'
1512 )
1512 )
1513 )
1513 )
1514
1514
1515 # TODO print CA cert info
1515 # TODO print CA cert info
1516
1516
1517 # hg version
1517 # hg version
1518 hgver = util.version()
1518 hgver = util.version()
1519 fm.write(
1519 fm.write(
1520 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1520 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1521 )
1521 )
1522 fm.write(
1522 fm.write(
1523 b'hgverextra',
1523 b'hgverextra',
1524 _(b"checking Mercurial custom build (%s)\n"),
1524 _(b"checking Mercurial custom build (%s)\n"),
1525 b'+'.join(hgver.split(b'+')[1:]),
1525 b'+'.join(hgver.split(b'+')[1:]),
1526 )
1526 )
1527
1527
1528 # compiled modules
1528 # compiled modules
1529 fm.write(
1529 fm.write(
1530 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1530 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1531 )
1531 )
1532 fm.write(
1532 fm.write(
1533 b'hgmodules',
1533 b'hgmodules',
1534 _(b"checking installed modules (%s)...\n"),
1534 _(b"checking installed modules (%s)...\n"),
1535 os.path.dirname(pycompat.fsencode(__file__)),
1535 os.path.dirname(pycompat.fsencode(__file__)),
1536 )
1536 )
1537
1537
1538 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1538 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1539 rustext = rustandc # for now, that's the only case
1539 rustext = rustandc # for now, that's the only case
1540 cext = policy.policy in (b'c', b'allow') or rustandc
1540 cext = policy.policy in (b'c', b'allow') or rustandc
1541 nopure = cext or rustext
1541 nopure = cext or rustext
1542 if nopure:
1542 if nopure:
1543 err = None
1543 err = None
1544 try:
1544 try:
1545 if cext:
1545 if cext:
1546 from .cext import (
1546 from .cext import (
1547 base85,
1547 base85,
1548 bdiff,
1548 bdiff,
1549 mpatch,
1549 mpatch,
1550 osutil,
1550 osutil,
1551 )
1551 )
1552
1552
1553 # quiet pyflakes
1553 # quiet pyflakes
1554 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1554 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1555 if rustext:
1555 if rustext:
1556 from .rustext import (
1556 from .rustext import (
1557 ancestor,
1557 ancestor,
1558 dirstate,
1558 dirstate,
1559 )
1559 )
1560
1560
1561 dir(ancestor), dir(dirstate) # quiet pyflakes
1561 dir(ancestor), dir(dirstate) # quiet pyflakes
1562 except Exception as inst:
1562 except Exception as inst:
1563 err = stringutil.forcebytestr(inst)
1563 err = stringutil.forcebytestr(inst)
1564 problems += 1
1564 problems += 1
1565 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1565 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1566
1566
1567 compengines = util.compengines._engines.values()
1567 compengines = util.compengines._engines.values()
1568 fm.write(
1568 fm.write(
1569 b'compengines',
1569 b'compengines',
1570 _(b'checking registered compression engines (%s)\n'),
1570 _(b'checking registered compression engines (%s)\n'),
1571 fm.formatlist(
1571 fm.formatlist(
1572 sorted(e.name() for e in compengines),
1572 sorted(e.name() for e in compengines),
1573 name=b'compengine',
1573 name=b'compengine',
1574 fmt=b'%s',
1574 fmt=b'%s',
1575 sep=b', ',
1575 sep=b', ',
1576 ),
1576 ),
1577 )
1577 )
1578 fm.write(
1578 fm.write(
1579 b'compenginesavail',
1579 b'compenginesavail',
1580 _(b'checking available compression engines (%s)\n'),
1580 _(b'checking available compression engines (%s)\n'),
1581 fm.formatlist(
1581 fm.formatlist(
1582 sorted(e.name() for e in compengines if e.available()),
1582 sorted(e.name() for e in compengines if e.available()),
1583 name=b'compengine',
1583 name=b'compengine',
1584 fmt=b'%s',
1584 fmt=b'%s',
1585 sep=b', ',
1585 sep=b', ',
1586 ),
1586 ),
1587 )
1587 )
1588 wirecompengines = compression.compengines.supportedwireengines(
1588 wirecompengines = compression.compengines.supportedwireengines(
1589 compression.SERVERROLE
1589 compression.SERVERROLE
1590 )
1590 )
1591 fm.write(
1591 fm.write(
1592 b'compenginesserver',
1592 b'compenginesserver',
1593 _(
1593 _(
1594 b'checking available compression engines '
1594 b'checking available compression engines '
1595 b'for wire protocol (%s)\n'
1595 b'for wire protocol (%s)\n'
1596 ),
1596 ),
1597 fm.formatlist(
1597 fm.formatlist(
1598 [e.name() for e in wirecompengines if e.wireprotosupport()],
1598 [e.name() for e in wirecompengines if e.wireprotosupport()],
1599 name=b'compengine',
1599 name=b'compengine',
1600 fmt=b'%s',
1600 fmt=b'%s',
1601 sep=b', ',
1601 sep=b', ',
1602 ),
1602 ),
1603 )
1603 )
1604 re2 = b'missing'
1604 re2 = b'missing'
1605 if util._re2:
1605 if util._re2:
1606 re2 = b'available'
1606 re2 = b'available'
1607 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1607 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1608 fm.data(re2=bool(util._re2))
1608 fm.data(re2=bool(util._re2))
1609
1609
1610 # templates
1610 # templates
1611 p = templater.templatepaths()
1611 p = templater.templatepaths()
1612 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1612 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1613 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1613 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1614 if p:
1614 if p:
1615 m = templater.templatepath(b"map-cmdline.default")
1615 m = templater.templatepath(b"map-cmdline.default")
1616 if m:
1616 if m:
1617 # template found, check if it is working
1617 # template found, check if it is working
1618 err = None
1618 err = None
1619 try:
1619 try:
1620 templater.templater.frommapfile(m)
1620 templater.templater.frommapfile(m)
1621 except Exception as inst:
1621 except Exception as inst:
1622 err = stringutil.forcebytestr(inst)
1622 err = stringutil.forcebytestr(inst)
1623 p = None
1623 p = None
1624 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1624 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1625 else:
1625 else:
1626 p = None
1626 p = None
1627 fm.condwrite(
1627 fm.condwrite(
1628 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1628 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1629 )
1629 )
1630 fm.condwrite(
1630 fm.condwrite(
1631 not m,
1631 not m,
1632 b'defaulttemplatenotfound',
1632 b'defaulttemplatenotfound',
1633 _(b" template '%s' not found\n"),
1633 _(b" template '%s' not found\n"),
1634 b"default",
1634 b"default",
1635 )
1635 )
1636 if not p:
1636 if not p:
1637 problems += 1
1637 problems += 1
1638 fm.condwrite(
1638 fm.condwrite(
1639 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1639 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1640 )
1640 )
1641
1641
1642 # editor
1642 # editor
1643 editor = ui.geteditor()
1643 editor = ui.geteditor()
1644 editor = util.expandpath(editor)
1644 editor = util.expandpath(editor)
1645 editorbin = procutil.shellsplit(editor)[0]
1645 editorbin = procutil.shellsplit(editor)[0]
1646 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1646 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1647 cmdpath = procutil.findexe(editorbin)
1647 cmdpath = procutil.findexe(editorbin)
1648 fm.condwrite(
1648 fm.condwrite(
1649 not cmdpath and editor == b'vi',
1649 not cmdpath and editor == b'vi',
1650 b'vinotfound',
1650 b'vinotfound',
1651 _(
1651 _(
1652 b" No commit editor set and can't find %s in PATH\n"
1652 b" No commit editor set and can't find %s in PATH\n"
1653 b" (specify a commit editor in your configuration"
1653 b" (specify a commit editor in your configuration"
1654 b" file)\n"
1654 b" file)\n"
1655 ),
1655 ),
1656 not cmdpath and editor == b'vi' and editorbin,
1656 not cmdpath and editor == b'vi' and editorbin,
1657 )
1657 )
1658 fm.condwrite(
1658 fm.condwrite(
1659 not cmdpath and editor != b'vi',
1659 not cmdpath and editor != b'vi',
1660 b'editornotfound',
1660 b'editornotfound',
1661 _(
1661 _(
1662 b" Can't find editor '%s' in PATH\n"
1662 b" Can't find editor '%s' in PATH\n"
1663 b" (specify a commit editor in your configuration"
1663 b" (specify a commit editor in your configuration"
1664 b" file)\n"
1664 b" file)\n"
1665 ),
1665 ),
1666 not cmdpath and editorbin,
1666 not cmdpath and editorbin,
1667 )
1667 )
1668 if not cmdpath and editor != b'vi':
1668 if not cmdpath and editor != b'vi':
1669 problems += 1
1669 problems += 1
1670
1670
1671 # check username
1671 # check username
1672 username = None
1672 username = None
1673 err = None
1673 err = None
1674 try:
1674 try:
1675 username = ui.username()
1675 username = ui.username()
1676 except error.Abort as e:
1676 except error.Abort as e:
1677 err = stringutil.forcebytestr(e)
1677 err = stringutil.forcebytestr(e)
1678 problems += 1
1678 problems += 1
1679
1679
1680 fm.condwrite(
1680 fm.condwrite(
1681 username, b'username', _(b"checking username (%s)\n"), username
1681 username, b'username', _(b"checking username (%s)\n"), username
1682 )
1682 )
1683 fm.condwrite(
1683 fm.condwrite(
1684 err,
1684 err,
1685 b'usernameerror',
1685 b'usernameerror',
1686 _(
1686 _(
1687 b"checking username...\n %s\n"
1687 b"checking username...\n %s\n"
1688 b" (specify a username in your configuration file)\n"
1688 b" (specify a username in your configuration file)\n"
1689 ),
1689 ),
1690 err,
1690 err,
1691 )
1691 )
1692
1692
1693 for name, mod in extensions.extensions():
1693 for name, mod in extensions.extensions():
1694 handler = getattr(mod, 'debuginstall', None)
1694 handler = getattr(mod, 'debuginstall', None)
1695 if handler is not None:
1695 if handler is not None:
1696 problems += handler(ui, fm)
1696 problems += handler(ui, fm)
1697
1697
1698 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1698 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1699 if not problems:
1699 if not problems:
1700 fm.data(problems=problems)
1700 fm.data(problems=problems)
1701 fm.condwrite(
1701 fm.condwrite(
1702 problems,
1702 problems,
1703 b'problems',
1703 b'problems',
1704 _(b"%d problems detected, please check your install!\n"),
1704 _(b"%d problems detected, please check your install!\n"),
1705 problems,
1705 problems,
1706 )
1706 )
1707 fm.end()
1707 fm.end()
1708
1708
1709 return problems
1709 return problems
1710
1710
1711
1711
1712 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1712 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1713 def debugknown(ui, repopath, *ids, **opts):
1713 def debugknown(ui, repopath, *ids, **opts):
1714 """test whether node ids are known to a repo
1714 """test whether node ids are known to a repo
1715
1715
1716 Every ID must be a full-length hex node id string. Returns a list of 0s
1716 Every ID must be a full-length hex node id string. Returns a list of 0s
1717 and 1s indicating unknown/known.
1717 and 1s indicating unknown/known.
1718 """
1718 """
1719 opts = pycompat.byteskwargs(opts)
1719 opts = pycompat.byteskwargs(opts)
1720 repo = hg.peer(ui, opts, repopath)
1720 repo = hg.peer(ui, opts, repopath)
1721 if not repo.capable(b'known'):
1721 if not repo.capable(b'known'):
1722 raise error.Abort(b"known() not supported by target repository")
1722 raise error.Abort(b"known() not supported by target repository")
1723 flags = repo.known([bin(s) for s in ids])
1723 flags = repo.known([bin(s) for s in ids])
1724 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1724 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1725
1725
1726
1726
1727 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1727 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1728 def debuglabelcomplete(ui, repo, *args):
1728 def debuglabelcomplete(ui, repo, *args):
1729 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1729 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1730 debugnamecomplete(ui, repo, *args)
1730 debugnamecomplete(ui, repo, *args)
1731
1731
1732
1732
1733 @command(
1733 @command(
1734 b'debuglocks',
1734 b'debuglocks',
1735 [
1735 [
1736 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1736 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1737 (
1737 (
1738 b'W',
1738 b'W',
1739 b'force-wlock',
1739 b'force-wlock',
1740 None,
1740 None,
1741 _(b'free the working state lock (DANGEROUS)'),
1741 _(b'free the working state lock (DANGEROUS)'),
1742 ),
1742 ),
1743 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1743 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1744 (
1744 (
1745 b'S',
1745 b'S',
1746 b'set-wlock',
1746 b'set-wlock',
1747 None,
1747 None,
1748 _(b'set the working state lock until stopped'),
1748 _(b'set the working state lock until stopped'),
1749 ),
1749 ),
1750 ],
1750 ],
1751 _(b'[OPTION]...'),
1751 _(b'[OPTION]...'),
1752 )
1752 )
1753 def debuglocks(ui, repo, **opts):
1753 def debuglocks(ui, repo, **opts):
1754 """show or modify state of locks
1754 """show or modify state of locks
1755
1755
1756 By default, this command will show which locks are held. This
1756 By default, this command will show which locks are held. This
1757 includes the user and process holding the lock, the amount of time
1757 includes the user and process holding the lock, the amount of time
1758 the lock has been held, and the machine name where the process is
1758 the lock has been held, and the machine name where the process is
1759 running if it's not local.
1759 running if it's not local.
1760
1760
1761 Locks protect the integrity of Mercurial's data, so should be
1761 Locks protect the integrity of Mercurial's data, so should be
1762 treated with care. System crashes or other interruptions may cause
1762 treated with care. System crashes or other interruptions may cause
1763 locks to not be properly released, though Mercurial will usually
1763 locks to not be properly released, though Mercurial will usually
1764 detect and remove such stale locks automatically.
1764 detect and remove such stale locks automatically.
1765
1765
1766 However, detecting stale locks may not always be possible (for
1766 However, detecting stale locks may not always be possible (for
1767 instance, on a shared filesystem). Removing locks may also be
1767 instance, on a shared filesystem). Removing locks may also be
1768 blocked by filesystem permissions.
1768 blocked by filesystem permissions.
1769
1769
1770 Setting a lock will prevent other commands from changing the data.
1770 Setting a lock will prevent other commands from changing the data.
1771 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1771 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1772 The set locks are removed when the command exits.
1772 The set locks are removed when the command exits.
1773
1773
1774 Returns 0 if no locks are held.
1774 Returns 0 if no locks are held.
1775
1775
1776 """
1776 """
1777
1777
1778 if opts.get(r'force_lock'):
1778 if opts.get(r'force_lock'):
1779 repo.svfs.unlink(b'lock')
1779 repo.svfs.unlink(b'lock')
1780 if opts.get(r'force_wlock'):
1780 if opts.get(r'force_wlock'):
1781 repo.vfs.unlink(b'wlock')
1781 repo.vfs.unlink(b'wlock')
1782 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1782 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1783 return 0
1783 return 0
1784
1784
1785 locks = []
1785 locks = []
1786 try:
1786 try:
1787 if opts.get(r'set_wlock'):
1787 if opts.get(r'set_wlock'):
1788 try:
1788 try:
1789 locks.append(repo.wlock(False))
1789 locks.append(repo.wlock(False))
1790 except error.LockHeld:
1790 except error.LockHeld:
1791 raise error.Abort(_(b'wlock is already held'))
1791 raise error.Abort(_(b'wlock is already held'))
1792 if opts.get(r'set_lock'):
1792 if opts.get(r'set_lock'):
1793 try:
1793 try:
1794 locks.append(repo.lock(False))
1794 locks.append(repo.lock(False))
1795 except error.LockHeld:
1795 except error.LockHeld:
1796 raise error.Abort(_(b'lock is already held'))
1796 raise error.Abort(_(b'lock is already held'))
1797 if len(locks):
1797 if len(locks):
1798 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1798 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1799 return 0
1799 return 0
1800 finally:
1800 finally:
1801 release(*locks)
1801 release(*locks)
1802
1802
1803 now = time.time()
1803 now = time.time()
1804 held = 0
1804 held = 0
1805
1805
1806 def report(vfs, name, method):
1806 def report(vfs, name, method):
1807 # this causes stale locks to get reaped for more accurate reporting
1807 # this causes stale locks to get reaped for more accurate reporting
1808 try:
1808 try:
1809 l = method(False)
1809 l = method(False)
1810 except error.LockHeld:
1810 except error.LockHeld:
1811 l = None
1811 l = None
1812
1812
1813 if l:
1813 if l:
1814 l.release()
1814 l.release()
1815 else:
1815 else:
1816 try:
1816 try:
1817 st = vfs.lstat(name)
1817 st = vfs.lstat(name)
1818 age = now - st[stat.ST_MTIME]
1818 age = now - st[stat.ST_MTIME]
1819 user = util.username(st.st_uid)
1819 user = util.username(st.st_uid)
1820 locker = vfs.readlock(name)
1820 locker = vfs.readlock(name)
1821 if b":" in locker:
1821 if b":" in locker:
1822 host, pid = locker.split(b':')
1822 host, pid = locker.split(b':')
1823 if host == socket.gethostname():
1823 if host == socket.gethostname():
1824 locker = b'user %s, process %s' % (user or b'None', pid)
1824 locker = b'user %s, process %s' % (user or b'None', pid)
1825 else:
1825 else:
1826 locker = b'user %s, process %s, host %s' % (
1826 locker = b'user %s, process %s, host %s' % (
1827 user or b'None',
1827 user or b'None',
1828 pid,
1828 pid,
1829 host,
1829 host,
1830 )
1830 )
1831 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1831 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1832 return 1
1832 return 1
1833 except OSError as e:
1833 except OSError as e:
1834 if e.errno != errno.ENOENT:
1834 if e.errno != errno.ENOENT:
1835 raise
1835 raise
1836
1836
1837 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1837 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1838 return 0
1838 return 0
1839
1839
1840 held += report(repo.svfs, b"lock", repo.lock)
1840 held += report(repo.svfs, b"lock", repo.lock)
1841 held += report(repo.vfs, b"wlock", repo.wlock)
1841 held += report(repo.vfs, b"wlock", repo.wlock)
1842
1842
1843 return held
1843 return held
1844
1844
1845
1845
1846 @command(
1846 @command(
1847 b'debugmanifestfulltextcache',
1847 b'debugmanifestfulltextcache',
1848 [
1848 [
1849 (b'', b'clear', False, _(b'clear the cache')),
1849 (b'', b'clear', False, _(b'clear the cache')),
1850 (
1850 (
1851 b'a',
1851 b'a',
1852 b'add',
1852 b'add',
1853 [],
1853 [],
1854 _(b'add the given manifest nodes to the cache'),
1854 _(b'add the given manifest nodes to the cache'),
1855 _(b'NODE'),
1855 _(b'NODE'),
1856 ),
1856 ),
1857 ],
1857 ],
1858 b'',
1858 b'',
1859 )
1859 )
1860 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1860 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1861 """show, clear or amend the contents of the manifest fulltext cache"""
1861 """show, clear or amend the contents of the manifest fulltext cache"""
1862
1862
1863 def getcache():
1863 def getcache():
1864 r = repo.manifestlog.getstorage(b'')
1864 r = repo.manifestlog.getstorage(b'')
1865 try:
1865 try:
1866 return r._fulltextcache
1866 return r._fulltextcache
1867 except AttributeError:
1867 except AttributeError:
1868 msg = _(
1868 msg = _(
1869 b"Current revlog implementation doesn't appear to have a "
1869 b"Current revlog implementation doesn't appear to have a "
1870 b"manifest fulltext cache\n"
1870 b"manifest fulltext cache\n"
1871 )
1871 )
1872 raise error.Abort(msg)
1872 raise error.Abort(msg)
1873
1873
1874 if opts.get(r'clear'):
1874 if opts.get(r'clear'):
1875 with repo.wlock():
1875 with repo.wlock():
1876 cache = getcache()
1876 cache = getcache()
1877 cache.clear(clear_persisted_data=True)
1877 cache.clear(clear_persisted_data=True)
1878 return
1878 return
1879
1879
1880 if add:
1880 if add:
1881 with repo.wlock():
1881 with repo.wlock():
1882 m = repo.manifestlog
1882 m = repo.manifestlog
1883 store = m.getstorage(b'')
1883 store = m.getstorage(b'')
1884 for n in add:
1884 for n in add:
1885 try:
1885 try:
1886 manifest = m[store.lookup(n)]
1886 manifest = m[store.lookup(n)]
1887 except error.LookupError as e:
1887 except error.LookupError as e:
1888 raise error.Abort(e, hint=b"Check your manifest node id")
1888 raise error.Abort(e, hint=b"Check your manifest node id")
1889 manifest.read() # stores revisision in cache too
1889 manifest.read() # stores revisision in cache too
1890 return
1890 return
1891
1891
1892 cache = getcache()
1892 cache = getcache()
1893 if not len(cache):
1893 if not len(cache):
1894 ui.write(_(b'cache empty\n'))
1894 ui.write(_(b'cache empty\n'))
1895 else:
1895 else:
1896 ui.write(
1896 ui.write(
1897 _(
1897 _(
1898 b'cache contains %d manifest entries, in order of most to '
1898 b'cache contains %d manifest entries, in order of most to '
1899 b'least recent:\n'
1899 b'least recent:\n'
1900 )
1900 )
1901 % (len(cache),)
1901 % (len(cache),)
1902 )
1902 )
1903 totalsize = 0
1903 totalsize = 0
1904 for nodeid in cache:
1904 for nodeid in cache:
1905 # Use cache.get to not update the LRU order
1905 # Use cache.get to not update the LRU order
1906 data = cache.peek(nodeid)
1906 data = cache.peek(nodeid)
1907 size = len(data)
1907 size = len(data)
1908 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1908 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1909 ui.write(
1909 ui.write(
1910 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1910 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1911 )
1911 )
1912 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1912 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1913 ui.write(
1913 ui.write(
1914 _(b'total cache data size %s, on-disk %s\n')
1914 _(b'total cache data size %s, on-disk %s\n')
1915 % (util.bytecount(totalsize), util.bytecount(ondisk))
1915 % (util.bytecount(totalsize), util.bytecount(ondisk))
1916 )
1916 )
1917
1917
1918
1918
1919 @command(b'debugmergestate', [], b'')
1919 @command(b'debugmergestate', [], b'')
1920 def debugmergestate(ui, repo, *args):
1920 def debugmergestate(ui, repo, *args):
1921 """print merge state
1921 """print merge state
1922
1922
1923 Use --verbose to print out information about whether v1 or v2 merge state
1923 Use --verbose to print out information about whether v1 or v2 merge state
1924 was chosen."""
1924 was chosen."""
1925
1925
1926 def _hashornull(h):
1926 def _hashornull(h):
1927 if h == nullhex:
1927 if h == nullhex:
1928 return b'null'
1928 return b'null'
1929 else:
1929 else:
1930 return h
1930 return h
1931
1931
1932 def printrecords(version):
1932 def printrecords(version):
1933 ui.writenoi18n(b'* version %d records\n' % version)
1933 ui.writenoi18n(b'* version %d records\n' % version)
1934 if version == 1:
1934 if version == 1:
1935 records = v1records
1935 records = v1records
1936 else:
1936 else:
1937 records = v2records
1937 records = v2records
1938
1938
1939 for rtype, record in records:
1939 for rtype, record in records:
1940 # pretty print some record types
1940 # pretty print some record types
1941 if rtype == b'L':
1941 if rtype == b'L':
1942 ui.writenoi18n(b'local: %s\n' % record)
1942 ui.writenoi18n(b'local: %s\n' % record)
1943 elif rtype == b'O':
1943 elif rtype == b'O':
1944 ui.writenoi18n(b'other: %s\n' % record)
1944 ui.writenoi18n(b'other: %s\n' % record)
1945 elif rtype == b'm':
1945 elif rtype == b'm':
1946 driver, mdstate = record.split(b'\0', 1)
1946 driver, mdstate = record.split(b'\0', 1)
1947 ui.writenoi18n(
1947 ui.writenoi18n(
1948 b'merge driver: %s (state "%s")\n' % (driver, mdstate)
1948 b'merge driver: %s (state "%s")\n' % (driver, mdstate)
1949 )
1949 )
1950 elif rtype in b'FDC':
1950 elif rtype in b'FDC':
1951 r = record.split(b'\0')
1951 r = record.split(b'\0')
1952 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1952 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1953 if version == 1:
1953 if version == 1:
1954 onode = b'not stored in v1 format'
1954 onode = b'not stored in v1 format'
1955 flags = r[7]
1955 flags = r[7]
1956 else:
1956 else:
1957 onode, flags = r[7:9]
1957 onode, flags = r[7:9]
1958 ui.writenoi18n(
1958 ui.writenoi18n(
1959 b'file: %s (record type "%s", state "%s", hash %s)\n'
1959 b'file: %s (record type "%s", state "%s", hash %s)\n'
1960 % (f, rtype, state, _hashornull(hash))
1960 % (f, rtype, state, _hashornull(hash))
1961 )
1961 )
1962 ui.writenoi18n(
1962 ui.writenoi18n(
1963 b' local path: %s (flags "%s")\n' % (lfile, flags)
1963 b' local path: %s (flags "%s")\n' % (lfile, flags)
1964 )
1964 )
1965 ui.writenoi18n(
1965 ui.writenoi18n(
1966 b' ancestor path: %s (node %s)\n'
1966 b' ancestor path: %s (node %s)\n'
1967 % (afile, _hashornull(anode))
1967 % (afile, _hashornull(anode))
1968 )
1968 )
1969 ui.writenoi18n(
1969 ui.writenoi18n(
1970 b' other path: %s (node %s)\n'
1970 b' other path: %s (node %s)\n'
1971 % (ofile, _hashornull(onode))
1971 % (ofile, _hashornull(onode))
1972 )
1972 )
1973 elif rtype == b'f':
1973 elif rtype == b'f':
1974 filename, rawextras = record.split(b'\0', 1)
1974 filename, rawextras = record.split(b'\0', 1)
1975 extras = rawextras.split(b'\0')
1975 extras = rawextras.split(b'\0')
1976 i = 0
1976 i = 0
1977 extrastrings = []
1977 extrastrings = []
1978 while i < len(extras):
1978 while i < len(extras):
1979 extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1]))
1979 extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1]))
1980 i += 2
1980 i += 2
1981
1981
1982 ui.writenoi18n(
1982 ui.writenoi18n(
1983 b'file extras: %s (%s)\n'
1983 b'file extras: %s (%s)\n'
1984 % (filename, b', '.join(extrastrings))
1984 % (filename, b', '.join(extrastrings))
1985 )
1985 )
1986 elif rtype == b'l':
1986 elif rtype == b'l':
1987 labels = record.split(b'\0', 2)
1987 labels = record.split(b'\0', 2)
1988 labels = [l for l in labels if len(l) > 0]
1988 labels = [l for l in labels if len(l) > 0]
1989 ui.writenoi18n(b'labels:\n')
1989 ui.writenoi18n(b'labels:\n')
1990 ui.write((b' local: %s\n' % labels[0]))
1990 ui.write((b' local: %s\n' % labels[0]))
1991 ui.write((b' other: %s\n' % labels[1]))
1991 ui.write((b' other: %s\n' % labels[1]))
1992 if len(labels) > 2:
1992 if len(labels) > 2:
1993 ui.write((b' base: %s\n' % labels[2]))
1993 ui.write((b' base: %s\n' % labels[2]))
1994 else:
1994 else:
1995 ui.writenoi18n(
1995 ui.writenoi18n(
1996 b'unrecognized entry: %s\t%s\n'
1996 b'unrecognized entry: %s\t%s\n'
1997 % (rtype, record.replace(b'\0', b'\t'))
1997 % (rtype, record.replace(b'\0', b'\t'))
1998 )
1998 )
1999
1999
2000 # Avoid mergestate.read() since it may raise an exception for unsupported
2000 # Avoid mergestate.read() since it may raise an exception for unsupported
2001 # merge state records. We shouldn't be doing this, but this is OK since this
2001 # merge state records. We shouldn't be doing this, but this is OK since this
2002 # command is pretty low-level.
2002 # command is pretty low-level.
2003 ms = mergemod.mergestate(repo)
2003 ms = mergemod.mergestate(repo)
2004
2004
2005 # sort so that reasonable information is on top
2005 # sort so that reasonable information is on top
2006 v1records = ms._readrecordsv1()
2006 v1records = ms._readrecordsv1()
2007 v2records = ms._readrecordsv2()
2007 v2records = ms._readrecordsv2()
2008 order = b'LOml'
2008 order = b'LOml'
2009
2009
2010 def key(r):
2010 def key(r):
2011 idx = order.find(r[0])
2011 idx = order.find(r[0])
2012 if idx == -1:
2012 if idx == -1:
2013 return (1, r[1])
2013 return (1, r[1])
2014 else:
2014 else:
2015 return (0, idx)
2015 return (0, idx)
2016
2016
2017 v1records.sort(key=key)
2017 v1records.sort(key=key)
2018 v2records.sort(key=key)
2018 v2records.sort(key=key)
2019
2019
2020 if not v1records and not v2records:
2020 if not v1records and not v2records:
2021 ui.writenoi18n(b'no merge state found\n')
2021 ui.writenoi18n(b'no merge state found\n')
2022 elif not v2records:
2022 elif not v2records:
2023 ui.notenoi18n(b'no version 2 merge state\n')
2023 ui.notenoi18n(b'no version 2 merge state\n')
2024 printrecords(1)
2024 printrecords(1)
2025 elif ms._v1v2match(v1records, v2records):
2025 elif ms._v1v2match(v1records, v2records):
2026 ui.notenoi18n(b'v1 and v2 states match: using v2\n')
2026 ui.notenoi18n(b'v1 and v2 states match: using v2\n')
2027 printrecords(2)
2027 printrecords(2)
2028 else:
2028 else:
2029 ui.notenoi18n(b'v1 and v2 states mismatch: using v1\n')
2029 ui.notenoi18n(b'v1 and v2 states mismatch: using v1\n')
2030 printrecords(1)
2030 printrecords(1)
2031 if ui.verbose:
2031 if ui.verbose:
2032 printrecords(2)
2032 printrecords(2)
2033
2033
2034
2034
2035 @command(b'debugnamecomplete', [], _(b'NAME...'))
2035 @command(b'debugnamecomplete', [], _(b'NAME...'))
2036 def debugnamecomplete(ui, repo, *args):
2036 def debugnamecomplete(ui, repo, *args):
2037 '''complete "names" - tags, open branch names, bookmark names'''
2037 '''complete "names" - tags, open branch names, bookmark names'''
2038
2038
2039 names = set()
2039 names = set()
2040 # since we previously only listed open branches, we will handle that
2040 # since we previously only listed open branches, we will handle that
2041 # specially (after this for loop)
2041 # specially (after this for loop)
2042 for name, ns in pycompat.iteritems(repo.names):
2042 for name, ns in pycompat.iteritems(repo.names):
2043 if name != b'branches':
2043 if name != b'branches':
2044 names.update(ns.listnames(repo))
2044 names.update(ns.listnames(repo))
2045 names.update(
2045 names.update(
2046 tag
2046 tag
2047 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2047 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2048 if not closed
2048 if not closed
2049 )
2049 )
2050 completions = set()
2050 completions = set()
2051 if not args:
2051 if not args:
2052 args = [b'']
2052 args = [b'']
2053 for a in args:
2053 for a in args:
2054 completions.update(n for n in names if n.startswith(a))
2054 completions.update(n for n in names if n.startswith(a))
2055 ui.write(b'\n'.join(sorted(completions)))
2055 ui.write(b'\n'.join(sorted(completions)))
2056 ui.write(b'\n')
2056 ui.write(b'\n')
2057
2057
2058
2058
2059 @command(
2059 @command(
2060 b'debugobsolete',
2060 b'debugobsolete',
2061 [
2061 [
2062 (b'', b'flags', 0, _(b'markers flag')),
2062 (b'', b'flags', 0, _(b'markers flag')),
2063 (
2063 (
2064 b'',
2064 b'',
2065 b'record-parents',
2065 b'record-parents',
2066 False,
2066 False,
2067 _(b'record parent information for the precursor'),
2067 _(b'record parent information for the precursor'),
2068 ),
2068 ),
2069 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2069 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2070 (
2070 (
2071 b'',
2071 b'',
2072 b'exclusive',
2072 b'exclusive',
2073 False,
2073 False,
2074 _(b'restrict display to markers only relevant to REV'),
2074 _(b'restrict display to markers only relevant to REV'),
2075 ),
2075 ),
2076 (b'', b'index', False, _(b'display index of the marker')),
2076 (b'', b'index', False, _(b'display index of the marker')),
2077 (b'', b'delete', [], _(b'delete markers specified by indices')),
2077 (b'', b'delete', [], _(b'delete markers specified by indices')),
2078 ]
2078 ]
2079 + cmdutil.commitopts2
2079 + cmdutil.commitopts2
2080 + cmdutil.formatteropts,
2080 + cmdutil.formatteropts,
2081 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2081 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2082 )
2082 )
2083 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2083 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2084 """create arbitrary obsolete marker
2084 """create arbitrary obsolete marker
2085
2085
2086 With no arguments, displays the list of obsolescence markers."""
2086 With no arguments, displays the list of obsolescence markers."""
2087
2087
2088 opts = pycompat.byteskwargs(opts)
2088 opts = pycompat.byteskwargs(opts)
2089
2089
2090 def parsenodeid(s):
2090 def parsenodeid(s):
2091 try:
2091 try:
2092 # We do not use revsingle/revrange functions here to accept
2092 # We do not use revsingle/revrange functions here to accept
2093 # arbitrary node identifiers, possibly not present in the
2093 # arbitrary node identifiers, possibly not present in the
2094 # local repository.
2094 # local repository.
2095 n = bin(s)
2095 n = bin(s)
2096 if len(n) != len(nullid):
2096 if len(n) != len(nullid):
2097 raise TypeError()
2097 raise TypeError()
2098 return n
2098 return n
2099 except TypeError:
2099 except TypeError:
2100 raise error.Abort(
2100 raise error.Abort(
2101 b'changeset references must be full hexadecimal '
2101 b'changeset references must be full hexadecimal '
2102 b'node identifiers'
2102 b'node identifiers'
2103 )
2103 )
2104
2104
2105 if opts.get(b'delete'):
2105 if opts.get(b'delete'):
2106 indices = []
2106 indices = []
2107 for v in opts.get(b'delete'):
2107 for v in opts.get(b'delete'):
2108 try:
2108 try:
2109 indices.append(int(v))
2109 indices.append(int(v))
2110 except ValueError:
2110 except ValueError:
2111 raise error.Abort(
2111 raise error.Abort(
2112 _(b'invalid index value: %r') % v,
2112 _(b'invalid index value: %r') % v,
2113 hint=_(b'use integers for indices'),
2113 hint=_(b'use integers for indices'),
2114 )
2114 )
2115
2115
2116 if repo.currenttransaction():
2116 if repo.currenttransaction():
2117 raise error.Abort(
2117 raise error.Abort(
2118 _(b'cannot delete obsmarkers in the middle of transaction.')
2118 _(b'cannot delete obsmarkers in the middle of transaction.')
2119 )
2119 )
2120
2120
2121 with repo.lock():
2121 with repo.lock():
2122 n = repair.deleteobsmarkers(repo.obsstore, indices)
2122 n = repair.deleteobsmarkers(repo.obsstore, indices)
2123 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2123 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2124
2124
2125 return
2125 return
2126
2126
2127 if precursor is not None:
2127 if precursor is not None:
2128 if opts[b'rev']:
2128 if opts[b'rev']:
2129 raise error.Abort(b'cannot select revision when creating marker')
2129 raise error.Abort(b'cannot select revision when creating marker')
2130 metadata = {}
2130 metadata = {}
2131 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2131 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2132 succs = tuple(parsenodeid(succ) for succ in successors)
2132 succs = tuple(parsenodeid(succ) for succ in successors)
2133 l = repo.lock()
2133 l = repo.lock()
2134 try:
2134 try:
2135 tr = repo.transaction(b'debugobsolete')
2135 tr = repo.transaction(b'debugobsolete')
2136 try:
2136 try:
2137 date = opts.get(b'date')
2137 date = opts.get(b'date')
2138 if date:
2138 if date:
2139 date = dateutil.parsedate(date)
2139 date = dateutil.parsedate(date)
2140 else:
2140 else:
2141 date = None
2141 date = None
2142 prec = parsenodeid(precursor)
2142 prec = parsenodeid(precursor)
2143 parents = None
2143 parents = None
2144 if opts[b'record_parents']:
2144 if opts[b'record_parents']:
2145 if prec not in repo.unfiltered():
2145 if prec not in repo.unfiltered():
2146 raise error.Abort(
2146 raise error.Abort(
2147 b'cannot used --record-parents on '
2147 b'cannot used --record-parents on '
2148 b'unknown changesets'
2148 b'unknown changesets'
2149 )
2149 )
2150 parents = repo.unfiltered()[prec].parents()
2150 parents = repo.unfiltered()[prec].parents()
2151 parents = tuple(p.node() for p in parents)
2151 parents = tuple(p.node() for p in parents)
2152 repo.obsstore.create(
2152 repo.obsstore.create(
2153 tr,
2153 tr,
2154 prec,
2154 prec,
2155 succs,
2155 succs,
2156 opts[b'flags'],
2156 opts[b'flags'],
2157 parents=parents,
2157 parents=parents,
2158 date=date,
2158 date=date,
2159 metadata=metadata,
2159 metadata=metadata,
2160 ui=ui,
2160 ui=ui,
2161 )
2161 )
2162 tr.close()
2162 tr.close()
2163 except ValueError as exc:
2163 except ValueError as exc:
2164 raise error.Abort(
2164 raise error.Abort(
2165 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2165 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2166 )
2166 )
2167 finally:
2167 finally:
2168 tr.release()
2168 tr.release()
2169 finally:
2169 finally:
2170 l.release()
2170 l.release()
2171 else:
2171 else:
2172 if opts[b'rev']:
2172 if opts[b'rev']:
2173 revs = scmutil.revrange(repo, opts[b'rev'])
2173 revs = scmutil.revrange(repo, opts[b'rev'])
2174 nodes = [repo[r].node() for r in revs]
2174 nodes = [repo[r].node() for r in revs]
2175 markers = list(
2175 markers = list(
2176 obsutil.getmarkers(
2176 obsutil.getmarkers(
2177 repo, nodes=nodes, exclusive=opts[b'exclusive']
2177 repo, nodes=nodes, exclusive=opts[b'exclusive']
2178 )
2178 )
2179 )
2179 )
2180 markers.sort(key=lambda x: x._data)
2180 markers.sort(key=lambda x: x._data)
2181 else:
2181 else:
2182 markers = obsutil.getmarkers(repo)
2182 markers = obsutil.getmarkers(repo)
2183
2183
2184 markerstoiter = markers
2184 markerstoiter = markers
2185 isrelevant = lambda m: True
2185 isrelevant = lambda m: True
2186 if opts.get(b'rev') and opts.get(b'index'):
2186 if opts.get(b'rev') and opts.get(b'index'):
2187 markerstoiter = obsutil.getmarkers(repo)
2187 markerstoiter = obsutil.getmarkers(repo)
2188 markerset = set(markers)
2188 markerset = set(markers)
2189 isrelevant = lambda m: m in markerset
2189 isrelevant = lambda m: m in markerset
2190
2190
2191 fm = ui.formatter(b'debugobsolete', opts)
2191 fm = ui.formatter(b'debugobsolete', opts)
2192 for i, m in enumerate(markerstoiter):
2192 for i, m in enumerate(markerstoiter):
2193 if not isrelevant(m):
2193 if not isrelevant(m):
2194 # marker can be irrelevant when we're iterating over a set
2194 # marker can be irrelevant when we're iterating over a set
2195 # of markers (markerstoiter) which is bigger than the set
2195 # of markers (markerstoiter) which is bigger than the set
2196 # of markers we want to display (markers)
2196 # of markers we want to display (markers)
2197 # this can happen if both --index and --rev options are
2197 # this can happen if both --index and --rev options are
2198 # provided and thus we need to iterate over all of the markers
2198 # provided and thus we need to iterate over all of the markers
2199 # to get the correct indices, but only display the ones that
2199 # to get the correct indices, but only display the ones that
2200 # are relevant to --rev value
2200 # are relevant to --rev value
2201 continue
2201 continue
2202 fm.startitem()
2202 fm.startitem()
2203 ind = i if opts.get(b'index') else None
2203 ind = i if opts.get(b'index') else None
2204 cmdutil.showmarker(fm, m, index=ind)
2204 cmdutil.showmarker(fm, m, index=ind)
2205 fm.end()
2205 fm.end()
2206
2206
2207
2207
2208 @command(
2208 @command(
2209 b'debugp1copies',
2209 b'debugp1copies',
2210 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2210 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2211 _(b'[-r REV]'),
2211 _(b'[-r REV]'),
2212 )
2212 )
2213 def debugp1copies(ui, repo, **opts):
2213 def debugp1copies(ui, repo, **opts):
2214 """dump copy information compared to p1"""
2214 """dump copy information compared to p1"""
2215
2215
2216 opts = pycompat.byteskwargs(opts)
2216 opts = pycompat.byteskwargs(opts)
2217 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2217 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2218 for dst, src in ctx.p1copies().items():
2218 for dst, src in ctx.p1copies().items():
2219 ui.write(b'%s -> %s\n' % (src, dst))
2219 ui.write(b'%s -> %s\n' % (src, dst))
2220
2220
2221
2221
2222 @command(
2222 @command(
2223 b'debugp2copies',
2223 b'debugp2copies',
2224 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2224 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2225 _(b'[-r REV]'),
2225 _(b'[-r REV]'),
2226 )
2226 )
2227 def debugp1copies(ui, repo, **opts):
2227 def debugp1copies(ui, repo, **opts):
2228 """dump copy information compared to p2"""
2228 """dump copy information compared to p2"""
2229
2229
2230 opts = pycompat.byteskwargs(opts)
2230 opts = pycompat.byteskwargs(opts)
2231 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2231 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2232 for dst, src in ctx.p2copies().items():
2232 for dst, src in ctx.p2copies().items():
2233 ui.write(b'%s -> %s\n' % (src, dst))
2233 ui.write(b'%s -> %s\n' % (src, dst))
2234
2234
2235
2235
2236 @command(
2236 @command(
2237 b'debugpathcomplete',
2237 b'debugpathcomplete',
2238 [
2238 [
2239 (b'f', b'full', None, _(b'complete an entire path')),
2239 (b'f', b'full', None, _(b'complete an entire path')),
2240 (b'n', b'normal', None, _(b'show only normal files')),
2240 (b'n', b'normal', None, _(b'show only normal files')),
2241 (b'a', b'added', None, _(b'show only added files')),
2241 (b'a', b'added', None, _(b'show only added files')),
2242 (b'r', b'removed', None, _(b'show only removed files')),
2242 (b'r', b'removed', None, _(b'show only removed files')),
2243 ],
2243 ],
2244 _(b'FILESPEC...'),
2244 _(b'FILESPEC...'),
2245 )
2245 )
2246 def debugpathcomplete(ui, repo, *specs, **opts):
2246 def debugpathcomplete(ui, repo, *specs, **opts):
2247 '''complete part or all of a tracked path
2247 '''complete part or all of a tracked path
2248
2248
2249 This command supports shells that offer path name completion. It
2249 This command supports shells that offer path name completion. It
2250 currently completes only files already known to the dirstate.
2250 currently completes only files already known to the dirstate.
2251
2251
2252 Completion extends only to the next path segment unless
2252 Completion extends only to the next path segment unless
2253 --full is specified, in which case entire paths are used.'''
2253 --full is specified, in which case entire paths are used.'''
2254
2254
2255 def complete(path, acceptable):
2255 def complete(path, acceptable):
2256 dirstate = repo.dirstate
2256 dirstate = repo.dirstate
2257 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2257 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2258 rootdir = repo.root + pycompat.ossep
2258 rootdir = repo.root + pycompat.ossep
2259 if spec != repo.root and not spec.startswith(rootdir):
2259 if spec != repo.root and not spec.startswith(rootdir):
2260 return [], []
2260 return [], []
2261 if os.path.isdir(spec):
2261 if os.path.isdir(spec):
2262 spec += b'/'
2262 spec += b'/'
2263 spec = spec[len(rootdir) :]
2263 spec = spec[len(rootdir) :]
2264 fixpaths = pycompat.ossep != b'/'
2264 fixpaths = pycompat.ossep != b'/'
2265 if fixpaths:
2265 if fixpaths:
2266 spec = spec.replace(pycompat.ossep, b'/')
2266 spec = spec.replace(pycompat.ossep, b'/')
2267 speclen = len(spec)
2267 speclen = len(spec)
2268 fullpaths = opts[r'full']
2268 fullpaths = opts[r'full']
2269 files, dirs = set(), set()
2269 files, dirs = set(), set()
2270 adddir, addfile = dirs.add, files.add
2270 adddir, addfile = dirs.add, files.add
2271 for f, st in pycompat.iteritems(dirstate):
2271 for f, st in pycompat.iteritems(dirstate):
2272 if f.startswith(spec) and st[0] in acceptable:
2272 if f.startswith(spec) and st[0] in acceptable:
2273 if fixpaths:
2273 if fixpaths:
2274 f = f.replace(b'/', pycompat.ossep)
2274 f = f.replace(b'/', pycompat.ossep)
2275 if fullpaths:
2275 if fullpaths:
2276 addfile(f)
2276 addfile(f)
2277 continue
2277 continue
2278 s = f.find(pycompat.ossep, speclen)
2278 s = f.find(pycompat.ossep, speclen)
2279 if s >= 0:
2279 if s >= 0:
2280 adddir(f[:s])
2280 adddir(f[:s])
2281 else:
2281 else:
2282 addfile(f)
2282 addfile(f)
2283 return files, dirs
2283 return files, dirs
2284
2284
2285 acceptable = b''
2285 acceptable = b''
2286 if opts[r'normal']:
2286 if opts[r'normal']:
2287 acceptable += b'nm'
2287 acceptable += b'nm'
2288 if opts[r'added']:
2288 if opts[r'added']:
2289 acceptable += b'a'
2289 acceptable += b'a'
2290 if opts[r'removed']:
2290 if opts[r'removed']:
2291 acceptable += b'r'
2291 acceptable += b'r'
2292 cwd = repo.getcwd()
2292 cwd = repo.getcwd()
2293 if not specs:
2293 if not specs:
2294 specs = [b'.']
2294 specs = [b'.']
2295
2295
2296 files, dirs = set(), set()
2296 files, dirs = set(), set()
2297 for spec in specs:
2297 for spec in specs:
2298 f, d = complete(spec, acceptable or b'nmar')
2298 f, d = complete(spec, acceptable or b'nmar')
2299 files.update(f)
2299 files.update(f)
2300 dirs.update(d)
2300 dirs.update(d)
2301 files.update(dirs)
2301 files.update(dirs)
2302 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2302 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2303 ui.write(b'\n')
2303 ui.write(b'\n')
2304
2304
2305
2305
2306 @command(
2306 @command(
2307 b'debugpathcopies',
2307 b'debugpathcopies',
2308 cmdutil.walkopts,
2308 cmdutil.walkopts,
2309 b'hg debugpathcopies REV1 REV2 [FILE]',
2309 b'hg debugpathcopies REV1 REV2 [FILE]',
2310 inferrepo=True,
2310 inferrepo=True,
2311 )
2311 )
2312 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2312 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2313 """show copies between two revisions"""
2313 """show copies between two revisions"""
2314 ctx1 = scmutil.revsingle(repo, rev1)
2314 ctx1 = scmutil.revsingle(repo, rev1)
2315 ctx2 = scmutil.revsingle(repo, rev2)
2315 ctx2 = scmutil.revsingle(repo, rev2)
2316 m = scmutil.match(ctx1, pats, opts)
2316 m = scmutil.match(ctx1, pats, opts)
2317 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2317 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2318 ui.write(b'%s -> %s\n' % (src, dst))
2318 ui.write(b'%s -> %s\n' % (src, dst))
2319
2319
2320
2320
2321 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2321 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2322 def debugpeer(ui, path):
2322 def debugpeer(ui, path):
2323 """establish a connection to a peer repository"""
2323 """establish a connection to a peer repository"""
2324 # Always enable peer request logging. Requires --debug to display
2324 # Always enable peer request logging. Requires --debug to display
2325 # though.
2325 # though.
2326 overrides = {
2326 overrides = {
2327 (b'devel', b'debug.peer-request'): True,
2327 (b'devel', b'debug.peer-request'): True,
2328 }
2328 }
2329
2329
2330 with ui.configoverride(overrides):
2330 with ui.configoverride(overrides):
2331 peer = hg.peer(ui, {}, path)
2331 peer = hg.peer(ui, {}, path)
2332
2332
2333 local = peer.local() is not None
2333 local = peer.local() is not None
2334 canpush = peer.canpush()
2334 canpush = peer.canpush()
2335
2335
2336 ui.write(_(b'url: %s\n') % peer.url())
2336 ui.write(_(b'url: %s\n') % peer.url())
2337 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2337 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2338 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2338 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2339
2339
2340
2340
2341 @command(
2341 @command(
2342 b'debugpickmergetool',
2342 b'debugpickmergetool',
2343 [
2343 [
2344 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2344 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2345 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2345 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2346 ]
2346 ]
2347 + cmdutil.walkopts
2347 + cmdutil.walkopts
2348 + cmdutil.mergetoolopts,
2348 + cmdutil.mergetoolopts,
2349 _(b'[PATTERN]...'),
2349 _(b'[PATTERN]...'),
2350 inferrepo=True,
2350 inferrepo=True,
2351 )
2351 )
2352 def debugpickmergetool(ui, repo, *pats, **opts):
2352 def debugpickmergetool(ui, repo, *pats, **opts):
2353 """examine which merge tool is chosen for specified file
2353 """examine which merge tool is chosen for specified file
2354
2354
2355 As described in :hg:`help merge-tools`, Mercurial examines
2355 As described in :hg:`help merge-tools`, Mercurial examines
2356 configurations below in this order to decide which merge tool is
2356 configurations below in this order to decide which merge tool is
2357 chosen for specified file.
2357 chosen for specified file.
2358
2358
2359 1. ``--tool`` option
2359 1. ``--tool`` option
2360 2. ``HGMERGE`` environment variable
2360 2. ``HGMERGE`` environment variable
2361 3. configurations in ``merge-patterns`` section
2361 3. configurations in ``merge-patterns`` section
2362 4. configuration of ``ui.merge``
2362 4. configuration of ``ui.merge``
2363 5. configurations in ``merge-tools`` section
2363 5. configurations in ``merge-tools`` section
2364 6. ``hgmerge`` tool (for historical reason only)
2364 6. ``hgmerge`` tool (for historical reason only)
2365 7. default tool for fallback (``:merge`` or ``:prompt``)
2365 7. default tool for fallback (``:merge`` or ``:prompt``)
2366
2366
2367 This command writes out examination result in the style below::
2367 This command writes out examination result in the style below::
2368
2368
2369 FILE = MERGETOOL
2369 FILE = MERGETOOL
2370
2370
2371 By default, all files known in the first parent context of the
2371 By default, all files known in the first parent context of the
2372 working directory are examined. Use file patterns and/or -I/-X
2372 working directory are examined. Use file patterns and/or -I/-X
2373 options to limit target files. -r/--rev is also useful to examine
2373 options to limit target files. -r/--rev is also useful to examine
2374 files in another context without actual updating to it.
2374 files in another context without actual updating to it.
2375
2375
2376 With --debug, this command shows warning messages while matching
2376 With --debug, this command shows warning messages while matching
2377 against ``merge-patterns`` and so on, too. It is recommended to
2377 against ``merge-patterns`` and so on, too. It is recommended to
2378 use this option with explicit file patterns and/or -I/-X options,
2378 use this option with explicit file patterns and/or -I/-X options,
2379 because this option increases amount of output per file according
2379 because this option increases amount of output per file according
2380 to configurations in hgrc.
2380 to configurations in hgrc.
2381
2381
2382 With -v/--verbose, this command shows configurations below at
2382 With -v/--verbose, this command shows configurations below at
2383 first (only if specified).
2383 first (only if specified).
2384
2384
2385 - ``--tool`` option
2385 - ``--tool`` option
2386 - ``HGMERGE`` environment variable
2386 - ``HGMERGE`` environment variable
2387 - configuration of ``ui.merge``
2387 - configuration of ``ui.merge``
2388
2388
2389 If merge tool is chosen before matching against
2389 If merge tool is chosen before matching against
2390 ``merge-patterns``, this command can't show any helpful
2390 ``merge-patterns``, this command can't show any helpful
2391 information, even with --debug. In such case, information above is
2391 information, even with --debug. In such case, information above is
2392 useful to know why a merge tool is chosen.
2392 useful to know why a merge tool is chosen.
2393 """
2393 """
2394 opts = pycompat.byteskwargs(opts)
2394 opts = pycompat.byteskwargs(opts)
2395 overrides = {}
2395 overrides = {}
2396 if opts[b'tool']:
2396 if opts[b'tool']:
2397 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2397 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2398 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2398 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2399
2399
2400 with ui.configoverride(overrides, b'debugmergepatterns'):
2400 with ui.configoverride(overrides, b'debugmergepatterns'):
2401 hgmerge = encoding.environ.get(b"HGMERGE")
2401 hgmerge = encoding.environ.get(b"HGMERGE")
2402 if hgmerge is not None:
2402 if hgmerge is not None:
2403 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2403 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2404 uimerge = ui.config(b"ui", b"merge")
2404 uimerge = ui.config(b"ui", b"merge")
2405 if uimerge:
2405 if uimerge:
2406 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2406 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2407
2407
2408 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2408 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2409 m = scmutil.match(ctx, pats, opts)
2409 m = scmutil.match(ctx, pats, opts)
2410 changedelete = opts[b'changedelete']
2410 changedelete = opts[b'changedelete']
2411 for path in ctx.walk(m):
2411 for path in ctx.walk(m):
2412 fctx = ctx[path]
2412 fctx = ctx[path]
2413 try:
2413 try:
2414 if not ui.debugflag:
2414 if not ui.debugflag:
2415 ui.pushbuffer(error=True)
2415 ui.pushbuffer(error=True)
2416 tool, toolpath = filemerge._picktool(
2416 tool, toolpath = filemerge._picktool(
2417 repo,
2417 repo,
2418 ui,
2418 ui,
2419 path,
2419 path,
2420 fctx.isbinary(),
2420 fctx.isbinary(),
2421 b'l' in fctx.flags(),
2421 b'l' in fctx.flags(),
2422 changedelete,
2422 changedelete,
2423 )
2423 )
2424 finally:
2424 finally:
2425 if not ui.debugflag:
2425 if not ui.debugflag:
2426 ui.popbuffer()
2426 ui.popbuffer()
2427 ui.write(b'%s = %s\n' % (path, tool))
2427 ui.write(b'%s = %s\n' % (path, tool))
2428
2428
2429
2429
2430 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2430 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2431 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2431 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2432 '''access the pushkey key/value protocol
2432 '''access the pushkey key/value protocol
2433
2433
2434 With two args, list the keys in the given namespace.
2434 With two args, list the keys in the given namespace.
2435
2435
2436 With five args, set a key to new if it currently is set to old.
2436 With five args, set a key to new if it currently is set to old.
2437 Reports success or failure.
2437 Reports success or failure.
2438 '''
2438 '''
2439
2439
2440 target = hg.peer(ui, {}, repopath)
2440 target = hg.peer(ui, {}, repopath)
2441 if keyinfo:
2441 if keyinfo:
2442 key, old, new = keyinfo
2442 key, old, new = keyinfo
2443 with target.commandexecutor() as e:
2443 with target.commandexecutor() as e:
2444 r = e.callcommand(
2444 r = e.callcommand(
2445 b'pushkey',
2445 b'pushkey',
2446 {
2446 {
2447 b'namespace': namespace,
2447 b'namespace': namespace,
2448 b'key': key,
2448 b'key': key,
2449 b'old': old,
2449 b'old': old,
2450 b'new': new,
2450 b'new': new,
2451 },
2451 },
2452 ).result()
2452 ).result()
2453
2453
2454 ui.status(pycompat.bytestr(r) + b'\n')
2454 ui.status(pycompat.bytestr(r) + b'\n')
2455 return not r
2455 return not r
2456 else:
2456 else:
2457 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2457 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2458 ui.write(
2458 ui.write(
2459 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2459 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2460 )
2460 )
2461
2461
2462
2462
2463 @command(b'debugpvec', [], _(b'A B'))
2463 @command(b'debugpvec', [], _(b'A B'))
2464 def debugpvec(ui, repo, a, b=None):
2464 def debugpvec(ui, repo, a, b=None):
2465 ca = scmutil.revsingle(repo, a)
2465 ca = scmutil.revsingle(repo, a)
2466 cb = scmutil.revsingle(repo, b)
2466 cb = scmutil.revsingle(repo, b)
2467 pa = pvec.ctxpvec(ca)
2467 pa = pvec.ctxpvec(ca)
2468 pb = pvec.ctxpvec(cb)
2468 pb = pvec.ctxpvec(cb)
2469 if pa == pb:
2469 if pa == pb:
2470 rel = b"="
2470 rel = b"="
2471 elif pa > pb:
2471 elif pa > pb:
2472 rel = b">"
2472 rel = b">"
2473 elif pa < pb:
2473 elif pa < pb:
2474 rel = b"<"
2474 rel = b"<"
2475 elif pa | pb:
2475 elif pa | pb:
2476 rel = b"|"
2476 rel = b"|"
2477 ui.write(_(b"a: %s\n") % pa)
2477 ui.write(_(b"a: %s\n") % pa)
2478 ui.write(_(b"b: %s\n") % pb)
2478 ui.write(_(b"b: %s\n") % pb)
2479 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2479 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2480 ui.write(
2480 ui.write(
2481 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2481 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2482 % (
2482 % (
2483 abs(pa._depth - pb._depth),
2483 abs(pa._depth - pb._depth),
2484 pvec._hamming(pa._vec, pb._vec),
2484 pvec._hamming(pa._vec, pb._vec),
2485 pa.distance(pb),
2485 pa.distance(pb),
2486 rel,
2486 rel,
2487 )
2487 )
2488 )
2488 )
2489
2489
2490
2490
2491 @command(
2491 @command(
2492 b'debugrebuilddirstate|debugrebuildstate',
2492 b'debugrebuilddirstate|debugrebuildstate',
2493 [
2493 [
2494 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2494 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2495 (
2495 (
2496 b'',
2496 b'',
2497 b'minimal',
2497 b'minimal',
2498 None,
2498 None,
2499 _(
2499 _(
2500 b'only rebuild files that are inconsistent with '
2500 b'only rebuild files that are inconsistent with '
2501 b'the working copy parent'
2501 b'the working copy parent'
2502 ),
2502 ),
2503 ),
2503 ),
2504 ],
2504 ],
2505 _(b'[-r REV]'),
2505 _(b'[-r REV]'),
2506 )
2506 )
2507 def debugrebuilddirstate(ui, repo, rev, **opts):
2507 def debugrebuilddirstate(ui, repo, rev, **opts):
2508 """rebuild the dirstate as it would look like for the given revision
2508 """rebuild the dirstate as it would look like for the given revision
2509
2509
2510 If no revision is specified the first current parent will be used.
2510 If no revision is specified the first current parent will be used.
2511
2511
2512 The dirstate will be set to the files of the given revision.
2512 The dirstate will be set to the files of the given revision.
2513 The actual working directory content or existing dirstate
2513 The actual working directory content or existing dirstate
2514 information such as adds or removes is not considered.
2514 information such as adds or removes is not considered.
2515
2515
2516 ``minimal`` will only rebuild the dirstate status for files that claim to be
2516 ``minimal`` will only rebuild the dirstate status for files that claim to be
2517 tracked but are not in the parent manifest, or that exist in the parent
2517 tracked but are not in the parent manifest, or that exist in the parent
2518 manifest but are not in the dirstate. It will not change adds, removes, or
2518 manifest but are not in the dirstate. It will not change adds, removes, or
2519 modified files that are in the working copy parent.
2519 modified files that are in the working copy parent.
2520
2520
2521 One use of this command is to make the next :hg:`status` invocation
2521 One use of this command is to make the next :hg:`status` invocation
2522 check the actual file content.
2522 check the actual file content.
2523 """
2523 """
2524 ctx = scmutil.revsingle(repo, rev)
2524 ctx = scmutil.revsingle(repo, rev)
2525 with repo.wlock():
2525 with repo.wlock():
2526 dirstate = repo.dirstate
2526 dirstate = repo.dirstate
2527 changedfiles = None
2527 changedfiles = None
2528 # See command doc for what minimal does.
2528 # See command doc for what minimal does.
2529 if opts.get(r'minimal'):
2529 if opts.get(r'minimal'):
2530 manifestfiles = set(ctx.manifest().keys())
2530 manifestfiles = set(ctx.manifest().keys())
2531 dirstatefiles = set(dirstate)
2531 dirstatefiles = set(dirstate)
2532 manifestonly = manifestfiles - dirstatefiles
2532 manifestonly = manifestfiles - dirstatefiles
2533 dsonly = dirstatefiles - manifestfiles
2533 dsonly = dirstatefiles - manifestfiles
2534 dsnotadded = set(f for f in dsonly if dirstate[f] != b'a')
2534 dsnotadded = set(f for f in dsonly if dirstate[f] != b'a')
2535 changedfiles = manifestonly | dsnotadded
2535 changedfiles = manifestonly | dsnotadded
2536
2536
2537 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2537 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2538
2538
2539
2539
2540 @command(b'debugrebuildfncache', [], b'')
2540 @command(b'debugrebuildfncache', [], b'')
2541 def debugrebuildfncache(ui, repo):
2541 def debugrebuildfncache(ui, repo):
2542 """rebuild the fncache file"""
2542 """rebuild the fncache file"""
2543 repair.rebuildfncache(ui, repo)
2543 repair.rebuildfncache(ui, repo)
2544
2544
2545
2545
2546 @command(
2546 @command(
2547 b'debugrename',
2547 b'debugrename',
2548 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2548 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2549 _(b'[-r REV] [FILE]...'),
2549 _(b'[-r REV] [FILE]...'),
2550 )
2550 )
2551 def debugrename(ui, repo, *pats, **opts):
2551 def debugrename(ui, repo, *pats, **opts):
2552 """dump rename information"""
2552 """dump rename information"""
2553
2553
2554 opts = pycompat.byteskwargs(opts)
2554 opts = pycompat.byteskwargs(opts)
2555 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2555 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2556 m = scmutil.match(ctx, pats, opts)
2556 m = scmutil.match(ctx, pats, opts)
2557 for abs in ctx.walk(m):
2557 for abs in ctx.walk(m):
2558 fctx = ctx[abs]
2558 fctx = ctx[abs]
2559 o = fctx.filelog().renamed(fctx.filenode())
2559 o = fctx.filelog().renamed(fctx.filenode())
2560 rel = repo.pathto(abs)
2560 rel = repo.pathto(abs)
2561 if o:
2561 if o:
2562 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2562 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2563 else:
2563 else:
2564 ui.write(_(b"%s not renamed\n") % rel)
2564 ui.write(_(b"%s not renamed\n") % rel)
2565
2565
2566
2566
2567 @command(
2567 @command(
2568 b'debugrevlog',
2568 b'debugrevlog',
2569 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2569 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2570 _(b'-c|-m|FILE'),
2570 _(b'-c|-m|FILE'),
2571 optionalrepo=True,
2571 optionalrepo=True,
2572 )
2572 )
2573 def debugrevlog(ui, repo, file_=None, **opts):
2573 def debugrevlog(ui, repo, file_=None, **opts):
2574 """show data and statistics about a revlog"""
2574 """show data and statistics about a revlog"""
2575 opts = pycompat.byteskwargs(opts)
2575 opts = pycompat.byteskwargs(opts)
2576 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2576 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2577
2577
2578 if opts.get(b"dump"):
2578 if opts.get(b"dump"):
2579 numrevs = len(r)
2579 numrevs = len(r)
2580 ui.write(
2580 ui.write(
2581 (
2581 (
2582 b"# rev p1rev p2rev start end deltastart base p1 p2"
2582 b"# rev p1rev p2rev start end deltastart base p1 p2"
2583 b" rawsize totalsize compression heads chainlen\n"
2583 b" rawsize totalsize compression heads chainlen\n"
2584 )
2584 )
2585 )
2585 )
2586 ts = 0
2586 ts = 0
2587 heads = set()
2587 heads = set()
2588
2588
2589 for rev in pycompat.xrange(numrevs):
2589 for rev in pycompat.xrange(numrevs):
2590 dbase = r.deltaparent(rev)
2590 dbase = r.deltaparent(rev)
2591 if dbase == -1:
2591 if dbase == -1:
2592 dbase = rev
2592 dbase = rev
2593 cbase = r.chainbase(rev)
2593 cbase = r.chainbase(rev)
2594 clen = r.chainlen(rev)
2594 clen = r.chainlen(rev)
2595 p1, p2 = r.parentrevs(rev)
2595 p1, p2 = r.parentrevs(rev)
2596 rs = r.rawsize(rev)
2596 rs = r.rawsize(rev)
2597 ts = ts + rs
2597 ts = ts + rs
2598 heads -= set(r.parentrevs(rev))
2598 heads -= set(r.parentrevs(rev))
2599 heads.add(rev)
2599 heads.add(rev)
2600 try:
2600 try:
2601 compression = ts / r.end(rev)
2601 compression = ts / r.end(rev)
2602 except ZeroDivisionError:
2602 except ZeroDivisionError:
2603 compression = 0
2603 compression = 0
2604 ui.write(
2604 ui.write(
2605 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2605 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2606 b"%11d %5d %8d\n"
2606 b"%11d %5d %8d\n"
2607 % (
2607 % (
2608 rev,
2608 rev,
2609 p1,
2609 p1,
2610 p2,
2610 p2,
2611 r.start(rev),
2611 r.start(rev),
2612 r.end(rev),
2612 r.end(rev),
2613 r.start(dbase),
2613 r.start(dbase),
2614 r.start(cbase),
2614 r.start(cbase),
2615 r.start(p1),
2615 r.start(p1),
2616 r.start(p2),
2616 r.start(p2),
2617 rs,
2617 rs,
2618 ts,
2618 ts,
2619 compression,
2619 compression,
2620 len(heads),
2620 len(heads),
2621 clen,
2621 clen,
2622 )
2622 )
2623 )
2623 )
2624 return 0
2624 return 0
2625
2625
2626 v = r.version
2626 v = r.version
2627 format = v & 0xFFFF
2627 format = v & 0xFFFF
2628 flags = []
2628 flags = []
2629 gdelta = False
2629 gdelta = False
2630 if v & revlog.FLAG_INLINE_DATA:
2630 if v & revlog.FLAG_INLINE_DATA:
2631 flags.append(b'inline')
2631 flags.append(b'inline')
2632 if v & revlog.FLAG_GENERALDELTA:
2632 if v & revlog.FLAG_GENERALDELTA:
2633 gdelta = True
2633 gdelta = True
2634 flags.append(b'generaldelta')
2634 flags.append(b'generaldelta')
2635 if not flags:
2635 if not flags:
2636 flags = [b'(none)']
2636 flags = [b'(none)']
2637
2637
2638 ### tracks merge vs single parent
2638 ### tracks merge vs single parent
2639 nummerges = 0
2639 nummerges = 0
2640
2640
2641 ### tracks ways the "delta" are build
2641 ### tracks ways the "delta" are build
2642 # nodelta
2642 # nodelta
2643 numempty = 0
2643 numempty = 0
2644 numemptytext = 0
2644 numemptytext = 0
2645 numemptydelta = 0
2645 numemptydelta = 0
2646 # full file content
2646 # full file content
2647 numfull = 0
2647 numfull = 0
2648 # intermediate snapshot against a prior snapshot
2648 # intermediate snapshot against a prior snapshot
2649 numsemi = 0
2649 numsemi = 0
2650 # snapshot count per depth
2650 # snapshot count per depth
2651 numsnapdepth = collections.defaultdict(lambda: 0)
2651 numsnapdepth = collections.defaultdict(lambda: 0)
2652 # delta against previous revision
2652 # delta against previous revision
2653 numprev = 0
2653 numprev = 0
2654 # delta against first or second parent (not prev)
2654 # delta against first or second parent (not prev)
2655 nump1 = 0
2655 nump1 = 0
2656 nump2 = 0
2656 nump2 = 0
2657 # delta against neither prev nor parents
2657 # delta against neither prev nor parents
2658 numother = 0
2658 numother = 0
2659 # delta against prev that are also first or second parent
2659 # delta against prev that are also first or second parent
2660 # (details of `numprev`)
2660 # (details of `numprev`)
2661 nump1prev = 0
2661 nump1prev = 0
2662 nump2prev = 0
2662 nump2prev = 0
2663
2663
2664 # data about delta chain of each revs
2664 # data about delta chain of each revs
2665 chainlengths = []
2665 chainlengths = []
2666 chainbases = []
2666 chainbases = []
2667 chainspans = []
2667 chainspans = []
2668
2668
2669 # data about each revision
2669 # data about each revision
2670 datasize = [None, 0, 0]
2670 datasize = [None, 0, 0]
2671 fullsize = [None, 0, 0]
2671 fullsize = [None, 0, 0]
2672 semisize = [None, 0, 0]
2672 semisize = [None, 0, 0]
2673 # snapshot count per depth
2673 # snapshot count per depth
2674 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2674 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2675 deltasize = [None, 0, 0]
2675 deltasize = [None, 0, 0]
2676 chunktypecounts = {}
2676 chunktypecounts = {}
2677 chunktypesizes = {}
2677 chunktypesizes = {}
2678
2678
2679 def addsize(size, l):
2679 def addsize(size, l):
2680 if l[0] is None or size < l[0]:
2680 if l[0] is None or size < l[0]:
2681 l[0] = size
2681 l[0] = size
2682 if size > l[1]:
2682 if size > l[1]:
2683 l[1] = size
2683 l[1] = size
2684 l[2] += size
2684 l[2] += size
2685
2685
2686 numrevs = len(r)
2686 numrevs = len(r)
2687 for rev in pycompat.xrange(numrevs):
2687 for rev in pycompat.xrange(numrevs):
2688 p1, p2 = r.parentrevs(rev)
2688 p1, p2 = r.parentrevs(rev)
2689 delta = r.deltaparent(rev)
2689 delta = r.deltaparent(rev)
2690 if format > 0:
2690 if format > 0:
2691 addsize(r.rawsize(rev), datasize)
2691 addsize(r.rawsize(rev), datasize)
2692 if p2 != nullrev:
2692 if p2 != nullrev:
2693 nummerges += 1
2693 nummerges += 1
2694 size = r.length(rev)
2694 size = r.length(rev)
2695 if delta == nullrev:
2695 if delta == nullrev:
2696 chainlengths.append(0)
2696 chainlengths.append(0)
2697 chainbases.append(r.start(rev))
2697 chainbases.append(r.start(rev))
2698 chainspans.append(size)
2698 chainspans.append(size)
2699 if size == 0:
2699 if size == 0:
2700 numempty += 1
2700 numempty += 1
2701 numemptytext += 1
2701 numemptytext += 1
2702 else:
2702 else:
2703 numfull += 1
2703 numfull += 1
2704 numsnapdepth[0] += 1
2704 numsnapdepth[0] += 1
2705 addsize(size, fullsize)
2705 addsize(size, fullsize)
2706 addsize(size, snapsizedepth[0])
2706 addsize(size, snapsizedepth[0])
2707 else:
2707 else:
2708 chainlengths.append(chainlengths[delta] + 1)
2708 chainlengths.append(chainlengths[delta] + 1)
2709 baseaddr = chainbases[delta]
2709 baseaddr = chainbases[delta]
2710 revaddr = r.start(rev)
2710 revaddr = r.start(rev)
2711 chainbases.append(baseaddr)
2711 chainbases.append(baseaddr)
2712 chainspans.append((revaddr - baseaddr) + size)
2712 chainspans.append((revaddr - baseaddr) + size)
2713 if size == 0:
2713 if size == 0:
2714 numempty += 1
2714 numempty += 1
2715 numemptydelta += 1
2715 numemptydelta += 1
2716 elif r.issnapshot(rev):
2716 elif r.issnapshot(rev):
2717 addsize(size, semisize)
2717 addsize(size, semisize)
2718 numsemi += 1
2718 numsemi += 1
2719 depth = r.snapshotdepth(rev)
2719 depth = r.snapshotdepth(rev)
2720 numsnapdepth[depth] += 1
2720 numsnapdepth[depth] += 1
2721 addsize(size, snapsizedepth[depth])
2721 addsize(size, snapsizedepth[depth])
2722 else:
2722 else:
2723 addsize(size, deltasize)
2723 addsize(size, deltasize)
2724 if delta == rev - 1:
2724 if delta == rev - 1:
2725 numprev += 1
2725 numprev += 1
2726 if delta == p1:
2726 if delta == p1:
2727 nump1prev += 1
2727 nump1prev += 1
2728 elif delta == p2:
2728 elif delta == p2:
2729 nump2prev += 1
2729 nump2prev += 1
2730 elif delta == p1:
2730 elif delta == p1:
2731 nump1 += 1
2731 nump1 += 1
2732 elif delta == p2:
2732 elif delta == p2:
2733 nump2 += 1
2733 nump2 += 1
2734 elif delta != nullrev:
2734 elif delta != nullrev:
2735 numother += 1
2735 numother += 1
2736
2736
2737 # Obtain data on the raw chunks in the revlog.
2737 # Obtain data on the raw chunks in the revlog.
2738 if util.safehasattr(r, b'_getsegmentforrevs'):
2738 if util.safehasattr(r, b'_getsegmentforrevs'):
2739 segment = r._getsegmentforrevs(rev, rev)[1]
2739 segment = r._getsegmentforrevs(rev, rev)[1]
2740 else:
2740 else:
2741 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2741 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2742 if segment:
2742 if segment:
2743 chunktype = bytes(segment[0:1])
2743 chunktype = bytes(segment[0:1])
2744 else:
2744 else:
2745 chunktype = b'empty'
2745 chunktype = b'empty'
2746
2746
2747 if chunktype not in chunktypecounts:
2747 if chunktype not in chunktypecounts:
2748 chunktypecounts[chunktype] = 0
2748 chunktypecounts[chunktype] = 0
2749 chunktypesizes[chunktype] = 0
2749 chunktypesizes[chunktype] = 0
2750
2750
2751 chunktypecounts[chunktype] += 1
2751 chunktypecounts[chunktype] += 1
2752 chunktypesizes[chunktype] += size
2752 chunktypesizes[chunktype] += size
2753
2753
2754 # Adjust size min value for empty cases
2754 # Adjust size min value for empty cases
2755 for size in (datasize, fullsize, semisize, deltasize):
2755 for size in (datasize, fullsize, semisize, deltasize):
2756 if size[0] is None:
2756 if size[0] is None:
2757 size[0] = 0
2757 size[0] = 0
2758
2758
2759 numdeltas = numrevs - numfull - numempty - numsemi
2759 numdeltas = numrevs - numfull - numempty - numsemi
2760 numoprev = numprev - nump1prev - nump2prev
2760 numoprev = numprev - nump1prev - nump2prev
2761 totalrawsize = datasize[2]
2761 totalrawsize = datasize[2]
2762 datasize[2] /= numrevs
2762 datasize[2] /= numrevs
2763 fulltotal = fullsize[2]
2763 fulltotal = fullsize[2]
2764 if numfull == 0:
2764 if numfull == 0:
2765 fullsize[2] = 0
2765 fullsize[2] = 0
2766 else:
2766 else:
2767 fullsize[2] /= numfull
2767 fullsize[2] /= numfull
2768 semitotal = semisize[2]
2768 semitotal = semisize[2]
2769 snaptotal = {}
2769 snaptotal = {}
2770 if numsemi > 0:
2770 if numsemi > 0:
2771 semisize[2] /= numsemi
2771 semisize[2] /= numsemi
2772 for depth in snapsizedepth:
2772 for depth in snapsizedepth:
2773 snaptotal[depth] = snapsizedepth[depth][2]
2773 snaptotal[depth] = snapsizedepth[depth][2]
2774 snapsizedepth[depth][2] /= numsnapdepth[depth]
2774 snapsizedepth[depth][2] /= numsnapdepth[depth]
2775
2775
2776 deltatotal = deltasize[2]
2776 deltatotal = deltasize[2]
2777 if numdeltas > 0:
2777 if numdeltas > 0:
2778 deltasize[2] /= numdeltas
2778 deltasize[2] /= numdeltas
2779 totalsize = fulltotal + semitotal + deltatotal
2779 totalsize = fulltotal + semitotal + deltatotal
2780 avgchainlen = sum(chainlengths) / numrevs
2780 avgchainlen = sum(chainlengths) / numrevs
2781 maxchainlen = max(chainlengths)
2781 maxchainlen = max(chainlengths)
2782 maxchainspan = max(chainspans)
2782 maxchainspan = max(chainspans)
2783 compratio = 1
2783 compratio = 1
2784 if totalsize:
2784 if totalsize:
2785 compratio = totalrawsize / totalsize
2785 compratio = totalrawsize / totalsize
2786
2786
2787 basedfmtstr = b'%%%dd\n'
2787 basedfmtstr = b'%%%dd\n'
2788 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2788 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2789
2789
2790 def dfmtstr(max):
2790 def dfmtstr(max):
2791 return basedfmtstr % len(str(max))
2791 return basedfmtstr % len(str(max))
2792
2792
2793 def pcfmtstr(max, padding=0):
2793 def pcfmtstr(max, padding=0):
2794 return basepcfmtstr % (len(str(max)), b' ' * padding)
2794 return basepcfmtstr % (len(str(max)), b' ' * padding)
2795
2795
2796 def pcfmt(value, total):
2796 def pcfmt(value, total):
2797 if total:
2797 if total:
2798 return (value, 100 * float(value) / total)
2798 return (value, 100 * float(value) / total)
2799 else:
2799 else:
2800 return value, 100.0
2800 return value, 100.0
2801
2801
2802 ui.writenoi18n(b'format : %d\n' % format)
2802 ui.writenoi18n(b'format : %d\n' % format)
2803 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2803 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2804
2804
2805 ui.write(b'\n')
2805 ui.write(b'\n')
2806 fmt = pcfmtstr(totalsize)
2806 fmt = pcfmtstr(totalsize)
2807 fmt2 = dfmtstr(totalsize)
2807 fmt2 = dfmtstr(totalsize)
2808 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2808 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2809 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2809 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2810 ui.writenoi18n(
2810 ui.writenoi18n(
2811 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2811 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2812 )
2812 )
2813 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2813 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2814 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2814 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2815 ui.writenoi18n(
2815 ui.writenoi18n(
2816 b' text : '
2816 b' text : '
2817 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2817 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2818 )
2818 )
2819 ui.writenoi18n(
2819 ui.writenoi18n(
2820 b' delta : '
2820 b' delta : '
2821 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2821 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2822 )
2822 )
2823 ui.writenoi18n(
2823 ui.writenoi18n(
2824 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2824 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2825 )
2825 )
2826 for depth in sorted(numsnapdepth):
2826 for depth in sorted(numsnapdepth):
2827 ui.write(
2827 ui.write(
2828 (b' lvl-%-3d : ' % depth)
2828 (b' lvl-%-3d : ' % depth)
2829 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2829 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2830 )
2830 )
2831 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2831 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2832 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2832 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2833 ui.writenoi18n(
2833 ui.writenoi18n(
2834 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2834 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2835 )
2835 )
2836 for depth in sorted(numsnapdepth):
2836 for depth in sorted(numsnapdepth):
2837 ui.write(
2837 ui.write(
2838 (b' lvl-%-3d : ' % depth)
2838 (b' lvl-%-3d : ' % depth)
2839 + fmt % pcfmt(snaptotal[depth], totalsize)
2839 + fmt % pcfmt(snaptotal[depth], totalsize)
2840 )
2840 )
2841 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2841 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2842
2842
2843 def fmtchunktype(chunktype):
2843 def fmtchunktype(chunktype):
2844 if chunktype == b'empty':
2844 if chunktype == b'empty':
2845 return b' %s : ' % chunktype
2845 return b' %s : ' % chunktype
2846 elif chunktype in pycompat.bytestr(string.ascii_letters):
2846 elif chunktype in pycompat.bytestr(string.ascii_letters):
2847 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2847 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2848 else:
2848 else:
2849 return b' 0x%s : ' % hex(chunktype)
2849 return b' 0x%s : ' % hex(chunktype)
2850
2850
2851 ui.write(b'\n')
2851 ui.write(b'\n')
2852 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2852 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2853 for chunktype in sorted(chunktypecounts):
2853 for chunktype in sorted(chunktypecounts):
2854 ui.write(fmtchunktype(chunktype))
2854 ui.write(fmtchunktype(chunktype))
2855 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2855 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2856 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2856 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2857 for chunktype in sorted(chunktypecounts):
2857 for chunktype in sorted(chunktypecounts):
2858 ui.write(fmtchunktype(chunktype))
2858 ui.write(fmtchunktype(chunktype))
2859 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2859 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2860
2860
2861 ui.write(b'\n')
2861 ui.write(b'\n')
2862 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2862 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2863 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2863 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2864 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2864 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2865 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2865 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2866 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2866 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2867
2867
2868 if format > 0:
2868 if format > 0:
2869 ui.write(b'\n')
2869 ui.write(b'\n')
2870 ui.writenoi18n(
2870 ui.writenoi18n(
2871 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2871 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2872 % tuple(datasize)
2872 % tuple(datasize)
2873 )
2873 )
2874 ui.writenoi18n(
2874 ui.writenoi18n(
2875 b'full revision size (min/max/avg) : %d / %d / %d\n'
2875 b'full revision size (min/max/avg) : %d / %d / %d\n'
2876 % tuple(fullsize)
2876 % tuple(fullsize)
2877 )
2877 )
2878 ui.writenoi18n(
2878 ui.writenoi18n(
2879 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2879 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2880 % tuple(semisize)
2880 % tuple(semisize)
2881 )
2881 )
2882 for depth in sorted(snapsizedepth):
2882 for depth in sorted(snapsizedepth):
2883 if depth == 0:
2883 if depth == 0:
2884 continue
2884 continue
2885 ui.writenoi18n(
2885 ui.writenoi18n(
2886 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2886 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2887 % ((depth,) + tuple(snapsizedepth[depth]))
2887 % ((depth,) + tuple(snapsizedepth[depth]))
2888 )
2888 )
2889 ui.writenoi18n(
2889 ui.writenoi18n(
2890 b'delta size (min/max/avg) : %d / %d / %d\n'
2890 b'delta size (min/max/avg) : %d / %d / %d\n'
2891 % tuple(deltasize)
2891 % tuple(deltasize)
2892 )
2892 )
2893
2893
2894 if numdeltas > 0:
2894 if numdeltas > 0:
2895 ui.write(b'\n')
2895 ui.write(b'\n')
2896 fmt = pcfmtstr(numdeltas)
2896 fmt = pcfmtstr(numdeltas)
2897 fmt2 = pcfmtstr(numdeltas, 4)
2897 fmt2 = pcfmtstr(numdeltas, 4)
2898 ui.writenoi18n(
2898 ui.writenoi18n(
2899 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2899 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2900 )
2900 )
2901 if numprev > 0:
2901 if numprev > 0:
2902 ui.writenoi18n(
2902 ui.writenoi18n(
2903 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2903 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2904 )
2904 )
2905 ui.writenoi18n(
2905 ui.writenoi18n(
2906 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2906 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2907 )
2907 )
2908 ui.writenoi18n(
2908 ui.writenoi18n(
2909 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
2909 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
2910 )
2910 )
2911 if gdelta:
2911 if gdelta:
2912 ui.writenoi18n(
2912 ui.writenoi18n(
2913 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
2913 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
2914 )
2914 )
2915 ui.writenoi18n(
2915 ui.writenoi18n(
2916 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
2916 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
2917 )
2917 )
2918 ui.writenoi18n(
2918 ui.writenoi18n(
2919 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
2919 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
2920 )
2920 )
2921
2921
2922
2922
2923 @command(
2923 @command(
2924 b'debugrevlogindex',
2924 b'debugrevlogindex',
2925 cmdutil.debugrevlogopts
2925 cmdutil.debugrevlogopts
2926 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
2926 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
2927 _(b'[-f FORMAT] -c|-m|FILE'),
2927 _(b'[-f FORMAT] -c|-m|FILE'),
2928 optionalrepo=True,
2928 optionalrepo=True,
2929 )
2929 )
2930 def debugrevlogindex(ui, repo, file_=None, **opts):
2930 def debugrevlogindex(ui, repo, file_=None, **opts):
2931 """dump the contents of a revlog index"""
2931 """dump the contents of a revlog index"""
2932 opts = pycompat.byteskwargs(opts)
2932 opts = pycompat.byteskwargs(opts)
2933 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
2933 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
2934 format = opts.get(b'format', 0)
2934 format = opts.get(b'format', 0)
2935 if format not in (0, 1):
2935 if format not in (0, 1):
2936 raise error.Abort(_(b"unknown format %d") % format)
2936 raise error.Abort(_(b"unknown format %d") % format)
2937
2937
2938 if ui.debugflag:
2938 if ui.debugflag:
2939 shortfn = hex
2939 shortfn = hex
2940 else:
2940 else:
2941 shortfn = short
2941 shortfn = short
2942
2942
2943 # There might not be anything in r, so have a sane default
2943 # There might not be anything in r, so have a sane default
2944 idlen = 12
2944 idlen = 12
2945 for i in r:
2945 for i in r:
2946 idlen = len(shortfn(r.node(i)))
2946 idlen = len(shortfn(r.node(i)))
2947 break
2947 break
2948
2948
2949 if format == 0:
2949 if format == 0:
2950 if ui.verbose:
2950 if ui.verbose:
2951 ui.writenoi18n(
2951 ui.writenoi18n(
2952 b" rev offset length linkrev %s %s p2\n"
2952 b" rev offset length linkrev %s %s p2\n"
2953 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2953 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2954 )
2954 )
2955 else:
2955 else:
2956 ui.writenoi18n(
2956 ui.writenoi18n(
2957 b" rev linkrev %s %s p2\n"
2957 b" rev linkrev %s %s p2\n"
2958 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2958 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2959 )
2959 )
2960 elif format == 1:
2960 elif format == 1:
2961 if ui.verbose:
2961 if ui.verbose:
2962 ui.writenoi18n(
2962 ui.writenoi18n(
2963 (
2963 (
2964 b" rev flag offset length size link p1"
2964 b" rev flag offset length size link p1"
2965 b" p2 %s\n"
2965 b" p2 %s\n"
2966 )
2966 )
2967 % b"nodeid".rjust(idlen)
2967 % b"nodeid".rjust(idlen)
2968 )
2968 )
2969 else:
2969 else:
2970 ui.writenoi18n(
2970 ui.writenoi18n(
2971 b" rev flag size link p1 p2 %s\n"
2971 b" rev flag size link p1 p2 %s\n"
2972 % b"nodeid".rjust(idlen)
2972 % b"nodeid".rjust(idlen)
2973 )
2973 )
2974
2974
2975 for i in r:
2975 for i in r:
2976 node = r.node(i)
2976 node = r.node(i)
2977 if format == 0:
2977 if format == 0:
2978 try:
2978 try:
2979 pp = r.parents(node)
2979 pp = r.parents(node)
2980 except Exception:
2980 except Exception:
2981 pp = [nullid, nullid]
2981 pp = [nullid, nullid]
2982 if ui.verbose:
2982 if ui.verbose:
2983 ui.write(
2983 ui.write(
2984 b"% 6d % 9d % 7d % 7d %s %s %s\n"
2984 b"% 6d % 9d % 7d % 7d %s %s %s\n"
2985 % (
2985 % (
2986 i,
2986 i,
2987 r.start(i),
2987 r.start(i),
2988 r.length(i),
2988 r.length(i),
2989 r.linkrev(i),
2989 r.linkrev(i),
2990 shortfn(node),
2990 shortfn(node),
2991 shortfn(pp[0]),
2991 shortfn(pp[0]),
2992 shortfn(pp[1]),
2992 shortfn(pp[1]),
2993 )
2993 )
2994 )
2994 )
2995 else:
2995 else:
2996 ui.write(
2996 ui.write(
2997 b"% 6d % 7d %s %s %s\n"
2997 b"% 6d % 7d %s %s %s\n"
2998 % (
2998 % (
2999 i,
2999 i,
3000 r.linkrev(i),
3000 r.linkrev(i),
3001 shortfn(node),
3001 shortfn(node),
3002 shortfn(pp[0]),
3002 shortfn(pp[0]),
3003 shortfn(pp[1]),
3003 shortfn(pp[1]),
3004 )
3004 )
3005 )
3005 )
3006 elif format == 1:
3006 elif format == 1:
3007 pr = r.parentrevs(i)
3007 pr = r.parentrevs(i)
3008 if ui.verbose:
3008 if ui.verbose:
3009 ui.write(
3009 ui.write(
3010 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3010 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3011 % (
3011 % (
3012 i,
3012 i,
3013 r.flags(i),
3013 r.flags(i),
3014 r.start(i),
3014 r.start(i),
3015 r.length(i),
3015 r.length(i),
3016 r.rawsize(i),
3016 r.rawsize(i),
3017 r.linkrev(i),
3017 r.linkrev(i),
3018 pr[0],
3018 pr[0],
3019 pr[1],
3019 pr[1],
3020 shortfn(node),
3020 shortfn(node),
3021 )
3021 )
3022 )
3022 )
3023 else:
3023 else:
3024 ui.write(
3024 ui.write(
3025 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3025 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3026 % (
3026 % (
3027 i,
3027 i,
3028 r.flags(i),
3028 r.flags(i),
3029 r.rawsize(i),
3029 r.rawsize(i),
3030 r.linkrev(i),
3030 r.linkrev(i),
3031 pr[0],
3031 pr[0],
3032 pr[1],
3032 pr[1],
3033 shortfn(node),
3033 shortfn(node),
3034 )
3034 )
3035 )
3035 )
3036
3036
3037
3037
3038 @command(
3038 @command(
3039 b'debugrevspec',
3039 b'debugrevspec',
3040 [
3040 [
3041 (
3041 (
3042 b'',
3042 b'',
3043 b'optimize',
3043 b'optimize',
3044 None,
3044 None,
3045 _(b'print parsed tree after optimizing (DEPRECATED)'),
3045 _(b'print parsed tree after optimizing (DEPRECATED)'),
3046 ),
3046 ),
3047 (
3047 (
3048 b'',
3048 b'',
3049 b'show-revs',
3049 b'show-revs',
3050 True,
3050 True,
3051 _(b'print list of result revisions (default)'),
3051 _(b'print list of result revisions (default)'),
3052 ),
3052 ),
3053 (
3053 (
3054 b's',
3054 b's',
3055 b'show-set',
3055 b'show-set',
3056 None,
3056 None,
3057 _(b'print internal representation of result set'),
3057 _(b'print internal representation of result set'),
3058 ),
3058 ),
3059 (
3059 (
3060 b'p',
3060 b'p',
3061 b'show-stage',
3061 b'show-stage',
3062 [],
3062 [],
3063 _(b'print parsed tree at the given stage'),
3063 _(b'print parsed tree at the given stage'),
3064 _(b'NAME'),
3064 _(b'NAME'),
3065 ),
3065 ),
3066 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3066 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3067 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3067 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3068 ],
3068 ],
3069 b'REVSPEC',
3069 b'REVSPEC',
3070 )
3070 )
3071 def debugrevspec(ui, repo, expr, **opts):
3071 def debugrevspec(ui, repo, expr, **opts):
3072 """parse and apply a revision specification
3072 """parse and apply a revision specification
3073
3073
3074 Use -p/--show-stage option to print the parsed tree at the given stages.
3074 Use -p/--show-stage option to print the parsed tree at the given stages.
3075 Use -p all to print tree at every stage.
3075 Use -p all to print tree at every stage.
3076
3076
3077 Use --no-show-revs option with -s or -p to print only the set
3077 Use --no-show-revs option with -s or -p to print only the set
3078 representation or the parsed tree respectively.
3078 representation or the parsed tree respectively.
3079
3079
3080 Use --verify-optimized to compare the optimized result with the unoptimized
3080 Use --verify-optimized to compare the optimized result with the unoptimized
3081 one. Returns 1 if the optimized result differs.
3081 one. Returns 1 if the optimized result differs.
3082 """
3082 """
3083 opts = pycompat.byteskwargs(opts)
3083 opts = pycompat.byteskwargs(opts)
3084 aliases = ui.configitems(b'revsetalias')
3084 aliases = ui.configitems(b'revsetalias')
3085 stages = [
3085 stages = [
3086 (b'parsed', lambda tree: tree),
3086 (b'parsed', lambda tree: tree),
3087 (
3087 (
3088 b'expanded',
3088 b'expanded',
3089 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3089 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3090 ),
3090 ),
3091 (b'concatenated', revsetlang.foldconcat),
3091 (b'concatenated', revsetlang.foldconcat),
3092 (b'analyzed', revsetlang.analyze),
3092 (b'analyzed', revsetlang.analyze),
3093 (b'optimized', revsetlang.optimize),
3093 (b'optimized', revsetlang.optimize),
3094 ]
3094 ]
3095 if opts[b'no_optimized']:
3095 if opts[b'no_optimized']:
3096 stages = stages[:-1]
3096 stages = stages[:-1]
3097 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3097 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3098 raise error.Abort(
3098 raise error.Abort(
3099 _(b'cannot use --verify-optimized with --no-optimized')
3099 _(b'cannot use --verify-optimized with --no-optimized')
3100 )
3100 )
3101 stagenames = set(n for n, f in stages)
3101 stagenames = set(n for n, f in stages)
3102
3102
3103 showalways = set()
3103 showalways = set()
3104 showchanged = set()
3104 showchanged = set()
3105 if ui.verbose and not opts[b'show_stage']:
3105 if ui.verbose and not opts[b'show_stage']:
3106 # show parsed tree by --verbose (deprecated)
3106 # show parsed tree by --verbose (deprecated)
3107 showalways.add(b'parsed')
3107 showalways.add(b'parsed')
3108 showchanged.update([b'expanded', b'concatenated'])
3108 showchanged.update([b'expanded', b'concatenated'])
3109 if opts[b'optimize']:
3109 if opts[b'optimize']:
3110 showalways.add(b'optimized')
3110 showalways.add(b'optimized')
3111 if opts[b'show_stage'] and opts[b'optimize']:
3111 if opts[b'show_stage'] and opts[b'optimize']:
3112 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3112 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3113 if opts[b'show_stage'] == [b'all']:
3113 if opts[b'show_stage'] == [b'all']:
3114 showalways.update(stagenames)
3114 showalways.update(stagenames)
3115 else:
3115 else:
3116 for n in opts[b'show_stage']:
3116 for n in opts[b'show_stage']:
3117 if n not in stagenames:
3117 if n not in stagenames:
3118 raise error.Abort(_(b'invalid stage name: %s') % n)
3118 raise error.Abort(_(b'invalid stage name: %s') % n)
3119 showalways.update(opts[b'show_stage'])
3119 showalways.update(opts[b'show_stage'])
3120
3120
3121 treebystage = {}
3121 treebystage = {}
3122 printedtree = None
3122 printedtree = None
3123 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3123 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3124 for n, f in stages:
3124 for n, f in stages:
3125 treebystage[n] = tree = f(tree)
3125 treebystage[n] = tree = f(tree)
3126 if n in showalways or (n in showchanged and tree != printedtree):
3126 if n in showalways or (n in showchanged and tree != printedtree):
3127 if opts[b'show_stage'] or n != b'parsed':
3127 if opts[b'show_stage'] or n != b'parsed':
3128 ui.write(b"* %s:\n" % n)
3128 ui.write(b"* %s:\n" % n)
3129 ui.write(revsetlang.prettyformat(tree), b"\n")
3129 ui.write(revsetlang.prettyformat(tree), b"\n")
3130 printedtree = tree
3130 printedtree = tree
3131
3131
3132 if opts[b'verify_optimized']:
3132 if opts[b'verify_optimized']:
3133 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3133 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3134 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3134 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3135 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3135 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3136 ui.writenoi18n(
3136 ui.writenoi18n(
3137 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3137 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3138 )
3138 )
3139 ui.writenoi18n(
3139 ui.writenoi18n(
3140 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3140 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3141 )
3141 )
3142 arevs = list(arevs)
3142 arevs = list(arevs)
3143 brevs = list(brevs)
3143 brevs = list(brevs)
3144 if arevs == brevs:
3144 if arevs == brevs:
3145 return 0
3145 return 0
3146 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3146 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3147 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3147 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3148 sm = difflib.SequenceMatcher(None, arevs, brevs)
3148 sm = difflib.SequenceMatcher(None, arevs, brevs)
3149 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3149 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3150 if tag in (r'delete', r'replace'):
3150 if tag in (r'delete', r'replace'):
3151 for c in arevs[alo:ahi]:
3151 for c in arevs[alo:ahi]:
3152 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3152 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3153 if tag in (r'insert', r'replace'):
3153 if tag in (r'insert', r'replace'):
3154 for c in brevs[blo:bhi]:
3154 for c in brevs[blo:bhi]:
3155 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3155 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3156 if tag == r'equal':
3156 if tag == r'equal':
3157 for c in arevs[alo:ahi]:
3157 for c in arevs[alo:ahi]:
3158 ui.write(b' %d\n' % c)
3158 ui.write(b' %d\n' % c)
3159 return 1
3159 return 1
3160
3160
3161 func = revset.makematcher(tree)
3161 func = revset.makematcher(tree)
3162 revs = func(repo)
3162 revs = func(repo)
3163 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3163 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3164 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3164 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3165 if not opts[b'show_revs']:
3165 if not opts[b'show_revs']:
3166 return
3166 return
3167 for c in revs:
3167 for c in revs:
3168 ui.write(b"%d\n" % c)
3168 ui.write(b"%d\n" % c)
3169
3169
3170
3170
3171 @command(
3171 @command(
3172 b'debugserve',
3172 b'debugserve',
3173 [
3173 [
3174 (
3174 (
3175 b'',
3175 b'',
3176 b'sshstdio',
3176 b'sshstdio',
3177 False,
3177 False,
3178 _(b'run an SSH server bound to process handles'),
3178 _(b'run an SSH server bound to process handles'),
3179 ),
3179 ),
3180 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3180 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3181 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3181 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3182 ],
3182 ],
3183 b'',
3183 b'',
3184 )
3184 )
3185 def debugserve(ui, repo, **opts):
3185 def debugserve(ui, repo, **opts):
3186 """run a server with advanced settings
3186 """run a server with advanced settings
3187
3187
3188 This command is similar to :hg:`serve`. It exists partially as a
3188 This command is similar to :hg:`serve`. It exists partially as a
3189 workaround to the fact that ``hg serve --stdio`` must have specific
3189 workaround to the fact that ``hg serve --stdio`` must have specific
3190 arguments for security reasons.
3190 arguments for security reasons.
3191 """
3191 """
3192 opts = pycompat.byteskwargs(opts)
3192 opts = pycompat.byteskwargs(opts)
3193
3193
3194 if not opts[b'sshstdio']:
3194 if not opts[b'sshstdio']:
3195 raise error.Abort(_(b'only --sshstdio is currently supported'))
3195 raise error.Abort(_(b'only --sshstdio is currently supported'))
3196
3196
3197 logfh = None
3197 logfh = None
3198
3198
3199 if opts[b'logiofd'] and opts[b'logiofile']:
3199 if opts[b'logiofd'] and opts[b'logiofile']:
3200 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3200 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3201
3201
3202 if opts[b'logiofd']:
3202 if opts[b'logiofd']:
3203 # Line buffered because output is line based.
3203 # Line buffered because output is line based.
3204 try:
3204 try:
3205 logfh = os.fdopen(int(opts[b'logiofd']), r'ab', 1)
3205 logfh = os.fdopen(int(opts[b'logiofd']), r'ab', 1)
3206 except OSError as e:
3206 except OSError as e:
3207 if e.errno != errno.ESPIPE:
3207 if e.errno != errno.ESPIPE:
3208 raise
3208 raise
3209 # can't seek a pipe, so `ab` mode fails on py3
3209 # can't seek a pipe, so `ab` mode fails on py3
3210 logfh = os.fdopen(int(opts[b'logiofd']), r'wb', 1)
3210 logfh = os.fdopen(int(opts[b'logiofd']), r'wb', 1)
3211 elif opts[b'logiofile']:
3211 elif opts[b'logiofile']:
3212 logfh = open(opts[b'logiofile'], b'ab', 1)
3212 logfh = open(opts[b'logiofile'], b'ab', 1)
3213
3213
3214 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3214 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3215 s.serve_forever()
3215 s.serve_forever()
3216
3216
3217
3217
3218 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3218 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3219 def debugsetparents(ui, repo, rev1, rev2=None):
3219 def debugsetparents(ui, repo, rev1, rev2=None):
3220 """manually set the parents of the current working directory
3220 """manually set the parents of the current working directory
3221
3221
3222 This is useful for writing repository conversion tools, but should
3222 This is useful for writing repository conversion tools, but should
3223 be used with care. For example, neither the working directory nor the
3223 be used with care. For example, neither the working directory nor the
3224 dirstate is updated, so file status may be incorrect after running this
3224 dirstate is updated, so file status may be incorrect after running this
3225 command.
3225 command.
3226
3226
3227 Returns 0 on success.
3227 Returns 0 on success.
3228 """
3228 """
3229
3229
3230 node1 = scmutil.revsingle(repo, rev1).node()
3230 node1 = scmutil.revsingle(repo, rev1).node()
3231 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3231 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3232
3232
3233 with repo.wlock():
3233 with repo.wlock():
3234 repo.setparents(node1, node2)
3234 repo.setparents(node1, node2)
3235
3235
3236
3236
3237 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3237 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3238 def debugsidedata(ui, repo, file_, rev=None, **opts):
3238 def debugsidedata(ui, repo, file_, rev=None, **opts):
3239 """dump the side data for a cl/manifest/file revision"""
3239 """dump the side data for a cl/manifest/file revision
3240
3241 Use --verbose to dump the sidedata content."""
3240 opts = pycompat.byteskwargs(opts)
3242 opts = pycompat.byteskwargs(opts)
3241 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3243 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3242 if rev is not None:
3244 if rev is not None:
3243 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3245 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3244 file_, rev = None, file_
3246 file_, rev = None, file_
3245 elif rev is None:
3247 elif rev is None:
3246 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3248 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3247 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3249 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3248 r = getattr(r, '_revlog', r)
3250 r = getattr(r, '_revlog', r)
3249 try:
3251 try:
3250 sidedata = r.sidedata(r.lookup(rev))
3252 sidedata = r.sidedata(r.lookup(rev))
3251 except KeyError:
3253 except KeyError:
3252 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3254 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3253 if sidedata:
3255 if sidedata:
3254 sidedata = list(sidedata.items())
3256 sidedata = list(sidedata.items())
3255 sidedata.sort()
3257 sidedata.sort()
3256 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3258 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3257 for key, value in sidedata:
3259 for key, value in sidedata:
3258 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3260 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3259 if ui.verbose:
3261 if ui.verbose:
3260 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3262 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3261
3263
3262
3264
3263 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3265 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3264 def debugssl(ui, repo, source=None, **opts):
3266 def debugssl(ui, repo, source=None, **opts):
3265 '''test a secure connection to a server
3267 '''test a secure connection to a server
3266
3268
3267 This builds the certificate chain for the server on Windows, installing the
3269 This builds the certificate chain for the server on Windows, installing the
3268 missing intermediates and trusted root via Windows Update if necessary. It
3270 missing intermediates and trusted root via Windows Update if necessary. It
3269 does nothing on other platforms.
3271 does nothing on other platforms.
3270
3272
3271 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3273 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3272 that server is used. See :hg:`help urls` for more information.
3274 that server is used. See :hg:`help urls` for more information.
3273
3275
3274 If the update succeeds, retry the original operation. Otherwise, the cause
3276 If the update succeeds, retry the original operation. Otherwise, the cause
3275 of the SSL error is likely another issue.
3277 of the SSL error is likely another issue.
3276 '''
3278 '''
3277 if not pycompat.iswindows:
3279 if not pycompat.iswindows:
3278 raise error.Abort(
3280 raise error.Abort(
3279 _(b'certificate chain building is only possible on Windows')
3281 _(b'certificate chain building is only possible on Windows')
3280 )
3282 )
3281
3283
3282 if not source:
3284 if not source:
3283 if not repo:
3285 if not repo:
3284 raise error.Abort(
3286 raise error.Abort(
3285 _(
3287 _(
3286 b"there is no Mercurial repository here, and no "
3288 b"there is no Mercurial repository here, and no "
3287 b"server specified"
3289 b"server specified"
3288 )
3290 )
3289 )
3291 )
3290 source = b"default"
3292 source = b"default"
3291
3293
3292 source, branches = hg.parseurl(ui.expandpath(source))
3294 source, branches = hg.parseurl(ui.expandpath(source))
3293 url = util.url(source)
3295 url = util.url(source)
3294
3296
3295 defaultport = {b'https': 443, b'ssh': 22}
3297 defaultport = {b'https': 443, b'ssh': 22}
3296 if url.scheme in defaultport:
3298 if url.scheme in defaultport:
3297 try:
3299 try:
3298 addr = (url.host, int(url.port or defaultport[url.scheme]))
3300 addr = (url.host, int(url.port or defaultport[url.scheme]))
3299 except ValueError:
3301 except ValueError:
3300 raise error.Abort(_(b"malformed port number in URL"))
3302 raise error.Abort(_(b"malformed port number in URL"))
3301 else:
3303 else:
3302 raise error.Abort(_(b"only https and ssh connections are supported"))
3304 raise error.Abort(_(b"only https and ssh connections are supported"))
3303
3305
3304 from . import win32
3306 from . import win32
3305
3307
3306 s = ssl.wrap_socket(
3308 s = ssl.wrap_socket(
3307 socket.socket(),
3309 socket.socket(),
3308 ssl_version=ssl.PROTOCOL_TLS,
3310 ssl_version=ssl.PROTOCOL_TLS,
3309 cert_reqs=ssl.CERT_NONE,
3311 cert_reqs=ssl.CERT_NONE,
3310 ca_certs=None,
3312 ca_certs=None,
3311 )
3313 )
3312
3314
3313 try:
3315 try:
3314 s.connect(addr)
3316 s.connect(addr)
3315 cert = s.getpeercert(True)
3317 cert = s.getpeercert(True)
3316
3318
3317 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3319 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3318
3320
3319 complete = win32.checkcertificatechain(cert, build=False)
3321 complete = win32.checkcertificatechain(cert, build=False)
3320
3322
3321 if not complete:
3323 if not complete:
3322 ui.status(_(b'certificate chain is incomplete, updating... '))
3324 ui.status(_(b'certificate chain is incomplete, updating... '))
3323
3325
3324 if not win32.checkcertificatechain(cert):
3326 if not win32.checkcertificatechain(cert):
3325 ui.status(_(b'failed.\n'))
3327 ui.status(_(b'failed.\n'))
3326 else:
3328 else:
3327 ui.status(_(b'done.\n'))
3329 ui.status(_(b'done.\n'))
3328 else:
3330 else:
3329 ui.status(_(b'full certificate chain is available\n'))
3331 ui.status(_(b'full certificate chain is available\n'))
3330 finally:
3332 finally:
3331 s.close()
3333 s.close()
3332
3334
3333
3335
3334 @command(
3336 @command(
3335 b'debugsub',
3337 b'debugsub',
3336 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3338 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3337 _(b'[-r REV] [REV]'),
3339 _(b'[-r REV] [REV]'),
3338 )
3340 )
3339 def debugsub(ui, repo, rev=None):
3341 def debugsub(ui, repo, rev=None):
3340 ctx = scmutil.revsingle(repo, rev, None)
3342 ctx = scmutil.revsingle(repo, rev, None)
3341 for k, v in sorted(ctx.substate.items()):
3343 for k, v in sorted(ctx.substate.items()):
3342 ui.writenoi18n(b'path %s\n' % k)
3344 ui.writenoi18n(b'path %s\n' % k)
3343 ui.writenoi18n(b' source %s\n' % v[0])
3345 ui.writenoi18n(b' source %s\n' % v[0])
3344 ui.writenoi18n(b' revision %s\n' % v[1])
3346 ui.writenoi18n(b' revision %s\n' % v[1])
3345
3347
3346
3348
3347 @command(
3349 @command(
3348 b'debugsuccessorssets',
3350 b'debugsuccessorssets',
3349 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3351 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3350 _(b'[REV]'),
3352 _(b'[REV]'),
3351 )
3353 )
3352 def debugsuccessorssets(ui, repo, *revs, **opts):
3354 def debugsuccessorssets(ui, repo, *revs, **opts):
3353 """show set of successors for revision
3355 """show set of successors for revision
3354
3356
3355 A successors set of changeset A is a consistent group of revisions that
3357 A successors set of changeset A is a consistent group of revisions that
3356 succeed A. It contains non-obsolete changesets only unless closests
3358 succeed A. It contains non-obsolete changesets only unless closests
3357 successors set is set.
3359 successors set is set.
3358
3360
3359 In most cases a changeset A has a single successors set containing a single
3361 In most cases a changeset A has a single successors set containing a single
3360 successor (changeset A replaced by A').
3362 successor (changeset A replaced by A').
3361
3363
3362 A changeset that is made obsolete with no successors are called "pruned".
3364 A changeset that is made obsolete with no successors are called "pruned".
3363 Such changesets have no successors sets at all.
3365 Such changesets have no successors sets at all.
3364
3366
3365 A changeset that has been "split" will have a successors set containing
3367 A changeset that has been "split" will have a successors set containing
3366 more than one successor.
3368 more than one successor.
3367
3369
3368 A changeset that has been rewritten in multiple different ways is called
3370 A changeset that has been rewritten in multiple different ways is called
3369 "divergent". Such changesets have multiple successor sets (each of which
3371 "divergent". Such changesets have multiple successor sets (each of which
3370 may also be split, i.e. have multiple successors).
3372 may also be split, i.e. have multiple successors).
3371
3373
3372 Results are displayed as follows::
3374 Results are displayed as follows::
3373
3375
3374 <rev1>
3376 <rev1>
3375 <successors-1A>
3377 <successors-1A>
3376 <rev2>
3378 <rev2>
3377 <successors-2A>
3379 <successors-2A>
3378 <successors-2B1> <successors-2B2> <successors-2B3>
3380 <successors-2B1> <successors-2B2> <successors-2B3>
3379
3381
3380 Here rev2 has two possible (i.e. divergent) successors sets. The first
3382 Here rev2 has two possible (i.e. divergent) successors sets. The first
3381 holds one element, whereas the second holds three (i.e. the changeset has
3383 holds one element, whereas the second holds three (i.e. the changeset has
3382 been split).
3384 been split).
3383 """
3385 """
3384 # passed to successorssets caching computation from one call to another
3386 # passed to successorssets caching computation from one call to another
3385 cache = {}
3387 cache = {}
3386 ctx2str = bytes
3388 ctx2str = bytes
3387 node2str = short
3389 node2str = short
3388 for rev in scmutil.revrange(repo, revs):
3390 for rev in scmutil.revrange(repo, revs):
3389 ctx = repo[rev]
3391 ctx = repo[rev]
3390 ui.write(b'%s\n' % ctx2str(ctx))
3392 ui.write(b'%s\n' % ctx2str(ctx))
3391 for succsset in obsutil.successorssets(
3393 for succsset in obsutil.successorssets(
3392 repo, ctx.node(), closest=opts[r'closest'], cache=cache
3394 repo, ctx.node(), closest=opts[r'closest'], cache=cache
3393 ):
3395 ):
3394 if succsset:
3396 if succsset:
3395 ui.write(b' ')
3397 ui.write(b' ')
3396 ui.write(node2str(succsset[0]))
3398 ui.write(node2str(succsset[0]))
3397 for node in succsset[1:]:
3399 for node in succsset[1:]:
3398 ui.write(b' ')
3400 ui.write(b' ')
3399 ui.write(node2str(node))
3401 ui.write(node2str(node))
3400 ui.write(b'\n')
3402 ui.write(b'\n')
3401
3403
3402
3404
3403 @command(
3405 @command(
3404 b'debugtemplate',
3406 b'debugtemplate',
3405 [
3407 [
3406 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3408 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3407 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3409 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3408 ],
3410 ],
3409 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3411 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3410 optionalrepo=True,
3412 optionalrepo=True,
3411 )
3413 )
3412 def debugtemplate(ui, repo, tmpl, **opts):
3414 def debugtemplate(ui, repo, tmpl, **opts):
3413 """parse and apply a template
3415 """parse and apply a template
3414
3416
3415 If -r/--rev is given, the template is processed as a log template and
3417 If -r/--rev is given, the template is processed as a log template and
3416 applied to the given changesets. Otherwise, it is processed as a generic
3418 applied to the given changesets. Otherwise, it is processed as a generic
3417 template.
3419 template.
3418
3420
3419 Use --verbose to print the parsed tree.
3421 Use --verbose to print the parsed tree.
3420 """
3422 """
3421 revs = None
3423 revs = None
3422 if opts[r'rev']:
3424 if opts[r'rev']:
3423 if repo is None:
3425 if repo is None:
3424 raise error.RepoError(
3426 raise error.RepoError(
3425 _(b'there is no Mercurial repository here (.hg not found)')
3427 _(b'there is no Mercurial repository here (.hg not found)')
3426 )
3428 )
3427 revs = scmutil.revrange(repo, opts[r'rev'])
3429 revs = scmutil.revrange(repo, opts[r'rev'])
3428
3430
3429 props = {}
3431 props = {}
3430 for d in opts[r'define']:
3432 for d in opts[r'define']:
3431 try:
3433 try:
3432 k, v = (e.strip() for e in d.split(b'=', 1))
3434 k, v = (e.strip() for e in d.split(b'=', 1))
3433 if not k or k == b'ui':
3435 if not k or k == b'ui':
3434 raise ValueError
3436 raise ValueError
3435 props[k] = v
3437 props[k] = v
3436 except ValueError:
3438 except ValueError:
3437 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3439 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3438
3440
3439 if ui.verbose:
3441 if ui.verbose:
3440 aliases = ui.configitems(b'templatealias')
3442 aliases = ui.configitems(b'templatealias')
3441 tree = templater.parse(tmpl)
3443 tree = templater.parse(tmpl)
3442 ui.note(templater.prettyformat(tree), b'\n')
3444 ui.note(templater.prettyformat(tree), b'\n')
3443 newtree = templater.expandaliases(tree, aliases)
3445 newtree = templater.expandaliases(tree, aliases)
3444 if newtree != tree:
3446 if newtree != tree:
3445 ui.notenoi18n(
3447 ui.notenoi18n(
3446 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3448 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3447 )
3449 )
3448
3450
3449 if revs is None:
3451 if revs is None:
3450 tres = formatter.templateresources(ui, repo)
3452 tres = formatter.templateresources(ui, repo)
3451 t = formatter.maketemplater(ui, tmpl, resources=tres)
3453 t = formatter.maketemplater(ui, tmpl, resources=tres)
3452 if ui.verbose:
3454 if ui.verbose:
3453 kwds, funcs = t.symbolsuseddefault()
3455 kwds, funcs = t.symbolsuseddefault()
3454 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3456 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3455 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3457 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3456 ui.write(t.renderdefault(props))
3458 ui.write(t.renderdefault(props))
3457 else:
3459 else:
3458 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3460 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3459 if ui.verbose:
3461 if ui.verbose:
3460 kwds, funcs = displayer.t.symbolsuseddefault()
3462 kwds, funcs = displayer.t.symbolsuseddefault()
3461 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3463 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3462 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3464 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3463 for r in revs:
3465 for r in revs:
3464 displayer.show(repo[r], **pycompat.strkwargs(props))
3466 displayer.show(repo[r], **pycompat.strkwargs(props))
3465 displayer.close()
3467 displayer.close()
3466
3468
3467
3469
3468 @command(
3470 @command(
3469 b'debuguigetpass',
3471 b'debuguigetpass',
3470 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3472 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3471 _(b'[-p TEXT]'),
3473 _(b'[-p TEXT]'),
3472 norepo=True,
3474 norepo=True,
3473 )
3475 )
3474 def debuguigetpass(ui, prompt=b''):
3476 def debuguigetpass(ui, prompt=b''):
3475 """show prompt to type password"""
3477 """show prompt to type password"""
3476 r = ui.getpass(prompt)
3478 r = ui.getpass(prompt)
3477 ui.writenoi18n(b'respose: %s\n' % r)
3479 ui.writenoi18n(b'respose: %s\n' % r)
3478
3480
3479
3481
3480 @command(
3482 @command(
3481 b'debuguiprompt',
3483 b'debuguiprompt',
3482 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3484 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3483 _(b'[-p TEXT]'),
3485 _(b'[-p TEXT]'),
3484 norepo=True,
3486 norepo=True,
3485 )
3487 )
3486 def debuguiprompt(ui, prompt=b''):
3488 def debuguiprompt(ui, prompt=b''):
3487 """show plain prompt"""
3489 """show plain prompt"""
3488 r = ui.prompt(prompt)
3490 r = ui.prompt(prompt)
3489 ui.writenoi18n(b'response: %s\n' % r)
3491 ui.writenoi18n(b'response: %s\n' % r)
3490
3492
3491
3493
3492 @command(b'debugupdatecaches', [])
3494 @command(b'debugupdatecaches', [])
3493 def debugupdatecaches(ui, repo, *pats, **opts):
3495 def debugupdatecaches(ui, repo, *pats, **opts):
3494 """warm all known caches in the repository"""
3496 """warm all known caches in the repository"""
3495 with repo.wlock(), repo.lock():
3497 with repo.wlock(), repo.lock():
3496 repo.updatecaches(full=True)
3498 repo.updatecaches(full=True)
3497
3499
3498
3500
3499 @command(
3501 @command(
3500 b'debugupgraderepo',
3502 b'debugupgraderepo',
3501 [
3503 [
3502 (
3504 (
3503 b'o',
3505 b'o',
3504 b'optimize',
3506 b'optimize',
3505 [],
3507 [],
3506 _(b'extra optimization to perform'),
3508 _(b'extra optimization to perform'),
3507 _(b'NAME'),
3509 _(b'NAME'),
3508 ),
3510 ),
3509 (b'', b'run', False, _(b'performs an upgrade')),
3511 (b'', b'run', False, _(b'performs an upgrade')),
3510 (b'', b'backup', True, _(b'keep the old repository content around')),
3512 (b'', b'backup', True, _(b'keep the old repository content around')),
3511 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3513 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3512 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3514 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3513 ],
3515 ],
3514 )
3516 )
3515 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3517 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3516 """upgrade a repository to use different features
3518 """upgrade a repository to use different features
3517
3519
3518 If no arguments are specified, the repository is evaluated for upgrade
3520 If no arguments are specified, the repository is evaluated for upgrade
3519 and a list of problems and potential optimizations is printed.
3521 and a list of problems and potential optimizations is printed.
3520
3522
3521 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3523 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3522 can be influenced via additional arguments. More details will be provided
3524 can be influenced via additional arguments. More details will be provided
3523 by the command output when run without ``--run``.
3525 by the command output when run without ``--run``.
3524
3526
3525 During the upgrade, the repository will be locked and no writes will be
3527 During the upgrade, the repository will be locked and no writes will be
3526 allowed.
3528 allowed.
3527
3529
3528 At the end of the upgrade, the repository may not be readable while new
3530 At the end of the upgrade, the repository may not be readable while new
3529 repository data is swapped in. This window will be as long as it takes to
3531 repository data is swapped in. This window will be as long as it takes to
3530 rename some directories inside the ``.hg`` directory. On most machines, this
3532 rename some directories inside the ``.hg`` directory. On most machines, this
3531 should complete almost instantaneously and the chances of a consumer being
3533 should complete almost instantaneously and the chances of a consumer being
3532 unable to access the repository should be low.
3534 unable to access the repository should be low.
3533
3535
3534 By default, all revlog will be upgraded. You can restrict this using flag
3536 By default, all revlog will be upgraded. You can restrict this using flag
3535 such as `--manifest`:
3537 such as `--manifest`:
3536
3538
3537 * `--manifest`: only optimize the manifest
3539 * `--manifest`: only optimize the manifest
3538 * `--no-manifest`: optimize all revlog but the manifest
3540 * `--no-manifest`: optimize all revlog but the manifest
3539 * `--changelog`: optimize the changelog only
3541 * `--changelog`: optimize the changelog only
3540 * `--no-changelog --no-manifest`: optimize filelogs only
3542 * `--no-changelog --no-manifest`: optimize filelogs only
3541 """
3543 """
3542 return upgrade.upgraderepo(
3544 return upgrade.upgraderepo(
3543 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3545 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3544 )
3546 )
3545
3547
3546
3548
3547 @command(
3549 @command(
3548 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3550 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3549 )
3551 )
3550 def debugwalk(ui, repo, *pats, **opts):
3552 def debugwalk(ui, repo, *pats, **opts):
3551 """show how files match on given patterns"""
3553 """show how files match on given patterns"""
3552 opts = pycompat.byteskwargs(opts)
3554 opts = pycompat.byteskwargs(opts)
3553 m = scmutil.match(repo[None], pats, opts)
3555 m = scmutil.match(repo[None], pats, opts)
3554 if ui.verbose:
3556 if ui.verbose:
3555 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3557 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3556 items = list(repo[None].walk(m))
3558 items = list(repo[None].walk(m))
3557 if not items:
3559 if not items:
3558 return
3560 return
3559 f = lambda fn: fn
3561 f = lambda fn: fn
3560 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3562 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3561 f = lambda fn: util.normpath(fn)
3563 f = lambda fn: util.normpath(fn)
3562 fmt = b'f %%-%ds %%-%ds %%s' % (
3564 fmt = b'f %%-%ds %%-%ds %%s' % (
3563 max([len(abs) for abs in items]),
3565 max([len(abs) for abs in items]),
3564 max([len(repo.pathto(abs)) for abs in items]),
3566 max([len(repo.pathto(abs)) for abs in items]),
3565 )
3567 )
3566 for abs in items:
3568 for abs in items:
3567 line = fmt % (
3569 line = fmt % (
3568 abs,
3570 abs,
3569 f(repo.pathto(abs)),
3571 f(repo.pathto(abs)),
3570 m.exact(abs) and b'exact' or b'',
3572 m.exact(abs) and b'exact' or b'',
3571 )
3573 )
3572 ui.write(b"%s\n" % line.rstrip())
3574 ui.write(b"%s\n" % line.rstrip())
3573
3575
3574
3576
3575 @command(b'debugwhyunstable', [], _(b'REV'))
3577 @command(b'debugwhyunstable', [], _(b'REV'))
3576 def debugwhyunstable(ui, repo, rev):
3578 def debugwhyunstable(ui, repo, rev):
3577 """explain instabilities of a changeset"""
3579 """explain instabilities of a changeset"""
3578 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3580 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3579 dnodes = b''
3581 dnodes = b''
3580 if entry.get(b'divergentnodes'):
3582 if entry.get(b'divergentnodes'):
3581 dnodes = (
3583 dnodes = (
3582 b' '.join(
3584 b' '.join(
3583 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3585 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3584 for ctx in entry[b'divergentnodes']
3586 for ctx in entry[b'divergentnodes']
3585 )
3587 )
3586 + b' '
3588 + b' '
3587 )
3589 )
3588 ui.write(
3590 ui.write(
3589 b'%s: %s%s %s\n'
3591 b'%s: %s%s %s\n'
3590 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3592 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3591 )
3593 )
3592
3594
3593
3595
3594 @command(
3596 @command(
3595 b'debugwireargs',
3597 b'debugwireargs',
3596 [
3598 [
3597 (b'', b'three', b'', b'three'),
3599 (b'', b'three', b'', b'three'),
3598 (b'', b'four', b'', b'four'),
3600 (b'', b'four', b'', b'four'),
3599 (b'', b'five', b'', b'five'),
3601 (b'', b'five', b'', b'five'),
3600 ]
3602 ]
3601 + cmdutil.remoteopts,
3603 + cmdutil.remoteopts,
3602 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3604 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3603 norepo=True,
3605 norepo=True,
3604 )
3606 )
3605 def debugwireargs(ui, repopath, *vals, **opts):
3607 def debugwireargs(ui, repopath, *vals, **opts):
3606 opts = pycompat.byteskwargs(opts)
3608 opts = pycompat.byteskwargs(opts)
3607 repo = hg.peer(ui, opts, repopath)
3609 repo = hg.peer(ui, opts, repopath)
3608 for opt in cmdutil.remoteopts:
3610 for opt in cmdutil.remoteopts:
3609 del opts[opt[1]]
3611 del opts[opt[1]]
3610 args = {}
3612 args = {}
3611 for k, v in pycompat.iteritems(opts):
3613 for k, v in pycompat.iteritems(opts):
3612 if v:
3614 if v:
3613 args[k] = v
3615 args[k] = v
3614 args = pycompat.strkwargs(args)
3616 args = pycompat.strkwargs(args)
3615 # run twice to check that we don't mess up the stream for the next command
3617 # run twice to check that we don't mess up the stream for the next command
3616 res1 = repo.debugwireargs(*vals, **args)
3618 res1 = repo.debugwireargs(*vals, **args)
3617 res2 = repo.debugwireargs(*vals, **args)
3619 res2 = repo.debugwireargs(*vals, **args)
3618 ui.write(b"%s\n" % res1)
3620 ui.write(b"%s\n" % res1)
3619 if res1 != res2:
3621 if res1 != res2:
3620 ui.warn(b"%s\n" % res2)
3622 ui.warn(b"%s\n" % res2)
3621
3623
3622
3624
3623 def _parsewirelangblocks(fh):
3625 def _parsewirelangblocks(fh):
3624 activeaction = None
3626 activeaction = None
3625 blocklines = []
3627 blocklines = []
3626 lastindent = 0
3628 lastindent = 0
3627
3629
3628 for line in fh:
3630 for line in fh:
3629 line = line.rstrip()
3631 line = line.rstrip()
3630 if not line:
3632 if not line:
3631 continue
3633 continue
3632
3634
3633 if line.startswith(b'#'):
3635 if line.startswith(b'#'):
3634 continue
3636 continue
3635
3637
3636 if not line.startswith(b' '):
3638 if not line.startswith(b' '):
3637 # New block. Flush previous one.
3639 # New block. Flush previous one.
3638 if activeaction:
3640 if activeaction:
3639 yield activeaction, blocklines
3641 yield activeaction, blocklines
3640
3642
3641 activeaction = line
3643 activeaction = line
3642 blocklines = []
3644 blocklines = []
3643 lastindent = 0
3645 lastindent = 0
3644 continue
3646 continue
3645
3647
3646 # Else we start with an indent.
3648 # Else we start with an indent.
3647
3649
3648 if not activeaction:
3650 if not activeaction:
3649 raise error.Abort(_(b'indented line outside of block'))
3651 raise error.Abort(_(b'indented line outside of block'))
3650
3652
3651 indent = len(line) - len(line.lstrip())
3653 indent = len(line) - len(line.lstrip())
3652
3654
3653 # If this line is indented more than the last line, concatenate it.
3655 # If this line is indented more than the last line, concatenate it.
3654 if indent > lastindent and blocklines:
3656 if indent > lastindent and blocklines:
3655 blocklines[-1] += line.lstrip()
3657 blocklines[-1] += line.lstrip()
3656 else:
3658 else:
3657 blocklines.append(line)
3659 blocklines.append(line)
3658 lastindent = indent
3660 lastindent = indent
3659
3661
3660 # Flush last block.
3662 # Flush last block.
3661 if activeaction:
3663 if activeaction:
3662 yield activeaction, blocklines
3664 yield activeaction, blocklines
3663
3665
3664
3666
3665 @command(
3667 @command(
3666 b'debugwireproto',
3668 b'debugwireproto',
3667 [
3669 [
3668 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3670 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3669 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3671 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3670 (
3672 (
3671 b'',
3673 b'',
3672 b'noreadstderr',
3674 b'noreadstderr',
3673 False,
3675 False,
3674 _(b'do not read from stderr of the remote'),
3676 _(b'do not read from stderr of the remote'),
3675 ),
3677 ),
3676 (
3678 (
3677 b'',
3679 b'',
3678 b'nologhandshake',
3680 b'nologhandshake',
3679 False,
3681 False,
3680 _(b'do not log I/O related to the peer handshake'),
3682 _(b'do not log I/O related to the peer handshake'),
3681 ),
3683 ),
3682 ]
3684 ]
3683 + cmdutil.remoteopts,
3685 + cmdutil.remoteopts,
3684 _(b'[PATH]'),
3686 _(b'[PATH]'),
3685 optionalrepo=True,
3687 optionalrepo=True,
3686 )
3688 )
3687 def debugwireproto(ui, repo, path=None, **opts):
3689 def debugwireproto(ui, repo, path=None, **opts):
3688 """send wire protocol commands to a server
3690 """send wire protocol commands to a server
3689
3691
3690 This command can be used to issue wire protocol commands to remote
3692 This command can be used to issue wire protocol commands to remote
3691 peers and to debug the raw data being exchanged.
3693 peers and to debug the raw data being exchanged.
3692
3694
3693 ``--localssh`` will start an SSH server against the current repository
3695 ``--localssh`` will start an SSH server against the current repository
3694 and connect to that. By default, the connection will perform a handshake
3696 and connect to that. By default, the connection will perform a handshake
3695 and establish an appropriate peer instance.
3697 and establish an appropriate peer instance.
3696
3698
3697 ``--peer`` can be used to bypass the handshake protocol and construct a
3699 ``--peer`` can be used to bypass the handshake protocol and construct a
3698 peer instance using the specified class type. Valid values are ``raw``,
3700 peer instance using the specified class type. Valid values are ``raw``,
3699 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3701 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3700 raw data payloads and don't support higher-level command actions.
3702 raw data payloads and don't support higher-level command actions.
3701
3703
3702 ``--noreadstderr`` can be used to disable automatic reading from stderr
3704 ``--noreadstderr`` can be used to disable automatic reading from stderr
3703 of the peer (for SSH connections only). Disabling automatic reading of
3705 of the peer (for SSH connections only). Disabling automatic reading of
3704 stderr is useful for making output more deterministic.
3706 stderr is useful for making output more deterministic.
3705
3707
3706 Commands are issued via a mini language which is specified via stdin.
3708 Commands are issued via a mini language which is specified via stdin.
3707 The language consists of individual actions to perform. An action is
3709 The language consists of individual actions to perform. An action is
3708 defined by a block. A block is defined as a line with no leading
3710 defined by a block. A block is defined as a line with no leading
3709 space followed by 0 or more lines with leading space. Blocks are
3711 space followed by 0 or more lines with leading space. Blocks are
3710 effectively a high-level command with additional metadata.
3712 effectively a high-level command with additional metadata.
3711
3713
3712 Lines beginning with ``#`` are ignored.
3714 Lines beginning with ``#`` are ignored.
3713
3715
3714 The following sections denote available actions.
3716 The following sections denote available actions.
3715
3717
3716 raw
3718 raw
3717 ---
3719 ---
3718
3720
3719 Send raw data to the server.
3721 Send raw data to the server.
3720
3722
3721 The block payload contains the raw data to send as one atomic send
3723 The block payload contains the raw data to send as one atomic send
3722 operation. The data may not actually be delivered in a single system
3724 operation. The data may not actually be delivered in a single system
3723 call: it depends on the abilities of the transport being used.
3725 call: it depends on the abilities of the transport being used.
3724
3726
3725 Each line in the block is de-indented and concatenated. Then, that
3727 Each line in the block is de-indented and concatenated. Then, that
3726 value is evaluated as a Python b'' literal. This allows the use of
3728 value is evaluated as a Python b'' literal. This allows the use of
3727 backslash escaping, etc.
3729 backslash escaping, etc.
3728
3730
3729 raw+
3731 raw+
3730 ----
3732 ----
3731
3733
3732 Behaves like ``raw`` except flushes output afterwards.
3734 Behaves like ``raw`` except flushes output afterwards.
3733
3735
3734 command <X>
3736 command <X>
3735 -----------
3737 -----------
3736
3738
3737 Send a request to run a named command, whose name follows the ``command``
3739 Send a request to run a named command, whose name follows the ``command``
3738 string.
3740 string.
3739
3741
3740 Arguments to the command are defined as lines in this block. The format of
3742 Arguments to the command are defined as lines in this block. The format of
3741 each line is ``<key> <value>``. e.g.::
3743 each line is ``<key> <value>``. e.g.::
3742
3744
3743 command listkeys
3745 command listkeys
3744 namespace bookmarks
3746 namespace bookmarks
3745
3747
3746 If the value begins with ``eval:``, it will be interpreted as a Python
3748 If the value begins with ``eval:``, it will be interpreted as a Python
3747 literal expression. Otherwise values are interpreted as Python b'' literals.
3749 literal expression. Otherwise values are interpreted as Python b'' literals.
3748 This allows sending complex types and encoding special byte sequences via
3750 This allows sending complex types and encoding special byte sequences via
3749 backslash escaping.
3751 backslash escaping.
3750
3752
3751 The following arguments have special meaning:
3753 The following arguments have special meaning:
3752
3754
3753 ``PUSHFILE``
3755 ``PUSHFILE``
3754 When defined, the *push* mechanism of the peer will be used instead
3756 When defined, the *push* mechanism of the peer will be used instead
3755 of the static request-response mechanism and the content of the
3757 of the static request-response mechanism and the content of the
3756 file specified in the value of this argument will be sent as the
3758 file specified in the value of this argument will be sent as the
3757 command payload.
3759 command payload.
3758
3760
3759 This can be used to submit a local bundle file to the remote.
3761 This can be used to submit a local bundle file to the remote.
3760
3762
3761 batchbegin
3763 batchbegin
3762 ----------
3764 ----------
3763
3765
3764 Instruct the peer to begin a batched send.
3766 Instruct the peer to begin a batched send.
3765
3767
3766 All ``command`` blocks are queued for execution until the next
3768 All ``command`` blocks are queued for execution until the next
3767 ``batchsubmit`` block.
3769 ``batchsubmit`` block.
3768
3770
3769 batchsubmit
3771 batchsubmit
3770 -----------
3772 -----------
3771
3773
3772 Submit previously queued ``command`` blocks as a batch request.
3774 Submit previously queued ``command`` blocks as a batch request.
3773
3775
3774 This action MUST be paired with a ``batchbegin`` action.
3776 This action MUST be paired with a ``batchbegin`` action.
3775
3777
3776 httprequest <method> <path>
3778 httprequest <method> <path>
3777 ---------------------------
3779 ---------------------------
3778
3780
3779 (HTTP peer only)
3781 (HTTP peer only)
3780
3782
3781 Send an HTTP request to the peer.
3783 Send an HTTP request to the peer.
3782
3784
3783 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3785 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3784
3786
3785 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3787 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3786 headers to add to the request. e.g. ``Accept: foo``.
3788 headers to add to the request. e.g. ``Accept: foo``.
3787
3789
3788 The following arguments are special:
3790 The following arguments are special:
3789
3791
3790 ``BODYFILE``
3792 ``BODYFILE``
3791 The content of the file defined as the value to this argument will be
3793 The content of the file defined as the value to this argument will be
3792 transferred verbatim as the HTTP request body.
3794 transferred verbatim as the HTTP request body.
3793
3795
3794 ``frame <type> <flags> <payload>``
3796 ``frame <type> <flags> <payload>``
3795 Send a unified protocol frame as part of the request body.
3797 Send a unified protocol frame as part of the request body.
3796
3798
3797 All frames will be collected and sent as the body to the HTTP
3799 All frames will be collected and sent as the body to the HTTP
3798 request.
3800 request.
3799
3801
3800 close
3802 close
3801 -----
3803 -----
3802
3804
3803 Close the connection to the server.
3805 Close the connection to the server.
3804
3806
3805 flush
3807 flush
3806 -----
3808 -----
3807
3809
3808 Flush data written to the server.
3810 Flush data written to the server.
3809
3811
3810 readavailable
3812 readavailable
3811 -------------
3813 -------------
3812
3814
3813 Close the write end of the connection and read all available data from
3815 Close the write end of the connection and read all available data from
3814 the server.
3816 the server.
3815
3817
3816 If the connection to the server encompasses multiple pipes, we poll both
3818 If the connection to the server encompasses multiple pipes, we poll both
3817 pipes and read available data.
3819 pipes and read available data.
3818
3820
3819 readline
3821 readline
3820 --------
3822 --------
3821
3823
3822 Read a line of output from the server. If there are multiple output
3824 Read a line of output from the server. If there are multiple output
3823 pipes, reads only the main pipe.
3825 pipes, reads only the main pipe.
3824
3826
3825 ereadline
3827 ereadline
3826 ---------
3828 ---------
3827
3829
3828 Like ``readline``, but read from the stderr pipe, if available.
3830 Like ``readline``, but read from the stderr pipe, if available.
3829
3831
3830 read <X>
3832 read <X>
3831 --------
3833 --------
3832
3834
3833 ``read()`` N bytes from the server's main output pipe.
3835 ``read()`` N bytes from the server's main output pipe.
3834
3836
3835 eread <X>
3837 eread <X>
3836 ---------
3838 ---------
3837
3839
3838 ``read()`` N bytes from the server's stderr pipe, if available.
3840 ``read()`` N bytes from the server's stderr pipe, if available.
3839
3841
3840 Specifying Unified Frame-Based Protocol Frames
3842 Specifying Unified Frame-Based Protocol Frames
3841 ----------------------------------------------
3843 ----------------------------------------------
3842
3844
3843 It is possible to emit a *Unified Frame-Based Protocol* by using special
3845 It is possible to emit a *Unified Frame-Based Protocol* by using special
3844 syntax.
3846 syntax.
3845
3847
3846 A frame is composed as a type, flags, and payload. These can be parsed
3848 A frame is composed as a type, flags, and payload. These can be parsed
3847 from a string of the form:
3849 from a string of the form:
3848
3850
3849 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3851 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3850
3852
3851 ``request-id`` and ``stream-id`` are integers defining the request and
3853 ``request-id`` and ``stream-id`` are integers defining the request and
3852 stream identifiers.
3854 stream identifiers.
3853
3855
3854 ``type`` can be an integer value for the frame type or the string name
3856 ``type`` can be an integer value for the frame type or the string name
3855 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3857 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3856 ``command-name``.
3858 ``command-name``.
3857
3859
3858 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3860 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3859 components. Each component (and there can be just one) can be an integer
3861 components. Each component (and there can be just one) can be an integer
3860 or a flag name for stream flags or frame flags, respectively. Values are
3862 or a flag name for stream flags or frame flags, respectively. Values are
3861 resolved to integers and then bitwise OR'd together.
3863 resolved to integers and then bitwise OR'd together.
3862
3864
3863 ``payload`` represents the raw frame payload. If it begins with
3865 ``payload`` represents the raw frame payload. If it begins with
3864 ``cbor:``, the following string is evaluated as Python code and the
3866 ``cbor:``, the following string is evaluated as Python code and the
3865 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3867 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3866 as a Python byte string literal.
3868 as a Python byte string literal.
3867 """
3869 """
3868 opts = pycompat.byteskwargs(opts)
3870 opts = pycompat.byteskwargs(opts)
3869
3871
3870 if opts[b'localssh'] and not repo:
3872 if opts[b'localssh'] and not repo:
3871 raise error.Abort(_(b'--localssh requires a repository'))
3873 raise error.Abort(_(b'--localssh requires a repository'))
3872
3874
3873 if opts[b'peer'] and opts[b'peer'] not in (
3875 if opts[b'peer'] and opts[b'peer'] not in (
3874 b'raw',
3876 b'raw',
3875 b'http2',
3877 b'http2',
3876 b'ssh1',
3878 b'ssh1',
3877 b'ssh2',
3879 b'ssh2',
3878 ):
3880 ):
3879 raise error.Abort(
3881 raise error.Abort(
3880 _(b'invalid value for --peer'),
3882 _(b'invalid value for --peer'),
3881 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
3883 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
3882 )
3884 )
3883
3885
3884 if path and opts[b'localssh']:
3886 if path and opts[b'localssh']:
3885 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
3887 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
3886
3888
3887 if ui.interactive():
3889 if ui.interactive():
3888 ui.write(_(b'(waiting for commands on stdin)\n'))
3890 ui.write(_(b'(waiting for commands on stdin)\n'))
3889
3891
3890 blocks = list(_parsewirelangblocks(ui.fin))
3892 blocks = list(_parsewirelangblocks(ui.fin))
3891
3893
3892 proc = None
3894 proc = None
3893 stdin = None
3895 stdin = None
3894 stdout = None
3896 stdout = None
3895 stderr = None
3897 stderr = None
3896 opener = None
3898 opener = None
3897
3899
3898 if opts[b'localssh']:
3900 if opts[b'localssh']:
3899 # We start the SSH server in its own process so there is process
3901 # We start the SSH server in its own process so there is process
3900 # separation. This prevents a whole class of potential bugs around
3902 # separation. This prevents a whole class of potential bugs around
3901 # shared state from interfering with server operation.
3903 # shared state from interfering with server operation.
3902 args = procutil.hgcmd() + [
3904 args = procutil.hgcmd() + [
3903 b'-R',
3905 b'-R',
3904 repo.root,
3906 repo.root,
3905 b'debugserve',
3907 b'debugserve',
3906 b'--sshstdio',
3908 b'--sshstdio',
3907 ]
3909 ]
3908 proc = subprocess.Popen(
3910 proc = subprocess.Popen(
3909 pycompat.rapply(procutil.tonativestr, args),
3911 pycompat.rapply(procutil.tonativestr, args),
3910 stdin=subprocess.PIPE,
3912 stdin=subprocess.PIPE,
3911 stdout=subprocess.PIPE,
3913 stdout=subprocess.PIPE,
3912 stderr=subprocess.PIPE,
3914 stderr=subprocess.PIPE,
3913 bufsize=0,
3915 bufsize=0,
3914 )
3916 )
3915
3917
3916 stdin = proc.stdin
3918 stdin = proc.stdin
3917 stdout = proc.stdout
3919 stdout = proc.stdout
3918 stderr = proc.stderr
3920 stderr = proc.stderr
3919
3921
3920 # We turn the pipes into observers so we can log I/O.
3922 # We turn the pipes into observers so we can log I/O.
3921 if ui.verbose or opts[b'peer'] == b'raw':
3923 if ui.verbose or opts[b'peer'] == b'raw':
3922 stdin = util.makeloggingfileobject(
3924 stdin = util.makeloggingfileobject(
3923 ui, proc.stdin, b'i', logdata=True
3925 ui, proc.stdin, b'i', logdata=True
3924 )
3926 )
3925 stdout = util.makeloggingfileobject(
3927 stdout = util.makeloggingfileobject(
3926 ui, proc.stdout, b'o', logdata=True
3928 ui, proc.stdout, b'o', logdata=True
3927 )
3929 )
3928 stderr = util.makeloggingfileobject(
3930 stderr = util.makeloggingfileobject(
3929 ui, proc.stderr, b'e', logdata=True
3931 ui, proc.stderr, b'e', logdata=True
3930 )
3932 )
3931
3933
3932 # --localssh also implies the peer connection settings.
3934 # --localssh also implies the peer connection settings.
3933
3935
3934 url = b'ssh://localserver'
3936 url = b'ssh://localserver'
3935 autoreadstderr = not opts[b'noreadstderr']
3937 autoreadstderr = not opts[b'noreadstderr']
3936
3938
3937 if opts[b'peer'] == b'ssh1':
3939 if opts[b'peer'] == b'ssh1':
3938 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
3940 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
3939 peer = sshpeer.sshv1peer(
3941 peer = sshpeer.sshv1peer(
3940 ui,
3942 ui,
3941 url,
3943 url,
3942 proc,
3944 proc,
3943 stdin,
3945 stdin,
3944 stdout,
3946 stdout,
3945 stderr,
3947 stderr,
3946 None,
3948 None,
3947 autoreadstderr=autoreadstderr,
3949 autoreadstderr=autoreadstderr,
3948 )
3950 )
3949 elif opts[b'peer'] == b'ssh2':
3951 elif opts[b'peer'] == b'ssh2':
3950 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
3952 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
3951 peer = sshpeer.sshv2peer(
3953 peer = sshpeer.sshv2peer(
3952 ui,
3954 ui,
3953 url,
3955 url,
3954 proc,
3956 proc,
3955 stdin,
3957 stdin,
3956 stdout,
3958 stdout,
3957 stderr,
3959 stderr,
3958 None,
3960 None,
3959 autoreadstderr=autoreadstderr,
3961 autoreadstderr=autoreadstderr,
3960 )
3962 )
3961 elif opts[b'peer'] == b'raw':
3963 elif opts[b'peer'] == b'raw':
3962 ui.write(_(b'using raw connection to peer\n'))
3964 ui.write(_(b'using raw connection to peer\n'))
3963 peer = None
3965 peer = None
3964 else:
3966 else:
3965 ui.write(_(b'creating ssh peer from handshake results\n'))
3967 ui.write(_(b'creating ssh peer from handshake results\n'))
3966 peer = sshpeer.makepeer(
3968 peer = sshpeer.makepeer(
3967 ui,
3969 ui,
3968 url,
3970 url,
3969 proc,
3971 proc,
3970 stdin,
3972 stdin,
3971 stdout,
3973 stdout,
3972 stderr,
3974 stderr,
3973 autoreadstderr=autoreadstderr,
3975 autoreadstderr=autoreadstderr,
3974 )
3976 )
3975
3977
3976 elif path:
3978 elif path:
3977 # We bypass hg.peer() so we can proxy the sockets.
3979 # We bypass hg.peer() so we can proxy the sockets.
3978 # TODO consider not doing this because we skip
3980 # TODO consider not doing this because we skip
3979 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3981 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3980 u = util.url(path)
3982 u = util.url(path)
3981 if u.scheme != b'http':
3983 if u.scheme != b'http':
3982 raise error.Abort(_(b'only http:// paths are currently supported'))
3984 raise error.Abort(_(b'only http:// paths are currently supported'))
3983
3985
3984 url, authinfo = u.authinfo()
3986 url, authinfo = u.authinfo()
3985 openerargs = {
3987 openerargs = {
3986 r'useragent': b'Mercurial debugwireproto',
3988 r'useragent': b'Mercurial debugwireproto',
3987 }
3989 }
3988
3990
3989 # Turn pipes/sockets into observers so we can log I/O.
3991 # Turn pipes/sockets into observers so we can log I/O.
3990 if ui.verbose:
3992 if ui.verbose:
3991 openerargs.update(
3993 openerargs.update(
3992 {
3994 {
3993 r'loggingfh': ui,
3995 r'loggingfh': ui,
3994 r'loggingname': b's',
3996 r'loggingname': b's',
3995 r'loggingopts': {r'logdata': True, r'logdataapis': False,},
3997 r'loggingopts': {r'logdata': True, r'logdataapis': False,},
3996 }
3998 }
3997 )
3999 )
3998
4000
3999 if ui.debugflag:
4001 if ui.debugflag:
4000 openerargs[r'loggingopts'][r'logdataapis'] = True
4002 openerargs[r'loggingopts'][r'logdataapis'] = True
4001
4003
4002 # Don't send default headers when in raw mode. This allows us to
4004 # Don't send default headers when in raw mode. This allows us to
4003 # bypass most of the behavior of our URL handling code so we can
4005 # bypass most of the behavior of our URL handling code so we can
4004 # have near complete control over what's sent on the wire.
4006 # have near complete control over what's sent on the wire.
4005 if opts[b'peer'] == b'raw':
4007 if opts[b'peer'] == b'raw':
4006 openerargs[r'sendaccept'] = False
4008 openerargs[r'sendaccept'] = False
4007
4009
4008 opener = urlmod.opener(ui, authinfo, **openerargs)
4010 opener = urlmod.opener(ui, authinfo, **openerargs)
4009
4011
4010 if opts[b'peer'] == b'http2':
4012 if opts[b'peer'] == b'http2':
4011 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4013 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4012 # We go through makepeer() because we need an API descriptor for
4014 # We go through makepeer() because we need an API descriptor for
4013 # the peer instance to be useful.
4015 # the peer instance to be useful.
4014 with ui.configoverride(
4016 with ui.configoverride(
4015 {(b'experimental', b'httppeer.advertise-v2'): True}
4017 {(b'experimental', b'httppeer.advertise-v2'): True}
4016 ):
4018 ):
4017 if opts[b'nologhandshake']:
4019 if opts[b'nologhandshake']:
4018 ui.pushbuffer()
4020 ui.pushbuffer()
4019
4021
4020 peer = httppeer.makepeer(ui, path, opener=opener)
4022 peer = httppeer.makepeer(ui, path, opener=opener)
4021
4023
4022 if opts[b'nologhandshake']:
4024 if opts[b'nologhandshake']:
4023 ui.popbuffer()
4025 ui.popbuffer()
4024
4026
4025 if not isinstance(peer, httppeer.httpv2peer):
4027 if not isinstance(peer, httppeer.httpv2peer):
4026 raise error.Abort(
4028 raise error.Abort(
4027 _(
4029 _(
4028 b'could not instantiate HTTP peer for '
4030 b'could not instantiate HTTP peer for '
4029 b'wire protocol version 2'
4031 b'wire protocol version 2'
4030 ),
4032 ),
4031 hint=_(
4033 hint=_(
4032 b'the server may not have the feature '
4034 b'the server may not have the feature '
4033 b'enabled or is not allowing this '
4035 b'enabled or is not allowing this '
4034 b'client version'
4036 b'client version'
4035 ),
4037 ),
4036 )
4038 )
4037
4039
4038 elif opts[b'peer'] == b'raw':
4040 elif opts[b'peer'] == b'raw':
4039 ui.write(_(b'using raw connection to peer\n'))
4041 ui.write(_(b'using raw connection to peer\n'))
4040 peer = None
4042 peer = None
4041 elif opts[b'peer']:
4043 elif opts[b'peer']:
4042 raise error.Abort(
4044 raise error.Abort(
4043 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4045 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4044 )
4046 )
4045 else:
4047 else:
4046 peer = httppeer.makepeer(ui, path, opener=opener)
4048 peer = httppeer.makepeer(ui, path, opener=opener)
4047
4049
4048 # We /could/ populate stdin/stdout with sock.makefile()...
4050 # We /could/ populate stdin/stdout with sock.makefile()...
4049 else:
4051 else:
4050 raise error.Abort(_(b'unsupported connection configuration'))
4052 raise error.Abort(_(b'unsupported connection configuration'))
4051
4053
4052 batchedcommands = None
4054 batchedcommands = None
4053
4055
4054 # Now perform actions based on the parsed wire language instructions.
4056 # Now perform actions based on the parsed wire language instructions.
4055 for action, lines in blocks:
4057 for action, lines in blocks:
4056 if action in (b'raw', b'raw+'):
4058 if action in (b'raw', b'raw+'):
4057 if not stdin:
4059 if not stdin:
4058 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4060 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4059
4061
4060 # Concatenate the data together.
4062 # Concatenate the data together.
4061 data = b''.join(l.lstrip() for l in lines)
4063 data = b''.join(l.lstrip() for l in lines)
4062 data = stringutil.unescapestr(data)
4064 data = stringutil.unescapestr(data)
4063 stdin.write(data)
4065 stdin.write(data)
4064
4066
4065 if action == b'raw+':
4067 if action == b'raw+':
4066 stdin.flush()
4068 stdin.flush()
4067 elif action == b'flush':
4069 elif action == b'flush':
4068 if not stdin:
4070 if not stdin:
4069 raise error.Abort(_(b'cannot call flush on this peer'))
4071 raise error.Abort(_(b'cannot call flush on this peer'))
4070 stdin.flush()
4072 stdin.flush()
4071 elif action.startswith(b'command'):
4073 elif action.startswith(b'command'):
4072 if not peer:
4074 if not peer:
4073 raise error.Abort(
4075 raise error.Abort(
4074 _(
4076 _(
4075 b'cannot send commands unless peer instance '
4077 b'cannot send commands unless peer instance '
4076 b'is available'
4078 b'is available'
4077 )
4079 )
4078 )
4080 )
4079
4081
4080 command = action.split(b' ', 1)[1]
4082 command = action.split(b' ', 1)[1]
4081
4083
4082 args = {}
4084 args = {}
4083 for line in lines:
4085 for line in lines:
4084 # We need to allow empty values.
4086 # We need to allow empty values.
4085 fields = line.lstrip().split(b' ', 1)
4087 fields = line.lstrip().split(b' ', 1)
4086 if len(fields) == 1:
4088 if len(fields) == 1:
4087 key = fields[0]
4089 key = fields[0]
4088 value = b''
4090 value = b''
4089 else:
4091 else:
4090 key, value = fields
4092 key, value = fields
4091
4093
4092 if value.startswith(b'eval:'):
4094 if value.startswith(b'eval:'):
4093 value = stringutil.evalpythonliteral(value[5:])
4095 value = stringutil.evalpythonliteral(value[5:])
4094 else:
4096 else:
4095 value = stringutil.unescapestr(value)
4097 value = stringutil.unescapestr(value)
4096
4098
4097 args[key] = value
4099 args[key] = value
4098
4100
4099 if batchedcommands is not None:
4101 if batchedcommands is not None:
4100 batchedcommands.append((command, args))
4102 batchedcommands.append((command, args))
4101 continue
4103 continue
4102
4104
4103 ui.status(_(b'sending %s command\n') % command)
4105 ui.status(_(b'sending %s command\n') % command)
4104
4106
4105 if b'PUSHFILE' in args:
4107 if b'PUSHFILE' in args:
4106 with open(args[b'PUSHFILE'], r'rb') as fh:
4108 with open(args[b'PUSHFILE'], r'rb') as fh:
4107 del args[b'PUSHFILE']
4109 del args[b'PUSHFILE']
4108 res, output = peer._callpush(
4110 res, output = peer._callpush(
4109 command, fh, **pycompat.strkwargs(args)
4111 command, fh, **pycompat.strkwargs(args)
4110 )
4112 )
4111 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4113 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4112 ui.status(
4114 ui.status(
4113 _(b'remote output: %s\n') % stringutil.escapestr(output)
4115 _(b'remote output: %s\n') % stringutil.escapestr(output)
4114 )
4116 )
4115 else:
4117 else:
4116 with peer.commandexecutor() as e:
4118 with peer.commandexecutor() as e:
4117 res = e.callcommand(command, args).result()
4119 res = e.callcommand(command, args).result()
4118
4120
4119 if isinstance(res, wireprotov2peer.commandresponse):
4121 if isinstance(res, wireprotov2peer.commandresponse):
4120 val = res.objects()
4122 val = res.objects()
4121 ui.status(
4123 ui.status(
4122 _(b'response: %s\n')
4124 _(b'response: %s\n')
4123 % stringutil.pprint(val, bprefix=True, indent=2)
4125 % stringutil.pprint(val, bprefix=True, indent=2)
4124 )
4126 )
4125 else:
4127 else:
4126 ui.status(
4128 ui.status(
4127 _(b'response: %s\n')
4129 _(b'response: %s\n')
4128 % stringutil.pprint(res, bprefix=True, indent=2)
4130 % stringutil.pprint(res, bprefix=True, indent=2)
4129 )
4131 )
4130
4132
4131 elif action == b'batchbegin':
4133 elif action == b'batchbegin':
4132 if batchedcommands is not None:
4134 if batchedcommands is not None:
4133 raise error.Abort(_(b'nested batchbegin not allowed'))
4135 raise error.Abort(_(b'nested batchbegin not allowed'))
4134
4136
4135 batchedcommands = []
4137 batchedcommands = []
4136 elif action == b'batchsubmit':
4138 elif action == b'batchsubmit':
4137 # There is a batching API we could go through. But it would be
4139 # There is a batching API we could go through. But it would be
4138 # difficult to normalize requests into function calls. It is easier
4140 # difficult to normalize requests into function calls. It is easier
4139 # to bypass this layer and normalize to commands + args.
4141 # to bypass this layer and normalize to commands + args.
4140 ui.status(
4142 ui.status(
4141 _(b'sending batch with %d sub-commands\n')
4143 _(b'sending batch with %d sub-commands\n')
4142 % len(batchedcommands)
4144 % len(batchedcommands)
4143 )
4145 )
4144 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4146 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4145 ui.status(
4147 ui.status(
4146 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4148 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4147 )
4149 )
4148
4150
4149 batchedcommands = None
4151 batchedcommands = None
4150
4152
4151 elif action.startswith(b'httprequest '):
4153 elif action.startswith(b'httprequest '):
4152 if not opener:
4154 if not opener:
4153 raise error.Abort(
4155 raise error.Abort(
4154 _(b'cannot use httprequest without an HTTP peer')
4156 _(b'cannot use httprequest without an HTTP peer')
4155 )
4157 )
4156
4158
4157 request = action.split(b' ', 2)
4159 request = action.split(b' ', 2)
4158 if len(request) != 3:
4160 if len(request) != 3:
4159 raise error.Abort(
4161 raise error.Abort(
4160 _(
4162 _(
4161 b'invalid httprequest: expected format is '
4163 b'invalid httprequest: expected format is '
4162 b'"httprequest <method> <path>'
4164 b'"httprequest <method> <path>'
4163 )
4165 )
4164 )
4166 )
4165
4167
4166 method, httppath = request[1:]
4168 method, httppath = request[1:]
4167 headers = {}
4169 headers = {}
4168 body = None
4170 body = None
4169 frames = []
4171 frames = []
4170 for line in lines:
4172 for line in lines:
4171 line = line.lstrip()
4173 line = line.lstrip()
4172 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4174 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4173 if m:
4175 if m:
4174 # Headers need to use native strings.
4176 # Headers need to use native strings.
4175 key = pycompat.strurl(m.group(1))
4177 key = pycompat.strurl(m.group(1))
4176 value = pycompat.strurl(m.group(2))
4178 value = pycompat.strurl(m.group(2))
4177 headers[key] = value
4179 headers[key] = value
4178 continue
4180 continue
4179
4181
4180 if line.startswith(b'BODYFILE '):
4182 if line.startswith(b'BODYFILE '):
4181 with open(line.split(b' ', 1), b'rb') as fh:
4183 with open(line.split(b' ', 1), b'rb') as fh:
4182 body = fh.read()
4184 body = fh.read()
4183 elif line.startswith(b'frame '):
4185 elif line.startswith(b'frame '):
4184 frame = wireprotoframing.makeframefromhumanstring(
4186 frame = wireprotoframing.makeframefromhumanstring(
4185 line[len(b'frame ') :]
4187 line[len(b'frame ') :]
4186 )
4188 )
4187
4189
4188 frames.append(frame)
4190 frames.append(frame)
4189 else:
4191 else:
4190 raise error.Abort(
4192 raise error.Abort(
4191 _(b'unknown argument to httprequest: %s') % line
4193 _(b'unknown argument to httprequest: %s') % line
4192 )
4194 )
4193
4195
4194 url = path + httppath
4196 url = path + httppath
4195
4197
4196 if frames:
4198 if frames:
4197 body = b''.join(bytes(f) for f in frames)
4199 body = b''.join(bytes(f) for f in frames)
4198
4200
4199 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4201 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4200
4202
4201 # urllib.Request insists on using has_data() as a proxy for
4203 # urllib.Request insists on using has_data() as a proxy for
4202 # determining the request method. Override that to use our
4204 # determining the request method. Override that to use our
4203 # explicitly requested method.
4205 # explicitly requested method.
4204 req.get_method = lambda: pycompat.sysstr(method)
4206 req.get_method = lambda: pycompat.sysstr(method)
4205
4207
4206 try:
4208 try:
4207 res = opener.open(req)
4209 res = opener.open(req)
4208 body = res.read()
4210 body = res.read()
4209 except util.urlerr.urlerror as e:
4211 except util.urlerr.urlerror as e:
4210 # read() method must be called, but only exists in Python 2
4212 # read() method must be called, but only exists in Python 2
4211 getattr(e, 'read', lambda: None)()
4213 getattr(e, 'read', lambda: None)()
4212 continue
4214 continue
4213
4215
4214 ct = res.headers.get(r'Content-Type')
4216 ct = res.headers.get(r'Content-Type')
4215 if ct == r'application/mercurial-cbor':
4217 if ct == r'application/mercurial-cbor':
4216 ui.write(
4218 ui.write(
4217 _(b'cbor> %s\n')
4219 _(b'cbor> %s\n')
4218 % stringutil.pprint(
4220 % stringutil.pprint(
4219 cborutil.decodeall(body), bprefix=True, indent=2
4221 cborutil.decodeall(body), bprefix=True, indent=2
4220 )
4222 )
4221 )
4223 )
4222
4224
4223 elif action == b'close':
4225 elif action == b'close':
4224 peer.close()
4226 peer.close()
4225 elif action == b'readavailable':
4227 elif action == b'readavailable':
4226 if not stdout or not stderr:
4228 if not stdout or not stderr:
4227 raise error.Abort(
4229 raise error.Abort(
4228 _(b'readavailable not available on this peer')
4230 _(b'readavailable not available on this peer')
4229 )
4231 )
4230
4232
4231 stdin.close()
4233 stdin.close()
4232 stdout.read()
4234 stdout.read()
4233 stderr.read()
4235 stderr.read()
4234
4236
4235 elif action == b'readline':
4237 elif action == b'readline':
4236 if not stdout:
4238 if not stdout:
4237 raise error.Abort(_(b'readline not available on this peer'))
4239 raise error.Abort(_(b'readline not available on this peer'))
4238 stdout.readline()
4240 stdout.readline()
4239 elif action == b'ereadline':
4241 elif action == b'ereadline':
4240 if not stderr:
4242 if not stderr:
4241 raise error.Abort(_(b'ereadline not available on this peer'))
4243 raise error.Abort(_(b'ereadline not available on this peer'))
4242 stderr.readline()
4244 stderr.readline()
4243 elif action.startswith(b'read '):
4245 elif action.startswith(b'read '):
4244 count = int(action.split(b' ', 1)[1])
4246 count = int(action.split(b' ', 1)[1])
4245 if not stdout:
4247 if not stdout:
4246 raise error.Abort(_(b'read not available on this peer'))
4248 raise error.Abort(_(b'read not available on this peer'))
4247 stdout.read(count)
4249 stdout.read(count)
4248 elif action.startswith(b'eread '):
4250 elif action.startswith(b'eread '):
4249 count = int(action.split(b' ', 1)[1])
4251 count = int(action.split(b' ', 1)[1])
4250 if not stderr:
4252 if not stderr:
4251 raise error.Abort(_(b'eread not available on this peer'))
4253 raise error.Abort(_(b'eread not available on this peer'))
4252 stderr.read(count)
4254 stderr.read(count)
4253 else:
4255 else:
4254 raise error.Abort(_(b'unknown action: %s') % action)
4256 raise error.Abort(_(b'unknown action: %s') % action)
4255
4257
4256 if batchedcommands is not None:
4258 if batchedcommands is not None:
4257 raise error.Abort(_(b'unclosed "batchbegin" request'))
4259 raise error.Abort(_(b'unclosed "batchbegin" request'))
4258
4260
4259 if peer:
4261 if peer:
4260 peer.close()
4262 peer.close()
4261
4263
4262 if proc:
4264 if proc:
4263 proc.kill()
4265 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now