##// END OF EJS Templates
debugcommands: add Python implementation to debuginstall...
Gregory Szorc -
r44603:0b475b0b default
parent child Browse files
Show More
@@ -1,4288 +1,4294 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import platform
16 import random
17 import random
17 import re
18 import re
18 import socket
19 import socket
19 import ssl
20 import ssl
20 import stat
21 import stat
21 import string
22 import string
22 import subprocess
23 import subprocess
23 import sys
24 import sys
24 import time
25 import time
25
26
26 from .i18n import _
27 from .i18n import _
27 from .node import (
28 from .node import (
28 bin,
29 bin,
29 hex,
30 hex,
30 nullhex,
31 nullhex,
31 nullid,
32 nullid,
32 nullrev,
33 nullrev,
33 short,
34 short,
34 )
35 )
35 from .pycompat import (
36 from .pycompat import (
36 getattr,
37 getattr,
37 open,
38 open,
38 )
39 )
39 from . import (
40 from . import (
40 bundle2,
41 bundle2,
41 changegroup,
42 changegroup,
42 cmdutil,
43 cmdutil,
43 color,
44 color,
44 context,
45 context,
45 copies,
46 copies,
46 dagparser,
47 dagparser,
47 encoding,
48 encoding,
48 error,
49 error,
49 exchange,
50 exchange,
50 extensions,
51 extensions,
51 filemerge,
52 filemerge,
52 filesetlang,
53 filesetlang,
53 formatter,
54 formatter,
54 hg,
55 hg,
55 httppeer,
56 httppeer,
56 localrepo,
57 localrepo,
57 lock as lockmod,
58 lock as lockmod,
58 logcmdutil,
59 logcmdutil,
59 merge as mergemod,
60 merge as mergemod,
60 obsolete,
61 obsolete,
61 obsutil,
62 obsutil,
62 pathutil,
63 pathutil,
63 phases,
64 phases,
64 policy,
65 policy,
65 pvec,
66 pvec,
66 pycompat,
67 pycompat,
67 registrar,
68 registrar,
68 repair,
69 repair,
69 revlog,
70 revlog,
70 revset,
71 revset,
71 revsetlang,
72 revsetlang,
72 scmutil,
73 scmutil,
73 setdiscovery,
74 setdiscovery,
74 simplemerge,
75 simplemerge,
75 sshpeer,
76 sshpeer,
76 sslutil,
77 sslutil,
77 streamclone,
78 streamclone,
78 templater,
79 templater,
79 treediscovery,
80 treediscovery,
80 upgrade,
81 upgrade,
81 url as urlmod,
82 url as urlmod,
82 util,
83 util,
83 vfs as vfsmod,
84 vfs as vfsmod,
84 wireprotoframing,
85 wireprotoframing,
85 wireprotoserver,
86 wireprotoserver,
86 wireprotov2peer,
87 wireprotov2peer,
87 )
88 )
88 from .utils import (
89 from .utils import (
89 cborutil,
90 cborutil,
90 compression,
91 compression,
91 dateutil,
92 dateutil,
92 procutil,
93 procutil,
93 stringutil,
94 stringutil,
94 )
95 )
95
96
96 from .revlogutils import deltas as deltautil
97 from .revlogutils import deltas as deltautil
97
98
98 release = lockmod.release
99 release = lockmod.release
99
100
100 command = registrar.command()
101 command = registrar.command()
101
102
102
103
103 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
104 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
104 def debugancestor(ui, repo, *args):
105 def debugancestor(ui, repo, *args):
105 """find the ancestor revision of two revisions in a given index"""
106 """find the ancestor revision of two revisions in a given index"""
106 if len(args) == 3:
107 if len(args) == 3:
107 index, rev1, rev2 = args
108 index, rev1, rev2 = args
108 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
109 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
109 lookup = r.lookup
110 lookup = r.lookup
110 elif len(args) == 2:
111 elif len(args) == 2:
111 if not repo:
112 if not repo:
112 raise error.Abort(
113 raise error.Abort(
113 _(b'there is no Mercurial repository here (.hg not found)')
114 _(b'there is no Mercurial repository here (.hg not found)')
114 )
115 )
115 rev1, rev2 = args
116 rev1, rev2 = args
116 r = repo.changelog
117 r = repo.changelog
117 lookup = repo.lookup
118 lookup = repo.lookup
118 else:
119 else:
119 raise error.Abort(_(b'either two or three arguments required'))
120 raise error.Abort(_(b'either two or three arguments required'))
120 a = r.ancestor(lookup(rev1), lookup(rev2))
121 a = r.ancestor(lookup(rev1), lookup(rev2))
121 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
122 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
122
123
123
124
124 @command(b'debugapplystreamclonebundle', [], b'FILE')
125 @command(b'debugapplystreamclonebundle', [], b'FILE')
125 def debugapplystreamclonebundle(ui, repo, fname):
126 def debugapplystreamclonebundle(ui, repo, fname):
126 """apply a stream clone bundle file"""
127 """apply a stream clone bundle file"""
127 f = hg.openpath(ui, fname)
128 f = hg.openpath(ui, fname)
128 gen = exchange.readbundle(ui, f, fname)
129 gen = exchange.readbundle(ui, f, fname)
129 gen.apply(repo)
130 gen.apply(repo)
130
131
131
132
132 @command(
133 @command(
133 b'debugbuilddag',
134 b'debugbuilddag',
134 [
135 [
135 (
136 (
136 b'm',
137 b'm',
137 b'mergeable-file',
138 b'mergeable-file',
138 None,
139 None,
139 _(b'add single file mergeable changes'),
140 _(b'add single file mergeable changes'),
140 ),
141 ),
141 (
142 (
142 b'o',
143 b'o',
143 b'overwritten-file',
144 b'overwritten-file',
144 None,
145 None,
145 _(b'add single file all revs overwrite'),
146 _(b'add single file all revs overwrite'),
146 ),
147 ),
147 (b'n', b'new-file', None, _(b'add new file at each rev')),
148 (b'n', b'new-file', None, _(b'add new file at each rev')),
148 ],
149 ],
149 _(b'[OPTION]... [TEXT]'),
150 _(b'[OPTION]... [TEXT]'),
150 )
151 )
151 def debugbuilddag(
152 def debugbuilddag(
152 ui,
153 ui,
153 repo,
154 repo,
154 text=None,
155 text=None,
155 mergeable_file=False,
156 mergeable_file=False,
156 overwritten_file=False,
157 overwritten_file=False,
157 new_file=False,
158 new_file=False,
158 ):
159 ):
159 """builds a repo with a given DAG from scratch in the current empty repo
160 """builds a repo with a given DAG from scratch in the current empty repo
160
161
161 The description of the DAG is read from stdin if not given on the
162 The description of the DAG is read from stdin if not given on the
162 command line.
163 command line.
163
164
164 Elements:
165 Elements:
165
166
166 - "+n" is a linear run of n nodes based on the current default parent
167 - "+n" is a linear run of n nodes based on the current default parent
167 - "." is a single node based on the current default parent
168 - "." is a single node based on the current default parent
168 - "$" resets the default parent to null (implied at the start);
169 - "$" resets the default parent to null (implied at the start);
169 otherwise the default parent is always the last node created
170 otherwise the default parent is always the last node created
170 - "<p" sets the default parent to the backref p
171 - "<p" sets the default parent to the backref p
171 - "*p" is a fork at parent p, which is a backref
172 - "*p" is a fork at parent p, which is a backref
172 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
173 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
173 - "/p2" is a merge of the preceding node and p2
174 - "/p2" is a merge of the preceding node and p2
174 - ":tag" defines a local tag for the preceding node
175 - ":tag" defines a local tag for the preceding node
175 - "@branch" sets the named branch for subsequent nodes
176 - "@branch" sets the named branch for subsequent nodes
176 - "#...\\n" is a comment up to the end of the line
177 - "#...\\n" is a comment up to the end of the line
177
178
178 Whitespace between the above elements is ignored.
179 Whitespace between the above elements is ignored.
179
180
180 A backref is either
181 A backref is either
181
182
182 - a number n, which references the node curr-n, where curr is the current
183 - a number n, which references the node curr-n, where curr is the current
183 node, or
184 node, or
184 - the name of a local tag you placed earlier using ":tag", or
185 - the name of a local tag you placed earlier using ":tag", or
185 - empty to denote the default parent.
186 - empty to denote the default parent.
186
187
187 All string valued-elements are either strictly alphanumeric, or must
188 All string valued-elements are either strictly alphanumeric, or must
188 be enclosed in double quotes ("..."), with "\\" as escape character.
189 be enclosed in double quotes ("..."), with "\\" as escape character.
189 """
190 """
190
191
191 if text is None:
192 if text is None:
192 ui.status(_(b"reading DAG from stdin\n"))
193 ui.status(_(b"reading DAG from stdin\n"))
193 text = ui.fin.read()
194 text = ui.fin.read()
194
195
195 cl = repo.changelog
196 cl = repo.changelog
196 if len(cl) > 0:
197 if len(cl) > 0:
197 raise error.Abort(_(b'repository is not empty'))
198 raise error.Abort(_(b'repository is not empty'))
198
199
199 # determine number of revs in DAG
200 # determine number of revs in DAG
200 total = 0
201 total = 0
201 for type, data in dagparser.parsedag(text):
202 for type, data in dagparser.parsedag(text):
202 if type == b'n':
203 if type == b'n':
203 total += 1
204 total += 1
204
205
205 if mergeable_file:
206 if mergeable_file:
206 linesperrev = 2
207 linesperrev = 2
207 # make a file with k lines per rev
208 # make a file with k lines per rev
208 initialmergedlines = [
209 initialmergedlines = [
209 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
210 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
210 ]
211 ]
211 initialmergedlines.append(b"")
212 initialmergedlines.append(b"")
212
213
213 tags = []
214 tags = []
214 progress = ui.makeprogress(
215 progress = ui.makeprogress(
215 _(b'building'), unit=_(b'revisions'), total=total
216 _(b'building'), unit=_(b'revisions'), total=total
216 )
217 )
217 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
218 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
218 at = -1
219 at = -1
219 atbranch = b'default'
220 atbranch = b'default'
220 nodeids = []
221 nodeids = []
221 id = 0
222 id = 0
222 progress.update(id)
223 progress.update(id)
223 for type, data in dagparser.parsedag(text):
224 for type, data in dagparser.parsedag(text):
224 if type == b'n':
225 if type == b'n':
225 ui.note((b'node %s\n' % pycompat.bytestr(data)))
226 ui.note((b'node %s\n' % pycompat.bytestr(data)))
226 id, ps = data
227 id, ps = data
227
228
228 files = []
229 files = []
229 filecontent = {}
230 filecontent = {}
230
231
231 p2 = None
232 p2 = None
232 if mergeable_file:
233 if mergeable_file:
233 fn = b"mf"
234 fn = b"mf"
234 p1 = repo[ps[0]]
235 p1 = repo[ps[0]]
235 if len(ps) > 1:
236 if len(ps) > 1:
236 p2 = repo[ps[1]]
237 p2 = repo[ps[1]]
237 pa = p1.ancestor(p2)
238 pa = p1.ancestor(p2)
238 base, local, other = [
239 base, local, other = [
239 x[fn].data() for x in (pa, p1, p2)
240 x[fn].data() for x in (pa, p1, p2)
240 ]
241 ]
241 m3 = simplemerge.Merge3Text(base, local, other)
242 m3 = simplemerge.Merge3Text(base, local, other)
242 ml = [l.strip() for l in m3.merge_lines()]
243 ml = [l.strip() for l in m3.merge_lines()]
243 ml.append(b"")
244 ml.append(b"")
244 elif at > 0:
245 elif at > 0:
245 ml = p1[fn].data().split(b"\n")
246 ml = p1[fn].data().split(b"\n")
246 else:
247 else:
247 ml = initialmergedlines
248 ml = initialmergedlines
248 ml[id * linesperrev] += b" r%i" % id
249 ml[id * linesperrev] += b" r%i" % id
249 mergedtext = b"\n".join(ml)
250 mergedtext = b"\n".join(ml)
250 files.append(fn)
251 files.append(fn)
251 filecontent[fn] = mergedtext
252 filecontent[fn] = mergedtext
252
253
253 if overwritten_file:
254 if overwritten_file:
254 fn = b"of"
255 fn = b"of"
255 files.append(fn)
256 files.append(fn)
256 filecontent[fn] = b"r%i\n" % id
257 filecontent[fn] = b"r%i\n" % id
257
258
258 if new_file:
259 if new_file:
259 fn = b"nf%i" % id
260 fn = b"nf%i" % id
260 files.append(fn)
261 files.append(fn)
261 filecontent[fn] = b"r%i\n" % id
262 filecontent[fn] = b"r%i\n" % id
262 if len(ps) > 1:
263 if len(ps) > 1:
263 if not p2:
264 if not p2:
264 p2 = repo[ps[1]]
265 p2 = repo[ps[1]]
265 for fn in p2:
266 for fn in p2:
266 if fn.startswith(b"nf"):
267 if fn.startswith(b"nf"):
267 files.append(fn)
268 files.append(fn)
268 filecontent[fn] = p2[fn].data()
269 filecontent[fn] = p2[fn].data()
269
270
270 def fctxfn(repo, cx, path):
271 def fctxfn(repo, cx, path):
271 if path in filecontent:
272 if path in filecontent:
272 return context.memfilectx(
273 return context.memfilectx(
273 repo, cx, path, filecontent[path]
274 repo, cx, path, filecontent[path]
274 )
275 )
275 return None
276 return None
276
277
277 if len(ps) == 0 or ps[0] < 0:
278 if len(ps) == 0 or ps[0] < 0:
278 pars = [None, None]
279 pars = [None, None]
279 elif len(ps) == 1:
280 elif len(ps) == 1:
280 pars = [nodeids[ps[0]], None]
281 pars = [nodeids[ps[0]], None]
281 else:
282 else:
282 pars = [nodeids[p] for p in ps]
283 pars = [nodeids[p] for p in ps]
283 cx = context.memctx(
284 cx = context.memctx(
284 repo,
285 repo,
285 pars,
286 pars,
286 b"r%i" % id,
287 b"r%i" % id,
287 files,
288 files,
288 fctxfn,
289 fctxfn,
289 date=(id, 0),
290 date=(id, 0),
290 user=b"debugbuilddag",
291 user=b"debugbuilddag",
291 extra={b'branch': atbranch},
292 extra={b'branch': atbranch},
292 )
293 )
293 nodeid = repo.commitctx(cx)
294 nodeid = repo.commitctx(cx)
294 nodeids.append(nodeid)
295 nodeids.append(nodeid)
295 at = id
296 at = id
296 elif type == b'l':
297 elif type == b'l':
297 id, name = data
298 id, name = data
298 ui.note((b'tag %s\n' % name))
299 ui.note((b'tag %s\n' % name))
299 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
300 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
300 elif type == b'a':
301 elif type == b'a':
301 ui.note((b'branch %s\n' % data))
302 ui.note((b'branch %s\n' % data))
302 atbranch = data
303 atbranch = data
303 progress.update(id)
304 progress.update(id)
304
305
305 if tags:
306 if tags:
306 repo.vfs.write(b"localtags", b"".join(tags))
307 repo.vfs.write(b"localtags", b"".join(tags))
307
308
308
309
309 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
310 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
310 indent_string = b' ' * indent
311 indent_string = b' ' * indent
311 if all:
312 if all:
312 ui.writenoi18n(
313 ui.writenoi18n(
313 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
314 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
314 % indent_string
315 % indent_string
315 )
316 )
316
317
317 def showchunks(named):
318 def showchunks(named):
318 ui.write(b"\n%s%s\n" % (indent_string, named))
319 ui.write(b"\n%s%s\n" % (indent_string, named))
319 for deltadata in gen.deltaiter():
320 for deltadata in gen.deltaiter():
320 node, p1, p2, cs, deltabase, delta, flags = deltadata
321 node, p1, p2, cs, deltabase, delta, flags = deltadata
321 ui.write(
322 ui.write(
322 b"%s%s %s %s %s %s %d\n"
323 b"%s%s %s %s %s %s %d\n"
323 % (
324 % (
324 indent_string,
325 indent_string,
325 hex(node),
326 hex(node),
326 hex(p1),
327 hex(p1),
327 hex(p2),
328 hex(p2),
328 hex(cs),
329 hex(cs),
329 hex(deltabase),
330 hex(deltabase),
330 len(delta),
331 len(delta),
331 )
332 )
332 )
333 )
333
334
334 gen.changelogheader()
335 gen.changelogheader()
335 showchunks(b"changelog")
336 showchunks(b"changelog")
336 gen.manifestheader()
337 gen.manifestheader()
337 showchunks(b"manifest")
338 showchunks(b"manifest")
338 for chunkdata in iter(gen.filelogheader, {}):
339 for chunkdata in iter(gen.filelogheader, {}):
339 fname = chunkdata[b'filename']
340 fname = chunkdata[b'filename']
340 showchunks(fname)
341 showchunks(fname)
341 else:
342 else:
342 if isinstance(gen, bundle2.unbundle20):
343 if isinstance(gen, bundle2.unbundle20):
343 raise error.Abort(_(b'use debugbundle2 for this file'))
344 raise error.Abort(_(b'use debugbundle2 for this file'))
344 gen.changelogheader()
345 gen.changelogheader()
345 for deltadata in gen.deltaiter():
346 for deltadata in gen.deltaiter():
346 node, p1, p2, cs, deltabase, delta, flags = deltadata
347 node, p1, p2, cs, deltabase, delta, flags = deltadata
347 ui.write(b"%s%s\n" % (indent_string, hex(node)))
348 ui.write(b"%s%s\n" % (indent_string, hex(node)))
348
349
349
350
350 def _debugobsmarkers(ui, part, indent=0, **opts):
351 def _debugobsmarkers(ui, part, indent=0, **opts):
351 """display version and markers contained in 'data'"""
352 """display version and markers contained in 'data'"""
352 opts = pycompat.byteskwargs(opts)
353 opts = pycompat.byteskwargs(opts)
353 data = part.read()
354 data = part.read()
354 indent_string = b' ' * indent
355 indent_string = b' ' * indent
355 try:
356 try:
356 version, markers = obsolete._readmarkers(data)
357 version, markers = obsolete._readmarkers(data)
357 except error.UnknownVersion as exc:
358 except error.UnknownVersion as exc:
358 msg = b"%sunsupported version: %s (%d bytes)\n"
359 msg = b"%sunsupported version: %s (%d bytes)\n"
359 msg %= indent_string, exc.version, len(data)
360 msg %= indent_string, exc.version, len(data)
360 ui.write(msg)
361 ui.write(msg)
361 else:
362 else:
362 msg = b"%sversion: %d (%d bytes)\n"
363 msg = b"%sversion: %d (%d bytes)\n"
363 msg %= indent_string, version, len(data)
364 msg %= indent_string, version, len(data)
364 ui.write(msg)
365 ui.write(msg)
365 fm = ui.formatter(b'debugobsolete', opts)
366 fm = ui.formatter(b'debugobsolete', opts)
366 for rawmarker in sorted(markers):
367 for rawmarker in sorted(markers):
367 m = obsutil.marker(None, rawmarker)
368 m = obsutil.marker(None, rawmarker)
368 fm.startitem()
369 fm.startitem()
369 fm.plain(indent_string)
370 fm.plain(indent_string)
370 cmdutil.showmarker(fm, m)
371 cmdutil.showmarker(fm, m)
371 fm.end()
372 fm.end()
372
373
373
374
374 def _debugphaseheads(ui, data, indent=0):
375 def _debugphaseheads(ui, data, indent=0):
375 """display version and markers contained in 'data'"""
376 """display version and markers contained in 'data'"""
376 indent_string = b' ' * indent
377 indent_string = b' ' * indent
377 headsbyphase = phases.binarydecode(data)
378 headsbyphase = phases.binarydecode(data)
378 for phase in phases.allphases:
379 for phase in phases.allphases:
379 for head in headsbyphase[phase]:
380 for head in headsbyphase[phase]:
380 ui.write(indent_string)
381 ui.write(indent_string)
381 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
382 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
382
383
383
384
384 def _quasirepr(thing):
385 def _quasirepr(thing):
385 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
386 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
386 return b'{%s}' % (
387 return b'{%s}' % (
387 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
388 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
388 )
389 )
389 return pycompat.bytestr(repr(thing))
390 return pycompat.bytestr(repr(thing))
390
391
391
392
392 def _debugbundle2(ui, gen, all=None, **opts):
393 def _debugbundle2(ui, gen, all=None, **opts):
393 """lists the contents of a bundle2"""
394 """lists the contents of a bundle2"""
394 if not isinstance(gen, bundle2.unbundle20):
395 if not isinstance(gen, bundle2.unbundle20):
395 raise error.Abort(_(b'not a bundle2 file'))
396 raise error.Abort(_(b'not a bundle2 file'))
396 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
397 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
397 parttypes = opts.get('part_type', [])
398 parttypes = opts.get('part_type', [])
398 for part in gen.iterparts():
399 for part in gen.iterparts():
399 if parttypes and part.type not in parttypes:
400 if parttypes and part.type not in parttypes:
400 continue
401 continue
401 msg = b'%s -- %s (mandatory: %r)\n'
402 msg = b'%s -- %s (mandatory: %r)\n'
402 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
403 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
403 if part.type == b'changegroup':
404 if part.type == b'changegroup':
404 version = part.params.get(b'version', b'01')
405 version = part.params.get(b'version', b'01')
405 cg = changegroup.getunbundler(version, part, b'UN')
406 cg = changegroup.getunbundler(version, part, b'UN')
406 if not ui.quiet:
407 if not ui.quiet:
407 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
408 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
408 if part.type == b'obsmarkers':
409 if part.type == b'obsmarkers':
409 if not ui.quiet:
410 if not ui.quiet:
410 _debugobsmarkers(ui, part, indent=4, **opts)
411 _debugobsmarkers(ui, part, indent=4, **opts)
411 if part.type == b'phase-heads':
412 if part.type == b'phase-heads':
412 if not ui.quiet:
413 if not ui.quiet:
413 _debugphaseheads(ui, part, indent=4)
414 _debugphaseheads(ui, part, indent=4)
414
415
415
416
416 @command(
417 @command(
417 b'debugbundle',
418 b'debugbundle',
418 [
419 [
419 (b'a', b'all', None, _(b'show all details')),
420 (b'a', b'all', None, _(b'show all details')),
420 (b'', b'part-type', [], _(b'show only the named part type')),
421 (b'', b'part-type', [], _(b'show only the named part type')),
421 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
422 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
422 ],
423 ],
423 _(b'FILE'),
424 _(b'FILE'),
424 norepo=True,
425 norepo=True,
425 )
426 )
426 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
427 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
427 """lists the contents of a bundle"""
428 """lists the contents of a bundle"""
428 with hg.openpath(ui, bundlepath) as f:
429 with hg.openpath(ui, bundlepath) as f:
429 if spec:
430 if spec:
430 spec = exchange.getbundlespec(ui, f)
431 spec = exchange.getbundlespec(ui, f)
431 ui.write(b'%s\n' % spec)
432 ui.write(b'%s\n' % spec)
432 return
433 return
433
434
434 gen = exchange.readbundle(ui, f, bundlepath)
435 gen = exchange.readbundle(ui, f, bundlepath)
435 if isinstance(gen, bundle2.unbundle20):
436 if isinstance(gen, bundle2.unbundle20):
436 return _debugbundle2(ui, gen, all=all, **opts)
437 return _debugbundle2(ui, gen, all=all, **opts)
437 _debugchangegroup(ui, gen, all=all, **opts)
438 _debugchangegroup(ui, gen, all=all, **opts)
438
439
439
440
440 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
441 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
441 def debugcapabilities(ui, path, **opts):
442 def debugcapabilities(ui, path, **opts):
442 """lists the capabilities of a remote peer"""
443 """lists the capabilities of a remote peer"""
443 opts = pycompat.byteskwargs(opts)
444 opts = pycompat.byteskwargs(opts)
444 peer = hg.peer(ui, opts, path)
445 peer = hg.peer(ui, opts, path)
445 caps = peer.capabilities()
446 caps = peer.capabilities()
446 ui.writenoi18n(b'Main capabilities:\n')
447 ui.writenoi18n(b'Main capabilities:\n')
447 for c in sorted(caps):
448 for c in sorted(caps):
448 ui.write(b' %s\n' % c)
449 ui.write(b' %s\n' % c)
449 b2caps = bundle2.bundle2caps(peer)
450 b2caps = bundle2.bundle2caps(peer)
450 if b2caps:
451 if b2caps:
451 ui.writenoi18n(b'Bundle2 capabilities:\n')
452 ui.writenoi18n(b'Bundle2 capabilities:\n')
452 for key, values in sorted(pycompat.iteritems(b2caps)):
453 for key, values in sorted(pycompat.iteritems(b2caps)):
453 ui.write(b' %s\n' % key)
454 ui.write(b' %s\n' % key)
454 for v in values:
455 for v in values:
455 ui.write(b' %s\n' % v)
456 ui.write(b' %s\n' % v)
456
457
457
458
458 @command(b'debugcheckstate', [], b'')
459 @command(b'debugcheckstate', [], b'')
459 def debugcheckstate(ui, repo):
460 def debugcheckstate(ui, repo):
460 """validate the correctness of the current dirstate"""
461 """validate the correctness of the current dirstate"""
461 parent1, parent2 = repo.dirstate.parents()
462 parent1, parent2 = repo.dirstate.parents()
462 m1 = repo[parent1].manifest()
463 m1 = repo[parent1].manifest()
463 m2 = repo[parent2].manifest()
464 m2 = repo[parent2].manifest()
464 errors = 0
465 errors = 0
465 for f in repo.dirstate:
466 for f in repo.dirstate:
466 state = repo.dirstate[f]
467 state = repo.dirstate[f]
467 if state in b"nr" and f not in m1:
468 if state in b"nr" and f not in m1:
468 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
469 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
469 errors += 1
470 errors += 1
470 if state in b"a" and f in m1:
471 if state in b"a" and f in m1:
471 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
472 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
472 errors += 1
473 errors += 1
473 if state in b"m" and f not in m1 and f not in m2:
474 if state in b"m" and f not in m1 and f not in m2:
474 ui.warn(
475 ui.warn(
475 _(b"%s in state %s, but not in either manifest\n") % (f, state)
476 _(b"%s in state %s, but not in either manifest\n") % (f, state)
476 )
477 )
477 errors += 1
478 errors += 1
478 for f in m1:
479 for f in m1:
479 state = repo.dirstate[f]
480 state = repo.dirstate[f]
480 if state not in b"nrm":
481 if state not in b"nrm":
481 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
482 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
482 errors += 1
483 errors += 1
483 if errors:
484 if errors:
484 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
485 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
485 raise error.Abort(errstr)
486 raise error.Abort(errstr)
486
487
487
488
488 @command(
489 @command(
489 b'debugcolor',
490 b'debugcolor',
490 [(b'', b'style', None, _(b'show all configured styles'))],
491 [(b'', b'style', None, _(b'show all configured styles'))],
491 b'hg debugcolor',
492 b'hg debugcolor',
492 )
493 )
493 def debugcolor(ui, repo, **opts):
494 def debugcolor(ui, repo, **opts):
494 """show available color, effects or style"""
495 """show available color, effects or style"""
495 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
496 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
496 if opts.get('style'):
497 if opts.get('style'):
497 return _debugdisplaystyle(ui)
498 return _debugdisplaystyle(ui)
498 else:
499 else:
499 return _debugdisplaycolor(ui)
500 return _debugdisplaycolor(ui)
500
501
501
502
502 def _debugdisplaycolor(ui):
503 def _debugdisplaycolor(ui):
503 ui = ui.copy()
504 ui = ui.copy()
504 ui._styles.clear()
505 ui._styles.clear()
505 for effect in color._activeeffects(ui).keys():
506 for effect in color._activeeffects(ui).keys():
506 ui._styles[effect] = effect
507 ui._styles[effect] = effect
507 if ui._terminfoparams:
508 if ui._terminfoparams:
508 for k, v in ui.configitems(b'color'):
509 for k, v in ui.configitems(b'color'):
509 if k.startswith(b'color.'):
510 if k.startswith(b'color.'):
510 ui._styles[k] = k[6:]
511 ui._styles[k] = k[6:]
511 elif k.startswith(b'terminfo.'):
512 elif k.startswith(b'terminfo.'):
512 ui._styles[k] = k[9:]
513 ui._styles[k] = k[9:]
513 ui.write(_(b'available colors:\n'))
514 ui.write(_(b'available colors:\n'))
514 # sort label with a '_' after the other to group '_background' entry.
515 # sort label with a '_' after the other to group '_background' entry.
515 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
516 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
516 for colorname, label in items:
517 for colorname, label in items:
517 ui.write(b'%s\n' % colorname, label=label)
518 ui.write(b'%s\n' % colorname, label=label)
518
519
519
520
520 def _debugdisplaystyle(ui):
521 def _debugdisplaystyle(ui):
521 ui.write(_(b'available style:\n'))
522 ui.write(_(b'available style:\n'))
522 if not ui._styles:
523 if not ui._styles:
523 return
524 return
524 width = max(len(s) for s in ui._styles)
525 width = max(len(s) for s in ui._styles)
525 for label, effects in sorted(ui._styles.items()):
526 for label, effects in sorted(ui._styles.items()):
526 ui.write(b'%s' % label, label=label)
527 ui.write(b'%s' % label, label=label)
527 if effects:
528 if effects:
528 # 50
529 # 50
529 ui.write(b': ')
530 ui.write(b': ')
530 ui.write(b' ' * (max(0, width - len(label))))
531 ui.write(b' ' * (max(0, width - len(label))))
531 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
532 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
532 ui.write(b'\n')
533 ui.write(b'\n')
533
534
534
535
535 @command(b'debugcreatestreamclonebundle', [], b'FILE')
536 @command(b'debugcreatestreamclonebundle', [], b'FILE')
536 def debugcreatestreamclonebundle(ui, repo, fname):
537 def debugcreatestreamclonebundle(ui, repo, fname):
537 """create a stream clone bundle file
538 """create a stream clone bundle file
538
539
539 Stream bundles are special bundles that are essentially archives of
540 Stream bundles are special bundles that are essentially archives of
540 revlog files. They are commonly used for cloning very quickly.
541 revlog files. They are commonly used for cloning very quickly.
541 """
542 """
542 # TODO we may want to turn this into an abort when this functionality
543 # TODO we may want to turn this into an abort when this functionality
543 # is moved into `hg bundle`.
544 # is moved into `hg bundle`.
544 if phases.hassecret(repo):
545 if phases.hassecret(repo):
545 ui.warn(
546 ui.warn(
546 _(
547 _(
547 b'(warning: stream clone bundle will contain secret '
548 b'(warning: stream clone bundle will contain secret '
548 b'revisions)\n'
549 b'revisions)\n'
549 )
550 )
550 )
551 )
551
552
552 requirements, gen = streamclone.generatebundlev1(repo)
553 requirements, gen = streamclone.generatebundlev1(repo)
553 changegroup.writechunks(ui, gen, fname)
554 changegroup.writechunks(ui, gen, fname)
554
555
555 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
556 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
556
557
557
558
558 @command(
559 @command(
559 b'debugdag',
560 b'debugdag',
560 [
561 [
561 (b't', b'tags', None, _(b'use tags as labels')),
562 (b't', b'tags', None, _(b'use tags as labels')),
562 (b'b', b'branches', None, _(b'annotate with branch names')),
563 (b'b', b'branches', None, _(b'annotate with branch names')),
563 (b'', b'dots', None, _(b'use dots for runs')),
564 (b'', b'dots', None, _(b'use dots for runs')),
564 (b's', b'spaces', None, _(b'separate elements by spaces')),
565 (b's', b'spaces', None, _(b'separate elements by spaces')),
565 ],
566 ],
566 _(b'[OPTION]... [FILE [REV]...]'),
567 _(b'[OPTION]... [FILE [REV]...]'),
567 optionalrepo=True,
568 optionalrepo=True,
568 )
569 )
569 def debugdag(ui, repo, file_=None, *revs, **opts):
570 def debugdag(ui, repo, file_=None, *revs, **opts):
570 """format the changelog or an index DAG as a concise textual description
571 """format the changelog or an index DAG as a concise textual description
571
572
572 If you pass a revlog index, the revlog's DAG is emitted. If you list
573 If you pass a revlog index, the revlog's DAG is emitted. If you list
573 revision numbers, they get labeled in the output as rN.
574 revision numbers, they get labeled in the output as rN.
574
575
575 Otherwise, the changelog DAG of the current repo is emitted.
576 Otherwise, the changelog DAG of the current repo is emitted.
576 """
577 """
577 spaces = opts.get('spaces')
578 spaces = opts.get('spaces')
578 dots = opts.get('dots')
579 dots = opts.get('dots')
579 if file_:
580 if file_:
580 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
581 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
581 revs = set((int(r) for r in revs))
582 revs = set((int(r) for r in revs))
582
583
583 def events():
584 def events():
584 for r in rlog:
585 for r in rlog:
585 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
586 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
586 if r in revs:
587 if r in revs:
587 yield b'l', (r, b"r%i" % r)
588 yield b'l', (r, b"r%i" % r)
588
589
589 elif repo:
590 elif repo:
590 cl = repo.changelog
591 cl = repo.changelog
591 tags = opts.get('tags')
592 tags = opts.get('tags')
592 branches = opts.get('branches')
593 branches = opts.get('branches')
593 if tags:
594 if tags:
594 labels = {}
595 labels = {}
595 for l, n in repo.tags().items():
596 for l, n in repo.tags().items():
596 labels.setdefault(cl.rev(n), []).append(l)
597 labels.setdefault(cl.rev(n), []).append(l)
597
598
598 def events():
599 def events():
599 b = b"default"
600 b = b"default"
600 for r in cl:
601 for r in cl:
601 if branches:
602 if branches:
602 newb = cl.read(cl.node(r))[5][b'branch']
603 newb = cl.read(cl.node(r))[5][b'branch']
603 if newb != b:
604 if newb != b:
604 yield b'a', newb
605 yield b'a', newb
605 b = newb
606 b = newb
606 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
607 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
607 if tags:
608 if tags:
608 ls = labels.get(r)
609 ls = labels.get(r)
609 if ls:
610 if ls:
610 for l in ls:
611 for l in ls:
611 yield b'l', (r, l)
612 yield b'l', (r, l)
612
613
613 else:
614 else:
614 raise error.Abort(_(b'need repo for changelog dag'))
615 raise error.Abort(_(b'need repo for changelog dag'))
615
616
616 for line in dagparser.dagtextlines(
617 for line in dagparser.dagtextlines(
617 events(),
618 events(),
618 addspaces=spaces,
619 addspaces=spaces,
619 wraplabels=True,
620 wraplabels=True,
620 wrapannotations=True,
621 wrapannotations=True,
621 wrapnonlinear=dots,
622 wrapnonlinear=dots,
622 usedots=dots,
623 usedots=dots,
623 maxlinewidth=70,
624 maxlinewidth=70,
624 ):
625 ):
625 ui.write(line)
626 ui.write(line)
626 ui.write(b"\n")
627 ui.write(b"\n")
627
628
628
629
629 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
630 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
630 def debugdata(ui, repo, file_, rev=None, **opts):
631 def debugdata(ui, repo, file_, rev=None, **opts):
631 """dump the contents of a data file revision"""
632 """dump the contents of a data file revision"""
632 opts = pycompat.byteskwargs(opts)
633 opts = pycompat.byteskwargs(opts)
633 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
634 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
634 if rev is not None:
635 if rev is not None:
635 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
636 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
636 file_, rev = None, file_
637 file_, rev = None, file_
637 elif rev is None:
638 elif rev is None:
638 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
639 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
639 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
640 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
640 try:
641 try:
641 ui.write(r.rawdata(r.lookup(rev)))
642 ui.write(r.rawdata(r.lookup(rev)))
642 except KeyError:
643 except KeyError:
643 raise error.Abort(_(b'invalid revision identifier %s') % rev)
644 raise error.Abort(_(b'invalid revision identifier %s') % rev)
644
645
645
646
646 @command(
647 @command(
647 b'debugdate',
648 b'debugdate',
648 [(b'e', b'extended', None, _(b'try extended date formats'))],
649 [(b'e', b'extended', None, _(b'try extended date formats'))],
649 _(b'[-e] DATE [RANGE]'),
650 _(b'[-e] DATE [RANGE]'),
650 norepo=True,
651 norepo=True,
651 optionalrepo=True,
652 optionalrepo=True,
652 )
653 )
653 def debugdate(ui, date, range=None, **opts):
654 def debugdate(ui, date, range=None, **opts):
654 """parse and display a date"""
655 """parse and display a date"""
655 if opts["extended"]:
656 if opts["extended"]:
656 d = dateutil.parsedate(date, dateutil.extendeddateformats)
657 d = dateutil.parsedate(date, dateutil.extendeddateformats)
657 else:
658 else:
658 d = dateutil.parsedate(date)
659 d = dateutil.parsedate(date)
659 ui.writenoi18n(b"internal: %d %d\n" % d)
660 ui.writenoi18n(b"internal: %d %d\n" % d)
660 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
661 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
661 if range:
662 if range:
662 m = dateutil.matchdate(range)
663 m = dateutil.matchdate(range)
663 ui.writenoi18n(b"match: %s\n" % m(d[0]))
664 ui.writenoi18n(b"match: %s\n" % m(d[0]))
664
665
665
666
666 @command(
667 @command(
667 b'debugdeltachain',
668 b'debugdeltachain',
668 cmdutil.debugrevlogopts + cmdutil.formatteropts,
669 cmdutil.debugrevlogopts + cmdutil.formatteropts,
669 _(b'-c|-m|FILE'),
670 _(b'-c|-m|FILE'),
670 optionalrepo=True,
671 optionalrepo=True,
671 )
672 )
672 def debugdeltachain(ui, repo, file_=None, **opts):
673 def debugdeltachain(ui, repo, file_=None, **opts):
673 """dump information about delta chains in a revlog
674 """dump information about delta chains in a revlog
674
675
675 Output can be templatized. Available template keywords are:
676 Output can be templatized. Available template keywords are:
676
677
677 :``rev``: revision number
678 :``rev``: revision number
678 :``chainid``: delta chain identifier (numbered by unique base)
679 :``chainid``: delta chain identifier (numbered by unique base)
679 :``chainlen``: delta chain length to this revision
680 :``chainlen``: delta chain length to this revision
680 :``prevrev``: previous revision in delta chain
681 :``prevrev``: previous revision in delta chain
681 :``deltatype``: role of delta / how it was computed
682 :``deltatype``: role of delta / how it was computed
682 :``compsize``: compressed size of revision
683 :``compsize``: compressed size of revision
683 :``uncompsize``: uncompressed size of revision
684 :``uncompsize``: uncompressed size of revision
684 :``chainsize``: total size of compressed revisions in chain
685 :``chainsize``: total size of compressed revisions in chain
685 :``chainratio``: total chain size divided by uncompressed revision size
686 :``chainratio``: total chain size divided by uncompressed revision size
686 (new delta chains typically start at ratio 2.00)
687 (new delta chains typically start at ratio 2.00)
687 :``lindist``: linear distance from base revision in delta chain to end
688 :``lindist``: linear distance from base revision in delta chain to end
688 of this revision
689 of this revision
689 :``extradist``: total size of revisions not part of this delta chain from
690 :``extradist``: total size of revisions not part of this delta chain from
690 base of delta chain to end of this revision; a measurement
691 base of delta chain to end of this revision; a measurement
691 of how much extra data we need to read/seek across to read
692 of how much extra data we need to read/seek across to read
692 the delta chain for this revision
693 the delta chain for this revision
693 :``extraratio``: extradist divided by chainsize; another representation of
694 :``extraratio``: extradist divided by chainsize; another representation of
694 how much unrelated data is needed to load this delta chain
695 how much unrelated data is needed to load this delta chain
695
696
696 If the repository is configured to use the sparse read, additional keywords
697 If the repository is configured to use the sparse read, additional keywords
697 are available:
698 are available:
698
699
699 :``readsize``: total size of data read from the disk for a revision
700 :``readsize``: total size of data read from the disk for a revision
700 (sum of the sizes of all the blocks)
701 (sum of the sizes of all the blocks)
701 :``largestblock``: size of the largest block of data read from the disk
702 :``largestblock``: size of the largest block of data read from the disk
702 :``readdensity``: density of useful bytes in the data read from the disk
703 :``readdensity``: density of useful bytes in the data read from the disk
703 :``srchunks``: in how many data hunks the whole revision would be read
704 :``srchunks``: in how many data hunks the whole revision would be read
704
705
705 The sparse read can be enabled with experimental.sparse-read = True
706 The sparse read can be enabled with experimental.sparse-read = True
706 """
707 """
707 opts = pycompat.byteskwargs(opts)
708 opts = pycompat.byteskwargs(opts)
708 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
709 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
709 index = r.index
710 index = r.index
710 start = r.start
711 start = r.start
711 length = r.length
712 length = r.length
712 generaldelta = r.version & revlog.FLAG_GENERALDELTA
713 generaldelta = r.version & revlog.FLAG_GENERALDELTA
713 withsparseread = getattr(r, '_withsparseread', False)
714 withsparseread = getattr(r, '_withsparseread', False)
714
715
715 def revinfo(rev):
716 def revinfo(rev):
716 e = index[rev]
717 e = index[rev]
717 compsize = e[1]
718 compsize = e[1]
718 uncompsize = e[2]
719 uncompsize = e[2]
719 chainsize = 0
720 chainsize = 0
720
721
721 if generaldelta:
722 if generaldelta:
722 if e[3] == e[5]:
723 if e[3] == e[5]:
723 deltatype = b'p1'
724 deltatype = b'p1'
724 elif e[3] == e[6]:
725 elif e[3] == e[6]:
725 deltatype = b'p2'
726 deltatype = b'p2'
726 elif e[3] == rev - 1:
727 elif e[3] == rev - 1:
727 deltatype = b'prev'
728 deltatype = b'prev'
728 elif e[3] == rev:
729 elif e[3] == rev:
729 deltatype = b'base'
730 deltatype = b'base'
730 else:
731 else:
731 deltatype = b'other'
732 deltatype = b'other'
732 else:
733 else:
733 if e[3] == rev:
734 if e[3] == rev:
734 deltatype = b'base'
735 deltatype = b'base'
735 else:
736 else:
736 deltatype = b'prev'
737 deltatype = b'prev'
737
738
738 chain = r._deltachain(rev)[0]
739 chain = r._deltachain(rev)[0]
739 for iterrev in chain:
740 for iterrev in chain:
740 e = index[iterrev]
741 e = index[iterrev]
741 chainsize += e[1]
742 chainsize += e[1]
742
743
743 return compsize, uncompsize, deltatype, chain, chainsize
744 return compsize, uncompsize, deltatype, chain, chainsize
744
745
745 fm = ui.formatter(b'debugdeltachain', opts)
746 fm = ui.formatter(b'debugdeltachain', opts)
746
747
747 fm.plain(
748 fm.plain(
748 b' rev chain# chainlen prev delta '
749 b' rev chain# chainlen prev delta '
749 b'size rawsize chainsize ratio lindist extradist '
750 b'size rawsize chainsize ratio lindist extradist '
750 b'extraratio'
751 b'extraratio'
751 )
752 )
752 if withsparseread:
753 if withsparseread:
753 fm.plain(b' readsize largestblk rddensity srchunks')
754 fm.plain(b' readsize largestblk rddensity srchunks')
754 fm.plain(b'\n')
755 fm.plain(b'\n')
755
756
756 chainbases = {}
757 chainbases = {}
757 for rev in r:
758 for rev in r:
758 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
759 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
759 chainbase = chain[0]
760 chainbase = chain[0]
760 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
761 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
761 basestart = start(chainbase)
762 basestart = start(chainbase)
762 revstart = start(rev)
763 revstart = start(rev)
763 lineardist = revstart + comp - basestart
764 lineardist = revstart + comp - basestart
764 extradist = lineardist - chainsize
765 extradist = lineardist - chainsize
765 try:
766 try:
766 prevrev = chain[-2]
767 prevrev = chain[-2]
767 except IndexError:
768 except IndexError:
768 prevrev = -1
769 prevrev = -1
769
770
770 if uncomp != 0:
771 if uncomp != 0:
771 chainratio = float(chainsize) / float(uncomp)
772 chainratio = float(chainsize) / float(uncomp)
772 else:
773 else:
773 chainratio = chainsize
774 chainratio = chainsize
774
775
775 if chainsize != 0:
776 if chainsize != 0:
776 extraratio = float(extradist) / float(chainsize)
777 extraratio = float(extradist) / float(chainsize)
777 else:
778 else:
778 extraratio = extradist
779 extraratio = extradist
779
780
780 fm.startitem()
781 fm.startitem()
781 fm.write(
782 fm.write(
782 b'rev chainid chainlen prevrev deltatype compsize '
783 b'rev chainid chainlen prevrev deltatype compsize '
783 b'uncompsize chainsize chainratio lindist extradist '
784 b'uncompsize chainsize chainratio lindist extradist '
784 b'extraratio',
785 b'extraratio',
785 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
786 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
786 rev,
787 rev,
787 chainid,
788 chainid,
788 len(chain),
789 len(chain),
789 prevrev,
790 prevrev,
790 deltatype,
791 deltatype,
791 comp,
792 comp,
792 uncomp,
793 uncomp,
793 chainsize,
794 chainsize,
794 chainratio,
795 chainratio,
795 lineardist,
796 lineardist,
796 extradist,
797 extradist,
797 extraratio,
798 extraratio,
798 rev=rev,
799 rev=rev,
799 chainid=chainid,
800 chainid=chainid,
800 chainlen=len(chain),
801 chainlen=len(chain),
801 prevrev=prevrev,
802 prevrev=prevrev,
802 deltatype=deltatype,
803 deltatype=deltatype,
803 compsize=comp,
804 compsize=comp,
804 uncompsize=uncomp,
805 uncompsize=uncomp,
805 chainsize=chainsize,
806 chainsize=chainsize,
806 chainratio=chainratio,
807 chainratio=chainratio,
807 lindist=lineardist,
808 lindist=lineardist,
808 extradist=extradist,
809 extradist=extradist,
809 extraratio=extraratio,
810 extraratio=extraratio,
810 )
811 )
811 if withsparseread:
812 if withsparseread:
812 readsize = 0
813 readsize = 0
813 largestblock = 0
814 largestblock = 0
814 srchunks = 0
815 srchunks = 0
815
816
816 for revschunk in deltautil.slicechunk(r, chain):
817 for revschunk in deltautil.slicechunk(r, chain):
817 srchunks += 1
818 srchunks += 1
818 blkend = start(revschunk[-1]) + length(revschunk[-1])
819 blkend = start(revschunk[-1]) + length(revschunk[-1])
819 blksize = blkend - start(revschunk[0])
820 blksize = blkend - start(revschunk[0])
820
821
821 readsize += blksize
822 readsize += blksize
822 if largestblock < blksize:
823 if largestblock < blksize:
823 largestblock = blksize
824 largestblock = blksize
824
825
825 if readsize:
826 if readsize:
826 readdensity = float(chainsize) / float(readsize)
827 readdensity = float(chainsize) / float(readsize)
827 else:
828 else:
828 readdensity = 1
829 readdensity = 1
829
830
830 fm.write(
831 fm.write(
831 b'readsize largestblock readdensity srchunks',
832 b'readsize largestblock readdensity srchunks',
832 b' %10d %10d %9.5f %8d',
833 b' %10d %10d %9.5f %8d',
833 readsize,
834 readsize,
834 largestblock,
835 largestblock,
835 readdensity,
836 readdensity,
836 srchunks,
837 srchunks,
837 readsize=readsize,
838 readsize=readsize,
838 largestblock=largestblock,
839 largestblock=largestblock,
839 readdensity=readdensity,
840 readdensity=readdensity,
840 srchunks=srchunks,
841 srchunks=srchunks,
841 )
842 )
842
843
843 fm.plain(b'\n')
844 fm.plain(b'\n')
844
845
845 fm.end()
846 fm.end()
846
847
847
848
848 @command(
849 @command(
849 b'debugdirstate|debugstate',
850 b'debugdirstate|debugstate',
850 [
851 [
851 (
852 (
852 b'',
853 b'',
853 b'nodates',
854 b'nodates',
854 None,
855 None,
855 _(b'do not display the saved mtime (DEPRECATED)'),
856 _(b'do not display the saved mtime (DEPRECATED)'),
856 ),
857 ),
857 (b'', b'dates', True, _(b'display the saved mtime')),
858 (b'', b'dates', True, _(b'display the saved mtime')),
858 (b'', b'datesort', None, _(b'sort by saved mtime')),
859 (b'', b'datesort', None, _(b'sort by saved mtime')),
859 ],
860 ],
860 _(b'[OPTION]...'),
861 _(b'[OPTION]...'),
861 )
862 )
862 def debugstate(ui, repo, **opts):
863 def debugstate(ui, repo, **opts):
863 """show the contents of the current dirstate"""
864 """show the contents of the current dirstate"""
864
865
865 nodates = not opts['dates']
866 nodates = not opts['dates']
866 if opts.get('nodates') is not None:
867 if opts.get('nodates') is not None:
867 nodates = True
868 nodates = True
868 datesort = opts.get('datesort')
869 datesort = opts.get('datesort')
869
870
870 if datesort:
871 if datesort:
871 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
872 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
872 else:
873 else:
873 keyfunc = None # sort by filename
874 keyfunc = None # sort by filename
874 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
875 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
875 if ent[3] == -1:
876 if ent[3] == -1:
876 timestr = b'unset '
877 timestr = b'unset '
877 elif nodates:
878 elif nodates:
878 timestr = b'set '
879 timestr = b'set '
879 else:
880 else:
880 timestr = time.strftime(
881 timestr = time.strftime(
881 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
882 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
882 )
883 )
883 timestr = encoding.strtolocal(timestr)
884 timestr = encoding.strtolocal(timestr)
884 if ent[1] & 0o20000:
885 if ent[1] & 0o20000:
885 mode = b'lnk'
886 mode = b'lnk'
886 else:
887 else:
887 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
888 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
888 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
889 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
889 for f in repo.dirstate.copies():
890 for f in repo.dirstate.copies():
890 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
891 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
891
892
892
893
893 @command(
894 @command(
894 b'debugdiscovery',
895 b'debugdiscovery',
895 [
896 [
896 (b'', b'old', None, _(b'use old-style discovery')),
897 (b'', b'old', None, _(b'use old-style discovery')),
897 (
898 (
898 b'',
899 b'',
899 b'nonheads',
900 b'nonheads',
900 None,
901 None,
901 _(b'use old-style discovery with non-heads included'),
902 _(b'use old-style discovery with non-heads included'),
902 ),
903 ),
903 (b'', b'rev', [], b'restrict discovery to this set of revs'),
904 (b'', b'rev', [], b'restrict discovery to this set of revs'),
904 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
905 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
905 ]
906 ]
906 + cmdutil.remoteopts,
907 + cmdutil.remoteopts,
907 _(b'[--rev REV] [OTHER]'),
908 _(b'[--rev REV] [OTHER]'),
908 )
909 )
909 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
910 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
910 """runs the changeset discovery protocol in isolation"""
911 """runs the changeset discovery protocol in isolation"""
911 opts = pycompat.byteskwargs(opts)
912 opts = pycompat.byteskwargs(opts)
912 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
913 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
913 remote = hg.peer(repo, opts, remoteurl)
914 remote = hg.peer(repo, opts, remoteurl)
914 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
915 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
915
916
916 # make sure tests are repeatable
917 # make sure tests are repeatable
917 random.seed(int(opts[b'seed']))
918 random.seed(int(opts[b'seed']))
918
919
919 if opts.get(b'old'):
920 if opts.get(b'old'):
920
921
921 def doit(pushedrevs, remoteheads, remote=remote):
922 def doit(pushedrevs, remoteheads, remote=remote):
922 if not util.safehasattr(remote, b'branches'):
923 if not util.safehasattr(remote, b'branches'):
923 # enable in-client legacy support
924 # enable in-client legacy support
924 remote = localrepo.locallegacypeer(remote.local())
925 remote = localrepo.locallegacypeer(remote.local())
925 common, _in, hds = treediscovery.findcommonincoming(
926 common, _in, hds = treediscovery.findcommonincoming(
926 repo, remote, force=True
927 repo, remote, force=True
927 )
928 )
928 common = set(common)
929 common = set(common)
929 if not opts.get(b'nonheads'):
930 if not opts.get(b'nonheads'):
930 ui.writenoi18n(
931 ui.writenoi18n(
931 b"unpruned common: %s\n"
932 b"unpruned common: %s\n"
932 % b" ".join(sorted(short(n) for n in common))
933 % b" ".join(sorted(short(n) for n in common))
933 )
934 )
934
935
935 clnode = repo.changelog.node
936 clnode = repo.changelog.node
936 common = repo.revs(b'heads(::%ln)', common)
937 common = repo.revs(b'heads(::%ln)', common)
937 common = {clnode(r) for r in common}
938 common = {clnode(r) for r in common}
938 return common, hds
939 return common, hds
939
940
940 else:
941 else:
941
942
942 def doit(pushedrevs, remoteheads, remote=remote):
943 def doit(pushedrevs, remoteheads, remote=remote):
943 nodes = None
944 nodes = None
944 if pushedrevs:
945 if pushedrevs:
945 revs = scmutil.revrange(repo, pushedrevs)
946 revs = scmutil.revrange(repo, pushedrevs)
946 nodes = [repo[r].node() for r in revs]
947 nodes = [repo[r].node() for r in revs]
947 common, any, hds = setdiscovery.findcommonheads(
948 common, any, hds = setdiscovery.findcommonheads(
948 ui, repo, remote, ancestorsof=nodes
949 ui, repo, remote, ancestorsof=nodes
949 )
950 )
950 return common, hds
951 return common, hds
951
952
952 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
953 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
953 localrevs = opts[b'rev']
954 localrevs = opts[b'rev']
954 with util.timedcm('debug-discovery') as t:
955 with util.timedcm('debug-discovery') as t:
955 common, hds = doit(localrevs, remoterevs)
956 common, hds = doit(localrevs, remoterevs)
956
957
957 # compute all statistics
958 # compute all statistics
958 common = set(common)
959 common = set(common)
959 rheads = set(hds)
960 rheads = set(hds)
960 lheads = set(repo.heads())
961 lheads = set(repo.heads())
961
962
962 data = {}
963 data = {}
963 data[b'elapsed'] = t.elapsed
964 data[b'elapsed'] = t.elapsed
964 data[b'nb-common'] = len(common)
965 data[b'nb-common'] = len(common)
965 data[b'nb-common-local'] = len(common & lheads)
966 data[b'nb-common-local'] = len(common & lheads)
966 data[b'nb-common-remote'] = len(common & rheads)
967 data[b'nb-common-remote'] = len(common & rheads)
967 data[b'nb-common-both'] = len(common & rheads & lheads)
968 data[b'nb-common-both'] = len(common & rheads & lheads)
968 data[b'nb-local'] = len(lheads)
969 data[b'nb-local'] = len(lheads)
969 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
970 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
970 data[b'nb-remote'] = len(rheads)
971 data[b'nb-remote'] = len(rheads)
971 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
972 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
972 data[b'nb-revs'] = len(repo.revs(b'all()'))
973 data[b'nb-revs'] = len(repo.revs(b'all()'))
973 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
974 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
974 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
975 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
975
976
976 # display discovery summary
977 # display discovery summary
977 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
978 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
978 ui.writenoi18n(b"heads summary:\n")
979 ui.writenoi18n(b"heads summary:\n")
979 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
980 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
980 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
981 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
981 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
982 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
982 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
983 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
983 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
984 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
984 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
985 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
985 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
986 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
986 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
987 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
987 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
988 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
988 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
989 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
989 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
990 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
990 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
991 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
991 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
992 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
992
993
993 if ui.verbose:
994 if ui.verbose:
994 ui.writenoi18n(
995 ui.writenoi18n(
995 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
996 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
996 )
997 )
997
998
998
999
999 _chunksize = 4 << 10
1000 _chunksize = 4 << 10
1000
1001
1001
1002
1002 @command(
1003 @command(
1003 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1004 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1004 )
1005 )
1005 def debugdownload(ui, repo, url, output=None, **opts):
1006 def debugdownload(ui, repo, url, output=None, **opts):
1006 """download a resource using Mercurial logic and config
1007 """download a resource using Mercurial logic and config
1007 """
1008 """
1008 fh = urlmod.open(ui, url, output)
1009 fh = urlmod.open(ui, url, output)
1009
1010
1010 dest = ui
1011 dest = ui
1011 if output:
1012 if output:
1012 dest = open(output, b"wb", _chunksize)
1013 dest = open(output, b"wb", _chunksize)
1013 try:
1014 try:
1014 data = fh.read(_chunksize)
1015 data = fh.read(_chunksize)
1015 while data:
1016 while data:
1016 dest.write(data)
1017 dest.write(data)
1017 data = fh.read(_chunksize)
1018 data = fh.read(_chunksize)
1018 finally:
1019 finally:
1019 if output:
1020 if output:
1020 dest.close()
1021 dest.close()
1021
1022
1022
1023
1023 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1024 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1024 def debugextensions(ui, repo, **opts):
1025 def debugextensions(ui, repo, **opts):
1025 '''show information about active extensions'''
1026 '''show information about active extensions'''
1026 opts = pycompat.byteskwargs(opts)
1027 opts = pycompat.byteskwargs(opts)
1027 exts = extensions.extensions(ui)
1028 exts = extensions.extensions(ui)
1028 hgver = util.version()
1029 hgver = util.version()
1029 fm = ui.formatter(b'debugextensions', opts)
1030 fm = ui.formatter(b'debugextensions', opts)
1030 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1031 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1031 isinternal = extensions.ismoduleinternal(extmod)
1032 isinternal = extensions.ismoduleinternal(extmod)
1032 extsource = None
1033 extsource = None
1033
1034
1034 if util.safehasattr(extmod, '__file__'):
1035 if util.safehasattr(extmod, '__file__'):
1035 extsource = pycompat.fsencode(extmod.__file__)
1036 extsource = pycompat.fsencode(extmod.__file__)
1036 elif getattr(sys, 'oxidized', False):
1037 elif getattr(sys, 'oxidized', False):
1037 extsource = pycompat.sysexecutable
1038 extsource = pycompat.sysexecutable
1038 if isinternal:
1039 if isinternal:
1039 exttestedwith = [] # never expose magic string to users
1040 exttestedwith = [] # never expose magic string to users
1040 else:
1041 else:
1041 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1042 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1042 extbuglink = getattr(extmod, 'buglink', None)
1043 extbuglink = getattr(extmod, 'buglink', None)
1043
1044
1044 fm.startitem()
1045 fm.startitem()
1045
1046
1046 if ui.quiet or ui.verbose:
1047 if ui.quiet or ui.verbose:
1047 fm.write(b'name', b'%s\n', extname)
1048 fm.write(b'name', b'%s\n', extname)
1048 else:
1049 else:
1049 fm.write(b'name', b'%s', extname)
1050 fm.write(b'name', b'%s', extname)
1050 if isinternal or hgver in exttestedwith:
1051 if isinternal or hgver in exttestedwith:
1051 fm.plain(b'\n')
1052 fm.plain(b'\n')
1052 elif not exttestedwith:
1053 elif not exttestedwith:
1053 fm.plain(_(b' (untested!)\n'))
1054 fm.plain(_(b' (untested!)\n'))
1054 else:
1055 else:
1055 lasttestedversion = exttestedwith[-1]
1056 lasttestedversion = exttestedwith[-1]
1056 fm.plain(b' (%s!)\n' % lasttestedversion)
1057 fm.plain(b' (%s!)\n' % lasttestedversion)
1057
1058
1058 fm.condwrite(
1059 fm.condwrite(
1059 ui.verbose and extsource,
1060 ui.verbose and extsource,
1060 b'source',
1061 b'source',
1061 _(b' location: %s\n'),
1062 _(b' location: %s\n'),
1062 extsource or b"",
1063 extsource or b"",
1063 )
1064 )
1064
1065
1065 if ui.verbose:
1066 if ui.verbose:
1066 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1067 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1067 fm.data(bundled=isinternal)
1068 fm.data(bundled=isinternal)
1068
1069
1069 fm.condwrite(
1070 fm.condwrite(
1070 ui.verbose and exttestedwith,
1071 ui.verbose and exttestedwith,
1071 b'testedwith',
1072 b'testedwith',
1072 _(b' tested with: %s\n'),
1073 _(b' tested with: %s\n'),
1073 fm.formatlist(exttestedwith, name=b'ver'),
1074 fm.formatlist(exttestedwith, name=b'ver'),
1074 )
1075 )
1075
1076
1076 fm.condwrite(
1077 fm.condwrite(
1077 ui.verbose and extbuglink,
1078 ui.verbose and extbuglink,
1078 b'buglink',
1079 b'buglink',
1079 _(b' bug reporting: %s\n'),
1080 _(b' bug reporting: %s\n'),
1080 extbuglink or b"",
1081 extbuglink or b"",
1081 )
1082 )
1082
1083
1083 fm.end()
1084 fm.end()
1084
1085
1085
1086
1086 @command(
1087 @command(
1087 b'debugfileset',
1088 b'debugfileset',
1088 [
1089 [
1089 (
1090 (
1090 b'r',
1091 b'r',
1091 b'rev',
1092 b'rev',
1092 b'',
1093 b'',
1093 _(b'apply the filespec on this revision'),
1094 _(b'apply the filespec on this revision'),
1094 _(b'REV'),
1095 _(b'REV'),
1095 ),
1096 ),
1096 (
1097 (
1097 b'',
1098 b'',
1098 b'all-files',
1099 b'all-files',
1099 False,
1100 False,
1100 _(b'test files from all revisions and working directory'),
1101 _(b'test files from all revisions and working directory'),
1101 ),
1102 ),
1102 (
1103 (
1103 b's',
1104 b's',
1104 b'show-matcher',
1105 b'show-matcher',
1105 None,
1106 None,
1106 _(b'print internal representation of matcher'),
1107 _(b'print internal representation of matcher'),
1107 ),
1108 ),
1108 (
1109 (
1109 b'p',
1110 b'p',
1110 b'show-stage',
1111 b'show-stage',
1111 [],
1112 [],
1112 _(b'print parsed tree at the given stage'),
1113 _(b'print parsed tree at the given stage'),
1113 _(b'NAME'),
1114 _(b'NAME'),
1114 ),
1115 ),
1115 ],
1116 ],
1116 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1117 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1117 )
1118 )
1118 def debugfileset(ui, repo, expr, **opts):
1119 def debugfileset(ui, repo, expr, **opts):
1119 '''parse and apply a fileset specification'''
1120 '''parse and apply a fileset specification'''
1120 from . import fileset
1121 from . import fileset
1121
1122
1122 fileset.symbols # force import of fileset so we have predicates to optimize
1123 fileset.symbols # force import of fileset so we have predicates to optimize
1123 opts = pycompat.byteskwargs(opts)
1124 opts = pycompat.byteskwargs(opts)
1124 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1125 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1125
1126
1126 stages = [
1127 stages = [
1127 (b'parsed', pycompat.identity),
1128 (b'parsed', pycompat.identity),
1128 (b'analyzed', filesetlang.analyze),
1129 (b'analyzed', filesetlang.analyze),
1129 (b'optimized', filesetlang.optimize),
1130 (b'optimized', filesetlang.optimize),
1130 ]
1131 ]
1131 stagenames = set(n for n, f in stages)
1132 stagenames = set(n for n, f in stages)
1132
1133
1133 showalways = set()
1134 showalways = set()
1134 if ui.verbose and not opts[b'show_stage']:
1135 if ui.verbose and not opts[b'show_stage']:
1135 # show parsed tree by --verbose (deprecated)
1136 # show parsed tree by --verbose (deprecated)
1136 showalways.add(b'parsed')
1137 showalways.add(b'parsed')
1137 if opts[b'show_stage'] == [b'all']:
1138 if opts[b'show_stage'] == [b'all']:
1138 showalways.update(stagenames)
1139 showalways.update(stagenames)
1139 else:
1140 else:
1140 for n in opts[b'show_stage']:
1141 for n in opts[b'show_stage']:
1141 if n not in stagenames:
1142 if n not in stagenames:
1142 raise error.Abort(_(b'invalid stage name: %s') % n)
1143 raise error.Abort(_(b'invalid stage name: %s') % n)
1143 showalways.update(opts[b'show_stage'])
1144 showalways.update(opts[b'show_stage'])
1144
1145
1145 tree = filesetlang.parse(expr)
1146 tree = filesetlang.parse(expr)
1146 for n, f in stages:
1147 for n, f in stages:
1147 tree = f(tree)
1148 tree = f(tree)
1148 if n in showalways:
1149 if n in showalways:
1149 if opts[b'show_stage'] or n != b'parsed':
1150 if opts[b'show_stage'] or n != b'parsed':
1150 ui.write(b"* %s:\n" % n)
1151 ui.write(b"* %s:\n" % n)
1151 ui.write(filesetlang.prettyformat(tree), b"\n")
1152 ui.write(filesetlang.prettyformat(tree), b"\n")
1152
1153
1153 files = set()
1154 files = set()
1154 if opts[b'all_files']:
1155 if opts[b'all_files']:
1155 for r in repo:
1156 for r in repo:
1156 c = repo[r]
1157 c = repo[r]
1157 files.update(c.files())
1158 files.update(c.files())
1158 files.update(c.substate)
1159 files.update(c.substate)
1159 if opts[b'all_files'] or ctx.rev() is None:
1160 if opts[b'all_files'] or ctx.rev() is None:
1160 wctx = repo[None]
1161 wctx = repo[None]
1161 files.update(
1162 files.update(
1162 repo.dirstate.walk(
1163 repo.dirstate.walk(
1163 scmutil.matchall(repo),
1164 scmutil.matchall(repo),
1164 subrepos=list(wctx.substate),
1165 subrepos=list(wctx.substate),
1165 unknown=True,
1166 unknown=True,
1166 ignored=True,
1167 ignored=True,
1167 )
1168 )
1168 )
1169 )
1169 files.update(wctx.substate)
1170 files.update(wctx.substate)
1170 else:
1171 else:
1171 files.update(ctx.files())
1172 files.update(ctx.files())
1172 files.update(ctx.substate)
1173 files.update(ctx.substate)
1173
1174
1174 m = ctx.matchfileset(repo.getcwd(), expr)
1175 m = ctx.matchfileset(repo.getcwd(), expr)
1175 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1176 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1176 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1177 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1177 for f in sorted(files):
1178 for f in sorted(files):
1178 if not m(f):
1179 if not m(f):
1179 continue
1180 continue
1180 ui.write(b"%s\n" % f)
1181 ui.write(b"%s\n" % f)
1181
1182
1182
1183
1183 @command(b'debugformat', [] + cmdutil.formatteropts)
1184 @command(b'debugformat', [] + cmdutil.formatteropts)
1184 def debugformat(ui, repo, **opts):
1185 def debugformat(ui, repo, **opts):
1185 """display format information about the current repository
1186 """display format information about the current repository
1186
1187
1187 Use --verbose to get extra information about current config value and
1188 Use --verbose to get extra information about current config value and
1188 Mercurial default."""
1189 Mercurial default."""
1189 opts = pycompat.byteskwargs(opts)
1190 opts = pycompat.byteskwargs(opts)
1190 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1191 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1191 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1192 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1192
1193
1193 def makeformatname(name):
1194 def makeformatname(name):
1194 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1195 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1195
1196
1196 fm = ui.formatter(b'debugformat', opts)
1197 fm = ui.formatter(b'debugformat', opts)
1197 if fm.isplain():
1198 if fm.isplain():
1198
1199
1199 def formatvalue(value):
1200 def formatvalue(value):
1200 if util.safehasattr(value, b'startswith'):
1201 if util.safehasattr(value, b'startswith'):
1201 return value
1202 return value
1202 if value:
1203 if value:
1203 return b'yes'
1204 return b'yes'
1204 else:
1205 else:
1205 return b'no'
1206 return b'no'
1206
1207
1207 else:
1208 else:
1208 formatvalue = pycompat.identity
1209 formatvalue = pycompat.identity
1209
1210
1210 fm.plain(b'format-variant')
1211 fm.plain(b'format-variant')
1211 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1212 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1212 fm.plain(b' repo')
1213 fm.plain(b' repo')
1213 if ui.verbose:
1214 if ui.verbose:
1214 fm.plain(b' config default')
1215 fm.plain(b' config default')
1215 fm.plain(b'\n')
1216 fm.plain(b'\n')
1216 for fv in upgrade.allformatvariant:
1217 for fv in upgrade.allformatvariant:
1217 fm.startitem()
1218 fm.startitem()
1218 repovalue = fv.fromrepo(repo)
1219 repovalue = fv.fromrepo(repo)
1219 configvalue = fv.fromconfig(repo)
1220 configvalue = fv.fromconfig(repo)
1220
1221
1221 if repovalue != configvalue:
1222 if repovalue != configvalue:
1222 namelabel = b'formatvariant.name.mismatchconfig'
1223 namelabel = b'formatvariant.name.mismatchconfig'
1223 repolabel = b'formatvariant.repo.mismatchconfig'
1224 repolabel = b'formatvariant.repo.mismatchconfig'
1224 elif repovalue != fv.default:
1225 elif repovalue != fv.default:
1225 namelabel = b'formatvariant.name.mismatchdefault'
1226 namelabel = b'formatvariant.name.mismatchdefault'
1226 repolabel = b'formatvariant.repo.mismatchdefault'
1227 repolabel = b'formatvariant.repo.mismatchdefault'
1227 else:
1228 else:
1228 namelabel = b'formatvariant.name.uptodate'
1229 namelabel = b'formatvariant.name.uptodate'
1229 repolabel = b'formatvariant.repo.uptodate'
1230 repolabel = b'formatvariant.repo.uptodate'
1230
1231
1231 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1232 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1232 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1233 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1233 if fv.default != configvalue:
1234 if fv.default != configvalue:
1234 configlabel = b'formatvariant.config.special'
1235 configlabel = b'formatvariant.config.special'
1235 else:
1236 else:
1236 configlabel = b'formatvariant.config.default'
1237 configlabel = b'formatvariant.config.default'
1237 fm.condwrite(
1238 fm.condwrite(
1238 ui.verbose,
1239 ui.verbose,
1239 b'config',
1240 b'config',
1240 b' %6s',
1241 b' %6s',
1241 formatvalue(configvalue),
1242 formatvalue(configvalue),
1242 label=configlabel,
1243 label=configlabel,
1243 )
1244 )
1244 fm.condwrite(
1245 fm.condwrite(
1245 ui.verbose,
1246 ui.verbose,
1246 b'default',
1247 b'default',
1247 b' %7s',
1248 b' %7s',
1248 formatvalue(fv.default),
1249 formatvalue(fv.default),
1249 label=b'formatvariant.default',
1250 label=b'formatvariant.default',
1250 )
1251 )
1251 fm.plain(b'\n')
1252 fm.plain(b'\n')
1252 fm.end()
1253 fm.end()
1253
1254
1254
1255
1255 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1256 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1256 def debugfsinfo(ui, path=b"."):
1257 def debugfsinfo(ui, path=b"."):
1257 """show information detected about current filesystem"""
1258 """show information detected about current filesystem"""
1258 ui.writenoi18n(b'path: %s\n' % path)
1259 ui.writenoi18n(b'path: %s\n' % path)
1259 ui.writenoi18n(
1260 ui.writenoi18n(
1260 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1261 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1261 )
1262 )
1262 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1263 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1263 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1264 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1264 ui.writenoi18n(
1265 ui.writenoi18n(
1265 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1266 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1266 )
1267 )
1267 ui.writenoi18n(
1268 ui.writenoi18n(
1268 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1269 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1269 )
1270 )
1270 casesensitive = b'(unknown)'
1271 casesensitive = b'(unknown)'
1271 try:
1272 try:
1272 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1273 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1273 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1274 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1274 except OSError:
1275 except OSError:
1275 pass
1276 pass
1276 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1277 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1277
1278
1278
1279
1279 @command(
1280 @command(
1280 b'debuggetbundle',
1281 b'debuggetbundle',
1281 [
1282 [
1282 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1283 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1283 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1284 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1284 (
1285 (
1285 b't',
1286 b't',
1286 b'type',
1287 b'type',
1287 b'bzip2',
1288 b'bzip2',
1288 _(b'bundle compression type to use'),
1289 _(b'bundle compression type to use'),
1289 _(b'TYPE'),
1290 _(b'TYPE'),
1290 ),
1291 ),
1291 ],
1292 ],
1292 _(b'REPO FILE [-H|-C ID]...'),
1293 _(b'REPO FILE [-H|-C ID]...'),
1293 norepo=True,
1294 norepo=True,
1294 )
1295 )
1295 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1296 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1296 """retrieves a bundle from a repo
1297 """retrieves a bundle from a repo
1297
1298
1298 Every ID must be a full-length hex node id string. Saves the bundle to the
1299 Every ID must be a full-length hex node id string. Saves the bundle to the
1299 given file.
1300 given file.
1300 """
1301 """
1301 opts = pycompat.byteskwargs(opts)
1302 opts = pycompat.byteskwargs(opts)
1302 repo = hg.peer(ui, opts, repopath)
1303 repo = hg.peer(ui, opts, repopath)
1303 if not repo.capable(b'getbundle'):
1304 if not repo.capable(b'getbundle'):
1304 raise error.Abort(b"getbundle() not supported by target repository")
1305 raise error.Abort(b"getbundle() not supported by target repository")
1305 args = {}
1306 args = {}
1306 if common:
1307 if common:
1307 args['common'] = [bin(s) for s in common]
1308 args['common'] = [bin(s) for s in common]
1308 if head:
1309 if head:
1309 args['heads'] = [bin(s) for s in head]
1310 args['heads'] = [bin(s) for s in head]
1310 # TODO: get desired bundlecaps from command line.
1311 # TODO: get desired bundlecaps from command line.
1311 args['bundlecaps'] = None
1312 args['bundlecaps'] = None
1312 bundle = repo.getbundle(b'debug', **args)
1313 bundle = repo.getbundle(b'debug', **args)
1313
1314
1314 bundletype = opts.get(b'type', b'bzip2').lower()
1315 bundletype = opts.get(b'type', b'bzip2').lower()
1315 btypes = {
1316 btypes = {
1316 b'none': b'HG10UN',
1317 b'none': b'HG10UN',
1317 b'bzip2': b'HG10BZ',
1318 b'bzip2': b'HG10BZ',
1318 b'gzip': b'HG10GZ',
1319 b'gzip': b'HG10GZ',
1319 b'bundle2': b'HG20',
1320 b'bundle2': b'HG20',
1320 }
1321 }
1321 bundletype = btypes.get(bundletype)
1322 bundletype = btypes.get(bundletype)
1322 if bundletype not in bundle2.bundletypes:
1323 if bundletype not in bundle2.bundletypes:
1323 raise error.Abort(_(b'unknown bundle type specified with --type'))
1324 raise error.Abort(_(b'unknown bundle type specified with --type'))
1324 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1325 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1325
1326
1326
1327
1327 @command(b'debugignore', [], b'[FILE]')
1328 @command(b'debugignore', [], b'[FILE]')
1328 def debugignore(ui, repo, *files, **opts):
1329 def debugignore(ui, repo, *files, **opts):
1329 """display the combined ignore pattern and information about ignored files
1330 """display the combined ignore pattern and information about ignored files
1330
1331
1331 With no argument display the combined ignore pattern.
1332 With no argument display the combined ignore pattern.
1332
1333
1333 Given space separated file names, shows if the given file is ignored and
1334 Given space separated file names, shows if the given file is ignored and
1334 if so, show the ignore rule (file and line number) that matched it.
1335 if so, show the ignore rule (file and line number) that matched it.
1335 """
1336 """
1336 ignore = repo.dirstate._ignore
1337 ignore = repo.dirstate._ignore
1337 if not files:
1338 if not files:
1338 # Show all the patterns
1339 # Show all the patterns
1339 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1340 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1340 else:
1341 else:
1341 m = scmutil.match(repo[None], pats=files)
1342 m = scmutil.match(repo[None], pats=files)
1342 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1343 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1343 for f in m.files():
1344 for f in m.files():
1344 nf = util.normpath(f)
1345 nf = util.normpath(f)
1345 ignored = None
1346 ignored = None
1346 ignoredata = None
1347 ignoredata = None
1347 if nf != b'.':
1348 if nf != b'.':
1348 if ignore(nf):
1349 if ignore(nf):
1349 ignored = nf
1350 ignored = nf
1350 ignoredata = repo.dirstate._ignorefileandline(nf)
1351 ignoredata = repo.dirstate._ignorefileandline(nf)
1351 else:
1352 else:
1352 for p in pathutil.finddirs(nf):
1353 for p in pathutil.finddirs(nf):
1353 if ignore(p):
1354 if ignore(p):
1354 ignored = p
1355 ignored = p
1355 ignoredata = repo.dirstate._ignorefileandline(p)
1356 ignoredata = repo.dirstate._ignorefileandline(p)
1356 break
1357 break
1357 if ignored:
1358 if ignored:
1358 if ignored == nf:
1359 if ignored == nf:
1359 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1360 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1360 else:
1361 else:
1361 ui.write(
1362 ui.write(
1362 _(
1363 _(
1363 b"%s is ignored because of "
1364 b"%s is ignored because of "
1364 b"containing directory %s\n"
1365 b"containing directory %s\n"
1365 )
1366 )
1366 % (uipathfn(f), ignored)
1367 % (uipathfn(f), ignored)
1367 )
1368 )
1368 ignorefile, lineno, line = ignoredata
1369 ignorefile, lineno, line = ignoredata
1369 ui.write(
1370 ui.write(
1370 _(b"(ignore rule in %s, line %d: '%s')\n")
1371 _(b"(ignore rule in %s, line %d: '%s')\n")
1371 % (ignorefile, lineno, line)
1372 % (ignorefile, lineno, line)
1372 )
1373 )
1373 else:
1374 else:
1374 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1375 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1375
1376
1376
1377
1377 @command(
1378 @command(
1378 b'debugindex',
1379 b'debugindex',
1379 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1380 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1380 _(b'-c|-m|FILE'),
1381 _(b'-c|-m|FILE'),
1381 )
1382 )
1382 def debugindex(ui, repo, file_=None, **opts):
1383 def debugindex(ui, repo, file_=None, **opts):
1383 """dump index data for a storage primitive"""
1384 """dump index data for a storage primitive"""
1384 opts = pycompat.byteskwargs(opts)
1385 opts = pycompat.byteskwargs(opts)
1385 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1386 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1386
1387
1387 if ui.debugflag:
1388 if ui.debugflag:
1388 shortfn = hex
1389 shortfn = hex
1389 else:
1390 else:
1390 shortfn = short
1391 shortfn = short
1391
1392
1392 idlen = 12
1393 idlen = 12
1393 for i in store:
1394 for i in store:
1394 idlen = len(shortfn(store.node(i)))
1395 idlen = len(shortfn(store.node(i)))
1395 break
1396 break
1396
1397
1397 fm = ui.formatter(b'debugindex', opts)
1398 fm = ui.formatter(b'debugindex', opts)
1398 fm.plain(
1399 fm.plain(
1399 b' rev linkrev %s %s p2\n'
1400 b' rev linkrev %s %s p2\n'
1400 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1401 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1401 )
1402 )
1402
1403
1403 for rev in store:
1404 for rev in store:
1404 node = store.node(rev)
1405 node = store.node(rev)
1405 parents = store.parents(node)
1406 parents = store.parents(node)
1406
1407
1407 fm.startitem()
1408 fm.startitem()
1408 fm.write(b'rev', b'%6d ', rev)
1409 fm.write(b'rev', b'%6d ', rev)
1409 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1410 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1410 fm.write(b'node', b'%s ', shortfn(node))
1411 fm.write(b'node', b'%s ', shortfn(node))
1411 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1412 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1412 fm.write(b'p2', b'%s', shortfn(parents[1]))
1413 fm.write(b'p2', b'%s', shortfn(parents[1]))
1413 fm.plain(b'\n')
1414 fm.plain(b'\n')
1414
1415
1415 fm.end()
1416 fm.end()
1416
1417
1417
1418
1418 @command(
1419 @command(
1419 b'debugindexdot',
1420 b'debugindexdot',
1420 cmdutil.debugrevlogopts,
1421 cmdutil.debugrevlogopts,
1421 _(b'-c|-m|FILE'),
1422 _(b'-c|-m|FILE'),
1422 optionalrepo=True,
1423 optionalrepo=True,
1423 )
1424 )
1424 def debugindexdot(ui, repo, file_=None, **opts):
1425 def debugindexdot(ui, repo, file_=None, **opts):
1425 """dump an index DAG as a graphviz dot file"""
1426 """dump an index DAG as a graphviz dot file"""
1426 opts = pycompat.byteskwargs(opts)
1427 opts = pycompat.byteskwargs(opts)
1427 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1428 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1428 ui.writenoi18n(b"digraph G {\n")
1429 ui.writenoi18n(b"digraph G {\n")
1429 for i in r:
1430 for i in r:
1430 node = r.node(i)
1431 node = r.node(i)
1431 pp = r.parents(node)
1432 pp = r.parents(node)
1432 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1433 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1433 if pp[1] != nullid:
1434 if pp[1] != nullid:
1434 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1435 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1435 ui.write(b"}\n")
1436 ui.write(b"}\n")
1436
1437
1437
1438
1438 @command(b'debugindexstats', [])
1439 @command(b'debugindexstats', [])
1439 def debugindexstats(ui, repo):
1440 def debugindexstats(ui, repo):
1440 """show stats related to the changelog index"""
1441 """show stats related to the changelog index"""
1441 repo.changelog.shortest(nullid, 1)
1442 repo.changelog.shortest(nullid, 1)
1442 index = repo.changelog.index
1443 index = repo.changelog.index
1443 if not util.safehasattr(index, b'stats'):
1444 if not util.safehasattr(index, b'stats'):
1444 raise error.Abort(_(b'debugindexstats only works with native code'))
1445 raise error.Abort(_(b'debugindexstats only works with native code'))
1445 for k, v in sorted(index.stats().items()):
1446 for k, v in sorted(index.stats().items()):
1446 ui.write(b'%s: %d\n' % (k, v))
1447 ui.write(b'%s: %d\n' % (k, v))
1447
1448
1448
1449
1449 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1450 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1450 def debuginstall(ui, **opts):
1451 def debuginstall(ui, **opts):
1451 '''test Mercurial installation
1452 '''test Mercurial installation
1452
1453
1453 Returns 0 on success.
1454 Returns 0 on success.
1454 '''
1455 '''
1455 opts = pycompat.byteskwargs(opts)
1456 opts = pycompat.byteskwargs(opts)
1456
1457
1457 problems = 0
1458 problems = 0
1458
1459
1459 fm = ui.formatter(b'debuginstall', opts)
1460 fm = ui.formatter(b'debuginstall', opts)
1460 fm.startitem()
1461 fm.startitem()
1461
1462
1462 # encoding
1463 # encoding
1463 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1464 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1464 err = None
1465 err = None
1465 try:
1466 try:
1466 codecs.lookup(pycompat.sysstr(encoding.encoding))
1467 codecs.lookup(pycompat.sysstr(encoding.encoding))
1467 except LookupError as inst:
1468 except LookupError as inst:
1468 err = stringutil.forcebytestr(inst)
1469 err = stringutil.forcebytestr(inst)
1469 problems += 1
1470 problems += 1
1470 fm.condwrite(
1471 fm.condwrite(
1471 err,
1472 err,
1472 b'encodingerror',
1473 b'encodingerror',
1473 _(b" %s\n (check that your locale is properly set)\n"),
1474 _(b" %s\n (check that your locale is properly set)\n"),
1474 err,
1475 err,
1475 )
1476 )
1476
1477
1477 # Python
1478 # Python
1478 pythonlib = None
1479 pythonlib = None
1479 if util.safehasattr(os, '__file__'):
1480 if util.safehasattr(os, '__file__'):
1480 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1481 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1481 elif getattr(sys, 'oxidized', False):
1482 elif getattr(sys, 'oxidized', False):
1482 pythonlib = pycompat.sysexecutable
1483 pythonlib = pycompat.sysexecutable
1483
1484
1484 fm.write(
1485 fm.write(
1485 b'pythonexe',
1486 b'pythonexe',
1486 _(b"checking Python executable (%s)\n"),
1487 _(b"checking Python executable (%s)\n"),
1487 pycompat.sysexecutable or _(b"unknown"),
1488 pycompat.sysexecutable or _(b"unknown"),
1488 )
1489 )
1489 fm.write(
1490 fm.write(
1491 b'pythonimplementation',
1492 _(b"checking Python implementation (%s)\n"),
1493 pycompat.sysbytes(platform.python_implementation()),
1494 )
1495 fm.write(
1490 b'pythonver',
1496 b'pythonver',
1491 _(b"checking Python version (%s)\n"),
1497 _(b"checking Python version (%s)\n"),
1492 (b"%d.%d.%d" % sys.version_info[:3]),
1498 (b"%d.%d.%d" % sys.version_info[:3]),
1493 )
1499 )
1494 fm.write(
1500 fm.write(
1495 b'pythonlib',
1501 b'pythonlib',
1496 _(b"checking Python lib (%s)...\n"),
1502 _(b"checking Python lib (%s)...\n"),
1497 pythonlib or _(b"unknown"),
1503 pythonlib or _(b"unknown"),
1498 )
1504 )
1499
1505
1500 security = set(sslutil.supportedprotocols)
1506 security = set(sslutil.supportedprotocols)
1501 if sslutil.hassni:
1507 if sslutil.hassni:
1502 security.add(b'sni')
1508 security.add(b'sni')
1503
1509
1504 fm.write(
1510 fm.write(
1505 b'pythonsecurity',
1511 b'pythonsecurity',
1506 _(b"checking Python security support (%s)\n"),
1512 _(b"checking Python security support (%s)\n"),
1507 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1513 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1508 )
1514 )
1509
1515
1510 # These are warnings, not errors. So don't increment problem count. This
1516 # These are warnings, not errors. So don't increment problem count. This
1511 # may change in the future.
1517 # may change in the future.
1512 if b'tls1.2' not in security:
1518 if b'tls1.2' not in security:
1513 fm.plain(
1519 fm.plain(
1514 _(
1520 _(
1515 b' TLS 1.2 not supported by Python install; '
1521 b' TLS 1.2 not supported by Python install; '
1516 b'network connections lack modern security\n'
1522 b'network connections lack modern security\n'
1517 )
1523 )
1518 )
1524 )
1519 if b'sni' not in security:
1525 if b'sni' not in security:
1520 fm.plain(
1526 fm.plain(
1521 _(
1527 _(
1522 b' SNI not supported by Python install; may have '
1528 b' SNI not supported by Python install; may have '
1523 b'connectivity issues with some servers\n'
1529 b'connectivity issues with some servers\n'
1524 )
1530 )
1525 )
1531 )
1526
1532
1527 # TODO print CA cert info
1533 # TODO print CA cert info
1528
1534
1529 # hg version
1535 # hg version
1530 hgver = util.version()
1536 hgver = util.version()
1531 fm.write(
1537 fm.write(
1532 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1538 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1533 )
1539 )
1534 fm.write(
1540 fm.write(
1535 b'hgverextra',
1541 b'hgverextra',
1536 _(b"checking Mercurial custom build (%s)\n"),
1542 _(b"checking Mercurial custom build (%s)\n"),
1537 b'+'.join(hgver.split(b'+')[1:]),
1543 b'+'.join(hgver.split(b'+')[1:]),
1538 )
1544 )
1539
1545
1540 # compiled modules
1546 # compiled modules
1541 hgmodules = None
1547 hgmodules = None
1542 if util.safehasattr(sys.modules[__name__], '__file__'):
1548 if util.safehasattr(sys.modules[__name__], '__file__'):
1543 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1549 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1544 elif getattr(sys, 'oxidized', False):
1550 elif getattr(sys, 'oxidized', False):
1545 hgmodules = pycompat.sysexecutable
1551 hgmodules = pycompat.sysexecutable
1546
1552
1547 fm.write(
1553 fm.write(
1548 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1554 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1549 )
1555 )
1550 fm.write(
1556 fm.write(
1551 b'hgmodules',
1557 b'hgmodules',
1552 _(b"checking installed modules (%s)...\n"),
1558 _(b"checking installed modules (%s)...\n"),
1553 hgmodules or _(b"unknown"),
1559 hgmodules or _(b"unknown"),
1554 )
1560 )
1555
1561
1556 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1562 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1557 rustext = rustandc # for now, that's the only case
1563 rustext = rustandc # for now, that's the only case
1558 cext = policy.policy in (b'c', b'allow') or rustandc
1564 cext = policy.policy in (b'c', b'allow') or rustandc
1559 nopure = cext or rustext
1565 nopure = cext or rustext
1560 if nopure:
1566 if nopure:
1561 err = None
1567 err = None
1562 try:
1568 try:
1563 if cext:
1569 if cext:
1564 from .cext import ( # pytype: disable=import-error
1570 from .cext import ( # pytype: disable=import-error
1565 base85,
1571 base85,
1566 bdiff,
1572 bdiff,
1567 mpatch,
1573 mpatch,
1568 osutil,
1574 osutil,
1569 )
1575 )
1570
1576
1571 # quiet pyflakes
1577 # quiet pyflakes
1572 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1578 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1573 if rustext:
1579 if rustext:
1574 from .rustext import ( # pytype: disable=import-error
1580 from .rustext import ( # pytype: disable=import-error
1575 ancestor,
1581 ancestor,
1576 dirstate,
1582 dirstate,
1577 )
1583 )
1578
1584
1579 dir(ancestor), dir(dirstate) # quiet pyflakes
1585 dir(ancestor), dir(dirstate) # quiet pyflakes
1580 except Exception as inst:
1586 except Exception as inst:
1581 err = stringutil.forcebytestr(inst)
1587 err = stringutil.forcebytestr(inst)
1582 problems += 1
1588 problems += 1
1583 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1589 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1584
1590
1585 compengines = util.compengines._engines.values()
1591 compengines = util.compengines._engines.values()
1586 fm.write(
1592 fm.write(
1587 b'compengines',
1593 b'compengines',
1588 _(b'checking registered compression engines (%s)\n'),
1594 _(b'checking registered compression engines (%s)\n'),
1589 fm.formatlist(
1595 fm.formatlist(
1590 sorted(e.name() for e in compengines),
1596 sorted(e.name() for e in compengines),
1591 name=b'compengine',
1597 name=b'compengine',
1592 fmt=b'%s',
1598 fmt=b'%s',
1593 sep=b', ',
1599 sep=b', ',
1594 ),
1600 ),
1595 )
1601 )
1596 fm.write(
1602 fm.write(
1597 b'compenginesavail',
1603 b'compenginesavail',
1598 _(b'checking available compression engines (%s)\n'),
1604 _(b'checking available compression engines (%s)\n'),
1599 fm.formatlist(
1605 fm.formatlist(
1600 sorted(e.name() for e in compengines if e.available()),
1606 sorted(e.name() for e in compengines if e.available()),
1601 name=b'compengine',
1607 name=b'compengine',
1602 fmt=b'%s',
1608 fmt=b'%s',
1603 sep=b', ',
1609 sep=b', ',
1604 ),
1610 ),
1605 )
1611 )
1606 wirecompengines = compression.compengines.supportedwireengines(
1612 wirecompengines = compression.compengines.supportedwireengines(
1607 compression.SERVERROLE
1613 compression.SERVERROLE
1608 )
1614 )
1609 fm.write(
1615 fm.write(
1610 b'compenginesserver',
1616 b'compenginesserver',
1611 _(
1617 _(
1612 b'checking available compression engines '
1618 b'checking available compression engines '
1613 b'for wire protocol (%s)\n'
1619 b'for wire protocol (%s)\n'
1614 ),
1620 ),
1615 fm.formatlist(
1621 fm.formatlist(
1616 [e.name() for e in wirecompengines if e.wireprotosupport()],
1622 [e.name() for e in wirecompengines if e.wireprotosupport()],
1617 name=b'compengine',
1623 name=b'compengine',
1618 fmt=b'%s',
1624 fmt=b'%s',
1619 sep=b', ',
1625 sep=b', ',
1620 ),
1626 ),
1621 )
1627 )
1622 re2 = b'missing'
1628 re2 = b'missing'
1623 if util._re2:
1629 if util._re2:
1624 re2 = b'available'
1630 re2 = b'available'
1625 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1631 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1626 fm.data(re2=bool(util._re2))
1632 fm.data(re2=bool(util._re2))
1627
1633
1628 # templates
1634 # templates
1629 p = templater.templatepaths()
1635 p = templater.templatepaths()
1630 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1636 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1631 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1637 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1632 if p:
1638 if p:
1633 m = templater.templatepath(b"map-cmdline.default")
1639 m = templater.templatepath(b"map-cmdline.default")
1634 if m:
1640 if m:
1635 # template found, check if it is working
1641 # template found, check if it is working
1636 err = None
1642 err = None
1637 try:
1643 try:
1638 templater.templater.frommapfile(m)
1644 templater.templater.frommapfile(m)
1639 except Exception as inst:
1645 except Exception as inst:
1640 err = stringutil.forcebytestr(inst)
1646 err = stringutil.forcebytestr(inst)
1641 p = None
1647 p = None
1642 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1648 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1643 else:
1649 else:
1644 p = None
1650 p = None
1645 fm.condwrite(
1651 fm.condwrite(
1646 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1652 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1647 )
1653 )
1648 fm.condwrite(
1654 fm.condwrite(
1649 not m,
1655 not m,
1650 b'defaulttemplatenotfound',
1656 b'defaulttemplatenotfound',
1651 _(b" template '%s' not found\n"),
1657 _(b" template '%s' not found\n"),
1652 b"default",
1658 b"default",
1653 )
1659 )
1654 if not p:
1660 if not p:
1655 problems += 1
1661 problems += 1
1656 fm.condwrite(
1662 fm.condwrite(
1657 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1663 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1658 )
1664 )
1659
1665
1660 # editor
1666 # editor
1661 editor = ui.geteditor()
1667 editor = ui.geteditor()
1662 editor = util.expandpath(editor)
1668 editor = util.expandpath(editor)
1663 editorbin = procutil.shellsplit(editor)[0]
1669 editorbin = procutil.shellsplit(editor)[0]
1664 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1670 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1665 cmdpath = procutil.findexe(editorbin)
1671 cmdpath = procutil.findexe(editorbin)
1666 fm.condwrite(
1672 fm.condwrite(
1667 not cmdpath and editor == b'vi',
1673 not cmdpath and editor == b'vi',
1668 b'vinotfound',
1674 b'vinotfound',
1669 _(
1675 _(
1670 b" No commit editor set and can't find %s in PATH\n"
1676 b" No commit editor set and can't find %s in PATH\n"
1671 b" (specify a commit editor in your configuration"
1677 b" (specify a commit editor in your configuration"
1672 b" file)\n"
1678 b" file)\n"
1673 ),
1679 ),
1674 not cmdpath and editor == b'vi' and editorbin,
1680 not cmdpath and editor == b'vi' and editorbin,
1675 )
1681 )
1676 fm.condwrite(
1682 fm.condwrite(
1677 not cmdpath and editor != b'vi',
1683 not cmdpath and editor != b'vi',
1678 b'editornotfound',
1684 b'editornotfound',
1679 _(
1685 _(
1680 b" Can't find editor '%s' in PATH\n"
1686 b" Can't find editor '%s' in PATH\n"
1681 b" (specify a commit editor in your configuration"
1687 b" (specify a commit editor in your configuration"
1682 b" file)\n"
1688 b" file)\n"
1683 ),
1689 ),
1684 not cmdpath and editorbin,
1690 not cmdpath and editorbin,
1685 )
1691 )
1686 if not cmdpath and editor != b'vi':
1692 if not cmdpath and editor != b'vi':
1687 problems += 1
1693 problems += 1
1688
1694
1689 # check username
1695 # check username
1690 username = None
1696 username = None
1691 err = None
1697 err = None
1692 try:
1698 try:
1693 username = ui.username()
1699 username = ui.username()
1694 except error.Abort as e:
1700 except error.Abort as e:
1695 err = stringutil.forcebytestr(e)
1701 err = stringutil.forcebytestr(e)
1696 problems += 1
1702 problems += 1
1697
1703
1698 fm.condwrite(
1704 fm.condwrite(
1699 username, b'username', _(b"checking username (%s)\n"), username
1705 username, b'username', _(b"checking username (%s)\n"), username
1700 )
1706 )
1701 fm.condwrite(
1707 fm.condwrite(
1702 err,
1708 err,
1703 b'usernameerror',
1709 b'usernameerror',
1704 _(
1710 _(
1705 b"checking username...\n %s\n"
1711 b"checking username...\n %s\n"
1706 b" (specify a username in your configuration file)\n"
1712 b" (specify a username in your configuration file)\n"
1707 ),
1713 ),
1708 err,
1714 err,
1709 )
1715 )
1710
1716
1711 for name, mod in extensions.extensions():
1717 for name, mod in extensions.extensions():
1712 handler = getattr(mod, 'debuginstall', None)
1718 handler = getattr(mod, 'debuginstall', None)
1713 if handler is not None:
1719 if handler is not None:
1714 problems += handler(ui, fm)
1720 problems += handler(ui, fm)
1715
1721
1716 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1722 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1717 if not problems:
1723 if not problems:
1718 fm.data(problems=problems)
1724 fm.data(problems=problems)
1719 fm.condwrite(
1725 fm.condwrite(
1720 problems,
1726 problems,
1721 b'problems',
1727 b'problems',
1722 _(b"%d problems detected, please check your install!\n"),
1728 _(b"%d problems detected, please check your install!\n"),
1723 problems,
1729 problems,
1724 )
1730 )
1725 fm.end()
1731 fm.end()
1726
1732
1727 return problems
1733 return problems
1728
1734
1729
1735
1730 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1736 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1731 def debugknown(ui, repopath, *ids, **opts):
1737 def debugknown(ui, repopath, *ids, **opts):
1732 """test whether node ids are known to a repo
1738 """test whether node ids are known to a repo
1733
1739
1734 Every ID must be a full-length hex node id string. Returns a list of 0s
1740 Every ID must be a full-length hex node id string. Returns a list of 0s
1735 and 1s indicating unknown/known.
1741 and 1s indicating unknown/known.
1736 """
1742 """
1737 opts = pycompat.byteskwargs(opts)
1743 opts = pycompat.byteskwargs(opts)
1738 repo = hg.peer(ui, opts, repopath)
1744 repo = hg.peer(ui, opts, repopath)
1739 if not repo.capable(b'known'):
1745 if not repo.capable(b'known'):
1740 raise error.Abort(b"known() not supported by target repository")
1746 raise error.Abort(b"known() not supported by target repository")
1741 flags = repo.known([bin(s) for s in ids])
1747 flags = repo.known([bin(s) for s in ids])
1742 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1748 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1743
1749
1744
1750
1745 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1751 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1746 def debuglabelcomplete(ui, repo, *args):
1752 def debuglabelcomplete(ui, repo, *args):
1747 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1753 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1748 debugnamecomplete(ui, repo, *args)
1754 debugnamecomplete(ui, repo, *args)
1749
1755
1750
1756
1751 @command(
1757 @command(
1752 b'debuglocks',
1758 b'debuglocks',
1753 [
1759 [
1754 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1760 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1755 (
1761 (
1756 b'W',
1762 b'W',
1757 b'force-wlock',
1763 b'force-wlock',
1758 None,
1764 None,
1759 _(b'free the working state lock (DANGEROUS)'),
1765 _(b'free the working state lock (DANGEROUS)'),
1760 ),
1766 ),
1761 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1767 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1762 (
1768 (
1763 b'S',
1769 b'S',
1764 b'set-wlock',
1770 b'set-wlock',
1765 None,
1771 None,
1766 _(b'set the working state lock until stopped'),
1772 _(b'set the working state lock until stopped'),
1767 ),
1773 ),
1768 ],
1774 ],
1769 _(b'[OPTION]...'),
1775 _(b'[OPTION]...'),
1770 )
1776 )
1771 def debuglocks(ui, repo, **opts):
1777 def debuglocks(ui, repo, **opts):
1772 """show or modify state of locks
1778 """show or modify state of locks
1773
1779
1774 By default, this command will show which locks are held. This
1780 By default, this command will show which locks are held. This
1775 includes the user and process holding the lock, the amount of time
1781 includes the user and process holding the lock, the amount of time
1776 the lock has been held, and the machine name where the process is
1782 the lock has been held, and the machine name where the process is
1777 running if it's not local.
1783 running if it's not local.
1778
1784
1779 Locks protect the integrity of Mercurial's data, so should be
1785 Locks protect the integrity of Mercurial's data, so should be
1780 treated with care. System crashes or other interruptions may cause
1786 treated with care. System crashes or other interruptions may cause
1781 locks to not be properly released, though Mercurial will usually
1787 locks to not be properly released, though Mercurial will usually
1782 detect and remove such stale locks automatically.
1788 detect and remove such stale locks automatically.
1783
1789
1784 However, detecting stale locks may not always be possible (for
1790 However, detecting stale locks may not always be possible (for
1785 instance, on a shared filesystem). Removing locks may also be
1791 instance, on a shared filesystem). Removing locks may also be
1786 blocked by filesystem permissions.
1792 blocked by filesystem permissions.
1787
1793
1788 Setting a lock will prevent other commands from changing the data.
1794 Setting a lock will prevent other commands from changing the data.
1789 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1795 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1790 The set locks are removed when the command exits.
1796 The set locks are removed when the command exits.
1791
1797
1792 Returns 0 if no locks are held.
1798 Returns 0 if no locks are held.
1793
1799
1794 """
1800 """
1795
1801
1796 if opts.get('force_lock'):
1802 if opts.get('force_lock'):
1797 repo.svfs.unlink(b'lock')
1803 repo.svfs.unlink(b'lock')
1798 if opts.get('force_wlock'):
1804 if opts.get('force_wlock'):
1799 repo.vfs.unlink(b'wlock')
1805 repo.vfs.unlink(b'wlock')
1800 if opts.get('force_lock') or opts.get('force_wlock'):
1806 if opts.get('force_lock') or opts.get('force_wlock'):
1801 return 0
1807 return 0
1802
1808
1803 locks = []
1809 locks = []
1804 try:
1810 try:
1805 if opts.get('set_wlock'):
1811 if opts.get('set_wlock'):
1806 try:
1812 try:
1807 locks.append(repo.wlock(False))
1813 locks.append(repo.wlock(False))
1808 except error.LockHeld:
1814 except error.LockHeld:
1809 raise error.Abort(_(b'wlock is already held'))
1815 raise error.Abort(_(b'wlock is already held'))
1810 if opts.get('set_lock'):
1816 if opts.get('set_lock'):
1811 try:
1817 try:
1812 locks.append(repo.lock(False))
1818 locks.append(repo.lock(False))
1813 except error.LockHeld:
1819 except error.LockHeld:
1814 raise error.Abort(_(b'lock is already held'))
1820 raise error.Abort(_(b'lock is already held'))
1815 if len(locks):
1821 if len(locks):
1816 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1822 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1817 return 0
1823 return 0
1818 finally:
1824 finally:
1819 release(*locks)
1825 release(*locks)
1820
1826
1821 now = time.time()
1827 now = time.time()
1822 held = 0
1828 held = 0
1823
1829
1824 def report(vfs, name, method):
1830 def report(vfs, name, method):
1825 # this causes stale locks to get reaped for more accurate reporting
1831 # this causes stale locks to get reaped for more accurate reporting
1826 try:
1832 try:
1827 l = method(False)
1833 l = method(False)
1828 except error.LockHeld:
1834 except error.LockHeld:
1829 l = None
1835 l = None
1830
1836
1831 if l:
1837 if l:
1832 l.release()
1838 l.release()
1833 else:
1839 else:
1834 try:
1840 try:
1835 st = vfs.lstat(name)
1841 st = vfs.lstat(name)
1836 age = now - st[stat.ST_MTIME]
1842 age = now - st[stat.ST_MTIME]
1837 user = util.username(st.st_uid)
1843 user = util.username(st.st_uid)
1838 locker = vfs.readlock(name)
1844 locker = vfs.readlock(name)
1839 if b":" in locker:
1845 if b":" in locker:
1840 host, pid = locker.split(b':')
1846 host, pid = locker.split(b':')
1841 if host == socket.gethostname():
1847 if host == socket.gethostname():
1842 locker = b'user %s, process %s' % (user or b'None', pid)
1848 locker = b'user %s, process %s' % (user or b'None', pid)
1843 else:
1849 else:
1844 locker = b'user %s, process %s, host %s' % (
1850 locker = b'user %s, process %s, host %s' % (
1845 user or b'None',
1851 user or b'None',
1846 pid,
1852 pid,
1847 host,
1853 host,
1848 )
1854 )
1849 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1855 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1850 return 1
1856 return 1
1851 except OSError as e:
1857 except OSError as e:
1852 if e.errno != errno.ENOENT:
1858 if e.errno != errno.ENOENT:
1853 raise
1859 raise
1854
1860
1855 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1861 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1856 return 0
1862 return 0
1857
1863
1858 held += report(repo.svfs, b"lock", repo.lock)
1864 held += report(repo.svfs, b"lock", repo.lock)
1859 held += report(repo.vfs, b"wlock", repo.wlock)
1865 held += report(repo.vfs, b"wlock", repo.wlock)
1860
1866
1861 return held
1867 return held
1862
1868
1863
1869
1864 @command(
1870 @command(
1865 b'debugmanifestfulltextcache',
1871 b'debugmanifestfulltextcache',
1866 [
1872 [
1867 (b'', b'clear', False, _(b'clear the cache')),
1873 (b'', b'clear', False, _(b'clear the cache')),
1868 (
1874 (
1869 b'a',
1875 b'a',
1870 b'add',
1876 b'add',
1871 [],
1877 [],
1872 _(b'add the given manifest nodes to the cache'),
1878 _(b'add the given manifest nodes to the cache'),
1873 _(b'NODE'),
1879 _(b'NODE'),
1874 ),
1880 ),
1875 ],
1881 ],
1876 b'',
1882 b'',
1877 )
1883 )
1878 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1884 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1879 """show, clear or amend the contents of the manifest fulltext cache"""
1885 """show, clear or amend the contents of the manifest fulltext cache"""
1880
1886
1881 def getcache():
1887 def getcache():
1882 r = repo.manifestlog.getstorage(b'')
1888 r = repo.manifestlog.getstorage(b'')
1883 try:
1889 try:
1884 return r._fulltextcache
1890 return r._fulltextcache
1885 except AttributeError:
1891 except AttributeError:
1886 msg = _(
1892 msg = _(
1887 b"Current revlog implementation doesn't appear to have a "
1893 b"Current revlog implementation doesn't appear to have a "
1888 b"manifest fulltext cache\n"
1894 b"manifest fulltext cache\n"
1889 )
1895 )
1890 raise error.Abort(msg)
1896 raise error.Abort(msg)
1891
1897
1892 if opts.get('clear'):
1898 if opts.get('clear'):
1893 with repo.wlock():
1899 with repo.wlock():
1894 cache = getcache()
1900 cache = getcache()
1895 cache.clear(clear_persisted_data=True)
1901 cache.clear(clear_persisted_data=True)
1896 return
1902 return
1897
1903
1898 if add:
1904 if add:
1899 with repo.wlock():
1905 with repo.wlock():
1900 m = repo.manifestlog
1906 m = repo.manifestlog
1901 store = m.getstorage(b'')
1907 store = m.getstorage(b'')
1902 for n in add:
1908 for n in add:
1903 try:
1909 try:
1904 manifest = m[store.lookup(n)]
1910 manifest = m[store.lookup(n)]
1905 except error.LookupError as e:
1911 except error.LookupError as e:
1906 raise error.Abort(e, hint=b"Check your manifest node id")
1912 raise error.Abort(e, hint=b"Check your manifest node id")
1907 manifest.read() # stores revisision in cache too
1913 manifest.read() # stores revisision in cache too
1908 return
1914 return
1909
1915
1910 cache = getcache()
1916 cache = getcache()
1911 if not len(cache):
1917 if not len(cache):
1912 ui.write(_(b'cache empty\n'))
1918 ui.write(_(b'cache empty\n'))
1913 else:
1919 else:
1914 ui.write(
1920 ui.write(
1915 _(
1921 _(
1916 b'cache contains %d manifest entries, in order of most to '
1922 b'cache contains %d manifest entries, in order of most to '
1917 b'least recent:\n'
1923 b'least recent:\n'
1918 )
1924 )
1919 % (len(cache),)
1925 % (len(cache),)
1920 )
1926 )
1921 totalsize = 0
1927 totalsize = 0
1922 for nodeid in cache:
1928 for nodeid in cache:
1923 # Use cache.get to not update the LRU order
1929 # Use cache.get to not update the LRU order
1924 data = cache.peek(nodeid)
1930 data = cache.peek(nodeid)
1925 size = len(data)
1931 size = len(data)
1926 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1932 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1927 ui.write(
1933 ui.write(
1928 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1934 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1929 )
1935 )
1930 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1936 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1931 ui.write(
1937 ui.write(
1932 _(b'total cache data size %s, on-disk %s\n')
1938 _(b'total cache data size %s, on-disk %s\n')
1933 % (util.bytecount(totalsize), util.bytecount(ondisk))
1939 % (util.bytecount(totalsize), util.bytecount(ondisk))
1934 )
1940 )
1935
1941
1936
1942
1937 @command(b'debugmergestate', [], b'')
1943 @command(b'debugmergestate', [], b'')
1938 def debugmergestate(ui, repo, *args):
1944 def debugmergestate(ui, repo, *args):
1939 """print merge state
1945 """print merge state
1940
1946
1941 Use --verbose to print out information about whether v1 or v2 merge state
1947 Use --verbose to print out information about whether v1 or v2 merge state
1942 was chosen."""
1948 was chosen."""
1943
1949
1944 def _hashornull(h):
1950 def _hashornull(h):
1945 if h == nullhex:
1951 if h == nullhex:
1946 return b'null'
1952 return b'null'
1947 else:
1953 else:
1948 return h
1954 return h
1949
1955
1950 def printrecords(version):
1956 def printrecords(version):
1951 ui.writenoi18n(b'* version %d records\n' % version)
1957 ui.writenoi18n(b'* version %d records\n' % version)
1952 if version == 1:
1958 if version == 1:
1953 records = v1records
1959 records = v1records
1954 else:
1960 else:
1955 records = v2records
1961 records = v2records
1956
1962
1957 for rtype, record in records:
1963 for rtype, record in records:
1958 # pretty print some record types
1964 # pretty print some record types
1959 if rtype == b'L':
1965 if rtype == b'L':
1960 ui.writenoi18n(b'local: %s\n' % record)
1966 ui.writenoi18n(b'local: %s\n' % record)
1961 elif rtype == b'O':
1967 elif rtype == b'O':
1962 ui.writenoi18n(b'other: %s\n' % record)
1968 ui.writenoi18n(b'other: %s\n' % record)
1963 elif rtype == b'm':
1969 elif rtype == b'm':
1964 driver, mdstate = record.split(b'\0', 1)
1970 driver, mdstate = record.split(b'\0', 1)
1965 ui.writenoi18n(
1971 ui.writenoi18n(
1966 b'merge driver: %s (state "%s")\n' % (driver, mdstate)
1972 b'merge driver: %s (state "%s")\n' % (driver, mdstate)
1967 )
1973 )
1968 elif rtype in b'FDC':
1974 elif rtype in b'FDC':
1969 r = record.split(b'\0')
1975 r = record.split(b'\0')
1970 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1976 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1971 if version == 1:
1977 if version == 1:
1972 onode = b'not stored in v1 format'
1978 onode = b'not stored in v1 format'
1973 flags = r[7]
1979 flags = r[7]
1974 else:
1980 else:
1975 onode, flags = r[7:9]
1981 onode, flags = r[7:9]
1976 ui.writenoi18n(
1982 ui.writenoi18n(
1977 b'file: %s (record type "%s", state "%s", hash %s)\n'
1983 b'file: %s (record type "%s", state "%s", hash %s)\n'
1978 % (f, rtype, state, _hashornull(hash))
1984 % (f, rtype, state, _hashornull(hash))
1979 )
1985 )
1980 ui.writenoi18n(
1986 ui.writenoi18n(
1981 b' local path: %s (flags "%s")\n' % (lfile, flags)
1987 b' local path: %s (flags "%s")\n' % (lfile, flags)
1982 )
1988 )
1983 ui.writenoi18n(
1989 ui.writenoi18n(
1984 b' ancestor path: %s (node %s)\n'
1990 b' ancestor path: %s (node %s)\n'
1985 % (afile, _hashornull(anode))
1991 % (afile, _hashornull(anode))
1986 )
1992 )
1987 ui.writenoi18n(
1993 ui.writenoi18n(
1988 b' other path: %s (node %s)\n'
1994 b' other path: %s (node %s)\n'
1989 % (ofile, _hashornull(onode))
1995 % (ofile, _hashornull(onode))
1990 )
1996 )
1991 elif rtype == b'f':
1997 elif rtype == b'f':
1992 filename, rawextras = record.split(b'\0', 1)
1998 filename, rawextras = record.split(b'\0', 1)
1993 extras = rawextras.split(b'\0')
1999 extras = rawextras.split(b'\0')
1994 i = 0
2000 i = 0
1995 extrastrings = []
2001 extrastrings = []
1996 while i < len(extras):
2002 while i < len(extras):
1997 extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1]))
2003 extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1]))
1998 i += 2
2004 i += 2
1999
2005
2000 ui.writenoi18n(
2006 ui.writenoi18n(
2001 b'file extras: %s (%s)\n'
2007 b'file extras: %s (%s)\n'
2002 % (filename, b', '.join(extrastrings))
2008 % (filename, b', '.join(extrastrings))
2003 )
2009 )
2004 elif rtype == b'l':
2010 elif rtype == b'l':
2005 labels = record.split(b'\0', 2)
2011 labels = record.split(b'\0', 2)
2006 labels = [l for l in labels if len(l) > 0]
2012 labels = [l for l in labels if len(l) > 0]
2007 ui.writenoi18n(b'labels:\n')
2013 ui.writenoi18n(b'labels:\n')
2008 ui.write((b' local: %s\n' % labels[0]))
2014 ui.write((b' local: %s\n' % labels[0]))
2009 ui.write((b' other: %s\n' % labels[1]))
2015 ui.write((b' other: %s\n' % labels[1]))
2010 if len(labels) > 2:
2016 if len(labels) > 2:
2011 ui.write((b' base: %s\n' % labels[2]))
2017 ui.write((b' base: %s\n' % labels[2]))
2012 else:
2018 else:
2013 ui.writenoi18n(
2019 ui.writenoi18n(
2014 b'unrecognized entry: %s\t%s\n'
2020 b'unrecognized entry: %s\t%s\n'
2015 % (rtype, record.replace(b'\0', b'\t'))
2021 % (rtype, record.replace(b'\0', b'\t'))
2016 )
2022 )
2017
2023
2018 # Avoid mergestate.read() since it may raise an exception for unsupported
2024 # Avoid mergestate.read() since it may raise an exception for unsupported
2019 # merge state records. We shouldn't be doing this, but this is OK since this
2025 # merge state records. We shouldn't be doing this, but this is OK since this
2020 # command is pretty low-level.
2026 # command is pretty low-level.
2021 ms = mergemod.mergestate(repo)
2027 ms = mergemod.mergestate(repo)
2022
2028
2023 # sort so that reasonable information is on top
2029 # sort so that reasonable information is on top
2024 v1records = ms._readrecordsv1()
2030 v1records = ms._readrecordsv1()
2025 v2records = ms._readrecordsv2()
2031 v2records = ms._readrecordsv2()
2026 order = b'LOml'
2032 order = b'LOml'
2027
2033
2028 def key(r):
2034 def key(r):
2029 idx = order.find(r[0])
2035 idx = order.find(r[0])
2030 if idx == -1:
2036 if idx == -1:
2031 return (1, r[1])
2037 return (1, r[1])
2032 else:
2038 else:
2033 return (0, idx)
2039 return (0, idx)
2034
2040
2035 v1records.sort(key=key)
2041 v1records.sort(key=key)
2036 v2records.sort(key=key)
2042 v2records.sort(key=key)
2037
2043
2038 if not v1records and not v2records:
2044 if not v1records and not v2records:
2039 ui.writenoi18n(b'no merge state found\n')
2045 ui.writenoi18n(b'no merge state found\n')
2040 elif not v2records:
2046 elif not v2records:
2041 ui.notenoi18n(b'no version 2 merge state\n')
2047 ui.notenoi18n(b'no version 2 merge state\n')
2042 printrecords(1)
2048 printrecords(1)
2043 elif ms._v1v2match(v1records, v2records):
2049 elif ms._v1v2match(v1records, v2records):
2044 ui.notenoi18n(b'v1 and v2 states match: using v2\n')
2050 ui.notenoi18n(b'v1 and v2 states match: using v2\n')
2045 printrecords(2)
2051 printrecords(2)
2046 else:
2052 else:
2047 ui.notenoi18n(b'v1 and v2 states mismatch: using v1\n')
2053 ui.notenoi18n(b'v1 and v2 states mismatch: using v1\n')
2048 printrecords(1)
2054 printrecords(1)
2049 if ui.verbose:
2055 if ui.verbose:
2050 printrecords(2)
2056 printrecords(2)
2051
2057
2052
2058
2053 @command(b'debugnamecomplete', [], _(b'NAME...'))
2059 @command(b'debugnamecomplete', [], _(b'NAME...'))
2054 def debugnamecomplete(ui, repo, *args):
2060 def debugnamecomplete(ui, repo, *args):
2055 '''complete "names" - tags, open branch names, bookmark names'''
2061 '''complete "names" - tags, open branch names, bookmark names'''
2056
2062
2057 names = set()
2063 names = set()
2058 # since we previously only listed open branches, we will handle that
2064 # since we previously only listed open branches, we will handle that
2059 # specially (after this for loop)
2065 # specially (after this for loop)
2060 for name, ns in pycompat.iteritems(repo.names):
2066 for name, ns in pycompat.iteritems(repo.names):
2061 if name != b'branches':
2067 if name != b'branches':
2062 names.update(ns.listnames(repo))
2068 names.update(ns.listnames(repo))
2063 names.update(
2069 names.update(
2064 tag
2070 tag
2065 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2071 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2066 if not closed
2072 if not closed
2067 )
2073 )
2068 completions = set()
2074 completions = set()
2069 if not args:
2075 if not args:
2070 args = [b'']
2076 args = [b'']
2071 for a in args:
2077 for a in args:
2072 completions.update(n for n in names if n.startswith(a))
2078 completions.update(n for n in names if n.startswith(a))
2073 ui.write(b'\n'.join(sorted(completions)))
2079 ui.write(b'\n'.join(sorted(completions)))
2074 ui.write(b'\n')
2080 ui.write(b'\n')
2075
2081
2076
2082
2077 @command(
2083 @command(
2078 b'debugobsolete',
2084 b'debugobsolete',
2079 [
2085 [
2080 (b'', b'flags', 0, _(b'markers flag')),
2086 (b'', b'flags', 0, _(b'markers flag')),
2081 (
2087 (
2082 b'',
2088 b'',
2083 b'record-parents',
2089 b'record-parents',
2084 False,
2090 False,
2085 _(b'record parent information for the precursor'),
2091 _(b'record parent information for the precursor'),
2086 ),
2092 ),
2087 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2093 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2088 (
2094 (
2089 b'',
2095 b'',
2090 b'exclusive',
2096 b'exclusive',
2091 False,
2097 False,
2092 _(b'restrict display to markers only relevant to REV'),
2098 _(b'restrict display to markers only relevant to REV'),
2093 ),
2099 ),
2094 (b'', b'index', False, _(b'display index of the marker')),
2100 (b'', b'index', False, _(b'display index of the marker')),
2095 (b'', b'delete', [], _(b'delete markers specified by indices')),
2101 (b'', b'delete', [], _(b'delete markers specified by indices')),
2096 ]
2102 ]
2097 + cmdutil.commitopts2
2103 + cmdutil.commitopts2
2098 + cmdutil.formatteropts,
2104 + cmdutil.formatteropts,
2099 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2105 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2100 )
2106 )
2101 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2107 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2102 """create arbitrary obsolete marker
2108 """create arbitrary obsolete marker
2103
2109
2104 With no arguments, displays the list of obsolescence markers."""
2110 With no arguments, displays the list of obsolescence markers."""
2105
2111
2106 opts = pycompat.byteskwargs(opts)
2112 opts = pycompat.byteskwargs(opts)
2107
2113
2108 def parsenodeid(s):
2114 def parsenodeid(s):
2109 try:
2115 try:
2110 # We do not use revsingle/revrange functions here to accept
2116 # We do not use revsingle/revrange functions here to accept
2111 # arbitrary node identifiers, possibly not present in the
2117 # arbitrary node identifiers, possibly not present in the
2112 # local repository.
2118 # local repository.
2113 n = bin(s)
2119 n = bin(s)
2114 if len(n) != len(nullid):
2120 if len(n) != len(nullid):
2115 raise TypeError()
2121 raise TypeError()
2116 return n
2122 return n
2117 except TypeError:
2123 except TypeError:
2118 raise error.Abort(
2124 raise error.Abort(
2119 b'changeset references must be full hexadecimal '
2125 b'changeset references must be full hexadecimal '
2120 b'node identifiers'
2126 b'node identifiers'
2121 )
2127 )
2122
2128
2123 if opts.get(b'delete'):
2129 if opts.get(b'delete'):
2124 indices = []
2130 indices = []
2125 for v in opts.get(b'delete'):
2131 for v in opts.get(b'delete'):
2126 try:
2132 try:
2127 indices.append(int(v))
2133 indices.append(int(v))
2128 except ValueError:
2134 except ValueError:
2129 raise error.Abort(
2135 raise error.Abort(
2130 _(b'invalid index value: %r') % v,
2136 _(b'invalid index value: %r') % v,
2131 hint=_(b'use integers for indices'),
2137 hint=_(b'use integers for indices'),
2132 )
2138 )
2133
2139
2134 if repo.currenttransaction():
2140 if repo.currenttransaction():
2135 raise error.Abort(
2141 raise error.Abort(
2136 _(b'cannot delete obsmarkers in the middle of transaction.')
2142 _(b'cannot delete obsmarkers in the middle of transaction.')
2137 )
2143 )
2138
2144
2139 with repo.lock():
2145 with repo.lock():
2140 n = repair.deleteobsmarkers(repo.obsstore, indices)
2146 n = repair.deleteobsmarkers(repo.obsstore, indices)
2141 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2147 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2142
2148
2143 return
2149 return
2144
2150
2145 if precursor is not None:
2151 if precursor is not None:
2146 if opts[b'rev']:
2152 if opts[b'rev']:
2147 raise error.Abort(b'cannot select revision when creating marker')
2153 raise error.Abort(b'cannot select revision when creating marker')
2148 metadata = {}
2154 metadata = {}
2149 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2155 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2150 succs = tuple(parsenodeid(succ) for succ in successors)
2156 succs = tuple(parsenodeid(succ) for succ in successors)
2151 l = repo.lock()
2157 l = repo.lock()
2152 try:
2158 try:
2153 tr = repo.transaction(b'debugobsolete')
2159 tr = repo.transaction(b'debugobsolete')
2154 try:
2160 try:
2155 date = opts.get(b'date')
2161 date = opts.get(b'date')
2156 if date:
2162 if date:
2157 date = dateutil.parsedate(date)
2163 date = dateutil.parsedate(date)
2158 else:
2164 else:
2159 date = None
2165 date = None
2160 prec = parsenodeid(precursor)
2166 prec = parsenodeid(precursor)
2161 parents = None
2167 parents = None
2162 if opts[b'record_parents']:
2168 if opts[b'record_parents']:
2163 if prec not in repo.unfiltered():
2169 if prec not in repo.unfiltered():
2164 raise error.Abort(
2170 raise error.Abort(
2165 b'cannot used --record-parents on '
2171 b'cannot used --record-parents on '
2166 b'unknown changesets'
2172 b'unknown changesets'
2167 )
2173 )
2168 parents = repo.unfiltered()[prec].parents()
2174 parents = repo.unfiltered()[prec].parents()
2169 parents = tuple(p.node() for p in parents)
2175 parents = tuple(p.node() for p in parents)
2170 repo.obsstore.create(
2176 repo.obsstore.create(
2171 tr,
2177 tr,
2172 prec,
2178 prec,
2173 succs,
2179 succs,
2174 opts[b'flags'],
2180 opts[b'flags'],
2175 parents=parents,
2181 parents=parents,
2176 date=date,
2182 date=date,
2177 metadata=metadata,
2183 metadata=metadata,
2178 ui=ui,
2184 ui=ui,
2179 )
2185 )
2180 tr.close()
2186 tr.close()
2181 except ValueError as exc:
2187 except ValueError as exc:
2182 raise error.Abort(
2188 raise error.Abort(
2183 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2189 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2184 )
2190 )
2185 finally:
2191 finally:
2186 tr.release()
2192 tr.release()
2187 finally:
2193 finally:
2188 l.release()
2194 l.release()
2189 else:
2195 else:
2190 if opts[b'rev']:
2196 if opts[b'rev']:
2191 revs = scmutil.revrange(repo, opts[b'rev'])
2197 revs = scmutil.revrange(repo, opts[b'rev'])
2192 nodes = [repo[r].node() for r in revs]
2198 nodes = [repo[r].node() for r in revs]
2193 markers = list(
2199 markers = list(
2194 obsutil.getmarkers(
2200 obsutil.getmarkers(
2195 repo, nodes=nodes, exclusive=opts[b'exclusive']
2201 repo, nodes=nodes, exclusive=opts[b'exclusive']
2196 )
2202 )
2197 )
2203 )
2198 markers.sort(key=lambda x: x._data)
2204 markers.sort(key=lambda x: x._data)
2199 else:
2205 else:
2200 markers = obsutil.getmarkers(repo)
2206 markers = obsutil.getmarkers(repo)
2201
2207
2202 markerstoiter = markers
2208 markerstoiter = markers
2203 isrelevant = lambda m: True
2209 isrelevant = lambda m: True
2204 if opts.get(b'rev') and opts.get(b'index'):
2210 if opts.get(b'rev') and opts.get(b'index'):
2205 markerstoiter = obsutil.getmarkers(repo)
2211 markerstoiter = obsutil.getmarkers(repo)
2206 markerset = set(markers)
2212 markerset = set(markers)
2207 isrelevant = lambda m: m in markerset
2213 isrelevant = lambda m: m in markerset
2208
2214
2209 fm = ui.formatter(b'debugobsolete', opts)
2215 fm = ui.formatter(b'debugobsolete', opts)
2210 for i, m in enumerate(markerstoiter):
2216 for i, m in enumerate(markerstoiter):
2211 if not isrelevant(m):
2217 if not isrelevant(m):
2212 # marker can be irrelevant when we're iterating over a set
2218 # marker can be irrelevant when we're iterating over a set
2213 # of markers (markerstoiter) which is bigger than the set
2219 # of markers (markerstoiter) which is bigger than the set
2214 # of markers we want to display (markers)
2220 # of markers we want to display (markers)
2215 # this can happen if both --index and --rev options are
2221 # this can happen if both --index and --rev options are
2216 # provided and thus we need to iterate over all of the markers
2222 # provided and thus we need to iterate over all of the markers
2217 # to get the correct indices, but only display the ones that
2223 # to get the correct indices, but only display the ones that
2218 # are relevant to --rev value
2224 # are relevant to --rev value
2219 continue
2225 continue
2220 fm.startitem()
2226 fm.startitem()
2221 ind = i if opts.get(b'index') else None
2227 ind = i if opts.get(b'index') else None
2222 cmdutil.showmarker(fm, m, index=ind)
2228 cmdutil.showmarker(fm, m, index=ind)
2223 fm.end()
2229 fm.end()
2224
2230
2225
2231
2226 @command(
2232 @command(
2227 b'debugp1copies',
2233 b'debugp1copies',
2228 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2234 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2229 _(b'[-r REV]'),
2235 _(b'[-r REV]'),
2230 )
2236 )
2231 def debugp1copies(ui, repo, **opts):
2237 def debugp1copies(ui, repo, **opts):
2232 """dump copy information compared to p1"""
2238 """dump copy information compared to p1"""
2233
2239
2234 opts = pycompat.byteskwargs(opts)
2240 opts = pycompat.byteskwargs(opts)
2235 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2241 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2236 for dst, src in ctx.p1copies().items():
2242 for dst, src in ctx.p1copies().items():
2237 ui.write(b'%s -> %s\n' % (src, dst))
2243 ui.write(b'%s -> %s\n' % (src, dst))
2238
2244
2239
2245
2240 @command(
2246 @command(
2241 b'debugp2copies',
2247 b'debugp2copies',
2242 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2248 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2243 _(b'[-r REV]'),
2249 _(b'[-r REV]'),
2244 )
2250 )
2245 def debugp1copies(ui, repo, **opts):
2251 def debugp1copies(ui, repo, **opts):
2246 """dump copy information compared to p2"""
2252 """dump copy information compared to p2"""
2247
2253
2248 opts = pycompat.byteskwargs(opts)
2254 opts = pycompat.byteskwargs(opts)
2249 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2255 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2250 for dst, src in ctx.p2copies().items():
2256 for dst, src in ctx.p2copies().items():
2251 ui.write(b'%s -> %s\n' % (src, dst))
2257 ui.write(b'%s -> %s\n' % (src, dst))
2252
2258
2253
2259
2254 @command(
2260 @command(
2255 b'debugpathcomplete',
2261 b'debugpathcomplete',
2256 [
2262 [
2257 (b'f', b'full', None, _(b'complete an entire path')),
2263 (b'f', b'full', None, _(b'complete an entire path')),
2258 (b'n', b'normal', None, _(b'show only normal files')),
2264 (b'n', b'normal', None, _(b'show only normal files')),
2259 (b'a', b'added', None, _(b'show only added files')),
2265 (b'a', b'added', None, _(b'show only added files')),
2260 (b'r', b'removed', None, _(b'show only removed files')),
2266 (b'r', b'removed', None, _(b'show only removed files')),
2261 ],
2267 ],
2262 _(b'FILESPEC...'),
2268 _(b'FILESPEC...'),
2263 )
2269 )
2264 def debugpathcomplete(ui, repo, *specs, **opts):
2270 def debugpathcomplete(ui, repo, *specs, **opts):
2265 '''complete part or all of a tracked path
2271 '''complete part or all of a tracked path
2266
2272
2267 This command supports shells that offer path name completion. It
2273 This command supports shells that offer path name completion. It
2268 currently completes only files already known to the dirstate.
2274 currently completes only files already known to the dirstate.
2269
2275
2270 Completion extends only to the next path segment unless
2276 Completion extends only to the next path segment unless
2271 --full is specified, in which case entire paths are used.'''
2277 --full is specified, in which case entire paths are used.'''
2272
2278
2273 def complete(path, acceptable):
2279 def complete(path, acceptable):
2274 dirstate = repo.dirstate
2280 dirstate = repo.dirstate
2275 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2281 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2276 rootdir = repo.root + pycompat.ossep
2282 rootdir = repo.root + pycompat.ossep
2277 if spec != repo.root and not spec.startswith(rootdir):
2283 if spec != repo.root and not spec.startswith(rootdir):
2278 return [], []
2284 return [], []
2279 if os.path.isdir(spec):
2285 if os.path.isdir(spec):
2280 spec += b'/'
2286 spec += b'/'
2281 spec = spec[len(rootdir) :]
2287 spec = spec[len(rootdir) :]
2282 fixpaths = pycompat.ossep != b'/'
2288 fixpaths = pycompat.ossep != b'/'
2283 if fixpaths:
2289 if fixpaths:
2284 spec = spec.replace(pycompat.ossep, b'/')
2290 spec = spec.replace(pycompat.ossep, b'/')
2285 speclen = len(spec)
2291 speclen = len(spec)
2286 fullpaths = opts['full']
2292 fullpaths = opts['full']
2287 files, dirs = set(), set()
2293 files, dirs = set(), set()
2288 adddir, addfile = dirs.add, files.add
2294 adddir, addfile = dirs.add, files.add
2289 for f, st in pycompat.iteritems(dirstate):
2295 for f, st in pycompat.iteritems(dirstate):
2290 if f.startswith(spec) and st[0] in acceptable:
2296 if f.startswith(spec) and st[0] in acceptable:
2291 if fixpaths:
2297 if fixpaths:
2292 f = f.replace(b'/', pycompat.ossep)
2298 f = f.replace(b'/', pycompat.ossep)
2293 if fullpaths:
2299 if fullpaths:
2294 addfile(f)
2300 addfile(f)
2295 continue
2301 continue
2296 s = f.find(pycompat.ossep, speclen)
2302 s = f.find(pycompat.ossep, speclen)
2297 if s >= 0:
2303 if s >= 0:
2298 adddir(f[:s])
2304 adddir(f[:s])
2299 else:
2305 else:
2300 addfile(f)
2306 addfile(f)
2301 return files, dirs
2307 return files, dirs
2302
2308
2303 acceptable = b''
2309 acceptable = b''
2304 if opts['normal']:
2310 if opts['normal']:
2305 acceptable += b'nm'
2311 acceptable += b'nm'
2306 if opts['added']:
2312 if opts['added']:
2307 acceptable += b'a'
2313 acceptable += b'a'
2308 if opts['removed']:
2314 if opts['removed']:
2309 acceptable += b'r'
2315 acceptable += b'r'
2310 cwd = repo.getcwd()
2316 cwd = repo.getcwd()
2311 if not specs:
2317 if not specs:
2312 specs = [b'.']
2318 specs = [b'.']
2313
2319
2314 files, dirs = set(), set()
2320 files, dirs = set(), set()
2315 for spec in specs:
2321 for spec in specs:
2316 f, d = complete(spec, acceptable or b'nmar')
2322 f, d = complete(spec, acceptable or b'nmar')
2317 files.update(f)
2323 files.update(f)
2318 dirs.update(d)
2324 dirs.update(d)
2319 files.update(dirs)
2325 files.update(dirs)
2320 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2326 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2321 ui.write(b'\n')
2327 ui.write(b'\n')
2322
2328
2323
2329
2324 @command(
2330 @command(
2325 b'debugpathcopies',
2331 b'debugpathcopies',
2326 cmdutil.walkopts,
2332 cmdutil.walkopts,
2327 b'hg debugpathcopies REV1 REV2 [FILE]',
2333 b'hg debugpathcopies REV1 REV2 [FILE]',
2328 inferrepo=True,
2334 inferrepo=True,
2329 )
2335 )
2330 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2336 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2331 """show copies between two revisions"""
2337 """show copies between two revisions"""
2332 ctx1 = scmutil.revsingle(repo, rev1)
2338 ctx1 = scmutil.revsingle(repo, rev1)
2333 ctx2 = scmutil.revsingle(repo, rev2)
2339 ctx2 = scmutil.revsingle(repo, rev2)
2334 m = scmutil.match(ctx1, pats, opts)
2340 m = scmutil.match(ctx1, pats, opts)
2335 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2341 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2336 ui.write(b'%s -> %s\n' % (src, dst))
2342 ui.write(b'%s -> %s\n' % (src, dst))
2337
2343
2338
2344
2339 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2345 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2340 def debugpeer(ui, path):
2346 def debugpeer(ui, path):
2341 """establish a connection to a peer repository"""
2347 """establish a connection to a peer repository"""
2342 # Always enable peer request logging. Requires --debug to display
2348 # Always enable peer request logging. Requires --debug to display
2343 # though.
2349 # though.
2344 overrides = {
2350 overrides = {
2345 (b'devel', b'debug.peer-request'): True,
2351 (b'devel', b'debug.peer-request'): True,
2346 }
2352 }
2347
2353
2348 with ui.configoverride(overrides):
2354 with ui.configoverride(overrides):
2349 peer = hg.peer(ui, {}, path)
2355 peer = hg.peer(ui, {}, path)
2350
2356
2351 local = peer.local() is not None
2357 local = peer.local() is not None
2352 canpush = peer.canpush()
2358 canpush = peer.canpush()
2353
2359
2354 ui.write(_(b'url: %s\n') % peer.url())
2360 ui.write(_(b'url: %s\n') % peer.url())
2355 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2361 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2356 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2362 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2357
2363
2358
2364
2359 @command(
2365 @command(
2360 b'debugpickmergetool',
2366 b'debugpickmergetool',
2361 [
2367 [
2362 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2368 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2363 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2369 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2364 ]
2370 ]
2365 + cmdutil.walkopts
2371 + cmdutil.walkopts
2366 + cmdutil.mergetoolopts,
2372 + cmdutil.mergetoolopts,
2367 _(b'[PATTERN]...'),
2373 _(b'[PATTERN]...'),
2368 inferrepo=True,
2374 inferrepo=True,
2369 )
2375 )
2370 def debugpickmergetool(ui, repo, *pats, **opts):
2376 def debugpickmergetool(ui, repo, *pats, **opts):
2371 """examine which merge tool is chosen for specified file
2377 """examine which merge tool is chosen for specified file
2372
2378
2373 As described in :hg:`help merge-tools`, Mercurial examines
2379 As described in :hg:`help merge-tools`, Mercurial examines
2374 configurations below in this order to decide which merge tool is
2380 configurations below in this order to decide which merge tool is
2375 chosen for specified file.
2381 chosen for specified file.
2376
2382
2377 1. ``--tool`` option
2383 1. ``--tool`` option
2378 2. ``HGMERGE`` environment variable
2384 2. ``HGMERGE`` environment variable
2379 3. configurations in ``merge-patterns`` section
2385 3. configurations in ``merge-patterns`` section
2380 4. configuration of ``ui.merge``
2386 4. configuration of ``ui.merge``
2381 5. configurations in ``merge-tools`` section
2387 5. configurations in ``merge-tools`` section
2382 6. ``hgmerge`` tool (for historical reason only)
2388 6. ``hgmerge`` tool (for historical reason only)
2383 7. default tool for fallback (``:merge`` or ``:prompt``)
2389 7. default tool for fallback (``:merge`` or ``:prompt``)
2384
2390
2385 This command writes out examination result in the style below::
2391 This command writes out examination result in the style below::
2386
2392
2387 FILE = MERGETOOL
2393 FILE = MERGETOOL
2388
2394
2389 By default, all files known in the first parent context of the
2395 By default, all files known in the first parent context of the
2390 working directory are examined. Use file patterns and/or -I/-X
2396 working directory are examined. Use file patterns and/or -I/-X
2391 options to limit target files. -r/--rev is also useful to examine
2397 options to limit target files. -r/--rev is also useful to examine
2392 files in another context without actual updating to it.
2398 files in another context without actual updating to it.
2393
2399
2394 With --debug, this command shows warning messages while matching
2400 With --debug, this command shows warning messages while matching
2395 against ``merge-patterns`` and so on, too. It is recommended to
2401 against ``merge-patterns`` and so on, too. It is recommended to
2396 use this option with explicit file patterns and/or -I/-X options,
2402 use this option with explicit file patterns and/or -I/-X options,
2397 because this option increases amount of output per file according
2403 because this option increases amount of output per file according
2398 to configurations in hgrc.
2404 to configurations in hgrc.
2399
2405
2400 With -v/--verbose, this command shows configurations below at
2406 With -v/--verbose, this command shows configurations below at
2401 first (only if specified).
2407 first (only if specified).
2402
2408
2403 - ``--tool`` option
2409 - ``--tool`` option
2404 - ``HGMERGE`` environment variable
2410 - ``HGMERGE`` environment variable
2405 - configuration of ``ui.merge``
2411 - configuration of ``ui.merge``
2406
2412
2407 If merge tool is chosen before matching against
2413 If merge tool is chosen before matching against
2408 ``merge-patterns``, this command can't show any helpful
2414 ``merge-patterns``, this command can't show any helpful
2409 information, even with --debug. In such case, information above is
2415 information, even with --debug. In such case, information above is
2410 useful to know why a merge tool is chosen.
2416 useful to know why a merge tool is chosen.
2411 """
2417 """
2412 opts = pycompat.byteskwargs(opts)
2418 opts = pycompat.byteskwargs(opts)
2413 overrides = {}
2419 overrides = {}
2414 if opts[b'tool']:
2420 if opts[b'tool']:
2415 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2421 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2416 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2422 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2417
2423
2418 with ui.configoverride(overrides, b'debugmergepatterns'):
2424 with ui.configoverride(overrides, b'debugmergepatterns'):
2419 hgmerge = encoding.environ.get(b"HGMERGE")
2425 hgmerge = encoding.environ.get(b"HGMERGE")
2420 if hgmerge is not None:
2426 if hgmerge is not None:
2421 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2427 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2422 uimerge = ui.config(b"ui", b"merge")
2428 uimerge = ui.config(b"ui", b"merge")
2423 if uimerge:
2429 if uimerge:
2424 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2430 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2425
2431
2426 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2432 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2427 m = scmutil.match(ctx, pats, opts)
2433 m = scmutil.match(ctx, pats, opts)
2428 changedelete = opts[b'changedelete']
2434 changedelete = opts[b'changedelete']
2429 for path in ctx.walk(m):
2435 for path in ctx.walk(m):
2430 fctx = ctx[path]
2436 fctx = ctx[path]
2431 try:
2437 try:
2432 if not ui.debugflag:
2438 if not ui.debugflag:
2433 ui.pushbuffer(error=True)
2439 ui.pushbuffer(error=True)
2434 tool, toolpath = filemerge._picktool(
2440 tool, toolpath = filemerge._picktool(
2435 repo,
2441 repo,
2436 ui,
2442 ui,
2437 path,
2443 path,
2438 fctx.isbinary(),
2444 fctx.isbinary(),
2439 b'l' in fctx.flags(),
2445 b'l' in fctx.flags(),
2440 changedelete,
2446 changedelete,
2441 )
2447 )
2442 finally:
2448 finally:
2443 if not ui.debugflag:
2449 if not ui.debugflag:
2444 ui.popbuffer()
2450 ui.popbuffer()
2445 ui.write(b'%s = %s\n' % (path, tool))
2451 ui.write(b'%s = %s\n' % (path, tool))
2446
2452
2447
2453
2448 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2454 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2449 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2455 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2450 '''access the pushkey key/value protocol
2456 '''access the pushkey key/value protocol
2451
2457
2452 With two args, list the keys in the given namespace.
2458 With two args, list the keys in the given namespace.
2453
2459
2454 With five args, set a key to new if it currently is set to old.
2460 With five args, set a key to new if it currently is set to old.
2455 Reports success or failure.
2461 Reports success or failure.
2456 '''
2462 '''
2457
2463
2458 target = hg.peer(ui, {}, repopath)
2464 target = hg.peer(ui, {}, repopath)
2459 if keyinfo:
2465 if keyinfo:
2460 key, old, new = keyinfo
2466 key, old, new = keyinfo
2461 with target.commandexecutor() as e:
2467 with target.commandexecutor() as e:
2462 r = e.callcommand(
2468 r = e.callcommand(
2463 b'pushkey',
2469 b'pushkey',
2464 {
2470 {
2465 b'namespace': namespace,
2471 b'namespace': namespace,
2466 b'key': key,
2472 b'key': key,
2467 b'old': old,
2473 b'old': old,
2468 b'new': new,
2474 b'new': new,
2469 },
2475 },
2470 ).result()
2476 ).result()
2471
2477
2472 ui.status(pycompat.bytestr(r) + b'\n')
2478 ui.status(pycompat.bytestr(r) + b'\n')
2473 return not r
2479 return not r
2474 else:
2480 else:
2475 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2481 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2476 ui.write(
2482 ui.write(
2477 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2483 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2478 )
2484 )
2479
2485
2480
2486
2481 @command(b'debugpvec', [], _(b'A B'))
2487 @command(b'debugpvec', [], _(b'A B'))
2482 def debugpvec(ui, repo, a, b=None):
2488 def debugpvec(ui, repo, a, b=None):
2483 ca = scmutil.revsingle(repo, a)
2489 ca = scmutil.revsingle(repo, a)
2484 cb = scmutil.revsingle(repo, b)
2490 cb = scmutil.revsingle(repo, b)
2485 pa = pvec.ctxpvec(ca)
2491 pa = pvec.ctxpvec(ca)
2486 pb = pvec.ctxpvec(cb)
2492 pb = pvec.ctxpvec(cb)
2487 if pa == pb:
2493 if pa == pb:
2488 rel = b"="
2494 rel = b"="
2489 elif pa > pb:
2495 elif pa > pb:
2490 rel = b">"
2496 rel = b">"
2491 elif pa < pb:
2497 elif pa < pb:
2492 rel = b"<"
2498 rel = b"<"
2493 elif pa | pb:
2499 elif pa | pb:
2494 rel = b"|"
2500 rel = b"|"
2495 ui.write(_(b"a: %s\n") % pa)
2501 ui.write(_(b"a: %s\n") % pa)
2496 ui.write(_(b"b: %s\n") % pb)
2502 ui.write(_(b"b: %s\n") % pb)
2497 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2503 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2498 ui.write(
2504 ui.write(
2499 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2505 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2500 % (
2506 % (
2501 abs(pa._depth - pb._depth),
2507 abs(pa._depth - pb._depth),
2502 pvec._hamming(pa._vec, pb._vec),
2508 pvec._hamming(pa._vec, pb._vec),
2503 pa.distance(pb),
2509 pa.distance(pb),
2504 rel,
2510 rel,
2505 )
2511 )
2506 )
2512 )
2507
2513
2508
2514
2509 @command(
2515 @command(
2510 b'debugrebuilddirstate|debugrebuildstate',
2516 b'debugrebuilddirstate|debugrebuildstate',
2511 [
2517 [
2512 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2518 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2513 (
2519 (
2514 b'',
2520 b'',
2515 b'minimal',
2521 b'minimal',
2516 None,
2522 None,
2517 _(
2523 _(
2518 b'only rebuild files that are inconsistent with '
2524 b'only rebuild files that are inconsistent with '
2519 b'the working copy parent'
2525 b'the working copy parent'
2520 ),
2526 ),
2521 ),
2527 ),
2522 ],
2528 ],
2523 _(b'[-r REV]'),
2529 _(b'[-r REV]'),
2524 )
2530 )
2525 def debugrebuilddirstate(ui, repo, rev, **opts):
2531 def debugrebuilddirstate(ui, repo, rev, **opts):
2526 """rebuild the dirstate as it would look like for the given revision
2532 """rebuild the dirstate as it would look like for the given revision
2527
2533
2528 If no revision is specified the first current parent will be used.
2534 If no revision is specified the first current parent will be used.
2529
2535
2530 The dirstate will be set to the files of the given revision.
2536 The dirstate will be set to the files of the given revision.
2531 The actual working directory content or existing dirstate
2537 The actual working directory content or existing dirstate
2532 information such as adds or removes is not considered.
2538 information such as adds or removes is not considered.
2533
2539
2534 ``minimal`` will only rebuild the dirstate status for files that claim to be
2540 ``minimal`` will only rebuild the dirstate status for files that claim to be
2535 tracked but are not in the parent manifest, or that exist in the parent
2541 tracked but are not in the parent manifest, or that exist in the parent
2536 manifest but are not in the dirstate. It will not change adds, removes, or
2542 manifest but are not in the dirstate. It will not change adds, removes, or
2537 modified files that are in the working copy parent.
2543 modified files that are in the working copy parent.
2538
2544
2539 One use of this command is to make the next :hg:`status` invocation
2545 One use of this command is to make the next :hg:`status` invocation
2540 check the actual file content.
2546 check the actual file content.
2541 """
2547 """
2542 ctx = scmutil.revsingle(repo, rev)
2548 ctx = scmutil.revsingle(repo, rev)
2543 with repo.wlock():
2549 with repo.wlock():
2544 dirstate = repo.dirstate
2550 dirstate = repo.dirstate
2545 changedfiles = None
2551 changedfiles = None
2546 # See command doc for what minimal does.
2552 # See command doc for what minimal does.
2547 if opts.get('minimal'):
2553 if opts.get('minimal'):
2548 manifestfiles = set(ctx.manifest().keys())
2554 manifestfiles = set(ctx.manifest().keys())
2549 dirstatefiles = set(dirstate)
2555 dirstatefiles = set(dirstate)
2550 manifestonly = manifestfiles - dirstatefiles
2556 manifestonly = manifestfiles - dirstatefiles
2551 dsonly = dirstatefiles - manifestfiles
2557 dsonly = dirstatefiles - manifestfiles
2552 dsnotadded = set(f for f in dsonly if dirstate[f] != b'a')
2558 dsnotadded = set(f for f in dsonly if dirstate[f] != b'a')
2553 changedfiles = manifestonly | dsnotadded
2559 changedfiles = manifestonly | dsnotadded
2554
2560
2555 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2561 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2556
2562
2557
2563
2558 @command(b'debugrebuildfncache', [], b'')
2564 @command(b'debugrebuildfncache', [], b'')
2559 def debugrebuildfncache(ui, repo):
2565 def debugrebuildfncache(ui, repo):
2560 """rebuild the fncache file"""
2566 """rebuild the fncache file"""
2561 repair.rebuildfncache(ui, repo)
2567 repair.rebuildfncache(ui, repo)
2562
2568
2563
2569
2564 @command(
2570 @command(
2565 b'debugrename',
2571 b'debugrename',
2566 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2572 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2567 _(b'[-r REV] [FILE]...'),
2573 _(b'[-r REV] [FILE]...'),
2568 )
2574 )
2569 def debugrename(ui, repo, *pats, **opts):
2575 def debugrename(ui, repo, *pats, **opts):
2570 """dump rename information"""
2576 """dump rename information"""
2571
2577
2572 opts = pycompat.byteskwargs(opts)
2578 opts = pycompat.byteskwargs(opts)
2573 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2579 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2574 m = scmutil.match(ctx, pats, opts)
2580 m = scmutil.match(ctx, pats, opts)
2575 for abs in ctx.walk(m):
2581 for abs in ctx.walk(m):
2576 fctx = ctx[abs]
2582 fctx = ctx[abs]
2577 o = fctx.filelog().renamed(fctx.filenode())
2583 o = fctx.filelog().renamed(fctx.filenode())
2578 rel = repo.pathto(abs)
2584 rel = repo.pathto(abs)
2579 if o:
2585 if o:
2580 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2586 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2581 else:
2587 else:
2582 ui.write(_(b"%s not renamed\n") % rel)
2588 ui.write(_(b"%s not renamed\n") % rel)
2583
2589
2584
2590
2585 @command(
2591 @command(
2586 b'debugrevlog',
2592 b'debugrevlog',
2587 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2593 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2588 _(b'-c|-m|FILE'),
2594 _(b'-c|-m|FILE'),
2589 optionalrepo=True,
2595 optionalrepo=True,
2590 )
2596 )
2591 def debugrevlog(ui, repo, file_=None, **opts):
2597 def debugrevlog(ui, repo, file_=None, **opts):
2592 """show data and statistics about a revlog"""
2598 """show data and statistics about a revlog"""
2593 opts = pycompat.byteskwargs(opts)
2599 opts = pycompat.byteskwargs(opts)
2594 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2600 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2595
2601
2596 if opts.get(b"dump"):
2602 if opts.get(b"dump"):
2597 numrevs = len(r)
2603 numrevs = len(r)
2598 ui.write(
2604 ui.write(
2599 (
2605 (
2600 b"# rev p1rev p2rev start end deltastart base p1 p2"
2606 b"# rev p1rev p2rev start end deltastart base p1 p2"
2601 b" rawsize totalsize compression heads chainlen\n"
2607 b" rawsize totalsize compression heads chainlen\n"
2602 )
2608 )
2603 )
2609 )
2604 ts = 0
2610 ts = 0
2605 heads = set()
2611 heads = set()
2606
2612
2607 for rev in pycompat.xrange(numrevs):
2613 for rev in pycompat.xrange(numrevs):
2608 dbase = r.deltaparent(rev)
2614 dbase = r.deltaparent(rev)
2609 if dbase == -1:
2615 if dbase == -1:
2610 dbase = rev
2616 dbase = rev
2611 cbase = r.chainbase(rev)
2617 cbase = r.chainbase(rev)
2612 clen = r.chainlen(rev)
2618 clen = r.chainlen(rev)
2613 p1, p2 = r.parentrevs(rev)
2619 p1, p2 = r.parentrevs(rev)
2614 rs = r.rawsize(rev)
2620 rs = r.rawsize(rev)
2615 ts = ts + rs
2621 ts = ts + rs
2616 heads -= set(r.parentrevs(rev))
2622 heads -= set(r.parentrevs(rev))
2617 heads.add(rev)
2623 heads.add(rev)
2618 try:
2624 try:
2619 compression = ts / r.end(rev)
2625 compression = ts / r.end(rev)
2620 except ZeroDivisionError:
2626 except ZeroDivisionError:
2621 compression = 0
2627 compression = 0
2622 ui.write(
2628 ui.write(
2623 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2629 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2624 b"%11d %5d %8d\n"
2630 b"%11d %5d %8d\n"
2625 % (
2631 % (
2626 rev,
2632 rev,
2627 p1,
2633 p1,
2628 p2,
2634 p2,
2629 r.start(rev),
2635 r.start(rev),
2630 r.end(rev),
2636 r.end(rev),
2631 r.start(dbase),
2637 r.start(dbase),
2632 r.start(cbase),
2638 r.start(cbase),
2633 r.start(p1),
2639 r.start(p1),
2634 r.start(p2),
2640 r.start(p2),
2635 rs,
2641 rs,
2636 ts,
2642 ts,
2637 compression,
2643 compression,
2638 len(heads),
2644 len(heads),
2639 clen,
2645 clen,
2640 )
2646 )
2641 )
2647 )
2642 return 0
2648 return 0
2643
2649
2644 v = r.version
2650 v = r.version
2645 format = v & 0xFFFF
2651 format = v & 0xFFFF
2646 flags = []
2652 flags = []
2647 gdelta = False
2653 gdelta = False
2648 if v & revlog.FLAG_INLINE_DATA:
2654 if v & revlog.FLAG_INLINE_DATA:
2649 flags.append(b'inline')
2655 flags.append(b'inline')
2650 if v & revlog.FLAG_GENERALDELTA:
2656 if v & revlog.FLAG_GENERALDELTA:
2651 gdelta = True
2657 gdelta = True
2652 flags.append(b'generaldelta')
2658 flags.append(b'generaldelta')
2653 if not flags:
2659 if not flags:
2654 flags = [b'(none)']
2660 flags = [b'(none)']
2655
2661
2656 ### tracks merge vs single parent
2662 ### tracks merge vs single parent
2657 nummerges = 0
2663 nummerges = 0
2658
2664
2659 ### tracks ways the "delta" are build
2665 ### tracks ways the "delta" are build
2660 # nodelta
2666 # nodelta
2661 numempty = 0
2667 numempty = 0
2662 numemptytext = 0
2668 numemptytext = 0
2663 numemptydelta = 0
2669 numemptydelta = 0
2664 # full file content
2670 # full file content
2665 numfull = 0
2671 numfull = 0
2666 # intermediate snapshot against a prior snapshot
2672 # intermediate snapshot against a prior snapshot
2667 numsemi = 0
2673 numsemi = 0
2668 # snapshot count per depth
2674 # snapshot count per depth
2669 numsnapdepth = collections.defaultdict(lambda: 0)
2675 numsnapdepth = collections.defaultdict(lambda: 0)
2670 # delta against previous revision
2676 # delta against previous revision
2671 numprev = 0
2677 numprev = 0
2672 # delta against first or second parent (not prev)
2678 # delta against first or second parent (not prev)
2673 nump1 = 0
2679 nump1 = 0
2674 nump2 = 0
2680 nump2 = 0
2675 # delta against neither prev nor parents
2681 # delta against neither prev nor parents
2676 numother = 0
2682 numother = 0
2677 # delta against prev that are also first or second parent
2683 # delta against prev that are also first or second parent
2678 # (details of `numprev`)
2684 # (details of `numprev`)
2679 nump1prev = 0
2685 nump1prev = 0
2680 nump2prev = 0
2686 nump2prev = 0
2681
2687
2682 # data about delta chain of each revs
2688 # data about delta chain of each revs
2683 chainlengths = []
2689 chainlengths = []
2684 chainbases = []
2690 chainbases = []
2685 chainspans = []
2691 chainspans = []
2686
2692
2687 # data about each revision
2693 # data about each revision
2688 datasize = [None, 0, 0]
2694 datasize = [None, 0, 0]
2689 fullsize = [None, 0, 0]
2695 fullsize = [None, 0, 0]
2690 semisize = [None, 0, 0]
2696 semisize = [None, 0, 0]
2691 # snapshot count per depth
2697 # snapshot count per depth
2692 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2698 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2693 deltasize = [None, 0, 0]
2699 deltasize = [None, 0, 0]
2694 chunktypecounts = {}
2700 chunktypecounts = {}
2695 chunktypesizes = {}
2701 chunktypesizes = {}
2696
2702
2697 def addsize(size, l):
2703 def addsize(size, l):
2698 if l[0] is None or size < l[0]:
2704 if l[0] is None or size < l[0]:
2699 l[0] = size
2705 l[0] = size
2700 if size > l[1]:
2706 if size > l[1]:
2701 l[1] = size
2707 l[1] = size
2702 l[2] += size
2708 l[2] += size
2703
2709
2704 numrevs = len(r)
2710 numrevs = len(r)
2705 for rev in pycompat.xrange(numrevs):
2711 for rev in pycompat.xrange(numrevs):
2706 p1, p2 = r.parentrevs(rev)
2712 p1, p2 = r.parentrevs(rev)
2707 delta = r.deltaparent(rev)
2713 delta = r.deltaparent(rev)
2708 if format > 0:
2714 if format > 0:
2709 addsize(r.rawsize(rev), datasize)
2715 addsize(r.rawsize(rev), datasize)
2710 if p2 != nullrev:
2716 if p2 != nullrev:
2711 nummerges += 1
2717 nummerges += 1
2712 size = r.length(rev)
2718 size = r.length(rev)
2713 if delta == nullrev:
2719 if delta == nullrev:
2714 chainlengths.append(0)
2720 chainlengths.append(0)
2715 chainbases.append(r.start(rev))
2721 chainbases.append(r.start(rev))
2716 chainspans.append(size)
2722 chainspans.append(size)
2717 if size == 0:
2723 if size == 0:
2718 numempty += 1
2724 numempty += 1
2719 numemptytext += 1
2725 numemptytext += 1
2720 else:
2726 else:
2721 numfull += 1
2727 numfull += 1
2722 numsnapdepth[0] += 1
2728 numsnapdepth[0] += 1
2723 addsize(size, fullsize)
2729 addsize(size, fullsize)
2724 addsize(size, snapsizedepth[0])
2730 addsize(size, snapsizedepth[0])
2725 else:
2731 else:
2726 chainlengths.append(chainlengths[delta] + 1)
2732 chainlengths.append(chainlengths[delta] + 1)
2727 baseaddr = chainbases[delta]
2733 baseaddr = chainbases[delta]
2728 revaddr = r.start(rev)
2734 revaddr = r.start(rev)
2729 chainbases.append(baseaddr)
2735 chainbases.append(baseaddr)
2730 chainspans.append((revaddr - baseaddr) + size)
2736 chainspans.append((revaddr - baseaddr) + size)
2731 if size == 0:
2737 if size == 0:
2732 numempty += 1
2738 numempty += 1
2733 numemptydelta += 1
2739 numemptydelta += 1
2734 elif r.issnapshot(rev):
2740 elif r.issnapshot(rev):
2735 addsize(size, semisize)
2741 addsize(size, semisize)
2736 numsemi += 1
2742 numsemi += 1
2737 depth = r.snapshotdepth(rev)
2743 depth = r.snapshotdepth(rev)
2738 numsnapdepth[depth] += 1
2744 numsnapdepth[depth] += 1
2739 addsize(size, snapsizedepth[depth])
2745 addsize(size, snapsizedepth[depth])
2740 else:
2746 else:
2741 addsize(size, deltasize)
2747 addsize(size, deltasize)
2742 if delta == rev - 1:
2748 if delta == rev - 1:
2743 numprev += 1
2749 numprev += 1
2744 if delta == p1:
2750 if delta == p1:
2745 nump1prev += 1
2751 nump1prev += 1
2746 elif delta == p2:
2752 elif delta == p2:
2747 nump2prev += 1
2753 nump2prev += 1
2748 elif delta == p1:
2754 elif delta == p1:
2749 nump1 += 1
2755 nump1 += 1
2750 elif delta == p2:
2756 elif delta == p2:
2751 nump2 += 1
2757 nump2 += 1
2752 elif delta != nullrev:
2758 elif delta != nullrev:
2753 numother += 1
2759 numother += 1
2754
2760
2755 # Obtain data on the raw chunks in the revlog.
2761 # Obtain data on the raw chunks in the revlog.
2756 if util.safehasattr(r, b'_getsegmentforrevs'):
2762 if util.safehasattr(r, b'_getsegmentforrevs'):
2757 segment = r._getsegmentforrevs(rev, rev)[1]
2763 segment = r._getsegmentforrevs(rev, rev)[1]
2758 else:
2764 else:
2759 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2765 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2760 if segment:
2766 if segment:
2761 chunktype = bytes(segment[0:1])
2767 chunktype = bytes(segment[0:1])
2762 else:
2768 else:
2763 chunktype = b'empty'
2769 chunktype = b'empty'
2764
2770
2765 if chunktype not in chunktypecounts:
2771 if chunktype not in chunktypecounts:
2766 chunktypecounts[chunktype] = 0
2772 chunktypecounts[chunktype] = 0
2767 chunktypesizes[chunktype] = 0
2773 chunktypesizes[chunktype] = 0
2768
2774
2769 chunktypecounts[chunktype] += 1
2775 chunktypecounts[chunktype] += 1
2770 chunktypesizes[chunktype] += size
2776 chunktypesizes[chunktype] += size
2771
2777
2772 # Adjust size min value for empty cases
2778 # Adjust size min value for empty cases
2773 for size in (datasize, fullsize, semisize, deltasize):
2779 for size in (datasize, fullsize, semisize, deltasize):
2774 if size[0] is None:
2780 if size[0] is None:
2775 size[0] = 0
2781 size[0] = 0
2776
2782
2777 numdeltas = numrevs - numfull - numempty - numsemi
2783 numdeltas = numrevs - numfull - numempty - numsemi
2778 numoprev = numprev - nump1prev - nump2prev
2784 numoprev = numprev - nump1prev - nump2prev
2779 totalrawsize = datasize[2]
2785 totalrawsize = datasize[2]
2780 datasize[2] /= numrevs
2786 datasize[2] /= numrevs
2781 fulltotal = fullsize[2]
2787 fulltotal = fullsize[2]
2782 if numfull == 0:
2788 if numfull == 0:
2783 fullsize[2] = 0
2789 fullsize[2] = 0
2784 else:
2790 else:
2785 fullsize[2] /= numfull
2791 fullsize[2] /= numfull
2786 semitotal = semisize[2]
2792 semitotal = semisize[2]
2787 snaptotal = {}
2793 snaptotal = {}
2788 if numsemi > 0:
2794 if numsemi > 0:
2789 semisize[2] /= numsemi
2795 semisize[2] /= numsemi
2790 for depth in snapsizedepth:
2796 for depth in snapsizedepth:
2791 snaptotal[depth] = snapsizedepth[depth][2]
2797 snaptotal[depth] = snapsizedepth[depth][2]
2792 snapsizedepth[depth][2] /= numsnapdepth[depth]
2798 snapsizedepth[depth][2] /= numsnapdepth[depth]
2793
2799
2794 deltatotal = deltasize[2]
2800 deltatotal = deltasize[2]
2795 if numdeltas > 0:
2801 if numdeltas > 0:
2796 deltasize[2] /= numdeltas
2802 deltasize[2] /= numdeltas
2797 totalsize = fulltotal + semitotal + deltatotal
2803 totalsize = fulltotal + semitotal + deltatotal
2798 avgchainlen = sum(chainlengths) / numrevs
2804 avgchainlen = sum(chainlengths) / numrevs
2799 maxchainlen = max(chainlengths)
2805 maxchainlen = max(chainlengths)
2800 maxchainspan = max(chainspans)
2806 maxchainspan = max(chainspans)
2801 compratio = 1
2807 compratio = 1
2802 if totalsize:
2808 if totalsize:
2803 compratio = totalrawsize / totalsize
2809 compratio = totalrawsize / totalsize
2804
2810
2805 basedfmtstr = b'%%%dd\n'
2811 basedfmtstr = b'%%%dd\n'
2806 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2812 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2807
2813
2808 def dfmtstr(max):
2814 def dfmtstr(max):
2809 return basedfmtstr % len(str(max))
2815 return basedfmtstr % len(str(max))
2810
2816
2811 def pcfmtstr(max, padding=0):
2817 def pcfmtstr(max, padding=0):
2812 return basepcfmtstr % (len(str(max)), b' ' * padding)
2818 return basepcfmtstr % (len(str(max)), b' ' * padding)
2813
2819
2814 def pcfmt(value, total):
2820 def pcfmt(value, total):
2815 if total:
2821 if total:
2816 return (value, 100 * float(value) / total)
2822 return (value, 100 * float(value) / total)
2817 else:
2823 else:
2818 return value, 100.0
2824 return value, 100.0
2819
2825
2820 ui.writenoi18n(b'format : %d\n' % format)
2826 ui.writenoi18n(b'format : %d\n' % format)
2821 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2827 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2822
2828
2823 ui.write(b'\n')
2829 ui.write(b'\n')
2824 fmt = pcfmtstr(totalsize)
2830 fmt = pcfmtstr(totalsize)
2825 fmt2 = dfmtstr(totalsize)
2831 fmt2 = dfmtstr(totalsize)
2826 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2832 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2827 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2833 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2828 ui.writenoi18n(
2834 ui.writenoi18n(
2829 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2835 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2830 )
2836 )
2831 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2837 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2832 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2838 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2833 ui.writenoi18n(
2839 ui.writenoi18n(
2834 b' text : '
2840 b' text : '
2835 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2841 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2836 )
2842 )
2837 ui.writenoi18n(
2843 ui.writenoi18n(
2838 b' delta : '
2844 b' delta : '
2839 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2845 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2840 )
2846 )
2841 ui.writenoi18n(
2847 ui.writenoi18n(
2842 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2848 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2843 )
2849 )
2844 for depth in sorted(numsnapdepth):
2850 for depth in sorted(numsnapdepth):
2845 ui.write(
2851 ui.write(
2846 (b' lvl-%-3d : ' % depth)
2852 (b' lvl-%-3d : ' % depth)
2847 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2853 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2848 )
2854 )
2849 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2855 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2850 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2856 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2851 ui.writenoi18n(
2857 ui.writenoi18n(
2852 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2858 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2853 )
2859 )
2854 for depth in sorted(numsnapdepth):
2860 for depth in sorted(numsnapdepth):
2855 ui.write(
2861 ui.write(
2856 (b' lvl-%-3d : ' % depth)
2862 (b' lvl-%-3d : ' % depth)
2857 + fmt % pcfmt(snaptotal[depth], totalsize)
2863 + fmt % pcfmt(snaptotal[depth], totalsize)
2858 )
2864 )
2859 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2865 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2860
2866
2861 def fmtchunktype(chunktype):
2867 def fmtchunktype(chunktype):
2862 if chunktype == b'empty':
2868 if chunktype == b'empty':
2863 return b' %s : ' % chunktype
2869 return b' %s : ' % chunktype
2864 elif chunktype in pycompat.bytestr(string.ascii_letters):
2870 elif chunktype in pycompat.bytestr(string.ascii_letters):
2865 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2871 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2866 else:
2872 else:
2867 return b' 0x%s : ' % hex(chunktype)
2873 return b' 0x%s : ' % hex(chunktype)
2868
2874
2869 ui.write(b'\n')
2875 ui.write(b'\n')
2870 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2876 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2871 for chunktype in sorted(chunktypecounts):
2877 for chunktype in sorted(chunktypecounts):
2872 ui.write(fmtchunktype(chunktype))
2878 ui.write(fmtchunktype(chunktype))
2873 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2879 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2874 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2880 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2875 for chunktype in sorted(chunktypecounts):
2881 for chunktype in sorted(chunktypecounts):
2876 ui.write(fmtchunktype(chunktype))
2882 ui.write(fmtchunktype(chunktype))
2877 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2883 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2878
2884
2879 ui.write(b'\n')
2885 ui.write(b'\n')
2880 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2886 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2881 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2887 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2882 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2888 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2883 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2889 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2884 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2890 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2885
2891
2886 if format > 0:
2892 if format > 0:
2887 ui.write(b'\n')
2893 ui.write(b'\n')
2888 ui.writenoi18n(
2894 ui.writenoi18n(
2889 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2895 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2890 % tuple(datasize)
2896 % tuple(datasize)
2891 )
2897 )
2892 ui.writenoi18n(
2898 ui.writenoi18n(
2893 b'full revision size (min/max/avg) : %d / %d / %d\n'
2899 b'full revision size (min/max/avg) : %d / %d / %d\n'
2894 % tuple(fullsize)
2900 % tuple(fullsize)
2895 )
2901 )
2896 ui.writenoi18n(
2902 ui.writenoi18n(
2897 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2903 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2898 % tuple(semisize)
2904 % tuple(semisize)
2899 )
2905 )
2900 for depth in sorted(snapsizedepth):
2906 for depth in sorted(snapsizedepth):
2901 if depth == 0:
2907 if depth == 0:
2902 continue
2908 continue
2903 ui.writenoi18n(
2909 ui.writenoi18n(
2904 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2910 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2905 % ((depth,) + tuple(snapsizedepth[depth]))
2911 % ((depth,) + tuple(snapsizedepth[depth]))
2906 )
2912 )
2907 ui.writenoi18n(
2913 ui.writenoi18n(
2908 b'delta size (min/max/avg) : %d / %d / %d\n'
2914 b'delta size (min/max/avg) : %d / %d / %d\n'
2909 % tuple(deltasize)
2915 % tuple(deltasize)
2910 )
2916 )
2911
2917
2912 if numdeltas > 0:
2918 if numdeltas > 0:
2913 ui.write(b'\n')
2919 ui.write(b'\n')
2914 fmt = pcfmtstr(numdeltas)
2920 fmt = pcfmtstr(numdeltas)
2915 fmt2 = pcfmtstr(numdeltas, 4)
2921 fmt2 = pcfmtstr(numdeltas, 4)
2916 ui.writenoi18n(
2922 ui.writenoi18n(
2917 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2923 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2918 )
2924 )
2919 if numprev > 0:
2925 if numprev > 0:
2920 ui.writenoi18n(
2926 ui.writenoi18n(
2921 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2927 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2922 )
2928 )
2923 ui.writenoi18n(
2929 ui.writenoi18n(
2924 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2930 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2925 )
2931 )
2926 ui.writenoi18n(
2932 ui.writenoi18n(
2927 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
2933 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
2928 )
2934 )
2929 if gdelta:
2935 if gdelta:
2930 ui.writenoi18n(
2936 ui.writenoi18n(
2931 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
2937 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
2932 )
2938 )
2933 ui.writenoi18n(
2939 ui.writenoi18n(
2934 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
2940 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
2935 )
2941 )
2936 ui.writenoi18n(
2942 ui.writenoi18n(
2937 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
2943 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
2938 )
2944 )
2939
2945
2940
2946
2941 @command(
2947 @command(
2942 b'debugrevlogindex',
2948 b'debugrevlogindex',
2943 cmdutil.debugrevlogopts
2949 cmdutil.debugrevlogopts
2944 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
2950 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
2945 _(b'[-f FORMAT] -c|-m|FILE'),
2951 _(b'[-f FORMAT] -c|-m|FILE'),
2946 optionalrepo=True,
2952 optionalrepo=True,
2947 )
2953 )
2948 def debugrevlogindex(ui, repo, file_=None, **opts):
2954 def debugrevlogindex(ui, repo, file_=None, **opts):
2949 """dump the contents of a revlog index"""
2955 """dump the contents of a revlog index"""
2950 opts = pycompat.byteskwargs(opts)
2956 opts = pycompat.byteskwargs(opts)
2951 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
2957 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
2952 format = opts.get(b'format', 0)
2958 format = opts.get(b'format', 0)
2953 if format not in (0, 1):
2959 if format not in (0, 1):
2954 raise error.Abort(_(b"unknown format %d") % format)
2960 raise error.Abort(_(b"unknown format %d") % format)
2955
2961
2956 if ui.debugflag:
2962 if ui.debugflag:
2957 shortfn = hex
2963 shortfn = hex
2958 else:
2964 else:
2959 shortfn = short
2965 shortfn = short
2960
2966
2961 # There might not be anything in r, so have a sane default
2967 # There might not be anything in r, so have a sane default
2962 idlen = 12
2968 idlen = 12
2963 for i in r:
2969 for i in r:
2964 idlen = len(shortfn(r.node(i)))
2970 idlen = len(shortfn(r.node(i)))
2965 break
2971 break
2966
2972
2967 if format == 0:
2973 if format == 0:
2968 if ui.verbose:
2974 if ui.verbose:
2969 ui.writenoi18n(
2975 ui.writenoi18n(
2970 b" rev offset length linkrev %s %s p2\n"
2976 b" rev offset length linkrev %s %s p2\n"
2971 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2977 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2972 )
2978 )
2973 else:
2979 else:
2974 ui.writenoi18n(
2980 ui.writenoi18n(
2975 b" rev linkrev %s %s p2\n"
2981 b" rev linkrev %s %s p2\n"
2976 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2982 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2977 )
2983 )
2978 elif format == 1:
2984 elif format == 1:
2979 if ui.verbose:
2985 if ui.verbose:
2980 ui.writenoi18n(
2986 ui.writenoi18n(
2981 (
2987 (
2982 b" rev flag offset length size link p1"
2988 b" rev flag offset length size link p1"
2983 b" p2 %s\n"
2989 b" p2 %s\n"
2984 )
2990 )
2985 % b"nodeid".rjust(idlen)
2991 % b"nodeid".rjust(idlen)
2986 )
2992 )
2987 else:
2993 else:
2988 ui.writenoi18n(
2994 ui.writenoi18n(
2989 b" rev flag size link p1 p2 %s\n"
2995 b" rev flag size link p1 p2 %s\n"
2990 % b"nodeid".rjust(idlen)
2996 % b"nodeid".rjust(idlen)
2991 )
2997 )
2992
2998
2993 for i in r:
2999 for i in r:
2994 node = r.node(i)
3000 node = r.node(i)
2995 if format == 0:
3001 if format == 0:
2996 try:
3002 try:
2997 pp = r.parents(node)
3003 pp = r.parents(node)
2998 except Exception:
3004 except Exception:
2999 pp = [nullid, nullid]
3005 pp = [nullid, nullid]
3000 if ui.verbose:
3006 if ui.verbose:
3001 ui.write(
3007 ui.write(
3002 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3008 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3003 % (
3009 % (
3004 i,
3010 i,
3005 r.start(i),
3011 r.start(i),
3006 r.length(i),
3012 r.length(i),
3007 r.linkrev(i),
3013 r.linkrev(i),
3008 shortfn(node),
3014 shortfn(node),
3009 shortfn(pp[0]),
3015 shortfn(pp[0]),
3010 shortfn(pp[1]),
3016 shortfn(pp[1]),
3011 )
3017 )
3012 )
3018 )
3013 else:
3019 else:
3014 ui.write(
3020 ui.write(
3015 b"% 6d % 7d %s %s %s\n"
3021 b"% 6d % 7d %s %s %s\n"
3016 % (
3022 % (
3017 i,
3023 i,
3018 r.linkrev(i),
3024 r.linkrev(i),
3019 shortfn(node),
3025 shortfn(node),
3020 shortfn(pp[0]),
3026 shortfn(pp[0]),
3021 shortfn(pp[1]),
3027 shortfn(pp[1]),
3022 )
3028 )
3023 )
3029 )
3024 elif format == 1:
3030 elif format == 1:
3025 pr = r.parentrevs(i)
3031 pr = r.parentrevs(i)
3026 if ui.verbose:
3032 if ui.verbose:
3027 ui.write(
3033 ui.write(
3028 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3034 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3029 % (
3035 % (
3030 i,
3036 i,
3031 r.flags(i),
3037 r.flags(i),
3032 r.start(i),
3038 r.start(i),
3033 r.length(i),
3039 r.length(i),
3034 r.rawsize(i),
3040 r.rawsize(i),
3035 r.linkrev(i),
3041 r.linkrev(i),
3036 pr[0],
3042 pr[0],
3037 pr[1],
3043 pr[1],
3038 shortfn(node),
3044 shortfn(node),
3039 )
3045 )
3040 )
3046 )
3041 else:
3047 else:
3042 ui.write(
3048 ui.write(
3043 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3049 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3044 % (
3050 % (
3045 i,
3051 i,
3046 r.flags(i),
3052 r.flags(i),
3047 r.rawsize(i),
3053 r.rawsize(i),
3048 r.linkrev(i),
3054 r.linkrev(i),
3049 pr[0],
3055 pr[0],
3050 pr[1],
3056 pr[1],
3051 shortfn(node),
3057 shortfn(node),
3052 )
3058 )
3053 )
3059 )
3054
3060
3055
3061
3056 @command(
3062 @command(
3057 b'debugrevspec',
3063 b'debugrevspec',
3058 [
3064 [
3059 (
3065 (
3060 b'',
3066 b'',
3061 b'optimize',
3067 b'optimize',
3062 None,
3068 None,
3063 _(b'print parsed tree after optimizing (DEPRECATED)'),
3069 _(b'print parsed tree after optimizing (DEPRECATED)'),
3064 ),
3070 ),
3065 (
3071 (
3066 b'',
3072 b'',
3067 b'show-revs',
3073 b'show-revs',
3068 True,
3074 True,
3069 _(b'print list of result revisions (default)'),
3075 _(b'print list of result revisions (default)'),
3070 ),
3076 ),
3071 (
3077 (
3072 b's',
3078 b's',
3073 b'show-set',
3079 b'show-set',
3074 None,
3080 None,
3075 _(b'print internal representation of result set'),
3081 _(b'print internal representation of result set'),
3076 ),
3082 ),
3077 (
3083 (
3078 b'p',
3084 b'p',
3079 b'show-stage',
3085 b'show-stage',
3080 [],
3086 [],
3081 _(b'print parsed tree at the given stage'),
3087 _(b'print parsed tree at the given stage'),
3082 _(b'NAME'),
3088 _(b'NAME'),
3083 ),
3089 ),
3084 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3090 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3085 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3091 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3086 ],
3092 ],
3087 b'REVSPEC',
3093 b'REVSPEC',
3088 )
3094 )
3089 def debugrevspec(ui, repo, expr, **opts):
3095 def debugrevspec(ui, repo, expr, **opts):
3090 """parse and apply a revision specification
3096 """parse and apply a revision specification
3091
3097
3092 Use -p/--show-stage option to print the parsed tree at the given stages.
3098 Use -p/--show-stage option to print the parsed tree at the given stages.
3093 Use -p all to print tree at every stage.
3099 Use -p all to print tree at every stage.
3094
3100
3095 Use --no-show-revs option with -s or -p to print only the set
3101 Use --no-show-revs option with -s or -p to print only the set
3096 representation or the parsed tree respectively.
3102 representation or the parsed tree respectively.
3097
3103
3098 Use --verify-optimized to compare the optimized result with the unoptimized
3104 Use --verify-optimized to compare the optimized result with the unoptimized
3099 one. Returns 1 if the optimized result differs.
3105 one. Returns 1 if the optimized result differs.
3100 """
3106 """
3101 opts = pycompat.byteskwargs(opts)
3107 opts = pycompat.byteskwargs(opts)
3102 aliases = ui.configitems(b'revsetalias')
3108 aliases = ui.configitems(b'revsetalias')
3103 stages = [
3109 stages = [
3104 (b'parsed', lambda tree: tree),
3110 (b'parsed', lambda tree: tree),
3105 (
3111 (
3106 b'expanded',
3112 b'expanded',
3107 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3113 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3108 ),
3114 ),
3109 (b'concatenated', revsetlang.foldconcat),
3115 (b'concatenated', revsetlang.foldconcat),
3110 (b'analyzed', revsetlang.analyze),
3116 (b'analyzed', revsetlang.analyze),
3111 (b'optimized', revsetlang.optimize),
3117 (b'optimized', revsetlang.optimize),
3112 ]
3118 ]
3113 if opts[b'no_optimized']:
3119 if opts[b'no_optimized']:
3114 stages = stages[:-1]
3120 stages = stages[:-1]
3115 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3121 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3116 raise error.Abort(
3122 raise error.Abort(
3117 _(b'cannot use --verify-optimized with --no-optimized')
3123 _(b'cannot use --verify-optimized with --no-optimized')
3118 )
3124 )
3119 stagenames = set(n for n, f in stages)
3125 stagenames = set(n for n, f in stages)
3120
3126
3121 showalways = set()
3127 showalways = set()
3122 showchanged = set()
3128 showchanged = set()
3123 if ui.verbose and not opts[b'show_stage']:
3129 if ui.verbose and not opts[b'show_stage']:
3124 # show parsed tree by --verbose (deprecated)
3130 # show parsed tree by --verbose (deprecated)
3125 showalways.add(b'parsed')
3131 showalways.add(b'parsed')
3126 showchanged.update([b'expanded', b'concatenated'])
3132 showchanged.update([b'expanded', b'concatenated'])
3127 if opts[b'optimize']:
3133 if opts[b'optimize']:
3128 showalways.add(b'optimized')
3134 showalways.add(b'optimized')
3129 if opts[b'show_stage'] and opts[b'optimize']:
3135 if opts[b'show_stage'] and opts[b'optimize']:
3130 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3136 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3131 if opts[b'show_stage'] == [b'all']:
3137 if opts[b'show_stage'] == [b'all']:
3132 showalways.update(stagenames)
3138 showalways.update(stagenames)
3133 else:
3139 else:
3134 for n in opts[b'show_stage']:
3140 for n in opts[b'show_stage']:
3135 if n not in stagenames:
3141 if n not in stagenames:
3136 raise error.Abort(_(b'invalid stage name: %s') % n)
3142 raise error.Abort(_(b'invalid stage name: %s') % n)
3137 showalways.update(opts[b'show_stage'])
3143 showalways.update(opts[b'show_stage'])
3138
3144
3139 treebystage = {}
3145 treebystage = {}
3140 printedtree = None
3146 printedtree = None
3141 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3147 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3142 for n, f in stages:
3148 for n, f in stages:
3143 treebystage[n] = tree = f(tree)
3149 treebystage[n] = tree = f(tree)
3144 if n in showalways or (n in showchanged and tree != printedtree):
3150 if n in showalways or (n in showchanged and tree != printedtree):
3145 if opts[b'show_stage'] or n != b'parsed':
3151 if opts[b'show_stage'] or n != b'parsed':
3146 ui.write(b"* %s:\n" % n)
3152 ui.write(b"* %s:\n" % n)
3147 ui.write(revsetlang.prettyformat(tree), b"\n")
3153 ui.write(revsetlang.prettyformat(tree), b"\n")
3148 printedtree = tree
3154 printedtree = tree
3149
3155
3150 if opts[b'verify_optimized']:
3156 if opts[b'verify_optimized']:
3151 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3157 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3152 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3158 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3153 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3159 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3154 ui.writenoi18n(
3160 ui.writenoi18n(
3155 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3161 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3156 )
3162 )
3157 ui.writenoi18n(
3163 ui.writenoi18n(
3158 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3164 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3159 )
3165 )
3160 arevs = list(arevs)
3166 arevs = list(arevs)
3161 brevs = list(brevs)
3167 brevs = list(brevs)
3162 if arevs == brevs:
3168 if arevs == brevs:
3163 return 0
3169 return 0
3164 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3170 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3165 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3171 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3166 sm = difflib.SequenceMatcher(None, arevs, brevs)
3172 sm = difflib.SequenceMatcher(None, arevs, brevs)
3167 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3173 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3168 if tag in ('delete', 'replace'):
3174 if tag in ('delete', 'replace'):
3169 for c in arevs[alo:ahi]:
3175 for c in arevs[alo:ahi]:
3170 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3176 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3171 if tag in ('insert', 'replace'):
3177 if tag in ('insert', 'replace'):
3172 for c in brevs[blo:bhi]:
3178 for c in brevs[blo:bhi]:
3173 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3179 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3174 if tag == 'equal':
3180 if tag == 'equal':
3175 for c in arevs[alo:ahi]:
3181 for c in arevs[alo:ahi]:
3176 ui.write(b' %d\n' % c)
3182 ui.write(b' %d\n' % c)
3177 return 1
3183 return 1
3178
3184
3179 func = revset.makematcher(tree)
3185 func = revset.makematcher(tree)
3180 revs = func(repo)
3186 revs = func(repo)
3181 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3187 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3182 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3188 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3183 if not opts[b'show_revs']:
3189 if not opts[b'show_revs']:
3184 return
3190 return
3185 for c in revs:
3191 for c in revs:
3186 ui.write(b"%d\n" % c)
3192 ui.write(b"%d\n" % c)
3187
3193
3188
3194
3189 @command(
3195 @command(
3190 b'debugserve',
3196 b'debugserve',
3191 [
3197 [
3192 (
3198 (
3193 b'',
3199 b'',
3194 b'sshstdio',
3200 b'sshstdio',
3195 False,
3201 False,
3196 _(b'run an SSH server bound to process handles'),
3202 _(b'run an SSH server bound to process handles'),
3197 ),
3203 ),
3198 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3204 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3199 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3205 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3200 ],
3206 ],
3201 b'',
3207 b'',
3202 )
3208 )
3203 def debugserve(ui, repo, **opts):
3209 def debugserve(ui, repo, **opts):
3204 """run a server with advanced settings
3210 """run a server with advanced settings
3205
3211
3206 This command is similar to :hg:`serve`. It exists partially as a
3212 This command is similar to :hg:`serve`. It exists partially as a
3207 workaround to the fact that ``hg serve --stdio`` must have specific
3213 workaround to the fact that ``hg serve --stdio`` must have specific
3208 arguments for security reasons.
3214 arguments for security reasons.
3209 """
3215 """
3210 opts = pycompat.byteskwargs(opts)
3216 opts = pycompat.byteskwargs(opts)
3211
3217
3212 if not opts[b'sshstdio']:
3218 if not opts[b'sshstdio']:
3213 raise error.Abort(_(b'only --sshstdio is currently supported'))
3219 raise error.Abort(_(b'only --sshstdio is currently supported'))
3214
3220
3215 logfh = None
3221 logfh = None
3216
3222
3217 if opts[b'logiofd'] and opts[b'logiofile']:
3223 if opts[b'logiofd'] and opts[b'logiofile']:
3218 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3224 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3219
3225
3220 if opts[b'logiofd']:
3226 if opts[b'logiofd']:
3221 # Ideally we would be line buffered. But line buffering in binary
3227 # Ideally we would be line buffered. But line buffering in binary
3222 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3228 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3223 # buffering could have performance impacts. But since this isn't
3229 # buffering could have performance impacts. But since this isn't
3224 # performance critical code, it should be fine.
3230 # performance critical code, it should be fine.
3225 try:
3231 try:
3226 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3232 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3227 except OSError as e:
3233 except OSError as e:
3228 if e.errno != errno.ESPIPE:
3234 if e.errno != errno.ESPIPE:
3229 raise
3235 raise
3230 # can't seek a pipe, so `ab` mode fails on py3
3236 # can't seek a pipe, so `ab` mode fails on py3
3231 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3237 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3232 elif opts[b'logiofile']:
3238 elif opts[b'logiofile']:
3233 logfh = open(opts[b'logiofile'], b'ab', 0)
3239 logfh = open(opts[b'logiofile'], b'ab', 0)
3234
3240
3235 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3241 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3236 s.serve_forever()
3242 s.serve_forever()
3237
3243
3238
3244
3239 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3245 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3240 def debugsetparents(ui, repo, rev1, rev2=None):
3246 def debugsetparents(ui, repo, rev1, rev2=None):
3241 """manually set the parents of the current working directory
3247 """manually set the parents of the current working directory
3242
3248
3243 This is useful for writing repository conversion tools, but should
3249 This is useful for writing repository conversion tools, but should
3244 be used with care. For example, neither the working directory nor the
3250 be used with care. For example, neither the working directory nor the
3245 dirstate is updated, so file status may be incorrect after running this
3251 dirstate is updated, so file status may be incorrect after running this
3246 command.
3252 command.
3247
3253
3248 Returns 0 on success.
3254 Returns 0 on success.
3249 """
3255 """
3250
3256
3251 node1 = scmutil.revsingle(repo, rev1).node()
3257 node1 = scmutil.revsingle(repo, rev1).node()
3252 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3258 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3253
3259
3254 with repo.wlock():
3260 with repo.wlock():
3255 repo.setparents(node1, node2)
3261 repo.setparents(node1, node2)
3256
3262
3257
3263
3258 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3264 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3259 def debugsidedata(ui, repo, file_, rev=None, **opts):
3265 def debugsidedata(ui, repo, file_, rev=None, **opts):
3260 """dump the side data for a cl/manifest/file revision
3266 """dump the side data for a cl/manifest/file revision
3261
3267
3262 Use --verbose to dump the sidedata content."""
3268 Use --verbose to dump the sidedata content."""
3263 opts = pycompat.byteskwargs(opts)
3269 opts = pycompat.byteskwargs(opts)
3264 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3270 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3265 if rev is not None:
3271 if rev is not None:
3266 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3272 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3267 file_, rev = None, file_
3273 file_, rev = None, file_
3268 elif rev is None:
3274 elif rev is None:
3269 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3275 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3270 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3276 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3271 r = getattr(r, '_revlog', r)
3277 r = getattr(r, '_revlog', r)
3272 try:
3278 try:
3273 sidedata = r.sidedata(r.lookup(rev))
3279 sidedata = r.sidedata(r.lookup(rev))
3274 except KeyError:
3280 except KeyError:
3275 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3281 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3276 if sidedata:
3282 if sidedata:
3277 sidedata = list(sidedata.items())
3283 sidedata = list(sidedata.items())
3278 sidedata.sort()
3284 sidedata.sort()
3279 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3285 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3280 for key, value in sidedata:
3286 for key, value in sidedata:
3281 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3287 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3282 if ui.verbose:
3288 if ui.verbose:
3283 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3289 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3284
3290
3285
3291
3286 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3292 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3287 def debugssl(ui, repo, source=None, **opts):
3293 def debugssl(ui, repo, source=None, **opts):
3288 '''test a secure connection to a server
3294 '''test a secure connection to a server
3289
3295
3290 This builds the certificate chain for the server on Windows, installing the
3296 This builds the certificate chain for the server on Windows, installing the
3291 missing intermediates and trusted root via Windows Update if necessary. It
3297 missing intermediates and trusted root via Windows Update if necessary. It
3292 does nothing on other platforms.
3298 does nothing on other platforms.
3293
3299
3294 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3300 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3295 that server is used. See :hg:`help urls` for more information.
3301 that server is used. See :hg:`help urls` for more information.
3296
3302
3297 If the update succeeds, retry the original operation. Otherwise, the cause
3303 If the update succeeds, retry the original operation. Otherwise, the cause
3298 of the SSL error is likely another issue.
3304 of the SSL error is likely another issue.
3299 '''
3305 '''
3300 if not pycompat.iswindows:
3306 if not pycompat.iswindows:
3301 raise error.Abort(
3307 raise error.Abort(
3302 _(b'certificate chain building is only possible on Windows')
3308 _(b'certificate chain building is only possible on Windows')
3303 )
3309 )
3304
3310
3305 if not source:
3311 if not source:
3306 if not repo:
3312 if not repo:
3307 raise error.Abort(
3313 raise error.Abort(
3308 _(
3314 _(
3309 b"there is no Mercurial repository here, and no "
3315 b"there is no Mercurial repository here, and no "
3310 b"server specified"
3316 b"server specified"
3311 )
3317 )
3312 )
3318 )
3313 source = b"default"
3319 source = b"default"
3314
3320
3315 source, branches = hg.parseurl(ui.expandpath(source))
3321 source, branches = hg.parseurl(ui.expandpath(source))
3316 url = util.url(source)
3322 url = util.url(source)
3317
3323
3318 defaultport = {b'https': 443, b'ssh': 22}
3324 defaultport = {b'https': 443, b'ssh': 22}
3319 if url.scheme in defaultport:
3325 if url.scheme in defaultport:
3320 try:
3326 try:
3321 addr = (url.host, int(url.port or defaultport[url.scheme]))
3327 addr = (url.host, int(url.port or defaultport[url.scheme]))
3322 except ValueError:
3328 except ValueError:
3323 raise error.Abort(_(b"malformed port number in URL"))
3329 raise error.Abort(_(b"malformed port number in URL"))
3324 else:
3330 else:
3325 raise error.Abort(_(b"only https and ssh connections are supported"))
3331 raise error.Abort(_(b"only https and ssh connections are supported"))
3326
3332
3327 from . import win32
3333 from . import win32
3328
3334
3329 s = ssl.wrap_socket(
3335 s = ssl.wrap_socket(
3330 socket.socket(),
3336 socket.socket(),
3331 ssl_version=ssl.PROTOCOL_TLS,
3337 ssl_version=ssl.PROTOCOL_TLS,
3332 cert_reqs=ssl.CERT_NONE,
3338 cert_reqs=ssl.CERT_NONE,
3333 ca_certs=None,
3339 ca_certs=None,
3334 )
3340 )
3335
3341
3336 try:
3342 try:
3337 s.connect(addr)
3343 s.connect(addr)
3338 cert = s.getpeercert(True)
3344 cert = s.getpeercert(True)
3339
3345
3340 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3346 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3341
3347
3342 complete = win32.checkcertificatechain(cert, build=False)
3348 complete = win32.checkcertificatechain(cert, build=False)
3343
3349
3344 if not complete:
3350 if not complete:
3345 ui.status(_(b'certificate chain is incomplete, updating... '))
3351 ui.status(_(b'certificate chain is incomplete, updating... '))
3346
3352
3347 if not win32.checkcertificatechain(cert):
3353 if not win32.checkcertificatechain(cert):
3348 ui.status(_(b'failed.\n'))
3354 ui.status(_(b'failed.\n'))
3349 else:
3355 else:
3350 ui.status(_(b'done.\n'))
3356 ui.status(_(b'done.\n'))
3351 else:
3357 else:
3352 ui.status(_(b'full certificate chain is available\n'))
3358 ui.status(_(b'full certificate chain is available\n'))
3353 finally:
3359 finally:
3354 s.close()
3360 s.close()
3355
3361
3356
3362
3357 @command(
3363 @command(
3358 b'debugsub',
3364 b'debugsub',
3359 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3365 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3360 _(b'[-r REV] [REV]'),
3366 _(b'[-r REV] [REV]'),
3361 )
3367 )
3362 def debugsub(ui, repo, rev=None):
3368 def debugsub(ui, repo, rev=None):
3363 ctx = scmutil.revsingle(repo, rev, None)
3369 ctx = scmutil.revsingle(repo, rev, None)
3364 for k, v in sorted(ctx.substate.items()):
3370 for k, v in sorted(ctx.substate.items()):
3365 ui.writenoi18n(b'path %s\n' % k)
3371 ui.writenoi18n(b'path %s\n' % k)
3366 ui.writenoi18n(b' source %s\n' % v[0])
3372 ui.writenoi18n(b' source %s\n' % v[0])
3367 ui.writenoi18n(b' revision %s\n' % v[1])
3373 ui.writenoi18n(b' revision %s\n' % v[1])
3368
3374
3369
3375
3370 @command(
3376 @command(
3371 b'debugsuccessorssets',
3377 b'debugsuccessorssets',
3372 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3378 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3373 _(b'[REV]'),
3379 _(b'[REV]'),
3374 )
3380 )
3375 def debugsuccessorssets(ui, repo, *revs, **opts):
3381 def debugsuccessorssets(ui, repo, *revs, **opts):
3376 """show set of successors for revision
3382 """show set of successors for revision
3377
3383
3378 A successors set of changeset A is a consistent group of revisions that
3384 A successors set of changeset A is a consistent group of revisions that
3379 succeed A. It contains non-obsolete changesets only unless closests
3385 succeed A. It contains non-obsolete changesets only unless closests
3380 successors set is set.
3386 successors set is set.
3381
3387
3382 In most cases a changeset A has a single successors set containing a single
3388 In most cases a changeset A has a single successors set containing a single
3383 successor (changeset A replaced by A').
3389 successor (changeset A replaced by A').
3384
3390
3385 A changeset that is made obsolete with no successors are called "pruned".
3391 A changeset that is made obsolete with no successors are called "pruned".
3386 Such changesets have no successors sets at all.
3392 Such changesets have no successors sets at all.
3387
3393
3388 A changeset that has been "split" will have a successors set containing
3394 A changeset that has been "split" will have a successors set containing
3389 more than one successor.
3395 more than one successor.
3390
3396
3391 A changeset that has been rewritten in multiple different ways is called
3397 A changeset that has been rewritten in multiple different ways is called
3392 "divergent". Such changesets have multiple successor sets (each of which
3398 "divergent". Such changesets have multiple successor sets (each of which
3393 may also be split, i.e. have multiple successors).
3399 may also be split, i.e. have multiple successors).
3394
3400
3395 Results are displayed as follows::
3401 Results are displayed as follows::
3396
3402
3397 <rev1>
3403 <rev1>
3398 <successors-1A>
3404 <successors-1A>
3399 <rev2>
3405 <rev2>
3400 <successors-2A>
3406 <successors-2A>
3401 <successors-2B1> <successors-2B2> <successors-2B3>
3407 <successors-2B1> <successors-2B2> <successors-2B3>
3402
3408
3403 Here rev2 has two possible (i.e. divergent) successors sets. The first
3409 Here rev2 has two possible (i.e. divergent) successors sets. The first
3404 holds one element, whereas the second holds three (i.e. the changeset has
3410 holds one element, whereas the second holds three (i.e. the changeset has
3405 been split).
3411 been split).
3406 """
3412 """
3407 # passed to successorssets caching computation from one call to another
3413 # passed to successorssets caching computation from one call to another
3408 cache = {}
3414 cache = {}
3409 ctx2str = bytes
3415 ctx2str = bytes
3410 node2str = short
3416 node2str = short
3411 for rev in scmutil.revrange(repo, revs):
3417 for rev in scmutil.revrange(repo, revs):
3412 ctx = repo[rev]
3418 ctx = repo[rev]
3413 ui.write(b'%s\n' % ctx2str(ctx))
3419 ui.write(b'%s\n' % ctx2str(ctx))
3414 for succsset in obsutil.successorssets(
3420 for succsset in obsutil.successorssets(
3415 repo, ctx.node(), closest=opts['closest'], cache=cache
3421 repo, ctx.node(), closest=opts['closest'], cache=cache
3416 ):
3422 ):
3417 if succsset:
3423 if succsset:
3418 ui.write(b' ')
3424 ui.write(b' ')
3419 ui.write(node2str(succsset[0]))
3425 ui.write(node2str(succsset[0]))
3420 for node in succsset[1:]:
3426 for node in succsset[1:]:
3421 ui.write(b' ')
3427 ui.write(b' ')
3422 ui.write(node2str(node))
3428 ui.write(node2str(node))
3423 ui.write(b'\n')
3429 ui.write(b'\n')
3424
3430
3425
3431
3426 @command(
3432 @command(
3427 b'debugtemplate',
3433 b'debugtemplate',
3428 [
3434 [
3429 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3435 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3430 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3436 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3431 ],
3437 ],
3432 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3438 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3433 optionalrepo=True,
3439 optionalrepo=True,
3434 )
3440 )
3435 def debugtemplate(ui, repo, tmpl, **opts):
3441 def debugtemplate(ui, repo, tmpl, **opts):
3436 """parse and apply a template
3442 """parse and apply a template
3437
3443
3438 If -r/--rev is given, the template is processed as a log template and
3444 If -r/--rev is given, the template is processed as a log template and
3439 applied to the given changesets. Otherwise, it is processed as a generic
3445 applied to the given changesets. Otherwise, it is processed as a generic
3440 template.
3446 template.
3441
3447
3442 Use --verbose to print the parsed tree.
3448 Use --verbose to print the parsed tree.
3443 """
3449 """
3444 revs = None
3450 revs = None
3445 if opts['rev']:
3451 if opts['rev']:
3446 if repo is None:
3452 if repo is None:
3447 raise error.RepoError(
3453 raise error.RepoError(
3448 _(b'there is no Mercurial repository here (.hg not found)')
3454 _(b'there is no Mercurial repository here (.hg not found)')
3449 )
3455 )
3450 revs = scmutil.revrange(repo, opts['rev'])
3456 revs = scmutil.revrange(repo, opts['rev'])
3451
3457
3452 props = {}
3458 props = {}
3453 for d in opts['define']:
3459 for d in opts['define']:
3454 try:
3460 try:
3455 k, v = (e.strip() for e in d.split(b'=', 1))
3461 k, v = (e.strip() for e in d.split(b'=', 1))
3456 if not k or k == b'ui':
3462 if not k or k == b'ui':
3457 raise ValueError
3463 raise ValueError
3458 props[k] = v
3464 props[k] = v
3459 except ValueError:
3465 except ValueError:
3460 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3466 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3461
3467
3462 if ui.verbose:
3468 if ui.verbose:
3463 aliases = ui.configitems(b'templatealias')
3469 aliases = ui.configitems(b'templatealias')
3464 tree = templater.parse(tmpl)
3470 tree = templater.parse(tmpl)
3465 ui.note(templater.prettyformat(tree), b'\n')
3471 ui.note(templater.prettyformat(tree), b'\n')
3466 newtree = templater.expandaliases(tree, aliases)
3472 newtree = templater.expandaliases(tree, aliases)
3467 if newtree != tree:
3473 if newtree != tree:
3468 ui.notenoi18n(
3474 ui.notenoi18n(
3469 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3475 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3470 )
3476 )
3471
3477
3472 if revs is None:
3478 if revs is None:
3473 tres = formatter.templateresources(ui, repo)
3479 tres = formatter.templateresources(ui, repo)
3474 t = formatter.maketemplater(ui, tmpl, resources=tres)
3480 t = formatter.maketemplater(ui, tmpl, resources=tres)
3475 if ui.verbose:
3481 if ui.verbose:
3476 kwds, funcs = t.symbolsuseddefault()
3482 kwds, funcs = t.symbolsuseddefault()
3477 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3483 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3478 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3484 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3479 ui.write(t.renderdefault(props))
3485 ui.write(t.renderdefault(props))
3480 else:
3486 else:
3481 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3487 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3482 if ui.verbose:
3488 if ui.verbose:
3483 kwds, funcs = displayer.t.symbolsuseddefault()
3489 kwds, funcs = displayer.t.symbolsuseddefault()
3484 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3490 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3485 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3491 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3486 for r in revs:
3492 for r in revs:
3487 displayer.show(repo[r], **pycompat.strkwargs(props))
3493 displayer.show(repo[r], **pycompat.strkwargs(props))
3488 displayer.close()
3494 displayer.close()
3489
3495
3490
3496
3491 @command(
3497 @command(
3492 b'debuguigetpass',
3498 b'debuguigetpass',
3493 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3499 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3494 _(b'[-p TEXT]'),
3500 _(b'[-p TEXT]'),
3495 norepo=True,
3501 norepo=True,
3496 )
3502 )
3497 def debuguigetpass(ui, prompt=b''):
3503 def debuguigetpass(ui, prompt=b''):
3498 """show prompt to type password"""
3504 """show prompt to type password"""
3499 r = ui.getpass(prompt)
3505 r = ui.getpass(prompt)
3500 ui.writenoi18n(b'respose: %s\n' % r)
3506 ui.writenoi18n(b'respose: %s\n' % r)
3501
3507
3502
3508
3503 @command(
3509 @command(
3504 b'debuguiprompt',
3510 b'debuguiprompt',
3505 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3511 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3506 _(b'[-p TEXT]'),
3512 _(b'[-p TEXT]'),
3507 norepo=True,
3513 norepo=True,
3508 )
3514 )
3509 def debuguiprompt(ui, prompt=b''):
3515 def debuguiprompt(ui, prompt=b''):
3510 """show plain prompt"""
3516 """show plain prompt"""
3511 r = ui.prompt(prompt)
3517 r = ui.prompt(prompt)
3512 ui.writenoi18n(b'response: %s\n' % r)
3518 ui.writenoi18n(b'response: %s\n' % r)
3513
3519
3514
3520
3515 @command(b'debugupdatecaches', [])
3521 @command(b'debugupdatecaches', [])
3516 def debugupdatecaches(ui, repo, *pats, **opts):
3522 def debugupdatecaches(ui, repo, *pats, **opts):
3517 """warm all known caches in the repository"""
3523 """warm all known caches in the repository"""
3518 with repo.wlock(), repo.lock():
3524 with repo.wlock(), repo.lock():
3519 repo.updatecaches(full=True)
3525 repo.updatecaches(full=True)
3520
3526
3521
3527
3522 @command(
3528 @command(
3523 b'debugupgraderepo',
3529 b'debugupgraderepo',
3524 [
3530 [
3525 (
3531 (
3526 b'o',
3532 b'o',
3527 b'optimize',
3533 b'optimize',
3528 [],
3534 [],
3529 _(b'extra optimization to perform'),
3535 _(b'extra optimization to perform'),
3530 _(b'NAME'),
3536 _(b'NAME'),
3531 ),
3537 ),
3532 (b'', b'run', False, _(b'performs an upgrade')),
3538 (b'', b'run', False, _(b'performs an upgrade')),
3533 (b'', b'backup', True, _(b'keep the old repository content around')),
3539 (b'', b'backup', True, _(b'keep the old repository content around')),
3534 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3540 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3535 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3541 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3536 ],
3542 ],
3537 )
3543 )
3538 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3544 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3539 """upgrade a repository to use different features
3545 """upgrade a repository to use different features
3540
3546
3541 If no arguments are specified, the repository is evaluated for upgrade
3547 If no arguments are specified, the repository is evaluated for upgrade
3542 and a list of problems and potential optimizations is printed.
3548 and a list of problems and potential optimizations is printed.
3543
3549
3544 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3550 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3545 can be influenced via additional arguments. More details will be provided
3551 can be influenced via additional arguments. More details will be provided
3546 by the command output when run without ``--run``.
3552 by the command output when run without ``--run``.
3547
3553
3548 During the upgrade, the repository will be locked and no writes will be
3554 During the upgrade, the repository will be locked and no writes will be
3549 allowed.
3555 allowed.
3550
3556
3551 At the end of the upgrade, the repository may not be readable while new
3557 At the end of the upgrade, the repository may not be readable while new
3552 repository data is swapped in. This window will be as long as it takes to
3558 repository data is swapped in. This window will be as long as it takes to
3553 rename some directories inside the ``.hg`` directory. On most machines, this
3559 rename some directories inside the ``.hg`` directory. On most machines, this
3554 should complete almost instantaneously and the chances of a consumer being
3560 should complete almost instantaneously and the chances of a consumer being
3555 unable to access the repository should be low.
3561 unable to access the repository should be low.
3556
3562
3557 By default, all revlog will be upgraded. You can restrict this using flag
3563 By default, all revlog will be upgraded. You can restrict this using flag
3558 such as `--manifest`:
3564 such as `--manifest`:
3559
3565
3560 * `--manifest`: only optimize the manifest
3566 * `--manifest`: only optimize the manifest
3561 * `--no-manifest`: optimize all revlog but the manifest
3567 * `--no-manifest`: optimize all revlog but the manifest
3562 * `--changelog`: optimize the changelog only
3568 * `--changelog`: optimize the changelog only
3563 * `--no-changelog --no-manifest`: optimize filelogs only
3569 * `--no-changelog --no-manifest`: optimize filelogs only
3564 """
3570 """
3565 return upgrade.upgraderepo(
3571 return upgrade.upgraderepo(
3566 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3572 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3567 )
3573 )
3568
3574
3569
3575
3570 @command(
3576 @command(
3571 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3577 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3572 )
3578 )
3573 def debugwalk(ui, repo, *pats, **opts):
3579 def debugwalk(ui, repo, *pats, **opts):
3574 """show how files match on given patterns"""
3580 """show how files match on given patterns"""
3575 opts = pycompat.byteskwargs(opts)
3581 opts = pycompat.byteskwargs(opts)
3576 m = scmutil.match(repo[None], pats, opts)
3582 m = scmutil.match(repo[None], pats, opts)
3577 if ui.verbose:
3583 if ui.verbose:
3578 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3584 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3579 items = list(repo[None].walk(m))
3585 items = list(repo[None].walk(m))
3580 if not items:
3586 if not items:
3581 return
3587 return
3582 f = lambda fn: fn
3588 f = lambda fn: fn
3583 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3589 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3584 f = lambda fn: util.normpath(fn)
3590 f = lambda fn: util.normpath(fn)
3585 fmt = b'f %%-%ds %%-%ds %%s' % (
3591 fmt = b'f %%-%ds %%-%ds %%s' % (
3586 max([len(abs) for abs in items]),
3592 max([len(abs) for abs in items]),
3587 max([len(repo.pathto(abs)) for abs in items]),
3593 max([len(repo.pathto(abs)) for abs in items]),
3588 )
3594 )
3589 for abs in items:
3595 for abs in items:
3590 line = fmt % (
3596 line = fmt % (
3591 abs,
3597 abs,
3592 f(repo.pathto(abs)),
3598 f(repo.pathto(abs)),
3593 m.exact(abs) and b'exact' or b'',
3599 m.exact(abs) and b'exact' or b'',
3594 )
3600 )
3595 ui.write(b"%s\n" % line.rstrip())
3601 ui.write(b"%s\n" % line.rstrip())
3596
3602
3597
3603
3598 @command(b'debugwhyunstable', [], _(b'REV'))
3604 @command(b'debugwhyunstable', [], _(b'REV'))
3599 def debugwhyunstable(ui, repo, rev):
3605 def debugwhyunstable(ui, repo, rev):
3600 """explain instabilities of a changeset"""
3606 """explain instabilities of a changeset"""
3601 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3607 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3602 dnodes = b''
3608 dnodes = b''
3603 if entry.get(b'divergentnodes'):
3609 if entry.get(b'divergentnodes'):
3604 dnodes = (
3610 dnodes = (
3605 b' '.join(
3611 b' '.join(
3606 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3612 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3607 for ctx in entry[b'divergentnodes']
3613 for ctx in entry[b'divergentnodes']
3608 )
3614 )
3609 + b' '
3615 + b' '
3610 )
3616 )
3611 ui.write(
3617 ui.write(
3612 b'%s: %s%s %s\n'
3618 b'%s: %s%s %s\n'
3613 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3619 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3614 )
3620 )
3615
3621
3616
3622
3617 @command(
3623 @command(
3618 b'debugwireargs',
3624 b'debugwireargs',
3619 [
3625 [
3620 (b'', b'three', b'', b'three'),
3626 (b'', b'three', b'', b'three'),
3621 (b'', b'four', b'', b'four'),
3627 (b'', b'four', b'', b'four'),
3622 (b'', b'five', b'', b'five'),
3628 (b'', b'five', b'', b'five'),
3623 ]
3629 ]
3624 + cmdutil.remoteopts,
3630 + cmdutil.remoteopts,
3625 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3631 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3626 norepo=True,
3632 norepo=True,
3627 )
3633 )
3628 def debugwireargs(ui, repopath, *vals, **opts):
3634 def debugwireargs(ui, repopath, *vals, **opts):
3629 opts = pycompat.byteskwargs(opts)
3635 opts = pycompat.byteskwargs(opts)
3630 repo = hg.peer(ui, opts, repopath)
3636 repo = hg.peer(ui, opts, repopath)
3631 for opt in cmdutil.remoteopts:
3637 for opt in cmdutil.remoteopts:
3632 del opts[opt[1]]
3638 del opts[opt[1]]
3633 args = {}
3639 args = {}
3634 for k, v in pycompat.iteritems(opts):
3640 for k, v in pycompat.iteritems(opts):
3635 if v:
3641 if v:
3636 args[k] = v
3642 args[k] = v
3637 args = pycompat.strkwargs(args)
3643 args = pycompat.strkwargs(args)
3638 # run twice to check that we don't mess up the stream for the next command
3644 # run twice to check that we don't mess up the stream for the next command
3639 res1 = repo.debugwireargs(*vals, **args)
3645 res1 = repo.debugwireargs(*vals, **args)
3640 res2 = repo.debugwireargs(*vals, **args)
3646 res2 = repo.debugwireargs(*vals, **args)
3641 ui.write(b"%s\n" % res1)
3647 ui.write(b"%s\n" % res1)
3642 if res1 != res2:
3648 if res1 != res2:
3643 ui.warn(b"%s\n" % res2)
3649 ui.warn(b"%s\n" % res2)
3644
3650
3645
3651
3646 def _parsewirelangblocks(fh):
3652 def _parsewirelangblocks(fh):
3647 activeaction = None
3653 activeaction = None
3648 blocklines = []
3654 blocklines = []
3649 lastindent = 0
3655 lastindent = 0
3650
3656
3651 for line in fh:
3657 for line in fh:
3652 line = line.rstrip()
3658 line = line.rstrip()
3653 if not line:
3659 if not line:
3654 continue
3660 continue
3655
3661
3656 if line.startswith(b'#'):
3662 if line.startswith(b'#'):
3657 continue
3663 continue
3658
3664
3659 if not line.startswith(b' '):
3665 if not line.startswith(b' '):
3660 # New block. Flush previous one.
3666 # New block. Flush previous one.
3661 if activeaction:
3667 if activeaction:
3662 yield activeaction, blocklines
3668 yield activeaction, blocklines
3663
3669
3664 activeaction = line
3670 activeaction = line
3665 blocklines = []
3671 blocklines = []
3666 lastindent = 0
3672 lastindent = 0
3667 continue
3673 continue
3668
3674
3669 # Else we start with an indent.
3675 # Else we start with an indent.
3670
3676
3671 if not activeaction:
3677 if not activeaction:
3672 raise error.Abort(_(b'indented line outside of block'))
3678 raise error.Abort(_(b'indented line outside of block'))
3673
3679
3674 indent = len(line) - len(line.lstrip())
3680 indent = len(line) - len(line.lstrip())
3675
3681
3676 # If this line is indented more than the last line, concatenate it.
3682 # If this line is indented more than the last line, concatenate it.
3677 if indent > lastindent and blocklines:
3683 if indent > lastindent and blocklines:
3678 blocklines[-1] += line.lstrip()
3684 blocklines[-1] += line.lstrip()
3679 else:
3685 else:
3680 blocklines.append(line)
3686 blocklines.append(line)
3681 lastindent = indent
3687 lastindent = indent
3682
3688
3683 # Flush last block.
3689 # Flush last block.
3684 if activeaction:
3690 if activeaction:
3685 yield activeaction, blocklines
3691 yield activeaction, blocklines
3686
3692
3687
3693
3688 @command(
3694 @command(
3689 b'debugwireproto',
3695 b'debugwireproto',
3690 [
3696 [
3691 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3697 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3692 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3698 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3693 (
3699 (
3694 b'',
3700 b'',
3695 b'noreadstderr',
3701 b'noreadstderr',
3696 False,
3702 False,
3697 _(b'do not read from stderr of the remote'),
3703 _(b'do not read from stderr of the remote'),
3698 ),
3704 ),
3699 (
3705 (
3700 b'',
3706 b'',
3701 b'nologhandshake',
3707 b'nologhandshake',
3702 False,
3708 False,
3703 _(b'do not log I/O related to the peer handshake'),
3709 _(b'do not log I/O related to the peer handshake'),
3704 ),
3710 ),
3705 ]
3711 ]
3706 + cmdutil.remoteopts,
3712 + cmdutil.remoteopts,
3707 _(b'[PATH]'),
3713 _(b'[PATH]'),
3708 optionalrepo=True,
3714 optionalrepo=True,
3709 )
3715 )
3710 def debugwireproto(ui, repo, path=None, **opts):
3716 def debugwireproto(ui, repo, path=None, **opts):
3711 """send wire protocol commands to a server
3717 """send wire protocol commands to a server
3712
3718
3713 This command can be used to issue wire protocol commands to remote
3719 This command can be used to issue wire protocol commands to remote
3714 peers and to debug the raw data being exchanged.
3720 peers and to debug the raw data being exchanged.
3715
3721
3716 ``--localssh`` will start an SSH server against the current repository
3722 ``--localssh`` will start an SSH server against the current repository
3717 and connect to that. By default, the connection will perform a handshake
3723 and connect to that. By default, the connection will perform a handshake
3718 and establish an appropriate peer instance.
3724 and establish an appropriate peer instance.
3719
3725
3720 ``--peer`` can be used to bypass the handshake protocol and construct a
3726 ``--peer`` can be used to bypass the handshake protocol and construct a
3721 peer instance using the specified class type. Valid values are ``raw``,
3727 peer instance using the specified class type. Valid values are ``raw``,
3722 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3728 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3723 raw data payloads and don't support higher-level command actions.
3729 raw data payloads and don't support higher-level command actions.
3724
3730
3725 ``--noreadstderr`` can be used to disable automatic reading from stderr
3731 ``--noreadstderr`` can be used to disable automatic reading from stderr
3726 of the peer (for SSH connections only). Disabling automatic reading of
3732 of the peer (for SSH connections only). Disabling automatic reading of
3727 stderr is useful for making output more deterministic.
3733 stderr is useful for making output more deterministic.
3728
3734
3729 Commands are issued via a mini language which is specified via stdin.
3735 Commands are issued via a mini language which is specified via stdin.
3730 The language consists of individual actions to perform. An action is
3736 The language consists of individual actions to perform. An action is
3731 defined by a block. A block is defined as a line with no leading
3737 defined by a block. A block is defined as a line with no leading
3732 space followed by 0 or more lines with leading space. Blocks are
3738 space followed by 0 or more lines with leading space. Blocks are
3733 effectively a high-level command with additional metadata.
3739 effectively a high-level command with additional metadata.
3734
3740
3735 Lines beginning with ``#`` are ignored.
3741 Lines beginning with ``#`` are ignored.
3736
3742
3737 The following sections denote available actions.
3743 The following sections denote available actions.
3738
3744
3739 raw
3745 raw
3740 ---
3746 ---
3741
3747
3742 Send raw data to the server.
3748 Send raw data to the server.
3743
3749
3744 The block payload contains the raw data to send as one atomic send
3750 The block payload contains the raw data to send as one atomic send
3745 operation. The data may not actually be delivered in a single system
3751 operation. The data may not actually be delivered in a single system
3746 call: it depends on the abilities of the transport being used.
3752 call: it depends on the abilities of the transport being used.
3747
3753
3748 Each line in the block is de-indented and concatenated. Then, that
3754 Each line in the block is de-indented and concatenated. Then, that
3749 value is evaluated as a Python b'' literal. This allows the use of
3755 value is evaluated as a Python b'' literal. This allows the use of
3750 backslash escaping, etc.
3756 backslash escaping, etc.
3751
3757
3752 raw+
3758 raw+
3753 ----
3759 ----
3754
3760
3755 Behaves like ``raw`` except flushes output afterwards.
3761 Behaves like ``raw`` except flushes output afterwards.
3756
3762
3757 command <X>
3763 command <X>
3758 -----------
3764 -----------
3759
3765
3760 Send a request to run a named command, whose name follows the ``command``
3766 Send a request to run a named command, whose name follows the ``command``
3761 string.
3767 string.
3762
3768
3763 Arguments to the command are defined as lines in this block. The format of
3769 Arguments to the command are defined as lines in this block. The format of
3764 each line is ``<key> <value>``. e.g.::
3770 each line is ``<key> <value>``. e.g.::
3765
3771
3766 command listkeys
3772 command listkeys
3767 namespace bookmarks
3773 namespace bookmarks
3768
3774
3769 If the value begins with ``eval:``, it will be interpreted as a Python
3775 If the value begins with ``eval:``, it will be interpreted as a Python
3770 literal expression. Otherwise values are interpreted as Python b'' literals.
3776 literal expression. Otherwise values are interpreted as Python b'' literals.
3771 This allows sending complex types and encoding special byte sequences via
3777 This allows sending complex types and encoding special byte sequences via
3772 backslash escaping.
3778 backslash escaping.
3773
3779
3774 The following arguments have special meaning:
3780 The following arguments have special meaning:
3775
3781
3776 ``PUSHFILE``
3782 ``PUSHFILE``
3777 When defined, the *push* mechanism of the peer will be used instead
3783 When defined, the *push* mechanism of the peer will be used instead
3778 of the static request-response mechanism and the content of the
3784 of the static request-response mechanism and the content of the
3779 file specified in the value of this argument will be sent as the
3785 file specified in the value of this argument will be sent as the
3780 command payload.
3786 command payload.
3781
3787
3782 This can be used to submit a local bundle file to the remote.
3788 This can be used to submit a local bundle file to the remote.
3783
3789
3784 batchbegin
3790 batchbegin
3785 ----------
3791 ----------
3786
3792
3787 Instruct the peer to begin a batched send.
3793 Instruct the peer to begin a batched send.
3788
3794
3789 All ``command`` blocks are queued for execution until the next
3795 All ``command`` blocks are queued for execution until the next
3790 ``batchsubmit`` block.
3796 ``batchsubmit`` block.
3791
3797
3792 batchsubmit
3798 batchsubmit
3793 -----------
3799 -----------
3794
3800
3795 Submit previously queued ``command`` blocks as a batch request.
3801 Submit previously queued ``command`` blocks as a batch request.
3796
3802
3797 This action MUST be paired with a ``batchbegin`` action.
3803 This action MUST be paired with a ``batchbegin`` action.
3798
3804
3799 httprequest <method> <path>
3805 httprequest <method> <path>
3800 ---------------------------
3806 ---------------------------
3801
3807
3802 (HTTP peer only)
3808 (HTTP peer only)
3803
3809
3804 Send an HTTP request to the peer.
3810 Send an HTTP request to the peer.
3805
3811
3806 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3812 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3807
3813
3808 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3814 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3809 headers to add to the request. e.g. ``Accept: foo``.
3815 headers to add to the request. e.g. ``Accept: foo``.
3810
3816
3811 The following arguments are special:
3817 The following arguments are special:
3812
3818
3813 ``BODYFILE``
3819 ``BODYFILE``
3814 The content of the file defined as the value to this argument will be
3820 The content of the file defined as the value to this argument will be
3815 transferred verbatim as the HTTP request body.
3821 transferred verbatim as the HTTP request body.
3816
3822
3817 ``frame <type> <flags> <payload>``
3823 ``frame <type> <flags> <payload>``
3818 Send a unified protocol frame as part of the request body.
3824 Send a unified protocol frame as part of the request body.
3819
3825
3820 All frames will be collected and sent as the body to the HTTP
3826 All frames will be collected and sent as the body to the HTTP
3821 request.
3827 request.
3822
3828
3823 close
3829 close
3824 -----
3830 -----
3825
3831
3826 Close the connection to the server.
3832 Close the connection to the server.
3827
3833
3828 flush
3834 flush
3829 -----
3835 -----
3830
3836
3831 Flush data written to the server.
3837 Flush data written to the server.
3832
3838
3833 readavailable
3839 readavailable
3834 -------------
3840 -------------
3835
3841
3836 Close the write end of the connection and read all available data from
3842 Close the write end of the connection and read all available data from
3837 the server.
3843 the server.
3838
3844
3839 If the connection to the server encompasses multiple pipes, we poll both
3845 If the connection to the server encompasses multiple pipes, we poll both
3840 pipes and read available data.
3846 pipes and read available data.
3841
3847
3842 readline
3848 readline
3843 --------
3849 --------
3844
3850
3845 Read a line of output from the server. If there are multiple output
3851 Read a line of output from the server. If there are multiple output
3846 pipes, reads only the main pipe.
3852 pipes, reads only the main pipe.
3847
3853
3848 ereadline
3854 ereadline
3849 ---------
3855 ---------
3850
3856
3851 Like ``readline``, but read from the stderr pipe, if available.
3857 Like ``readline``, but read from the stderr pipe, if available.
3852
3858
3853 read <X>
3859 read <X>
3854 --------
3860 --------
3855
3861
3856 ``read()`` N bytes from the server's main output pipe.
3862 ``read()`` N bytes from the server's main output pipe.
3857
3863
3858 eread <X>
3864 eread <X>
3859 ---------
3865 ---------
3860
3866
3861 ``read()`` N bytes from the server's stderr pipe, if available.
3867 ``read()`` N bytes from the server's stderr pipe, if available.
3862
3868
3863 Specifying Unified Frame-Based Protocol Frames
3869 Specifying Unified Frame-Based Protocol Frames
3864 ----------------------------------------------
3870 ----------------------------------------------
3865
3871
3866 It is possible to emit a *Unified Frame-Based Protocol* by using special
3872 It is possible to emit a *Unified Frame-Based Protocol* by using special
3867 syntax.
3873 syntax.
3868
3874
3869 A frame is composed as a type, flags, and payload. These can be parsed
3875 A frame is composed as a type, flags, and payload. These can be parsed
3870 from a string of the form:
3876 from a string of the form:
3871
3877
3872 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3878 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3873
3879
3874 ``request-id`` and ``stream-id`` are integers defining the request and
3880 ``request-id`` and ``stream-id`` are integers defining the request and
3875 stream identifiers.
3881 stream identifiers.
3876
3882
3877 ``type`` can be an integer value for the frame type or the string name
3883 ``type`` can be an integer value for the frame type or the string name
3878 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3884 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3879 ``command-name``.
3885 ``command-name``.
3880
3886
3881 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3887 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3882 components. Each component (and there can be just one) can be an integer
3888 components. Each component (and there can be just one) can be an integer
3883 or a flag name for stream flags or frame flags, respectively. Values are
3889 or a flag name for stream flags or frame flags, respectively. Values are
3884 resolved to integers and then bitwise OR'd together.
3890 resolved to integers and then bitwise OR'd together.
3885
3891
3886 ``payload`` represents the raw frame payload. If it begins with
3892 ``payload`` represents the raw frame payload. If it begins with
3887 ``cbor:``, the following string is evaluated as Python code and the
3893 ``cbor:``, the following string is evaluated as Python code and the
3888 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3894 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3889 as a Python byte string literal.
3895 as a Python byte string literal.
3890 """
3896 """
3891 opts = pycompat.byteskwargs(opts)
3897 opts = pycompat.byteskwargs(opts)
3892
3898
3893 if opts[b'localssh'] and not repo:
3899 if opts[b'localssh'] and not repo:
3894 raise error.Abort(_(b'--localssh requires a repository'))
3900 raise error.Abort(_(b'--localssh requires a repository'))
3895
3901
3896 if opts[b'peer'] and opts[b'peer'] not in (
3902 if opts[b'peer'] and opts[b'peer'] not in (
3897 b'raw',
3903 b'raw',
3898 b'http2',
3904 b'http2',
3899 b'ssh1',
3905 b'ssh1',
3900 b'ssh2',
3906 b'ssh2',
3901 ):
3907 ):
3902 raise error.Abort(
3908 raise error.Abort(
3903 _(b'invalid value for --peer'),
3909 _(b'invalid value for --peer'),
3904 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
3910 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
3905 )
3911 )
3906
3912
3907 if path and opts[b'localssh']:
3913 if path and opts[b'localssh']:
3908 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
3914 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
3909
3915
3910 if ui.interactive():
3916 if ui.interactive():
3911 ui.write(_(b'(waiting for commands on stdin)\n'))
3917 ui.write(_(b'(waiting for commands on stdin)\n'))
3912
3918
3913 blocks = list(_parsewirelangblocks(ui.fin))
3919 blocks = list(_parsewirelangblocks(ui.fin))
3914
3920
3915 proc = None
3921 proc = None
3916 stdin = None
3922 stdin = None
3917 stdout = None
3923 stdout = None
3918 stderr = None
3924 stderr = None
3919 opener = None
3925 opener = None
3920
3926
3921 if opts[b'localssh']:
3927 if opts[b'localssh']:
3922 # We start the SSH server in its own process so there is process
3928 # We start the SSH server in its own process so there is process
3923 # separation. This prevents a whole class of potential bugs around
3929 # separation. This prevents a whole class of potential bugs around
3924 # shared state from interfering with server operation.
3930 # shared state from interfering with server operation.
3925 args = procutil.hgcmd() + [
3931 args = procutil.hgcmd() + [
3926 b'-R',
3932 b'-R',
3927 repo.root,
3933 repo.root,
3928 b'debugserve',
3934 b'debugserve',
3929 b'--sshstdio',
3935 b'--sshstdio',
3930 ]
3936 ]
3931 proc = subprocess.Popen(
3937 proc = subprocess.Popen(
3932 pycompat.rapply(procutil.tonativestr, args),
3938 pycompat.rapply(procutil.tonativestr, args),
3933 stdin=subprocess.PIPE,
3939 stdin=subprocess.PIPE,
3934 stdout=subprocess.PIPE,
3940 stdout=subprocess.PIPE,
3935 stderr=subprocess.PIPE,
3941 stderr=subprocess.PIPE,
3936 bufsize=0,
3942 bufsize=0,
3937 )
3943 )
3938
3944
3939 stdin = proc.stdin
3945 stdin = proc.stdin
3940 stdout = proc.stdout
3946 stdout = proc.stdout
3941 stderr = proc.stderr
3947 stderr = proc.stderr
3942
3948
3943 # We turn the pipes into observers so we can log I/O.
3949 # We turn the pipes into observers so we can log I/O.
3944 if ui.verbose or opts[b'peer'] == b'raw':
3950 if ui.verbose or opts[b'peer'] == b'raw':
3945 stdin = util.makeloggingfileobject(
3951 stdin = util.makeloggingfileobject(
3946 ui, proc.stdin, b'i', logdata=True
3952 ui, proc.stdin, b'i', logdata=True
3947 )
3953 )
3948 stdout = util.makeloggingfileobject(
3954 stdout = util.makeloggingfileobject(
3949 ui, proc.stdout, b'o', logdata=True
3955 ui, proc.stdout, b'o', logdata=True
3950 )
3956 )
3951 stderr = util.makeloggingfileobject(
3957 stderr = util.makeloggingfileobject(
3952 ui, proc.stderr, b'e', logdata=True
3958 ui, proc.stderr, b'e', logdata=True
3953 )
3959 )
3954
3960
3955 # --localssh also implies the peer connection settings.
3961 # --localssh also implies the peer connection settings.
3956
3962
3957 url = b'ssh://localserver'
3963 url = b'ssh://localserver'
3958 autoreadstderr = not opts[b'noreadstderr']
3964 autoreadstderr = not opts[b'noreadstderr']
3959
3965
3960 if opts[b'peer'] == b'ssh1':
3966 if opts[b'peer'] == b'ssh1':
3961 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
3967 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
3962 peer = sshpeer.sshv1peer(
3968 peer = sshpeer.sshv1peer(
3963 ui,
3969 ui,
3964 url,
3970 url,
3965 proc,
3971 proc,
3966 stdin,
3972 stdin,
3967 stdout,
3973 stdout,
3968 stderr,
3974 stderr,
3969 None,
3975 None,
3970 autoreadstderr=autoreadstderr,
3976 autoreadstderr=autoreadstderr,
3971 )
3977 )
3972 elif opts[b'peer'] == b'ssh2':
3978 elif opts[b'peer'] == b'ssh2':
3973 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
3979 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
3974 peer = sshpeer.sshv2peer(
3980 peer = sshpeer.sshv2peer(
3975 ui,
3981 ui,
3976 url,
3982 url,
3977 proc,
3983 proc,
3978 stdin,
3984 stdin,
3979 stdout,
3985 stdout,
3980 stderr,
3986 stderr,
3981 None,
3987 None,
3982 autoreadstderr=autoreadstderr,
3988 autoreadstderr=autoreadstderr,
3983 )
3989 )
3984 elif opts[b'peer'] == b'raw':
3990 elif opts[b'peer'] == b'raw':
3985 ui.write(_(b'using raw connection to peer\n'))
3991 ui.write(_(b'using raw connection to peer\n'))
3986 peer = None
3992 peer = None
3987 else:
3993 else:
3988 ui.write(_(b'creating ssh peer from handshake results\n'))
3994 ui.write(_(b'creating ssh peer from handshake results\n'))
3989 peer = sshpeer.makepeer(
3995 peer = sshpeer.makepeer(
3990 ui,
3996 ui,
3991 url,
3997 url,
3992 proc,
3998 proc,
3993 stdin,
3999 stdin,
3994 stdout,
4000 stdout,
3995 stderr,
4001 stderr,
3996 autoreadstderr=autoreadstderr,
4002 autoreadstderr=autoreadstderr,
3997 )
4003 )
3998
4004
3999 elif path:
4005 elif path:
4000 # We bypass hg.peer() so we can proxy the sockets.
4006 # We bypass hg.peer() so we can proxy the sockets.
4001 # TODO consider not doing this because we skip
4007 # TODO consider not doing this because we skip
4002 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4008 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4003 u = util.url(path)
4009 u = util.url(path)
4004 if u.scheme != b'http':
4010 if u.scheme != b'http':
4005 raise error.Abort(_(b'only http:// paths are currently supported'))
4011 raise error.Abort(_(b'only http:// paths are currently supported'))
4006
4012
4007 url, authinfo = u.authinfo()
4013 url, authinfo = u.authinfo()
4008 openerargs = {
4014 openerargs = {
4009 'useragent': b'Mercurial debugwireproto',
4015 'useragent': b'Mercurial debugwireproto',
4010 }
4016 }
4011
4017
4012 # Turn pipes/sockets into observers so we can log I/O.
4018 # Turn pipes/sockets into observers so we can log I/O.
4013 if ui.verbose:
4019 if ui.verbose:
4014 openerargs.update(
4020 openerargs.update(
4015 {
4021 {
4016 'loggingfh': ui,
4022 'loggingfh': ui,
4017 'loggingname': b's',
4023 'loggingname': b's',
4018 'loggingopts': {'logdata': True, 'logdataapis': False,},
4024 'loggingopts': {'logdata': True, 'logdataapis': False,},
4019 }
4025 }
4020 )
4026 )
4021
4027
4022 if ui.debugflag:
4028 if ui.debugflag:
4023 openerargs['loggingopts']['logdataapis'] = True
4029 openerargs['loggingopts']['logdataapis'] = True
4024
4030
4025 # Don't send default headers when in raw mode. This allows us to
4031 # Don't send default headers when in raw mode. This allows us to
4026 # bypass most of the behavior of our URL handling code so we can
4032 # bypass most of the behavior of our URL handling code so we can
4027 # have near complete control over what's sent on the wire.
4033 # have near complete control over what's sent on the wire.
4028 if opts[b'peer'] == b'raw':
4034 if opts[b'peer'] == b'raw':
4029 openerargs['sendaccept'] = False
4035 openerargs['sendaccept'] = False
4030
4036
4031 opener = urlmod.opener(ui, authinfo, **openerargs)
4037 opener = urlmod.opener(ui, authinfo, **openerargs)
4032
4038
4033 if opts[b'peer'] == b'http2':
4039 if opts[b'peer'] == b'http2':
4034 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4040 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4035 # We go through makepeer() because we need an API descriptor for
4041 # We go through makepeer() because we need an API descriptor for
4036 # the peer instance to be useful.
4042 # the peer instance to be useful.
4037 with ui.configoverride(
4043 with ui.configoverride(
4038 {(b'experimental', b'httppeer.advertise-v2'): True}
4044 {(b'experimental', b'httppeer.advertise-v2'): True}
4039 ):
4045 ):
4040 if opts[b'nologhandshake']:
4046 if opts[b'nologhandshake']:
4041 ui.pushbuffer()
4047 ui.pushbuffer()
4042
4048
4043 peer = httppeer.makepeer(ui, path, opener=opener)
4049 peer = httppeer.makepeer(ui, path, opener=opener)
4044
4050
4045 if opts[b'nologhandshake']:
4051 if opts[b'nologhandshake']:
4046 ui.popbuffer()
4052 ui.popbuffer()
4047
4053
4048 if not isinstance(peer, httppeer.httpv2peer):
4054 if not isinstance(peer, httppeer.httpv2peer):
4049 raise error.Abort(
4055 raise error.Abort(
4050 _(
4056 _(
4051 b'could not instantiate HTTP peer for '
4057 b'could not instantiate HTTP peer for '
4052 b'wire protocol version 2'
4058 b'wire protocol version 2'
4053 ),
4059 ),
4054 hint=_(
4060 hint=_(
4055 b'the server may not have the feature '
4061 b'the server may not have the feature '
4056 b'enabled or is not allowing this '
4062 b'enabled or is not allowing this '
4057 b'client version'
4063 b'client version'
4058 ),
4064 ),
4059 )
4065 )
4060
4066
4061 elif opts[b'peer'] == b'raw':
4067 elif opts[b'peer'] == b'raw':
4062 ui.write(_(b'using raw connection to peer\n'))
4068 ui.write(_(b'using raw connection to peer\n'))
4063 peer = None
4069 peer = None
4064 elif opts[b'peer']:
4070 elif opts[b'peer']:
4065 raise error.Abort(
4071 raise error.Abort(
4066 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4072 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4067 )
4073 )
4068 else:
4074 else:
4069 peer = httppeer.makepeer(ui, path, opener=opener)
4075 peer = httppeer.makepeer(ui, path, opener=opener)
4070
4076
4071 # We /could/ populate stdin/stdout with sock.makefile()...
4077 # We /could/ populate stdin/stdout with sock.makefile()...
4072 else:
4078 else:
4073 raise error.Abort(_(b'unsupported connection configuration'))
4079 raise error.Abort(_(b'unsupported connection configuration'))
4074
4080
4075 batchedcommands = None
4081 batchedcommands = None
4076
4082
4077 # Now perform actions based on the parsed wire language instructions.
4083 # Now perform actions based on the parsed wire language instructions.
4078 for action, lines in blocks:
4084 for action, lines in blocks:
4079 if action in (b'raw', b'raw+'):
4085 if action in (b'raw', b'raw+'):
4080 if not stdin:
4086 if not stdin:
4081 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4087 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4082
4088
4083 # Concatenate the data together.
4089 # Concatenate the data together.
4084 data = b''.join(l.lstrip() for l in lines)
4090 data = b''.join(l.lstrip() for l in lines)
4085 data = stringutil.unescapestr(data)
4091 data = stringutil.unescapestr(data)
4086 stdin.write(data)
4092 stdin.write(data)
4087
4093
4088 if action == b'raw+':
4094 if action == b'raw+':
4089 stdin.flush()
4095 stdin.flush()
4090 elif action == b'flush':
4096 elif action == b'flush':
4091 if not stdin:
4097 if not stdin:
4092 raise error.Abort(_(b'cannot call flush on this peer'))
4098 raise error.Abort(_(b'cannot call flush on this peer'))
4093 stdin.flush()
4099 stdin.flush()
4094 elif action.startswith(b'command'):
4100 elif action.startswith(b'command'):
4095 if not peer:
4101 if not peer:
4096 raise error.Abort(
4102 raise error.Abort(
4097 _(
4103 _(
4098 b'cannot send commands unless peer instance '
4104 b'cannot send commands unless peer instance '
4099 b'is available'
4105 b'is available'
4100 )
4106 )
4101 )
4107 )
4102
4108
4103 command = action.split(b' ', 1)[1]
4109 command = action.split(b' ', 1)[1]
4104
4110
4105 args = {}
4111 args = {}
4106 for line in lines:
4112 for line in lines:
4107 # We need to allow empty values.
4113 # We need to allow empty values.
4108 fields = line.lstrip().split(b' ', 1)
4114 fields = line.lstrip().split(b' ', 1)
4109 if len(fields) == 1:
4115 if len(fields) == 1:
4110 key = fields[0]
4116 key = fields[0]
4111 value = b''
4117 value = b''
4112 else:
4118 else:
4113 key, value = fields
4119 key, value = fields
4114
4120
4115 if value.startswith(b'eval:'):
4121 if value.startswith(b'eval:'):
4116 value = stringutil.evalpythonliteral(value[5:])
4122 value = stringutil.evalpythonliteral(value[5:])
4117 else:
4123 else:
4118 value = stringutil.unescapestr(value)
4124 value = stringutil.unescapestr(value)
4119
4125
4120 args[key] = value
4126 args[key] = value
4121
4127
4122 if batchedcommands is not None:
4128 if batchedcommands is not None:
4123 batchedcommands.append((command, args))
4129 batchedcommands.append((command, args))
4124 continue
4130 continue
4125
4131
4126 ui.status(_(b'sending %s command\n') % command)
4132 ui.status(_(b'sending %s command\n') % command)
4127
4133
4128 if b'PUSHFILE' in args:
4134 if b'PUSHFILE' in args:
4129 with open(args[b'PUSHFILE'], 'rb') as fh:
4135 with open(args[b'PUSHFILE'], 'rb') as fh:
4130 del args[b'PUSHFILE']
4136 del args[b'PUSHFILE']
4131 res, output = peer._callpush(
4137 res, output = peer._callpush(
4132 command, fh, **pycompat.strkwargs(args)
4138 command, fh, **pycompat.strkwargs(args)
4133 )
4139 )
4134 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4140 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4135 ui.status(
4141 ui.status(
4136 _(b'remote output: %s\n') % stringutil.escapestr(output)
4142 _(b'remote output: %s\n') % stringutil.escapestr(output)
4137 )
4143 )
4138 else:
4144 else:
4139 with peer.commandexecutor() as e:
4145 with peer.commandexecutor() as e:
4140 res = e.callcommand(command, args).result()
4146 res = e.callcommand(command, args).result()
4141
4147
4142 if isinstance(res, wireprotov2peer.commandresponse):
4148 if isinstance(res, wireprotov2peer.commandresponse):
4143 val = res.objects()
4149 val = res.objects()
4144 ui.status(
4150 ui.status(
4145 _(b'response: %s\n')
4151 _(b'response: %s\n')
4146 % stringutil.pprint(val, bprefix=True, indent=2)
4152 % stringutil.pprint(val, bprefix=True, indent=2)
4147 )
4153 )
4148 else:
4154 else:
4149 ui.status(
4155 ui.status(
4150 _(b'response: %s\n')
4156 _(b'response: %s\n')
4151 % stringutil.pprint(res, bprefix=True, indent=2)
4157 % stringutil.pprint(res, bprefix=True, indent=2)
4152 )
4158 )
4153
4159
4154 elif action == b'batchbegin':
4160 elif action == b'batchbegin':
4155 if batchedcommands is not None:
4161 if batchedcommands is not None:
4156 raise error.Abort(_(b'nested batchbegin not allowed'))
4162 raise error.Abort(_(b'nested batchbegin not allowed'))
4157
4163
4158 batchedcommands = []
4164 batchedcommands = []
4159 elif action == b'batchsubmit':
4165 elif action == b'batchsubmit':
4160 # There is a batching API we could go through. But it would be
4166 # There is a batching API we could go through. But it would be
4161 # difficult to normalize requests into function calls. It is easier
4167 # difficult to normalize requests into function calls. It is easier
4162 # to bypass this layer and normalize to commands + args.
4168 # to bypass this layer and normalize to commands + args.
4163 ui.status(
4169 ui.status(
4164 _(b'sending batch with %d sub-commands\n')
4170 _(b'sending batch with %d sub-commands\n')
4165 % len(batchedcommands)
4171 % len(batchedcommands)
4166 )
4172 )
4167 assert peer is not None
4173 assert peer is not None
4168 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4174 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4169 ui.status(
4175 ui.status(
4170 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4176 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4171 )
4177 )
4172
4178
4173 batchedcommands = None
4179 batchedcommands = None
4174
4180
4175 elif action.startswith(b'httprequest '):
4181 elif action.startswith(b'httprequest '):
4176 if not opener:
4182 if not opener:
4177 raise error.Abort(
4183 raise error.Abort(
4178 _(b'cannot use httprequest without an HTTP peer')
4184 _(b'cannot use httprequest without an HTTP peer')
4179 )
4185 )
4180
4186
4181 request = action.split(b' ', 2)
4187 request = action.split(b' ', 2)
4182 if len(request) != 3:
4188 if len(request) != 3:
4183 raise error.Abort(
4189 raise error.Abort(
4184 _(
4190 _(
4185 b'invalid httprequest: expected format is '
4191 b'invalid httprequest: expected format is '
4186 b'"httprequest <method> <path>'
4192 b'"httprequest <method> <path>'
4187 )
4193 )
4188 )
4194 )
4189
4195
4190 method, httppath = request[1:]
4196 method, httppath = request[1:]
4191 headers = {}
4197 headers = {}
4192 body = None
4198 body = None
4193 frames = []
4199 frames = []
4194 for line in lines:
4200 for line in lines:
4195 line = line.lstrip()
4201 line = line.lstrip()
4196 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4202 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4197 if m:
4203 if m:
4198 # Headers need to use native strings.
4204 # Headers need to use native strings.
4199 key = pycompat.strurl(m.group(1))
4205 key = pycompat.strurl(m.group(1))
4200 value = pycompat.strurl(m.group(2))
4206 value = pycompat.strurl(m.group(2))
4201 headers[key] = value
4207 headers[key] = value
4202 continue
4208 continue
4203
4209
4204 if line.startswith(b'BODYFILE '):
4210 if line.startswith(b'BODYFILE '):
4205 with open(line.split(b' ', 1), b'rb') as fh:
4211 with open(line.split(b' ', 1), b'rb') as fh:
4206 body = fh.read()
4212 body = fh.read()
4207 elif line.startswith(b'frame '):
4213 elif line.startswith(b'frame '):
4208 frame = wireprotoframing.makeframefromhumanstring(
4214 frame = wireprotoframing.makeframefromhumanstring(
4209 line[len(b'frame ') :]
4215 line[len(b'frame ') :]
4210 )
4216 )
4211
4217
4212 frames.append(frame)
4218 frames.append(frame)
4213 else:
4219 else:
4214 raise error.Abort(
4220 raise error.Abort(
4215 _(b'unknown argument to httprequest: %s') % line
4221 _(b'unknown argument to httprequest: %s') % line
4216 )
4222 )
4217
4223
4218 url = path + httppath
4224 url = path + httppath
4219
4225
4220 if frames:
4226 if frames:
4221 body = b''.join(bytes(f) for f in frames)
4227 body = b''.join(bytes(f) for f in frames)
4222
4228
4223 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4229 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4224
4230
4225 # urllib.Request insists on using has_data() as a proxy for
4231 # urllib.Request insists on using has_data() as a proxy for
4226 # determining the request method. Override that to use our
4232 # determining the request method. Override that to use our
4227 # explicitly requested method.
4233 # explicitly requested method.
4228 req.get_method = lambda: pycompat.sysstr(method)
4234 req.get_method = lambda: pycompat.sysstr(method)
4229
4235
4230 try:
4236 try:
4231 res = opener.open(req)
4237 res = opener.open(req)
4232 body = res.read()
4238 body = res.read()
4233 except util.urlerr.urlerror as e:
4239 except util.urlerr.urlerror as e:
4234 # read() method must be called, but only exists in Python 2
4240 # read() method must be called, but only exists in Python 2
4235 getattr(e, 'read', lambda: None)()
4241 getattr(e, 'read', lambda: None)()
4236 continue
4242 continue
4237
4243
4238 ct = res.headers.get('Content-Type')
4244 ct = res.headers.get('Content-Type')
4239 if ct == 'application/mercurial-cbor':
4245 if ct == 'application/mercurial-cbor':
4240 ui.write(
4246 ui.write(
4241 _(b'cbor> %s\n')
4247 _(b'cbor> %s\n')
4242 % stringutil.pprint(
4248 % stringutil.pprint(
4243 cborutil.decodeall(body), bprefix=True, indent=2
4249 cborutil.decodeall(body), bprefix=True, indent=2
4244 )
4250 )
4245 )
4251 )
4246
4252
4247 elif action == b'close':
4253 elif action == b'close':
4248 assert peer is not None
4254 assert peer is not None
4249 peer.close()
4255 peer.close()
4250 elif action == b'readavailable':
4256 elif action == b'readavailable':
4251 if not stdout or not stderr:
4257 if not stdout or not stderr:
4252 raise error.Abort(
4258 raise error.Abort(
4253 _(b'readavailable not available on this peer')
4259 _(b'readavailable not available on this peer')
4254 )
4260 )
4255
4261
4256 stdin.close()
4262 stdin.close()
4257 stdout.read()
4263 stdout.read()
4258 stderr.read()
4264 stderr.read()
4259
4265
4260 elif action == b'readline':
4266 elif action == b'readline':
4261 if not stdout:
4267 if not stdout:
4262 raise error.Abort(_(b'readline not available on this peer'))
4268 raise error.Abort(_(b'readline not available on this peer'))
4263 stdout.readline()
4269 stdout.readline()
4264 elif action == b'ereadline':
4270 elif action == b'ereadline':
4265 if not stderr:
4271 if not stderr:
4266 raise error.Abort(_(b'ereadline not available on this peer'))
4272 raise error.Abort(_(b'ereadline not available on this peer'))
4267 stderr.readline()
4273 stderr.readline()
4268 elif action.startswith(b'read '):
4274 elif action.startswith(b'read '):
4269 count = int(action.split(b' ', 1)[1])
4275 count = int(action.split(b' ', 1)[1])
4270 if not stdout:
4276 if not stdout:
4271 raise error.Abort(_(b'read not available on this peer'))
4277 raise error.Abort(_(b'read not available on this peer'))
4272 stdout.read(count)
4278 stdout.read(count)
4273 elif action.startswith(b'eread '):
4279 elif action.startswith(b'eread '):
4274 count = int(action.split(b' ', 1)[1])
4280 count = int(action.split(b' ', 1)[1])
4275 if not stderr:
4281 if not stderr:
4276 raise error.Abort(_(b'eread not available on this peer'))
4282 raise error.Abort(_(b'eread not available on this peer'))
4277 stderr.read(count)
4283 stderr.read(count)
4278 else:
4284 else:
4279 raise error.Abort(_(b'unknown action: %s') % action)
4285 raise error.Abort(_(b'unknown action: %s') % action)
4280
4286
4281 if batchedcommands is not None:
4287 if batchedcommands is not None:
4282 raise error.Abort(_(b'unclosed "batchbegin" request'))
4288 raise error.Abort(_(b'unclosed "batchbegin" request'))
4283
4289
4284 if peer:
4290 if peer:
4285 peer.close()
4291 peer.close()
4286
4292
4287 if proc:
4293 if proc:
4288 proc.kill()
4294 proc.kill()
@@ -1,242 +1,249 b''
1 hg debuginstall
1 hg debuginstall
2 $ hg debuginstall
2 $ hg debuginstall
3 checking encoding (ascii)...
3 checking encoding (ascii)...
4 checking Python executable (*) (glob)
4 checking Python executable (*) (glob)
5 checking Python implementation (*) (glob)
5 checking Python version (2.*) (glob) (no-py3 !)
6 checking Python version (2.*) (glob) (no-py3 !)
6 checking Python version (3.*) (glob) (py3 !)
7 checking Python version (3.*) (glob) (py3 !)
7 checking Python lib (.*[Ll]ib.*)... (re)
8 checking Python lib (.*[Ll]ib.*)... (re)
8 checking Python security support (*) (glob)
9 checking Python security support (*) (glob)
9 TLS 1.2 not supported by Python install; network connections lack modern security (?)
10 TLS 1.2 not supported by Python install; network connections lack modern security (?)
10 SNI not supported by Python install; may have connectivity issues with some servers (?)
11 SNI not supported by Python install; may have connectivity issues with some servers (?)
11 checking Mercurial version (*) (glob)
12 checking Mercurial version (*) (glob)
12 checking Mercurial custom build (*) (glob)
13 checking Mercurial custom build (*) (glob)
13 checking module policy (*) (glob)
14 checking module policy (*) (glob)
14 checking installed modules (*mercurial)... (glob)
15 checking installed modules (*mercurial)... (glob)
15 checking registered compression engines (*zlib*) (glob)
16 checking registered compression engines (*zlib*) (glob)
16 checking available compression engines (*zlib*) (glob)
17 checking available compression engines (*zlib*) (glob)
17 checking available compression engines for wire protocol (*zlib*) (glob)
18 checking available compression engines for wire protocol (*zlib*) (glob)
18 checking "re2" regexp engine \((available|missing)\) (re)
19 checking "re2" regexp engine \((available|missing)\) (re)
19 checking templates (*mercurial?templates)... (glob)
20 checking templates (*mercurial?templates)... (glob)
20 checking default template (*mercurial?templates?map-cmdline.default) (glob)
21 checking default template (*mercurial?templates?map-cmdline.default) (glob)
21 checking commit editor... (*) (glob)
22 checking commit editor... (*) (glob)
22 checking username (test)
23 checking username (test)
23 no problems detected
24 no problems detected
24
25
25 hg debuginstall JSON
26 hg debuginstall JSON
26 $ hg debuginstall -Tjson | sed 's|\\\\|\\|g'
27 $ hg debuginstall -Tjson | sed 's|\\\\|\\|g'
27 [
28 [
28 {
29 {
29 "compengines": ["bz2", "bz2truncated", "none", "zlib"*], (glob)
30 "compengines": ["bz2", "bz2truncated", "none", "zlib"*], (glob)
30 "compenginesavail": ["bz2", "bz2truncated", "none", "zlib"*], (glob)
31 "compenginesavail": ["bz2", "bz2truncated", "none", "zlib"*], (glob)
31 "compenginesserver": [*"zlib"*], (glob)
32 "compenginesserver": [*"zlib"*], (glob)
32 "defaulttemplate": "*mercurial?templates?map-cmdline.default", (glob)
33 "defaulttemplate": "*mercurial?templates?map-cmdline.default", (glob)
33 "defaulttemplateerror": null,
34 "defaulttemplateerror": null,
34 "defaulttemplatenotfound": "default",
35 "defaulttemplatenotfound": "default",
35 "editor": "*", (glob)
36 "editor": "*", (glob)
36 "editornotfound": false,
37 "editornotfound": false,
37 "encoding": "ascii",
38 "encoding": "ascii",
38 "encodingerror": null,
39 "encodingerror": null,
39 "extensionserror": null, (no-pure !)
40 "extensionserror": null, (no-pure !)
40 "hgmodulepolicy": "*", (glob)
41 "hgmodulepolicy": "*", (glob)
41 "hgmodules": "*mercurial", (glob)
42 "hgmodules": "*mercurial", (glob)
42 "hgver": "*", (glob)
43 "hgver": "*", (glob)
43 "hgverextra": "*", (glob)
44 "hgverextra": "*", (glob)
44 "problems": 0,
45 "problems": 0,
45 "pythonexe": "*", (glob)
46 "pythonexe": "*", (glob)
47 "pythonimplementation": "*", (glob)
46 "pythonlib": "*", (glob)
48 "pythonlib": "*", (glob)
47 "pythonsecurity": [*], (glob)
49 "pythonsecurity": [*], (glob)
48 "pythonver": "*.*.*", (glob)
50 "pythonver": "*.*.*", (glob)
49 "re2": (true|false), (re)
51 "re2": (true|false), (re)
50 "templatedirs": "*mercurial?templates", (glob)
52 "templatedirs": "*mercurial?templates", (glob)
51 "username": "test",
53 "username": "test",
52 "usernameerror": null,
54 "usernameerror": null,
53 "vinotfound": false
55 "vinotfound": false
54 }
56 }
55 ]
57 ]
56
58
57 hg debuginstall with no username
59 hg debuginstall with no username
58 $ HGUSER= hg debuginstall
60 $ HGUSER= hg debuginstall
59 checking encoding (ascii)...
61 checking encoding (ascii)...
60 checking Python executable (*) (glob)
62 checking Python executable (*) (glob)
63 checking Python implementation (*) (glob)
61 checking Python version (2.*) (glob) (no-py3 !)
64 checking Python version (2.*) (glob) (no-py3 !)
62 checking Python version (3.*) (glob) (py3 !)
65 checking Python version (3.*) (glob) (py3 !)
63 checking Python lib (.*[Ll]ib.*)... (re)
66 checking Python lib (.*[Ll]ib.*)... (re)
64 checking Python security support (*) (glob)
67 checking Python security support (*) (glob)
65 TLS 1.2 not supported by Python install; network connections lack modern security (?)
68 TLS 1.2 not supported by Python install; network connections lack modern security (?)
66 SNI not supported by Python install; may have connectivity issues with some servers (?)
69 SNI not supported by Python install; may have connectivity issues with some servers (?)
67 checking Mercurial version (*) (glob)
70 checking Mercurial version (*) (glob)
68 checking Mercurial custom build (*) (glob)
71 checking Mercurial custom build (*) (glob)
69 checking module policy (*) (glob)
72 checking module policy (*) (glob)
70 checking installed modules (*mercurial)... (glob)
73 checking installed modules (*mercurial)... (glob)
71 checking registered compression engines (*zlib*) (glob)
74 checking registered compression engines (*zlib*) (glob)
72 checking available compression engines (*zlib*) (glob)
75 checking available compression engines (*zlib*) (glob)
73 checking available compression engines for wire protocol (*zlib*) (glob)
76 checking available compression engines for wire protocol (*zlib*) (glob)
74 checking "re2" regexp engine \((available|missing)\) (re)
77 checking "re2" regexp engine \((available|missing)\) (re)
75 checking templates (*mercurial?templates)... (glob)
78 checking templates (*mercurial?templates)... (glob)
76 checking default template (*mercurial?templates?map-cmdline.default) (glob)
79 checking default template (*mercurial?templates?map-cmdline.default) (glob)
77 checking commit editor... (*) (glob)
80 checking commit editor... (*) (glob)
78 checking username...
81 checking username...
79 no username supplied
82 no username supplied
80 (specify a username in your configuration file)
83 (specify a username in your configuration file)
81 1 problems detected, please check your install!
84 1 problems detected, please check your install!
82 [1]
85 [1]
83
86
84 hg debuginstall with invalid encoding
87 hg debuginstall with invalid encoding
85 $ HGENCODING=invalidenc hg debuginstall | grep encoding
88 $ HGENCODING=invalidenc hg debuginstall | grep encoding
86 checking encoding (invalidenc)...
89 checking encoding (invalidenc)...
87 unknown encoding: invalidenc
90 unknown encoding: invalidenc
88
91
89 exception message in JSON
92 exception message in JSON
90
93
91 $ HGENCODING=invalidenc HGUSER= hg debuginstall -Tjson | grep error
94 $ HGENCODING=invalidenc HGUSER= hg debuginstall -Tjson | grep error
92 "defaulttemplateerror": null,
95 "defaulttemplateerror": null,
93 "encodingerror": "unknown encoding: invalidenc",
96 "encodingerror": "unknown encoding: invalidenc",
94 "extensionserror": null, (no-pure !)
97 "extensionserror": null, (no-pure !)
95 "usernameerror": "no username supplied",
98 "usernameerror": "no username supplied",
96
99
97 path variables are expanded (~ is the same as $TESTTMP)
100 path variables are expanded (~ is the same as $TESTTMP)
98 $ mkdir tools
101 $ mkdir tools
99 $ touch tools/testeditor.exe
102 $ touch tools/testeditor.exe
100 #if execbit
103 #if execbit
101 $ chmod 755 tools/testeditor.exe
104 $ chmod 755 tools/testeditor.exe
102 #endif
105 #endif
103 $ HGEDITOR="~/tools/testeditor.exe" hg debuginstall
106 $ HGEDITOR="~/tools/testeditor.exe" hg debuginstall
104 checking encoding (ascii)...
107 checking encoding (ascii)...
105 checking Python executable (*) (glob)
108 checking Python executable (*) (glob)
109 checking Python implementation (*) (glob)
106 checking Python version (2.*) (glob) (no-py3 !)
110 checking Python version (2.*) (glob) (no-py3 !)
107 checking Python version (3.*) (glob) (py3 !)
111 checking Python version (3.*) (glob) (py3 !)
108 checking Python lib (.*[Ll]ib.*)... (re)
112 checking Python lib (.*[Ll]ib.*)... (re)
109 checking Python security support (*) (glob)
113 checking Python security support (*) (glob)
110 TLS 1.2 not supported by Python install; network connections lack modern security (?)
114 TLS 1.2 not supported by Python install; network connections lack modern security (?)
111 SNI not supported by Python install; may have connectivity issues with some servers (?)
115 SNI not supported by Python install; may have connectivity issues with some servers (?)
112 checking Mercurial version (*) (glob)
116 checking Mercurial version (*) (glob)
113 checking Mercurial custom build (*) (glob)
117 checking Mercurial custom build (*) (glob)
114 checking module policy (*) (glob)
118 checking module policy (*) (glob)
115 checking installed modules (*mercurial)... (glob)
119 checking installed modules (*mercurial)... (glob)
116 checking registered compression engines (*zlib*) (glob)
120 checking registered compression engines (*zlib*) (glob)
117 checking available compression engines (*zlib*) (glob)
121 checking available compression engines (*zlib*) (glob)
118 checking available compression engines for wire protocol (*zlib*) (glob)
122 checking available compression engines for wire protocol (*zlib*) (glob)
119 checking "re2" regexp engine \((available|missing)\) (re)
123 checking "re2" regexp engine \((available|missing)\) (re)
120 checking templates (*mercurial?templates)... (glob)
124 checking templates (*mercurial?templates)... (glob)
121 checking default template (*mercurial?templates?map-cmdline.default) (glob)
125 checking default template (*mercurial?templates?map-cmdline.default) (glob)
122 checking commit editor... ($TESTTMP/tools/testeditor.exe)
126 checking commit editor... ($TESTTMP/tools/testeditor.exe)
123 checking username (test)
127 checking username (test)
124 no problems detected
128 no problems detected
125
129
126 print out the binary post-shlexsplit in the error message when commit editor is
130 print out the binary post-shlexsplit in the error message when commit editor is
127 not found (this is intentionally using backslashes to mimic a windows usecase).
131 not found (this is intentionally using backslashes to mimic a windows usecase).
128 $ HGEDITOR="c:\foo\bar\baz.exe -y -z" hg debuginstall
132 $ HGEDITOR="c:\foo\bar\baz.exe -y -z" hg debuginstall
129 checking encoding (ascii)...
133 checking encoding (ascii)...
130 checking Python executable (*) (glob)
134 checking Python executable (*) (glob)
135 checking Python implementation (*) (glob)
131 checking Python version (2.*) (glob) (no-py3 !)
136 checking Python version (2.*) (glob) (no-py3 !)
132 checking Python version (3.*) (glob) (py3 !)
137 checking Python version (3.*) (glob) (py3 !)
133 checking Python lib (.*[Ll]ib.*)... (re)
138 checking Python lib (.*[Ll]ib.*)... (re)
134 checking Python security support (*) (glob)
139 checking Python security support (*) (glob)
135 TLS 1.2 not supported by Python install; network connections lack modern security (?)
140 TLS 1.2 not supported by Python install; network connections lack modern security (?)
136 SNI not supported by Python install; may have connectivity issues with some servers (?)
141 SNI not supported by Python install; may have connectivity issues with some servers (?)
137 checking Mercurial version (*) (glob)
142 checking Mercurial version (*) (glob)
138 checking Mercurial custom build (*) (glob)
143 checking Mercurial custom build (*) (glob)
139 checking module policy (*) (glob)
144 checking module policy (*) (glob)
140 checking installed modules (*mercurial)... (glob)
145 checking installed modules (*mercurial)... (glob)
141 checking registered compression engines (*zlib*) (glob)
146 checking registered compression engines (*zlib*) (glob)
142 checking available compression engines (*zlib*) (glob)
147 checking available compression engines (*zlib*) (glob)
143 checking available compression engines for wire protocol (*zlib*) (glob)
148 checking available compression engines for wire protocol (*zlib*) (glob)
144 checking "re2" regexp engine \((available|missing)\) (re)
149 checking "re2" regexp engine \((available|missing)\) (re)
145 checking templates (*mercurial?templates)... (glob)
150 checking templates (*mercurial?templates)... (glob)
146 checking default template (*mercurial?templates?map-cmdline.default) (glob)
151 checking default template (*mercurial?templates?map-cmdline.default) (glob)
147 checking commit editor... (c:\foo\bar\baz.exe) (windows !)
152 checking commit editor... (c:\foo\bar\baz.exe) (windows !)
148 Can't find editor 'c:\foo\bar\baz.exe' in PATH (windows !)
153 Can't find editor 'c:\foo\bar\baz.exe' in PATH (windows !)
149 checking commit editor... (c:foobarbaz.exe) (no-windows !)
154 checking commit editor... (c:foobarbaz.exe) (no-windows !)
150 Can't find editor 'c:foobarbaz.exe' in PATH (no-windows !)
155 Can't find editor 'c:foobarbaz.exe' in PATH (no-windows !)
151 (specify a commit editor in your configuration file)
156 (specify a commit editor in your configuration file)
152 checking username (test)
157 checking username (test)
153 1 problems detected, please check your install!
158 1 problems detected, please check your install!
154 [1]
159 [1]
155
160
156 debuginstall extension support
161 debuginstall extension support
157 $ hg debuginstall --config extensions.fsmonitor= --config fsmonitor.watchman_exe=false | grep atchman
162 $ hg debuginstall --config extensions.fsmonitor= --config fsmonitor.watchman_exe=false | grep atchman
158 fsmonitor checking for watchman binary... (false)
163 fsmonitor checking for watchman binary... (false)
159 watchman binary missing or broken: warning: Watchman unavailable: watchman exited with code 1
164 watchman binary missing or broken: warning: Watchman unavailable: watchman exited with code 1
160 Verify the json works too:
165 Verify the json works too:
161 $ hg debuginstall --config extensions.fsmonitor= --config fsmonitor.watchman_exe=false -Tjson | grep atchman
166 $ hg debuginstall --config extensions.fsmonitor= --config fsmonitor.watchman_exe=false -Tjson | grep atchman
162 "fsmonitor-watchman": "false",
167 "fsmonitor-watchman": "false",
163 "fsmonitor-watchman-error": "warning: Watchman unavailable: watchman exited with code 1",
168 "fsmonitor-watchman-error": "warning: Watchman unavailable: watchman exited with code 1",
164
169
165 Verify that Mercurial is installable with pip. Note that this MUST be
170 Verify that Mercurial is installable with pip. Note that this MUST be
166 the last test in this file, because we do some nasty things to the
171 the last test in this file, because we do some nasty things to the
167 shell environment in order to make the virtualenv work reliably.
172 shell environment in order to make the virtualenv work reliably.
168
173
169 On Python 3, we use the venv module, which is part of the standard library.
174 On Python 3, we use the venv module, which is part of the standard library.
170 But some Linux distros strip out this module's functionality involving pip,
175 But some Linux distros strip out this module's functionality involving pip,
171 so we have to look for the ensurepip module, which these distros strip out
176 so we have to look for the ensurepip module, which these distros strip out
172 completely.
177 completely.
173 On Python 2, we use the 3rd party virtualenv module, if available.
178 On Python 2, we use the 3rd party virtualenv module, if available.
174
179
175 $ cd $TESTTMP
180 $ cd $TESTTMP
176 $ unset PYTHONPATH
181 $ unset PYTHONPATH
177
182
178 #if py3 ensurepip
183 #if py3 ensurepip
179 $ "$PYTHON" -m venv installenv >> pip.log
184 $ "$PYTHON" -m venv installenv >> pip.log
180
185
181 Note: we use this weird path to run pip and hg to avoid platform differences,
186 Note: we use this weird path to run pip and hg to avoid platform differences,
182 since it's bin on most platforms but Scripts on Windows.
187 since it's bin on most platforms but Scripts on Windows.
183 $ ./installenv/*/pip install --no-index $TESTDIR/.. >> pip.log
188 $ ./installenv/*/pip install --no-index $TESTDIR/.. >> pip.log
184 Failed building wheel for mercurial (?)
189 Failed building wheel for mercurial (?)
185 $ ./installenv/*/hg debuginstall || cat pip.log
190 $ ./installenv/*/hg debuginstall || cat pip.log
186 checking encoding (ascii)...
191 checking encoding (ascii)...
187 checking Python executable (*) (glob)
192 checking Python executable (*) (glob)
193 checking Python implementation (*) (glob)
188 checking Python version (3.*) (glob)
194 checking Python version (3.*) (glob)
189 checking Python lib (*)... (glob)
195 checking Python lib (*)... (glob)
190 checking Python security support (*) (glob)
196 checking Python security support (*) (glob)
191 checking Mercurial version (*) (glob)
197 checking Mercurial version (*) (glob)
192 checking Mercurial custom build (*) (glob)
198 checking Mercurial custom build (*) (glob)
193 checking module policy (*) (glob)
199 checking module policy (*) (glob)
194 checking installed modules (*/mercurial)... (glob)
200 checking installed modules (*/mercurial)... (glob)
195 checking registered compression engines (*) (glob)
201 checking registered compression engines (*) (glob)
196 checking available compression engines (*) (glob)
202 checking available compression engines (*) (glob)
197 checking available compression engines for wire protocol (*) (glob)
203 checking available compression engines for wire protocol (*) (glob)
198 checking "re2" regexp engine \((available|missing)\) (re)
204 checking "re2" regexp engine \((available|missing)\) (re)
199 checking templates ($TESTTMP/installenv/*/site-packages/mercurial/templates)... (glob)
205 checking templates ($TESTTMP/installenv/*/site-packages/mercurial/templates)... (glob)
200 checking default template ($TESTTMP/installenv/*/site-packages/mercurial/templates/map-cmdline.default) (glob)
206 checking default template ($TESTTMP/installenv/*/site-packages/mercurial/templates/map-cmdline.default) (glob)
201 checking commit editor... (*) (glob)
207 checking commit editor... (*) (glob)
202 checking username (test)
208 checking username (test)
203 no problems detected
209 no problems detected
204 #endif
210 #endif
205
211
206 #if no-py3 virtualenv
212 #if no-py3 virtualenv
207
213
208 Note: --no-site-packages is deprecated, but some places have an
214 Note: --no-site-packages is deprecated, but some places have an
209 ancient virtualenv from their linux distro or similar and it's not yet
215 ancient virtualenv from their linux distro or similar and it's not yet
210 the default for them.
216 the default for them.
211
217
212 $ "$PYTHON" -m virtualenv --no-site-packages --never-download installenv >> pip.log
218 $ "$PYTHON" -m virtualenv --no-site-packages --never-download installenv >> pip.log
213 DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. (?)
219 DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. (?)
214 DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support (?)
220 DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support (?)
215
221
216 Note: we use this weird path to run pip and hg to avoid platform differences,
222 Note: we use this weird path to run pip and hg to avoid platform differences,
217 since it's bin on most platforms but Scripts on Windows.
223 since it's bin on most platforms but Scripts on Windows.
218 $ ./installenv/*/pip install --no-index $TESTDIR/.. >> pip.log
224 $ ./installenv/*/pip install --no-index $TESTDIR/.. >> pip.log
219 DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. (?)
225 DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. (?)
220 DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support (?)
226 DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support (?)
221 $ ./installenv/*/hg debuginstall || cat pip.log
227 $ ./installenv/*/hg debuginstall || cat pip.log
222 checking encoding (ascii)...
228 checking encoding (ascii)...
223 checking Python executable (*) (glob)
229 checking Python executable (*) (glob)
230 checking Python implementation (*) (glob)
224 checking Python version (2.*) (glob)
231 checking Python version (2.*) (glob)
225 checking Python lib (*)... (glob)
232 checking Python lib (*)... (glob)
226 checking Python security support (*) (glob)
233 checking Python security support (*) (glob)
227 TLS 1.2 not supported by Python install; network connections lack modern security (?)
234 TLS 1.2 not supported by Python install; network connections lack modern security (?)
228 SNI not supported by Python install; may have connectivity issues with some servers (?)
235 SNI not supported by Python install; may have connectivity issues with some servers (?)
229 checking Mercurial version (*) (glob)
236 checking Mercurial version (*) (glob)
230 checking Mercurial custom build (*) (glob)
237 checking Mercurial custom build (*) (glob)
231 checking module policy (*) (glob)
238 checking module policy (*) (glob)
232 checking installed modules (*/mercurial)... (glob)
239 checking installed modules (*/mercurial)... (glob)
233 checking registered compression engines (*) (glob)
240 checking registered compression engines (*) (glob)
234 checking available compression engines (*) (glob)
241 checking available compression engines (*) (glob)
235 checking available compression engines for wire protocol (*) (glob)
242 checking available compression engines for wire protocol (*) (glob)
236 checking "re2" regexp engine \((available|missing)\) (re)
243 checking "re2" regexp engine \((available|missing)\) (re)
237 checking templates ($TESTTMP/installenv/*/site-packages/mercurial/templates)... (glob)
244 checking templates ($TESTTMP/installenv/*/site-packages/mercurial/templates)... (glob)
238 checking default template ($TESTTMP/installenv/*/site-packages/mercurial/templates/map-cmdline.default) (glob)
245 checking default template ($TESTTMP/installenv/*/site-packages/mercurial/templates/map-cmdline.default) (glob)
239 checking commit editor... (*) (glob)
246 checking commit editor... (*) (glob)
240 checking username (test)
247 checking username (test)
241 no problems detected
248 no problems detected
242 #endif
249 #endif
General Comments 0
You need to be logged in to leave comments. Login now