##// END OF EJS Templates
debugdeltachain: detect a special case where parents are "skipped"...
marmoute -
r50118:13e52322 default
parent child Browse files
Show More
@@ -1,4947 +1,4991 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import binascii
9 import binascii
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import string
24 import string
25 import subprocess
25 import subprocess
26 import sys
26 import sys
27 import time
27 import time
28
28
29 from .i18n import _
29 from .i18n import _
30 from .node import (
30 from .node import (
31 bin,
31 bin,
32 hex,
32 hex,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 dirstateutils,
49 dirstateutils,
50 encoding,
50 encoding,
51 error,
51 error,
52 exchange,
52 exchange,
53 extensions,
53 extensions,
54 filemerge,
54 filemerge,
55 filesetlang,
55 filesetlang,
56 formatter,
56 formatter,
57 hg,
57 hg,
58 httppeer,
58 httppeer,
59 localrepo,
59 localrepo,
60 lock as lockmod,
60 lock as lockmod,
61 logcmdutil,
61 logcmdutil,
62 mergestate as mergestatemod,
62 mergestate as mergestatemod,
63 metadata,
63 metadata,
64 obsolete,
64 obsolete,
65 obsutil,
65 obsutil,
66 pathutil,
66 pathutil,
67 phases,
67 phases,
68 policy,
68 policy,
69 pvec,
69 pvec,
70 pycompat,
70 pycompat,
71 registrar,
71 registrar,
72 repair,
72 repair,
73 repoview,
73 repoview,
74 requirements,
74 requirements,
75 revlog,
75 revlog,
76 revset,
76 revset,
77 revsetlang,
77 revsetlang,
78 scmutil,
78 scmutil,
79 setdiscovery,
79 setdiscovery,
80 simplemerge,
80 simplemerge,
81 sshpeer,
81 sshpeer,
82 sslutil,
82 sslutil,
83 streamclone,
83 streamclone,
84 strip,
84 strip,
85 tags as tagsmod,
85 tags as tagsmod,
86 templater,
86 templater,
87 treediscovery,
87 treediscovery,
88 upgrade,
88 upgrade,
89 url as urlmod,
89 url as urlmod,
90 util,
90 util,
91 vfs as vfsmod,
91 vfs as vfsmod,
92 wireprotoframing,
92 wireprotoframing,
93 wireprotoserver,
93 wireprotoserver,
94 )
94 )
95 from .interfaces import repository
95 from .interfaces import repository
96 from .utils import (
96 from .utils import (
97 cborutil,
97 cborutil,
98 compression,
98 compression,
99 dateutil,
99 dateutil,
100 procutil,
100 procutil,
101 stringutil,
101 stringutil,
102 urlutil,
102 urlutil,
103 )
103 )
104
104
105 from .revlogutils import (
105 from .revlogutils import (
106 constants as revlog_constants,
106 constants as revlog_constants,
107 deltas as deltautil,
107 deltas as deltautil,
108 nodemap,
108 nodemap,
109 rewrite,
109 rewrite,
110 sidedata,
110 sidedata,
111 )
111 )
112
112
113 release = lockmod.release
113 release = lockmod.release
114
114
115 table = {}
115 table = {}
116 table.update(strip.command._table)
116 table.update(strip.command._table)
117 command = registrar.command(table)
117 command = registrar.command(table)
118
118
119
119
120 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
120 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
121 def debugancestor(ui, repo, *args):
121 def debugancestor(ui, repo, *args):
122 """find the ancestor revision of two revisions in a given index"""
122 """find the ancestor revision of two revisions in a given index"""
123 if len(args) == 3:
123 if len(args) == 3:
124 index, rev1, rev2 = args
124 index, rev1, rev2 = args
125 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
125 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
126 lookup = r.lookup
126 lookup = r.lookup
127 elif len(args) == 2:
127 elif len(args) == 2:
128 if not repo:
128 if not repo:
129 raise error.Abort(
129 raise error.Abort(
130 _(b'there is no Mercurial repository here (.hg not found)')
130 _(b'there is no Mercurial repository here (.hg not found)')
131 )
131 )
132 rev1, rev2 = args
132 rev1, rev2 = args
133 r = repo.changelog
133 r = repo.changelog
134 lookup = repo.lookup
134 lookup = repo.lookup
135 else:
135 else:
136 raise error.Abort(_(b'either two or three arguments required'))
136 raise error.Abort(_(b'either two or three arguments required'))
137 a = r.ancestor(lookup(rev1), lookup(rev2))
137 a = r.ancestor(lookup(rev1), lookup(rev2))
138 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
138 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
139
139
140
140
141 @command(b'debugantivirusrunning', [])
141 @command(b'debugantivirusrunning', [])
142 def debugantivirusrunning(ui, repo):
142 def debugantivirusrunning(ui, repo):
143 """attempt to trigger an antivirus scanner to see if one is active"""
143 """attempt to trigger an antivirus scanner to see if one is active"""
144 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
144 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
145 f.write(
145 f.write(
146 util.b85decode(
146 util.b85decode(
147 # This is a base85-armored version of the EICAR test file. See
147 # This is a base85-armored version of the EICAR test file. See
148 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
148 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
149 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
149 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
150 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
150 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
151 )
151 )
152 )
152 )
153 # Give an AV engine time to scan the file.
153 # Give an AV engine time to scan the file.
154 time.sleep(2)
154 time.sleep(2)
155 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
155 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
156
156
157
157
158 @command(b'debugapplystreamclonebundle', [], b'FILE')
158 @command(b'debugapplystreamclonebundle', [], b'FILE')
159 def debugapplystreamclonebundle(ui, repo, fname):
159 def debugapplystreamclonebundle(ui, repo, fname):
160 """apply a stream clone bundle file"""
160 """apply a stream clone bundle file"""
161 f = hg.openpath(ui, fname)
161 f = hg.openpath(ui, fname)
162 gen = exchange.readbundle(ui, f, fname)
162 gen = exchange.readbundle(ui, f, fname)
163 gen.apply(repo)
163 gen.apply(repo)
164
164
165
165
166 @command(
166 @command(
167 b'debugbuilddag',
167 b'debugbuilddag',
168 [
168 [
169 (
169 (
170 b'm',
170 b'm',
171 b'mergeable-file',
171 b'mergeable-file',
172 None,
172 None,
173 _(b'add single file mergeable changes'),
173 _(b'add single file mergeable changes'),
174 ),
174 ),
175 (
175 (
176 b'o',
176 b'o',
177 b'overwritten-file',
177 b'overwritten-file',
178 None,
178 None,
179 _(b'add single file all revs overwrite'),
179 _(b'add single file all revs overwrite'),
180 ),
180 ),
181 (b'n', b'new-file', None, _(b'add new file at each rev')),
181 (b'n', b'new-file', None, _(b'add new file at each rev')),
182 (
182 (
183 b'',
183 b'',
184 b'from-existing',
184 b'from-existing',
185 None,
185 None,
186 _(b'continue from a non-empty repository'),
186 _(b'continue from a non-empty repository'),
187 ),
187 ),
188 ],
188 ],
189 _(b'[OPTION]... [TEXT]'),
189 _(b'[OPTION]... [TEXT]'),
190 )
190 )
191 def debugbuilddag(
191 def debugbuilddag(
192 ui,
192 ui,
193 repo,
193 repo,
194 text=None,
194 text=None,
195 mergeable_file=False,
195 mergeable_file=False,
196 overwritten_file=False,
196 overwritten_file=False,
197 new_file=False,
197 new_file=False,
198 from_existing=False,
198 from_existing=False,
199 ):
199 ):
200 """builds a repo with a given DAG from scratch in the current empty repo
200 """builds a repo with a given DAG from scratch in the current empty repo
201
201
202 The description of the DAG is read from stdin if not given on the
202 The description of the DAG is read from stdin if not given on the
203 command line.
203 command line.
204
204
205 Elements:
205 Elements:
206
206
207 - "+n" is a linear run of n nodes based on the current default parent
207 - "+n" is a linear run of n nodes based on the current default parent
208 - "." is a single node based on the current default parent
208 - "." is a single node based on the current default parent
209 - "$" resets the default parent to null (implied at the start);
209 - "$" resets the default parent to null (implied at the start);
210 otherwise the default parent is always the last node created
210 otherwise the default parent is always the last node created
211 - "<p" sets the default parent to the backref p
211 - "<p" sets the default parent to the backref p
212 - "*p" is a fork at parent p, which is a backref
212 - "*p" is a fork at parent p, which is a backref
213 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
213 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
214 - "/p2" is a merge of the preceding node and p2
214 - "/p2" is a merge of the preceding node and p2
215 - ":tag" defines a local tag for the preceding node
215 - ":tag" defines a local tag for the preceding node
216 - "@branch" sets the named branch for subsequent nodes
216 - "@branch" sets the named branch for subsequent nodes
217 - "#...\\n" is a comment up to the end of the line
217 - "#...\\n" is a comment up to the end of the line
218
218
219 Whitespace between the above elements is ignored.
219 Whitespace between the above elements is ignored.
220
220
221 A backref is either
221 A backref is either
222
222
223 - a number n, which references the node curr-n, where curr is the current
223 - a number n, which references the node curr-n, where curr is the current
224 node, or
224 node, or
225 - the name of a local tag you placed earlier using ":tag", or
225 - the name of a local tag you placed earlier using ":tag", or
226 - empty to denote the default parent.
226 - empty to denote the default parent.
227
227
228 All string valued-elements are either strictly alphanumeric, or must
228 All string valued-elements are either strictly alphanumeric, or must
229 be enclosed in double quotes ("..."), with "\\" as escape character.
229 be enclosed in double quotes ("..."), with "\\" as escape character.
230 """
230 """
231
231
232 if text is None:
232 if text is None:
233 ui.status(_(b"reading DAG from stdin\n"))
233 ui.status(_(b"reading DAG from stdin\n"))
234 text = ui.fin.read()
234 text = ui.fin.read()
235
235
236 cl = repo.changelog
236 cl = repo.changelog
237 if len(cl) > 0 and not from_existing:
237 if len(cl) > 0 and not from_existing:
238 raise error.Abort(_(b'repository is not empty'))
238 raise error.Abort(_(b'repository is not empty'))
239
239
240 # determine number of revs in DAG
240 # determine number of revs in DAG
241 total = 0
241 total = 0
242 for type, data in dagparser.parsedag(text):
242 for type, data in dagparser.parsedag(text):
243 if type == b'n':
243 if type == b'n':
244 total += 1
244 total += 1
245
245
246 if mergeable_file:
246 if mergeable_file:
247 linesperrev = 2
247 linesperrev = 2
248 # make a file with k lines per rev
248 # make a file with k lines per rev
249 initialmergedlines = [
249 initialmergedlines = [
250 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
250 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
251 ]
251 ]
252 initialmergedlines.append(b"")
252 initialmergedlines.append(b"")
253
253
254 tags = []
254 tags = []
255 progress = ui.makeprogress(
255 progress = ui.makeprogress(
256 _(b'building'), unit=_(b'revisions'), total=total
256 _(b'building'), unit=_(b'revisions'), total=total
257 )
257 )
258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
259 at = -1
259 at = -1
260 atbranch = b'default'
260 atbranch = b'default'
261 nodeids = []
261 nodeids = []
262 id = 0
262 id = 0
263 progress.update(id)
263 progress.update(id)
264 for type, data in dagparser.parsedag(text):
264 for type, data in dagparser.parsedag(text):
265 if type == b'n':
265 if type == b'n':
266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
267 id, ps = data
267 id, ps = data
268
268
269 files = []
269 files = []
270 filecontent = {}
270 filecontent = {}
271
271
272 p2 = None
272 p2 = None
273 if mergeable_file:
273 if mergeable_file:
274 fn = b"mf"
274 fn = b"mf"
275 p1 = repo[ps[0]]
275 p1 = repo[ps[0]]
276 if len(ps) > 1:
276 if len(ps) > 1:
277 p2 = repo[ps[1]]
277 p2 = repo[ps[1]]
278 pa = p1.ancestor(p2)
278 pa = p1.ancestor(p2)
279 base, local, other = [
279 base, local, other = [
280 x[fn].data() for x in (pa, p1, p2)
280 x[fn].data() for x in (pa, p1, p2)
281 ]
281 ]
282 m3 = simplemerge.Merge3Text(base, local, other)
282 m3 = simplemerge.Merge3Text(base, local, other)
283 ml = [
283 ml = [
284 l.strip()
284 l.strip()
285 for l in simplemerge.render_minimized(m3)[0]
285 for l in simplemerge.render_minimized(m3)[0]
286 ]
286 ]
287 ml.append(b"")
287 ml.append(b"")
288 elif at > 0:
288 elif at > 0:
289 ml = p1[fn].data().split(b"\n")
289 ml = p1[fn].data().split(b"\n")
290 else:
290 else:
291 ml = initialmergedlines
291 ml = initialmergedlines
292 ml[id * linesperrev] += b" r%i" % id
292 ml[id * linesperrev] += b" r%i" % id
293 mergedtext = b"\n".join(ml)
293 mergedtext = b"\n".join(ml)
294 files.append(fn)
294 files.append(fn)
295 filecontent[fn] = mergedtext
295 filecontent[fn] = mergedtext
296
296
297 if overwritten_file:
297 if overwritten_file:
298 fn = b"of"
298 fn = b"of"
299 files.append(fn)
299 files.append(fn)
300 filecontent[fn] = b"r%i\n" % id
300 filecontent[fn] = b"r%i\n" % id
301
301
302 if new_file:
302 if new_file:
303 fn = b"nf%i" % id
303 fn = b"nf%i" % id
304 files.append(fn)
304 files.append(fn)
305 filecontent[fn] = b"r%i\n" % id
305 filecontent[fn] = b"r%i\n" % id
306 if len(ps) > 1:
306 if len(ps) > 1:
307 if not p2:
307 if not p2:
308 p2 = repo[ps[1]]
308 p2 = repo[ps[1]]
309 for fn in p2:
309 for fn in p2:
310 if fn.startswith(b"nf"):
310 if fn.startswith(b"nf"):
311 files.append(fn)
311 files.append(fn)
312 filecontent[fn] = p2[fn].data()
312 filecontent[fn] = p2[fn].data()
313
313
314 def fctxfn(repo, cx, path):
314 def fctxfn(repo, cx, path):
315 if path in filecontent:
315 if path in filecontent:
316 return context.memfilectx(
316 return context.memfilectx(
317 repo, cx, path, filecontent[path]
317 repo, cx, path, filecontent[path]
318 )
318 )
319 return None
319 return None
320
320
321 if len(ps) == 0 or ps[0] < 0:
321 if len(ps) == 0 or ps[0] < 0:
322 pars = [None, None]
322 pars = [None, None]
323 elif len(ps) == 1:
323 elif len(ps) == 1:
324 pars = [nodeids[ps[0]], None]
324 pars = [nodeids[ps[0]], None]
325 else:
325 else:
326 pars = [nodeids[p] for p in ps]
326 pars = [nodeids[p] for p in ps]
327 cx = context.memctx(
327 cx = context.memctx(
328 repo,
328 repo,
329 pars,
329 pars,
330 b"r%i" % id,
330 b"r%i" % id,
331 files,
331 files,
332 fctxfn,
332 fctxfn,
333 date=(id, 0),
333 date=(id, 0),
334 user=b"debugbuilddag",
334 user=b"debugbuilddag",
335 extra={b'branch': atbranch},
335 extra={b'branch': atbranch},
336 )
336 )
337 nodeid = repo.commitctx(cx)
337 nodeid = repo.commitctx(cx)
338 nodeids.append(nodeid)
338 nodeids.append(nodeid)
339 at = id
339 at = id
340 elif type == b'l':
340 elif type == b'l':
341 id, name = data
341 id, name = data
342 ui.note((b'tag %s\n' % name))
342 ui.note((b'tag %s\n' % name))
343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
344 elif type == b'a':
344 elif type == b'a':
345 ui.note((b'branch %s\n' % data))
345 ui.note((b'branch %s\n' % data))
346 atbranch = data
346 atbranch = data
347 progress.update(id)
347 progress.update(id)
348
348
349 if tags:
349 if tags:
350 repo.vfs.write(b"localtags", b"".join(tags))
350 repo.vfs.write(b"localtags", b"".join(tags))
351
351
352
352
353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
354 indent_string = b' ' * indent
354 indent_string = b' ' * indent
355 if all:
355 if all:
356 ui.writenoi18n(
356 ui.writenoi18n(
357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
358 % indent_string
358 % indent_string
359 )
359 )
360
360
361 def showchunks(named):
361 def showchunks(named):
362 ui.write(b"\n%s%s\n" % (indent_string, named))
362 ui.write(b"\n%s%s\n" % (indent_string, named))
363 for deltadata in gen.deltaiter():
363 for deltadata in gen.deltaiter():
364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
365 ui.write(
365 ui.write(
366 b"%s%s %s %s %s %s %d\n"
366 b"%s%s %s %s %s %s %d\n"
367 % (
367 % (
368 indent_string,
368 indent_string,
369 hex(node),
369 hex(node),
370 hex(p1),
370 hex(p1),
371 hex(p2),
371 hex(p2),
372 hex(cs),
372 hex(cs),
373 hex(deltabase),
373 hex(deltabase),
374 len(delta),
374 len(delta),
375 )
375 )
376 )
376 )
377
377
378 gen.changelogheader()
378 gen.changelogheader()
379 showchunks(b"changelog")
379 showchunks(b"changelog")
380 gen.manifestheader()
380 gen.manifestheader()
381 showchunks(b"manifest")
381 showchunks(b"manifest")
382 for chunkdata in iter(gen.filelogheader, {}):
382 for chunkdata in iter(gen.filelogheader, {}):
383 fname = chunkdata[b'filename']
383 fname = chunkdata[b'filename']
384 showchunks(fname)
384 showchunks(fname)
385 else:
385 else:
386 if isinstance(gen, bundle2.unbundle20):
386 if isinstance(gen, bundle2.unbundle20):
387 raise error.Abort(_(b'use debugbundle2 for this file'))
387 raise error.Abort(_(b'use debugbundle2 for this file'))
388 gen.changelogheader()
388 gen.changelogheader()
389 for deltadata in gen.deltaiter():
389 for deltadata in gen.deltaiter():
390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
392
392
393
393
394 def _debugobsmarkers(ui, part, indent=0, **opts):
394 def _debugobsmarkers(ui, part, indent=0, **opts):
395 """display version and markers contained in 'data'"""
395 """display version and markers contained in 'data'"""
396 opts = pycompat.byteskwargs(opts)
396 opts = pycompat.byteskwargs(opts)
397 data = part.read()
397 data = part.read()
398 indent_string = b' ' * indent
398 indent_string = b' ' * indent
399 try:
399 try:
400 version, markers = obsolete._readmarkers(data)
400 version, markers = obsolete._readmarkers(data)
401 except error.UnknownVersion as exc:
401 except error.UnknownVersion as exc:
402 msg = b"%sunsupported version: %s (%d bytes)\n"
402 msg = b"%sunsupported version: %s (%d bytes)\n"
403 msg %= indent_string, exc.version, len(data)
403 msg %= indent_string, exc.version, len(data)
404 ui.write(msg)
404 ui.write(msg)
405 else:
405 else:
406 msg = b"%sversion: %d (%d bytes)\n"
406 msg = b"%sversion: %d (%d bytes)\n"
407 msg %= indent_string, version, len(data)
407 msg %= indent_string, version, len(data)
408 ui.write(msg)
408 ui.write(msg)
409 fm = ui.formatter(b'debugobsolete', opts)
409 fm = ui.formatter(b'debugobsolete', opts)
410 for rawmarker in sorted(markers):
410 for rawmarker in sorted(markers):
411 m = obsutil.marker(None, rawmarker)
411 m = obsutil.marker(None, rawmarker)
412 fm.startitem()
412 fm.startitem()
413 fm.plain(indent_string)
413 fm.plain(indent_string)
414 cmdutil.showmarker(fm, m)
414 cmdutil.showmarker(fm, m)
415 fm.end()
415 fm.end()
416
416
417
417
418 def _debugphaseheads(ui, data, indent=0):
418 def _debugphaseheads(ui, data, indent=0):
419 """display version and markers contained in 'data'"""
419 """display version and markers contained in 'data'"""
420 indent_string = b' ' * indent
420 indent_string = b' ' * indent
421 headsbyphase = phases.binarydecode(data)
421 headsbyphase = phases.binarydecode(data)
422 for phase in phases.allphases:
422 for phase in phases.allphases:
423 for head in headsbyphase[phase]:
423 for head in headsbyphase[phase]:
424 ui.write(indent_string)
424 ui.write(indent_string)
425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
426
426
427
427
428 def _quasirepr(thing):
428 def _quasirepr(thing):
429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
430 return b'{%s}' % (
430 return b'{%s}' % (
431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
432 )
432 )
433 return pycompat.bytestr(repr(thing))
433 return pycompat.bytestr(repr(thing))
434
434
435
435
436 def _debugbundle2(ui, gen, all=None, **opts):
436 def _debugbundle2(ui, gen, all=None, **opts):
437 """lists the contents of a bundle2"""
437 """lists the contents of a bundle2"""
438 if not isinstance(gen, bundle2.unbundle20):
438 if not isinstance(gen, bundle2.unbundle20):
439 raise error.Abort(_(b'not a bundle2 file'))
439 raise error.Abort(_(b'not a bundle2 file'))
440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
441 parttypes = opts.get('part_type', [])
441 parttypes = opts.get('part_type', [])
442 for part in gen.iterparts():
442 for part in gen.iterparts():
443 if parttypes and part.type not in parttypes:
443 if parttypes and part.type not in parttypes:
444 continue
444 continue
445 msg = b'%s -- %s (mandatory: %r)\n'
445 msg = b'%s -- %s (mandatory: %r)\n'
446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
447 if part.type == b'changegroup':
447 if part.type == b'changegroup':
448 version = part.params.get(b'version', b'01')
448 version = part.params.get(b'version', b'01')
449 cg = changegroup.getunbundler(version, part, b'UN')
449 cg = changegroup.getunbundler(version, part, b'UN')
450 if not ui.quiet:
450 if not ui.quiet:
451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
452 if part.type == b'obsmarkers':
452 if part.type == b'obsmarkers':
453 if not ui.quiet:
453 if not ui.quiet:
454 _debugobsmarkers(ui, part, indent=4, **opts)
454 _debugobsmarkers(ui, part, indent=4, **opts)
455 if part.type == b'phase-heads':
455 if part.type == b'phase-heads':
456 if not ui.quiet:
456 if not ui.quiet:
457 _debugphaseheads(ui, part, indent=4)
457 _debugphaseheads(ui, part, indent=4)
458
458
459
459
460 @command(
460 @command(
461 b'debugbundle',
461 b'debugbundle',
462 [
462 [
463 (b'a', b'all', None, _(b'show all details')),
463 (b'a', b'all', None, _(b'show all details')),
464 (b'', b'part-type', [], _(b'show only the named part type')),
464 (b'', b'part-type', [], _(b'show only the named part type')),
465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
466 ],
466 ],
467 _(b'FILE'),
467 _(b'FILE'),
468 norepo=True,
468 norepo=True,
469 )
469 )
470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
471 """lists the contents of a bundle"""
471 """lists the contents of a bundle"""
472 with hg.openpath(ui, bundlepath) as f:
472 with hg.openpath(ui, bundlepath) as f:
473 if spec:
473 if spec:
474 spec = exchange.getbundlespec(ui, f)
474 spec = exchange.getbundlespec(ui, f)
475 ui.write(b'%s\n' % spec)
475 ui.write(b'%s\n' % spec)
476 return
476 return
477
477
478 gen = exchange.readbundle(ui, f, bundlepath)
478 gen = exchange.readbundle(ui, f, bundlepath)
479 if isinstance(gen, bundle2.unbundle20):
479 if isinstance(gen, bundle2.unbundle20):
480 return _debugbundle2(ui, gen, all=all, **opts)
480 return _debugbundle2(ui, gen, all=all, **opts)
481 _debugchangegroup(ui, gen, all=all, **opts)
481 _debugchangegroup(ui, gen, all=all, **opts)
482
482
483
483
484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
485 def debugcapabilities(ui, path, **opts):
485 def debugcapabilities(ui, path, **opts):
486 """lists the capabilities of a remote peer"""
486 """lists the capabilities of a remote peer"""
487 opts = pycompat.byteskwargs(opts)
487 opts = pycompat.byteskwargs(opts)
488 peer = hg.peer(ui, opts, path)
488 peer = hg.peer(ui, opts, path)
489 try:
489 try:
490 caps = peer.capabilities()
490 caps = peer.capabilities()
491 ui.writenoi18n(b'Main capabilities:\n')
491 ui.writenoi18n(b'Main capabilities:\n')
492 for c in sorted(caps):
492 for c in sorted(caps):
493 ui.write(b' %s\n' % c)
493 ui.write(b' %s\n' % c)
494 b2caps = bundle2.bundle2caps(peer)
494 b2caps = bundle2.bundle2caps(peer)
495 if b2caps:
495 if b2caps:
496 ui.writenoi18n(b'Bundle2 capabilities:\n')
496 ui.writenoi18n(b'Bundle2 capabilities:\n')
497 for key, values in sorted(b2caps.items()):
497 for key, values in sorted(b2caps.items()):
498 ui.write(b' %s\n' % key)
498 ui.write(b' %s\n' % key)
499 for v in values:
499 for v in values:
500 ui.write(b' %s\n' % v)
500 ui.write(b' %s\n' % v)
501 finally:
501 finally:
502 peer.close()
502 peer.close()
503
503
504
504
505 @command(
505 @command(
506 b'debugchangedfiles',
506 b'debugchangedfiles',
507 [
507 [
508 (
508 (
509 b'',
509 b'',
510 b'compute',
510 b'compute',
511 False,
511 False,
512 b"compute information instead of reading it from storage",
512 b"compute information instead of reading it from storage",
513 ),
513 ),
514 ],
514 ],
515 b'REV',
515 b'REV',
516 )
516 )
517 def debugchangedfiles(ui, repo, rev, **opts):
517 def debugchangedfiles(ui, repo, rev, **opts):
518 """list the stored files changes for a revision"""
518 """list the stored files changes for a revision"""
519 ctx = logcmdutil.revsingle(repo, rev, None)
519 ctx = logcmdutil.revsingle(repo, rev, None)
520 files = None
520 files = None
521
521
522 if opts['compute']:
522 if opts['compute']:
523 files = metadata.compute_all_files_changes(ctx)
523 files = metadata.compute_all_files_changes(ctx)
524 else:
524 else:
525 sd = repo.changelog.sidedata(ctx.rev())
525 sd = repo.changelog.sidedata(ctx.rev())
526 files_block = sd.get(sidedata.SD_FILES)
526 files_block = sd.get(sidedata.SD_FILES)
527 if files_block is not None:
527 if files_block is not None:
528 files = metadata.decode_files_sidedata(sd)
528 files = metadata.decode_files_sidedata(sd)
529 if files is not None:
529 if files is not None:
530 for f in sorted(files.touched):
530 for f in sorted(files.touched):
531 if f in files.added:
531 if f in files.added:
532 action = b"added"
532 action = b"added"
533 elif f in files.removed:
533 elif f in files.removed:
534 action = b"removed"
534 action = b"removed"
535 elif f in files.merged:
535 elif f in files.merged:
536 action = b"merged"
536 action = b"merged"
537 elif f in files.salvaged:
537 elif f in files.salvaged:
538 action = b"salvaged"
538 action = b"salvaged"
539 else:
539 else:
540 action = b"touched"
540 action = b"touched"
541
541
542 copy_parent = b""
542 copy_parent = b""
543 copy_source = b""
543 copy_source = b""
544 if f in files.copied_from_p1:
544 if f in files.copied_from_p1:
545 copy_parent = b"p1"
545 copy_parent = b"p1"
546 copy_source = files.copied_from_p1[f]
546 copy_source = files.copied_from_p1[f]
547 elif f in files.copied_from_p2:
547 elif f in files.copied_from_p2:
548 copy_parent = b"p2"
548 copy_parent = b"p2"
549 copy_source = files.copied_from_p2[f]
549 copy_source = files.copied_from_p2[f]
550
550
551 data = (action, copy_parent, f, copy_source)
551 data = (action, copy_parent, f, copy_source)
552 template = b"%-8s %2s: %s, %s;\n"
552 template = b"%-8s %2s: %s, %s;\n"
553 ui.write(template % data)
553 ui.write(template % data)
554
554
555
555
556 @command(b'debugcheckstate', [], b'')
556 @command(b'debugcheckstate', [], b'')
557 def debugcheckstate(ui, repo):
557 def debugcheckstate(ui, repo):
558 """validate the correctness of the current dirstate"""
558 """validate the correctness of the current dirstate"""
559 parent1, parent2 = repo.dirstate.parents()
559 parent1, parent2 = repo.dirstate.parents()
560 m1 = repo[parent1].manifest()
560 m1 = repo[parent1].manifest()
561 m2 = repo[parent2].manifest()
561 m2 = repo[parent2].manifest()
562 errors = 0
562 errors = 0
563 for err in repo.dirstate.verify(m1, m2):
563 for err in repo.dirstate.verify(m1, m2):
564 ui.warn(err[0] % err[1:])
564 ui.warn(err[0] % err[1:])
565 errors += 1
565 errors += 1
566 if errors:
566 if errors:
567 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
567 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
568 raise error.Abort(errstr)
568 raise error.Abort(errstr)
569
569
570
570
571 @command(
571 @command(
572 b'debugcolor',
572 b'debugcolor',
573 [(b'', b'style', None, _(b'show all configured styles'))],
573 [(b'', b'style', None, _(b'show all configured styles'))],
574 b'hg debugcolor',
574 b'hg debugcolor',
575 )
575 )
576 def debugcolor(ui, repo, **opts):
576 def debugcolor(ui, repo, **opts):
577 """show available color, effects or style"""
577 """show available color, effects or style"""
578 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
578 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
579 if opts.get('style'):
579 if opts.get('style'):
580 return _debugdisplaystyle(ui)
580 return _debugdisplaystyle(ui)
581 else:
581 else:
582 return _debugdisplaycolor(ui)
582 return _debugdisplaycolor(ui)
583
583
584
584
585 def _debugdisplaycolor(ui):
585 def _debugdisplaycolor(ui):
586 ui = ui.copy()
586 ui = ui.copy()
587 ui._styles.clear()
587 ui._styles.clear()
588 for effect in color._activeeffects(ui).keys():
588 for effect in color._activeeffects(ui).keys():
589 ui._styles[effect] = effect
589 ui._styles[effect] = effect
590 if ui._terminfoparams:
590 if ui._terminfoparams:
591 for k, v in ui.configitems(b'color'):
591 for k, v in ui.configitems(b'color'):
592 if k.startswith(b'color.'):
592 if k.startswith(b'color.'):
593 ui._styles[k] = k[6:]
593 ui._styles[k] = k[6:]
594 elif k.startswith(b'terminfo.'):
594 elif k.startswith(b'terminfo.'):
595 ui._styles[k] = k[9:]
595 ui._styles[k] = k[9:]
596 ui.write(_(b'available colors:\n'))
596 ui.write(_(b'available colors:\n'))
597 # sort label with a '_' after the other to group '_background' entry.
597 # sort label with a '_' after the other to group '_background' entry.
598 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
598 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
599 for colorname, label in items:
599 for colorname, label in items:
600 ui.write(b'%s\n' % colorname, label=label)
600 ui.write(b'%s\n' % colorname, label=label)
601
601
602
602
603 def _debugdisplaystyle(ui):
603 def _debugdisplaystyle(ui):
604 ui.write(_(b'available style:\n'))
604 ui.write(_(b'available style:\n'))
605 if not ui._styles:
605 if not ui._styles:
606 return
606 return
607 width = max(len(s) for s in ui._styles)
607 width = max(len(s) for s in ui._styles)
608 for label, effects in sorted(ui._styles.items()):
608 for label, effects in sorted(ui._styles.items()):
609 ui.write(b'%s' % label, label=label)
609 ui.write(b'%s' % label, label=label)
610 if effects:
610 if effects:
611 # 50
611 # 50
612 ui.write(b': ')
612 ui.write(b': ')
613 ui.write(b' ' * (max(0, width - len(label))))
613 ui.write(b' ' * (max(0, width - len(label))))
614 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
614 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
615 ui.write(b'\n')
615 ui.write(b'\n')
616
616
617
617
618 @command(b'debugcreatestreamclonebundle', [], b'FILE')
618 @command(b'debugcreatestreamclonebundle', [], b'FILE')
619 def debugcreatestreamclonebundle(ui, repo, fname):
619 def debugcreatestreamclonebundle(ui, repo, fname):
620 """create a stream clone bundle file
620 """create a stream clone bundle file
621
621
622 Stream bundles are special bundles that are essentially archives of
622 Stream bundles are special bundles that are essentially archives of
623 revlog files. They are commonly used for cloning very quickly.
623 revlog files. They are commonly used for cloning very quickly.
624 """
624 """
625 # TODO we may want to turn this into an abort when this functionality
625 # TODO we may want to turn this into an abort when this functionality
626 # is moved into `hg bundle`.
626 # is moved into `hg bundle`.
627 if phases.hassecret(repo):
627 if phases.hassecret(repo):
628 ui.warn(
628 ui.warn(
629 _(
629 _(
630 b'(warning: stream clone bundle will contain secret '
630 b'(warning: stream clone bundle will contain secret '
631 b'revisions)\n'
631 b'revisions)\n'
632 )
632 )
633 )
633 )
634
634
635 requirements, gen = streamclone.generatebundlev1(repo)
635 requirements, gen = streamclone.generatebundlev1(repo)
636 changegroup.writechunks(ui, gen, fname)
636 changegroup.writechunks(ui, gen, fname)
637
637
638 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
638 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
639
639
640
640
641 @command(
641 @command(
642 b'debugdag',
642 b'debugdag',
643 [
643 [
644 (b't', b'tags', None, _(b'use tags as labels')),
644 (b't', b'tags', None, _(b'use tags as labels')),
645 (b'b', b'branches', None, _(b'annotate with branch names')),
645 (b'b', b'branches', None, _(b'annotate with branch names')),
646 (b'', b'dots', None, _(b'use dots for runs')),
646 (b'', b'dots', None, _(b'use dots for runs')),
647 (b's', b'spaces', None, _(b'separate elements by spaces')),
647 (b's', b'spaces', None, _(b'separate elements by spaces')),
648 ],
648 ],
649 _(b'[OPTION]... [FILE [REV]...]'),
649 _(b'[OPTION]... [FILE [REV]...]'),
650 optionalrepo=True,
650 optionalrepo=True,
651 )
651 )
652 def debugdag(ui, repo, file_=None, *revs, **opts):
652 def debugdag(ui, repo, file_=None, *revs, **opts):
653 """format the changelog or an index DAG as a concise textual description
653 """format the changelog or an index DAG as a concise textual description
654
654
655 If you pass a revlog index, the revlog's DAG is emitted. If you list
655 If you pass a revlog index, the revlog's DAG is emitted. If you list
656 revision numbers, they get labeled in the output as rN.
656 revision numbers, they get labeled in the output as rN.
657
657
658 Otherwise, the changelog DAG of the current repo is emitted.
658 Otherwise, the changelog DAG of the current repo is emitted.
659 """
659 """
660 spaces = opts.get('spaces')
660 spaces = opts.get('spaces')
661 dots = opts.get('dots')
661 dots = opts.get('dots')
662 if file_:
662 if file_:
663 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
663 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
664 revs = {int(r) for r in revs}
664 revs = {int(r) for r in revs}
665
665
666 def events():
666 def events():
667 for r in rlog:
667 for r in rlog:
668 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
668 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
669 if r in revs:
669 if r in revs:
670 yield b'l', (r, b"r%i" % r)
670 yield b'l', (r, b"r%i" % r)
671
671
672 elif repo:
672 elif repo:
673 cl = repo.changelog
673 cl = repo.changelog
674 tags = opts.get('tags')
674 tags = opts.get('tags')
675 branches = opts.get('branches')
675 branches = opts.get('branches')
676 if tags:
676 if tags:
677 labels = {}
677 labels = {}
678 for l, n in repo.tags().items():
678 for l, n in repo.tags().items():
679 labels.setdefault(cl.rev(n), []).append(l)
679 labels.setdefault(cl.rev(n), []).append(l)
680
680
681 def events():
681 def events():
682 b = b"default"
682 b = b"default"
683 for r in cl:
683 for r in cl:
684 if branches:
684 if branches:
685 newb = cl.read(cl.node(r))[5][b'branch']
685 newb = cl.read(cl.node(r))[5][b'branch']
686 if newb != b:
686 if newb != b:
687 yield b'a', newb
687 yield b'a', newb
688 b = newb
688 b = newb
689 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
689 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
690 if tags:
690 if tags:
691 ls = labels.get(r)
691 ls = labels.get(r)
692 if ls:
692 if ls:
693 for l in ls:
693 for l in ls:
694 yield b'l', (r, l)
694 yield b'l', (r, l)
695
695
696 else:
696 else:
697 raise error.Abort(_(b'need repo for changelog dag'))
697 raise error.Abort(_(b'need repo for changelog dag'))
698
698
699 for line in dagparser.dagtextlines(
699 for line in dagparser.dagtextlines(
700 events(),
700 events(),
701 addspaces=spaces,
701 addspaces=spaces,
702 wraplabels=True,
702 wraplabels=True,
703 wrapannotations=True,
703 wrapannotations=True,
704 wrapnonlinear=dots,
704 wrapnonlinear=dots,
705 usedots=dots,
705 usedots=dots,
706 maxlinewidth=70,
706 maxlinewidth=70,
707 ):
707 ):
708 ui.write(line)
708 ui.write(line)
709 ui.write(b"\n")
709 ui.write(b"\n")
710
710
711
711
712 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
712 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
713 def debugdata(ui, repo, file_, rev=None, **opts):
713 def debugdata(ui, repo, file_, rev=None, **opts):
714 """dump the contents of a data file revision"""
714 """dump the contents of a data file revision"""
715 opts = pycompat.byteskwargs(opts)
715 opts = pycompat.byteskwargs(opts)
716 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
716 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
717 if rev is not None:
717 if rev is not None:
718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 file_, rev = None, file_
719 file_, rev = None, file_
720 elif rev is None:
720 elif rev is None:
721 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
721 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
722 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
722 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
723 try:
723 try:
724 ui.write(r.rawdata(r.lookup(rev)))
724 ui.write(r.rawdata(r.lookup(rev)))
725 except KeyError:
725 except KeyError:
726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727
727
728
728
729 @command(
729 @command(
730 b'debugdate',
730 b'debugdate',
731 [(b'e', b'extended', None, _(b'try extended date formats'))],
731 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 _(b'[-e] DATE [RANGE]'),
732 _(b'[-e] DATE [RANGE]'),
733 norepo=True,
733 norepo=True,
734 optionalrepo=True,
734 optionalrepo=True,
735 )
735 )
736 def debugdate(ui, date, range=None, **opts):
736 def debugdate(ui, date, range=None, **opts):
737 """parse and display a date"""
737 """parse and display a date"""
738 if opts["extended"]:
738 if opts["extended"]:
739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 else:
740 else:
741 d = dateutil.parsedate(date)
741 d = dateutil.parsedate(date)
742 ui.writenoi18n(b"internal: %d %d\n" % d)
742 ui.writenoi18n(b"internal: %d %d\n" % d)
743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 if range:
744 if range:
745 m = dateutil.matchdate(range)
745 m = dateutil.matchdate(range)
746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747
747
748
748
749 @command(
749 @command(
750 b'debugdeltachain',
750 b'debugdeltachain',
751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 _(b'-c|-m|FILE'),
752 _(b'-c|-m|FILE'),
753 optionalrepo=True,
753 optionalrepo=True,
754 )
754 )
755 def debugdeltachain(ui, repo, file_=None, **opts):
755 def debugdeltachain(ui, repo, file_=None, **opts):
756 """dump information about delta chains in a revlog
756 """dump information about delta chains in a revlog
757
757
758 Output can be templatized. Available template keywords are:
758 Output can be templatized. Available template keywords are:
759
759
760 :``rev``: revision number
760 :``rev``: revision number
761 :``p1``: parent 1 revision number (for reference)
761 :``p1``: parent 1 revision number (for reference)
762 :``p2``: parent 2 revision number (for reference)
762 :``p2``: parent 2 revision number (for reference)
763 :``chainid``: delta chain identifier (numbered by unique base)
763 :``chainid``: delta chain identifier (numbered by unique base)
764 :``chainlen``: delta chain length to this revision
764 :``chainlen``: delta chain length to this revision
765 :``prevrev``: previous revision in delta chain
765 :``prevrev``: previous revision in delta chain
766 :``deltatype``: role of delta / how it was computed
766 :``deltatype``: role of delta / how it was computed
767 - base: a full snapshot
767 - base: a full snapshot
768 - snap: an intermediate snapshot
768 - snap: an intermediate snapshot
769 - p1: a delta against the first parent
769 - p1: a delta against the first parent
770 - p2: a delta against the second parent
770 - p2: a delta against the second parent
771 - skip1: a delta against the same base as p1
772 (when p1 has empty delta
773 - skip2: a delta against the same base as p2
774 (when p2 has empty delta
771 - prev: a delta against the previous revision
775 - prev: a delta against the previous revision
772 - other: a delta against an arbitrary revision
776 - other: a delta against an arbitrary revision
773 :``compsize``: compressed size of revision
777 :``compsize``: compressed size of revision
774 :``uncompsize``: uncompressed size of revision
778 :``uncompsize``: uncompressed size of revision
775 :``chainsize``: total size of compressed revisions in chain
779 :``chainsize``: total size of compressed revisions in chain
776 :``chainratio``: total chain size divided by uncompressed revision size
780 :``chainratio``: total chain size divided by uncompressed revision size
777 (new delta chains typically start at ratio 2.00)
781 (new delta chains typically start at ratio 2.00)
778 :``lindist``: linear distance from base revision in delta chain to end
782 :``lindist``: linear distance from base revision in delta chain to end
779 of this revision
783 of this revision
780 :``extradist``: total size of revisions not part of this delta chain from
784 :``extradist``: total size of revisions not part of this delta chain from
781 base of delta chain to end of this revision; a measurement
785 base of delta chain to end of this revision; a measurement
782 of how much extra data we need to read/seek across to read
786 of how much extra data we need to read/seek across to read
783 the delta chain for this revision
787 the delta chain for this revision
784 :``extraratio``: extradist divided by chainsize; another representation of
788 :``extraratio``: extradist divided by chainsize; another representation of
785 how much unrelated data is needed to load this delta chain
789 how much unrelated data is needed to load this delta chain
786
790
787 If the repository is configured to use the sparse read, additional keywords
791 If the repository is configured to use the sparse read, additional keywords
788 are available:
792 are available:
789
793
790 :``readsize``: total size of data read from the disk for a revision
794 :``readsize``: total size of data read from the disk for a revision
791 (sum of the sizes of all the blocks)
795 (sum of the sizes of all the blocks)
792 :``largestblock``: size of the largest block of data read from the disk
796 :``largestblock``: size of the largest block of data read from the disk
793 :``readdensity``: density of useful bytes in the data read from the disk
797 :``readdensity``: density of useful bytes in the data read from the disk
794 :``srchunks``: in how many data hunks the whole revision would be read
798 :``srchunks``: in how many data hunks the whole revision would be read
795
799
796 The sparse read can be enabled with experimental.sparse-read = True
800 The sparse read can be enabled with experimental.sparse-read = True
797 """
801 """
798 opts = pycompat.byteskwargs(opts)
802 opts = pycompat.byteskwargs(opts)
799 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
803 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
800 index = r.index
804 index = r.index
801 start = r.start
805 start = r.start
802 length = r.length
806 length = r.length
803 generaldelta = r._generaldelta
807 generaldelta = r._generaldelta
804 withsparseread = getattr(r, '_withsparseread', False)
808 withsparseread = getattr(r, '_withsparseread', False)
805
809
810 # security to avoid crash on corrupted revlogs
811 total_revs = len(index)
812
806 def revinfo(rev):
813 def revinfo(rev):
807 e = index[rev]
814 e = index[rev]
808 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
815 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
809 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
816 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
810 chainsize = 0
817 chainsize = 0
811
818
812 base = e[revlog_constants.ENTRY_DELTA_BASE]
819 base = e[revlog_constants.ENTRY_DELTA_BASE]
813 p1 = e[revlog_constants.ENTRY_PARENT_1]
820 p1 = e[revlog_constants.ENTRY_PARENT_1]
814 p2 = e[revlog_constants.ENTRY_PARENT_2]
821 p2 = e[revlog_constants.ENTRY_PARENT_2]
815
822
823 # If the parents of a revision has an empty delta, we never try to delta
824 # against that parent, but directly against the delta base of that
825 # parent (recursively). It avoids adding a useless entry in the chain.
826 #
827 # However we need to detect that as a special case for delta-type, that
828 # is not simply "other".
829 p1_base = p1
830 if p1 != nullrev and p1 < total_revs:
831 e1 = index[p1]
832 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
833 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
834 if (
835 new_base == p1_base
836 or new_base == nullrev
837 or new_base >= total_revs
838 ):
839 break
840 p1_base = new_base
841 e1 = index[p1_base]
842 p2_base = p2
843 if p2 != nullrev and p2 < total_revs:
844 e2 = index[p2]
845 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
846 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
847 if (
848 new_base == p2_base
849 or new_base == nullrev
850 or new_base >= total_revs
851 ):
852 break
853 p2_base = new_base
854 e2 = index[p2_base]
855
816 if generaldelta:
856 if generaldelta:
817 if base == p1:
857 if base == p1:
818 deltatype = b'p1'
858 deltatype = b'p1'
819 elif base == p2:
859 elif base == p2:
820 deltatype = b'p2'
860 deltatype = b'p2'
821 elif base == rev:
861 elif base == rev:
822 deltatype = b'base'
862 deltatype = b'base'
863 elif base == p1_base:
864 deltatype = b'skip1'
865 elif base == p2_base:
866 deltatype = b'skip2'
823 elif r.issnapshot(rev):
867 elif r.issnapshot(rev):
824 deltatype = b'snap'
868 deltatype = b'snap'
825 elif base == rev - 1:
869 elif base == rev - 1:
826 deltatype = b'prev'
870 deltatype = b'prev'
827 else:
871 else:
828 deltatype = b'other'
872 deltatype = b'other'
829 else:
873 else:
830 if base == rev:
874 if base == rev:
831 deltatype = b'base'
875 deltatype = b'base'
832 else:
876 else:
833 deltatype = b'prev'
877 deltatype = b'prev'
834
878
835 chain = r._deltachain(rev)[0]
879 chain = r._deltachain(rev)[0]
836 for iterrev in chain:
880 for iterrev in chain:
837 e = index[iterrev]
881 e = index[iterrev]
838 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
882 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
839
883
840 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
884 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
841
885
842 fm = ui.formatter(b'debugdeltachain', opts)
886 fm = ui.formatter(b'debugdeltachain', opts)
843
887
844 fm.plain(
888 fm.plain(
845 b' rev p1 p2 chain# chainlen prev delta '
889 b' rev p1 p2 chain# chainlen prev delta '
846 b'size rawsize chainsize ratio lindist extradist '
890 b'size rawsize chainsize ratio lindist extradist '
847 b'extraratio'
891 b'extraratio'
848 )
892 )
849 if withsparseread:
893 if withsparseread:
850 fm.plain(b' readsize largestblk rddensity srchunks')
894 fm.plain(b' readsize largestblk rddensity srchunks')
851 fm.plain(b'\n')
895 fm.plain(b'\n')
852
896
853 chainbases = {}
897 chainbases = {}
854 for rev in r:
898 for rev in r:
855 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
899 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
856 chainbase = chain[0]
900 chainbase = chain[0]
857 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
901 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
858 basestart = start(chainbase)
902 basestart = start(chainbase)
859 revstart = start(rev)
903 revstart = start(rev)
860 lineardist = revstart + comp - basestart
904 lineardist = revstart + comp - basestart
861 extradist = lineardist - chainsize
905 extradist = lineardist - chainsize
862 try:
906 try:
863 prevrev = chain[-2]
907 prevrev = chain[-2]
864 except IndexError:
908 except IndexError:
865 prevrev = -1
909 prevrev = -1
866
910
867 if uncomp != 0:
911 if uncomp != 0:
868 chainratio = float(chainsize) / float(uncomp)
912 chainratio = float(chainsize) / float(uncomp)
869 else:
913 else:
870 chainratio = chainsize
914 chainratio = chainsize
871
915
872 if chainsize != 0:
916 if chainsize != 0:
873 extraratio = float(extradist) / float(chainsize)
917 extraratio = float(extradist) / float(chainsize)
874 else:
918 else:
875 extraratio = extradist
919 extraratio = extradist
876
920
877 fm.startitem()
921 fm.startitem()
878 fm.write(
922 fm.write(
879 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
923 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
880 b'uncompsize chainsize chainratio lindist extradist '
924 b'uncompsize chainsize chainratio lindist extradist '
881 b'extraratio',
925 b'extraratio',
882 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
926 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
883 rev,
927 rev,
884 p1,
928 p1,
885 p2,
929 p2,
886 chainid,
930 chainid,
887 len(chain),
931 len(chain),
888 prevrev,
932 prevrev,
889 deltatype,
933 deltatype,
890 comp,
934 comp,
891 uncomp,
935 uncomp,
892 chainsize,
936 chainsize,
893 chainratio,
937 chainratio,
894 lineardist,
938 lineardist,
895 extradist,
939 extradist,
896 extraratio,
940 extraratio,
897 rev=rev,
941 rev=rev,
898 chainid=chainid,
942 chainid=chainid,
899 chainlen=len(chain),
943 chainlen=len(chain),
900 prevrev=prevrev,
944 prevrev=prevrev,
901 deltatype=deltatype,
945 deltatype=deltatype,
902 compsize=comp,
946 compsize=comp,
903 uncompsize=uncomp,
947 uncompsize=uncomp,
904 chainsize=chainsize,
948 chainsize=chainsize,
905 chainratio=chainratio,
949 chainratio=chainratio,
906 lindist=lineardist,
950 lindist=lineardist,
907 extradist=extradist,
951 extradist=extradist,
908 extraratio=extraratio,
952 extraratio=extraratio,
909 )
953 )
910 if withsparseread:
954 if withsparseread:
911 readsize = 0
955 readsize = 0
912 largestblock = 0
956 largestblock = 0
913 srchunks = 0
957 srchunks = 0
914
958
915 for revschunk in deltautil.slicechunk(r, chain):
959 for revschunk in deltautil.slicechunk(r, chain):
916 srchunks += 1
960 srchunks += 1
917 blkend = start(revschunk[-1]) + length(revschunk[-1])
961 blkend = start(revschunk[-1]) + length(revschunk[-1])
918 blksize = blkend - start(revschunk[0])
962 blksize = blkend - start(revschunk[0])
919
963
920 readsize += blksize
964 readsize += blksize
921 if largestblock < blksize:
965 if largestblock < blksize:
922 largestblock = blksize
966 largestblock = blksize
923
967
924 if readsize:
968 if readsize:
925 readdensity = float(chainsize) / float(readsize)
969 readdensity = float(chainsize) / float(readsize)
926 else:
970 else:
927 readdensity = 1
971 readdensity = 1
928
972
929 fm.write(
973 fm.write(
930 b'readsize largestblock readdensity srchunks',
974 b'readsize largestblock readdensity srchunks',
931 b' %10d %10d %9.5f %8d',
975 b' %10d %10d %9.5f %8d',
932 readsize,
976 readsize,
933 largestblock,
977 largestblock,
934 readdensity,
978 readdensity,
935 srchunks,
979 srchunks,
936 readsize=readsize,
980 readsize=readsize,
937 largestblock=largestblock,
981 largestblock=largestblock,
938 readdensity=readdensity,
982 readdensity=readdensity,
939 srchunks=srchunks,
983 srchunks=srchunks,
940 )
984 )
941
985
942 fm.plain(b'\n')
986 fm.plain(b'\n')
943
987
944 fm.end()
988 fm.end()
945
989
946
990
947 @command(
991 @command(
948 b'debugdirstate|debugstate',
992 b'debugdirstate|debugstate',
949 [
993 [
950 (
994 (
951 b'',
995 b'',
952 b'nodates',
996 b'nodates',
953 None,
997 None,
954 _(b'do not display the saved mtime (DEPRECATED)'),
998 _(b'do not display the saved mtime (DEPRECATED)'),
955 ),
999 ),
956 (b'', b'dates', True, _(b'display the saved mtime')),
1000 (b'', b'dates', True, _(b'display the saved mtime')),
957 (b'', b'datesort', None, _(b'sort by saved mtime')),
1001 (b'', b'datesort', None, _(b'sort by saved mtime')),
958 (
1002 (
959 b'',
1003 b'',
960 b'docket',
1004 b'docket',
961 False,
1005 False,
962 _(b'display the docket (metadata file) instead'),
1006 _(b'display the docket (metadata file) instead'),
963 ),
1007 ),
964 (
1008 (
965 b'',
1009 b'',
966 b'all',
1010 b'all',
967 False,
1011 False,
968 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1012 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
969 ),
1013 ),
970 ],
1014 ],
971 _(b'[OPTION]...'),
1015 _(b'[OPTION]...'),
972 )
1016 )
973 def debugstate(ui, repo, **opts):
1017 def debugstate(ui, repo, **opts):
974 """show the contents of the current dirstate"""
1018 """show the contents of the current dirstate"""
975
1019
976 if opts.get("docket"):
1020 if opts.get("docket"):
977 if not repo.dirstate._use_dirstate_v2:
1021 if not repo.dirstate._use_dirstate_v2:
978 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1022 raise error.Abort(_(b'dirstate v1 does not have a docket'))
979
1023
980 docket = repo.dirstate._map.docket
1024 docket = repo.dirstate._map.docket
981 (
1025 (
982 start_offset,
1026 start_offset,
983 root_nodes,
1027 root_nodes,
984 nodes_with_entry,
1028 nodes_with_entry,
985 nodes_with_copy,
1029 nodes_with_copy,
986 unused_bytes,
1030 unused_bytes,
987 _unused,
1031 _unused,
988 ignore_pattern,
1032 ignore_pattern,
989 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1033 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
990
1034
991 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1035 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
992 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1036 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
993 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1037 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
994 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1038 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
995 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1039 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
996 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1040 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
997 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1041 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
998 ui.write(
1042 ui.write(
999 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1043 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1000 )
1044 )
1001 return
1045 return
1002
1046
1003 nodates = not opts['dates']
1047 nodates = not opts['dates']
1004 if opts.get('nodates') is not None:
1048 if opts.get('nodates') is not None:
1005 nodates = True
1049 nodates = True
1006 datesort = opts.get('datesort')
1050 datesort = opts.get('datesort')
1007
1051
1008 if datesort:
1052 if datesort:
1009
1053
1010 def keyfunc(entry):
1054 def keyfunc(entry):
1011 filename, _state, _mode, _size, mtime = entry
1055 filename, _state, _mode, _size, mtime = entry
1012 return (mtime, filename)
1056 return (mtime, filename)
1013
1057
1014 else:
1058 else:
1015 keyfunc = None # sort by filename
1059 keyfunc = None # sort by filename
1016 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1060 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1017 entries.sort(key=keyfunc)
1061 entries.sort(key=keyfunc)
1018 for entry in entries:
1062 for entry in entries:
1019 filename, state, mode, size, mtime = entry
1063 filename, state, mode, size, mtime = entry
1020 if mtime == -1:
1064 if mtime == -1:
1021 timestr = b'unset '
1065 timestr = b'unset '
1022 elif nodates:
1066 elif nodates:
1023 timestr = b'set '
1067 timestr = b'set '
1024 else:
1068 else:
1025 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1069 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1026 timestr = encoding.strtolocal(timestr)
1070 timestr = encoding.strtolocal(timestr)
1027 if mode & 0o20000:
1071 if mode & 0o20000:
1028 mode = b'lnk'
1072 mode = b'lnk'
1029 else:
1073 else:
1030 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1074 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1031 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1075 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1032 for f in repo.dirstate.copies():
1076 for f in repo.dirstate.copies():
1033 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1077 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1034
1078
1035
1079
1036 @command(
1080 @command(
1037 b'debugdirstateignorepatternshash',
1081 b'debugdirstateignorepatternshash',
1038 [],
1082 [],
1039 _(b''),
1083 _(b''),
1040 )
1084 )
1041 def debugdirstateignorepatternshash(ui, repo, **opts):
1085 def debugdirstateignorepatternshash(ui, repo, **opts):
1042 """show the hash of ignore patterns stored in dirstate if v2,
1086 """show the hash of ignore patterns stored in dirstate if v2,
1043 or nothing for dirstate-v2
1087 or nothing for dirstate-v2
1044 """
1088 """
1045 if repo.dirstate._use_dirstate_v2:
1089 if repo.dirstate._use_dirstate_v2:
1046 docket = repo.dirstate._map.docket
1090 docket = repo.dirstate._map.docket
1047 hash_len = 20 # 160 bits for SHA-1
1091 hash_len = 20 # 160 bits for SHA-1
1048 hash_bytes = docket.tree_metadata[-hash_len:]
1092 hash_bytes = docket.tree_metadata[-hash_len:]
1049 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1093 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1050
1094
1051
1095
1052 @command(
1096 @command(
1053 b'debugdiscovery',
1097 b'debugdiscovery',
1054 [
1098 [
1055 (b'', b'old', None, _(b'use old-style discovery')),
1099 (b'', b'old', None, _(b'use old-style discovery')),
1056 (
1100 (
1057 b'',
1101 b'',
1058 b'nonheads',
1102 b'nonheads',
1059 None,
1103 None,
1060 _(b'use old-style discovery with non-heads included'),
1104 _(b'use old-style discovery with non-heads included'),
1061 ),
1105 ),
1062 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1106 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1063 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1107 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1064 (
1108 (
1065 b'',
1109 b'',
1066 b'local-as-revs',
1110 b'local-as-revs',
1067 b"",
1111 b"",
1068 b'treat local has having these revisions only',
1112 b'treat local has having these revisions only',
1069 ),
1113 ),
1070 (
1114 (
1071 b'',
1115 b'',
1072 b'remote-as-revs',
1116 b'remote-as-revs',
1073 b"",
1117 b"",
1074 b'use local as remote, with only these revisions',
1118 b'use local as remote, with only these revisions',
1075 ),
1119 ),
1076 ]
1120 ]
1077 + cmdutil.remoteopts
1121 + cmdutil.remoteopts
1078 + cmdutil.formatteropts,
1122 + cmdutil.formatteropts,
1079 _(b'[--rev REV] [OTHER]'),
1123 _(b'[--rev REV] [OTHER]'),
1080 )
1124 )
1081 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1125 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1082 """runs the changeset discovery protocol in isolation
1126 """runs the changeset discovery protocol in isolation
1083
1127
1084 The local peer can be "replaced" by a subset of the local repository by
1128 The local peer can be "replaced" by a subset of the local repository by
1085 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1129 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1086 be "replaced" by a subset of the local repository using the
1130 be "replaced" by a subset of the local repository using the
1087 `--local-as-revs` flag. This is useful to efficiently debug pathological
1131 `--local-as-revs` flag. This is useful to efficiently debug pathological
1088 discovery situation.
1132 discovery situation.
1089
1133
1090 The following developer oriented config are relevant for people playing with this command:
1134 The following developer oriented config are relevant for people playing with this command:
1091
1135
1092 * devel.discovery.exchange-heads=True
1136 * devel.discovery.exchange-heads=True
1093
1137
1094 If False, the discovery will not start with
1138 If False, the discovery will not start with
1095 remote head fetching and local head querying.
1139 remote head fetching and local head querying.
1096
1140
1097 * devel.discovery.grow-sample=True
1141 * devel.discovery.grow-sample=True
1098
1142
1099 If False, the sample size used in set discovery will not be increased
1143 If False, the sample size used in set discovery will not be increased
1100 through the process
1144 through the process
1101
1145
1102 * devel.discovery.grow-sample.dynamic=True
1146 * devel.discovery.grow-sample.dynamic=True
1103
1147
1104 When discovery.grow-sample.dynamic is True, the default, the sample size is
1148 When discovery.grow-sample.dynamic is True, the default, the sample size is
1105 adapted to the shape of the undecided set (it is set to the max of:
1149 adapted to the shape of the undecided set (it is set to the max of:
1106 <target-size>, len(roots(undecided)), len(heads(undecided)
1150 <target-size>, len(roots(undecided)), len(heads(undecided)
1107
1151
1108 * devel.discovery.grow-sample.rate=1.05
1152 * devel.discovery.grow-sample.rate=1.05
1109
1153
1110 the rate at which the sample grow
1154 the rate at which the sample grow
1111
1155
1112 * devel.discovery.randomize=True
1156 * devel.discovery.randomize=True
1113
1157
1114 If andom sampling during discovery are deterministic. It is meant for
1158 If andom sampling during discovery are deterministic. It is meant for
1115 integration tests.
1159 integration tests.
1116
1160
1117 * devel.discovery.sample-size=200
1161 * devel.discovery.sample-size=200
1118
1162
1119 Control the initial size of the discovery sample
1163 Control the initial size of the discovery sample
1120
1164
1121 * devel.discovery.sample-size.initial=100
1165 * devel.discovery.sample-size.initial=100
1122
1166
1123 Control the initial size of the discovery for initial change
1167 Control the initial size of the discovery for initial change
1124 """
1168 """
1125 opts = pycompat.byteskwargs(opts)
1169 opts = pycompat.byteskwargs(opts)
1126 unfi = repo.unfiltered()
1170 unfi = repo.unfiltered()
1127
1171
1128 # setup potential extra filtering
1172 # setup potential extra filtering
1129 local_revs = opts[b"local_as_revs"]
1173 local_revs = opts[b"local_as_revs"]
1130 remote_revs = opts[b"remote_as_revs"]
1174 remote_revs = opts[b"remote_as_revs"]
1131
1175
1132 # make sure tests are repeatable
1176 # make sure tests are repeatable
1133 random.seed(int(opts[b'seed']))
1177 random.seed(int(opts[b'seed']))
1134
1178
1135 if not remote_revs:
1179 if not remote_revs:
1136
1180
1137 remoteurl, branches = urlutil.get_unique_pull_path(
1181 remoteurl, branches = urlutil.get_unique_pull_path(
1138 b'debugdiscovery', repo, ui, remoteurl
1182 b'debugdiscovery', repo, ui, remoteurl
1139 )
1183 )
1140 remote = hg.peer(repo, opts, remoteurl)
1184 remote = hg.peer(repo, opts, remoteurl)
1141 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1185 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1142 else:
1186 else:
1143 branches = (None, [])
1187 branches = (None, [])
1144 remote_filtered_revs = logcmdutil.revrange(
1188 remote_filtered_revs = logcmdutil.revrange(
1145 unfi, [b"not (::(%s))" % remote_revs]
1189 unfi, [b"not (::(%s))" % remote_revs]
1146 )
1190 )
1147 remote_filtered_revs = frozenset(remote_filtered_revs)
1191 remote_filtered_revs = frozenset(remote_filtered_revs)
1148
1192
1149 def remote_func(x):
1193 def remote_func(x):
1150 return remote_filtered_revs
1194 return remote_filtered_revs
1151
1195
1152 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1196 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1153
1197
1154 remote = repo.peer()
1198 remote = repo.peer()
1155 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1199 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1156
1200
1157 if local_revs:
1201 if local_revs:
1158 local_filtered_revs = logcmdutil.revrange(
1202 local_filtered_revs = logcmdutil.revrange(
1159 unfi, [b"not (::(%s))" % local_revs]
1203 unfi, [b"not (::(%s))" % local_revs]
1160 )
1204 )
1161 local_filtered_revs = frozenset(local_filtered_revs)
1205 local_filtered_revs = frozenset(local_filtered_revs)
1162
1206
1163 def local_func(x):
1207 def local_func(x):
1164 return local_filtered_revs
1208 return local_filtered_revs
1165
1209
1166 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1210 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1167 repo = repo.filtered(b'debug-discovery-local-filter')
1211 repo = repo.filtered(b'debug-discovery-local-filter')
1168
1212
1169 data = {}
1213 data = {}
1170 if opts.get(b'old'):
1214 if opts.get(b'old'):
1171
1215
1172 def doit(pushedrevs, remoteheads, remote=remote):
1216 def doit(pushedrevs, remoteheads, remote=remote):
1173 if not util.safehasattr(remote, b'branches'):
1217 if not util.safehasattr(remote, b'branches'):
1174 # enable in-client legacy support
1218 # enable in-client legacy support
1175 remote = localrepo.locallegacypeer(remote.local())
1219 remote = localrepo.locallegacypeer(remote.local())
1176 common, _in, hds = treediscovery.findcommonincoming(
1220 common, _in, hds = treediscovery.findcommonincoming(
1177 repo, remote, force=True, audit=data
1221 repo, remote, force=True, audit=data
1178 )
1222 )
1179 common = set(common)
1223 common = set(common)
1180 if not opts.get(b'nonheads'):
1224 if not opts.get(b'nonheads'):
1181 ui.writenoi18n(
1225 ui.writenoi18n(
1182 b"unpruned common: %s\n"
1226 b"unpruned common: %s\n"
1183 % b" ".join(sorted(short(n) for n in common))
1227 % b" ".join(sorted(short(n) for n in common))
1184 )
1228 )
1185
1229
1186 clnode = repo.changelog.node
1230 clnode = repo.changelog.node
1187 common = repo.revs(b'heads(::%ln)', common)
1231 common = repo.revs(b'heads(::%ln)', common)
1188 common = {clnode(r) for r in common}
1232 common = {clnode(r) for r in common}
1189 return common, hds
1233 return common, hds
1190
1234
1191 else:
1235 else:
1192
1236
1193 def doit(pushedrevs, remoteheads, remote=remote):
1237 def doit(pushedrevs, remoteheads, remote=remote):
1194 nodes = None
1238 nodes = None
1195 if pushedrevs:
1239 if pushedrevs:
1196 revs = logcmdutil.revrange(repo, pushedrevs)
1240 revs = logcmdutil.revrange(repo, pushedrevs)
1197 nodes = [repo[r].node() for r in revs]
1241 nodes = [repo[r].node() for r in revs]
1198 common, any, hds = setdiscovery.findcommonheads(
1242 common, any, hds = setdiscovery.findcommonheads(
1199 ui, repo, remote, ancestorsof=nodes, audit=data
1243 ui, repo, remote, ancestorsof=nodes, audit=data
1200 )
1244 )
1201 return common, hds
1245 return common, hds
1202
1246
1203 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1247 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1204 localrevs = opts[b'rev']
1248 localrevs = opts[b'rev']
1205
1249
1206 fm = ui.formatter(b'debugdiscovery', opts)
1250 fm = ui.formatter(b'debugdiscovery', opts)
1207 if fm.strict_format:
1251 if fm.strict_format:
1208
1252
1209 @contextlib.contextmanager
1253 @contextlib.contextmanager
1210 def may_capture_output():
1254 def may_capture_output():
1211 ui.pushbuffer()
1255 ui.pushbuffer()
1212 yield
1256 yield
1213 data[b'output'] = ui.popbuffer()
1257 data[b'output'] = ui.popbuffer()
1214
1258
1215 else:
1259 else:
1216 may_capture_output = util.nullcontextmanager
1260 may_capture_output = util.nullcontextmanager
1217 with may_capture_output():
1261 with may_capture_output():
1218 with util.timedcm('debug-discovery') as t:
1262 with util.timedcm('debug-discovery') as t:
1219 common, hds = doit(localrevs, remoterevs)
1263 common, hds = doit(localrevs, remoterevs)
1220
1264
1221 # compute all statistics
1265 # compute all statistics
1222 heads_common = set(common)
1266 heads_common = set(common)
1223 heads_remote = set(hds)
1267 heads_remote = set(hds)
1224 heads_local = set(repo.heads())
1268 heads_local = set(repo.heads())
1225 # note: they cannot be a local or remote head that is in common and not
1269 # note: they cannot be a local or remote head that is in common and not
1226 # itself a head of common.
1270 # itself a head of common.
1227 heads_common_local = heads_common & heads_local
1271 heads_common_local = heads_common & heads_local
1228 heads_common_remote = heads_common & heads_remote
1272 heads_common_remote = heads_common & heads_remote
1229 heads_common_both = heads_common & heads_remote & heads_local
1273 heads_common_both = heads_common & heads_remote & heads_local
1230
1274
1231 all = repo.revs(b'all()')
1275 all = repo.revs(b'all()')
1232 common = repo.revs(b'::%ln', common)
1276 common = repo.revs(b'::%ln', common)
1233 roots_common = repo.revs(b'roots(::%ld)', common)
1277 roots_common = repo.revs(b'roots(::%ld)', common)
1234 missing = repo.revs(b'not ::%ld', common)
1278 missing = repo.revs(b'not ::%ld', common)
1235 heads_missing = repo.revs(b'heads(%ld)', missing)
1279 heads_missing = repo.revs(b'heads(%ld)', missing)
1236 roots_missing = repo.revs(b'roots(%ld)', missing)
1280 roots_missing = repo.revs(b'roots(%ld)', missing)
1237 assert len(common) + len(missing) == len(all)
1281 assert len(common) + len(missing) == len(all)
1238
1282
1239 initial_undecided = repo.revs(
1283 initial_undecided = repo.revs(
1240 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1284 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1241 )
1285 )
1242 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1286 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1243 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1287 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1244 common_initial_undecided = initial_undecided & common
1288 common_initial_undecided = initial_undecided & common
1245 missing_initial_undecided = initial_undecided & missing
1289 missing_initial_undecided = initial_undecided & missing
1246
1290
1247 data[b'elapsed'] = t.elapsed
1291 data[b'elapsed'] = t.elapsed
1248 data[b'nb-common-heads'] = len(heads_common)
1292 data[b'nb-common-heads'] = len(heads_common)
1249 data[b'nb-common-heads-local'] = len(heads_common_local)
1293 data[b'nb-common-heads-local'] = len(heads_common_local)
1250 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1294 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1251 data[b'nb-common-heads-both'] = len(heads_common_both)
1295 data[b'nb-common-heads-both'] = len(heads_common_both)
1252 data[b'nb-common-roots'] = len(roots_common)
1296 data[b'nb-common-roots'] = len(roots_common)
1253 data[b'nb-head-local'] = len(heads_local)
1297 data[b'nb-head-local'] = len(heads_local)
1254 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1298 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1255 data[b'nb-head-remote'] = len(heads_remote)
1299 data[b'nb-head-remote'] = len(heads_remote)
1256 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1300 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1257 heads_common_remote
1301 heads_common_remote
1258 )
1302 )
1259 data[b'nb-revs'] = len(all)
1303 data[b'nb-revs'] = len(all)
1260 data[b'nb-revs-common'] = len(common)
1304 data[b'nb-revs-common'] = len(common)
1261 data[b'nb-revs-missing'] = len(missing)
1305 data[b'nb-revs-missing'] = len(missing)
1262 data[b'nb-missing-heads'] = len(heads_missing)
1306 data[b'nb-missing-heads'] = len(heads_missing)
1263 data[b'nb-missing-roots'] = len(roots_missing)
1307 data[b'nb-missing-roots'] = len(roots_missing)
1264 data[b'nb-ini_und'] = len(initial_undecided)
1308 data[b'nb-ini_und'] = len(initial_undecided)
1265 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1309 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1266 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1310 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1267 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1311 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1268 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1312 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1269
1313
1270 fm.startitem()
1314 fm.startitem()
1271 fm.data(**pycompat.strkwargs(data))
1315 fm.data(**pycompat.strkwargs(data))
1272 # display discovery summary
1316 # display discovery summary
1273 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1317 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1274 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1318 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1275 fm.plain(b"queries: %(total-queries)9d\n" % data)
1319 fm.plain(b"queries: %(total-queries)9d\n" % data)
1276 fm.plain(b"heads summary:\n")
1320 fm.plain(b"heads summary:\n")
1277 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1321 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1278 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1322 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1279 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1323 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1280 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1324 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1281 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1325 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1282 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1326 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1283 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1327 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1284 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1328 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1285 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1329 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1286 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1330 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1287 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1331 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1288 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1332 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1289 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1333 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1290 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1334 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1291 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1335 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1292 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1336 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1293 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1337 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1294 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1338 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1295 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1339 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1296 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1340 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1297 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1341 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1298 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1342 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1299
1343
1300 if ui.verbose:
1344 if ui.verbose:
1301 fm.plain(
1345 fm.plain(
1302 b"common heads: %s\n"
1346 b"common heads: %s\n"
1303 % b" ".join(sorted(short(n) for n in heads_common))
1347 % b" ".join(sorted(short(n) for n in heads_common))
1304 )
1348 )
1305 fm.end()
1349 fm.end()
1306
1350
1307
1351
1308 _chunksize = 4 << 10
1352 _chunksize = 4 << 10
1309
1353
1310
1354
1311 @command(
1355 @command(
1312 b'debugdownload',
1356 b'debugdownload',
1313 [
1357 [
1314 (b'o', b'output', b'', _(b'path')),
1358 (b'o', b'output', b'', _(b'path')),
1315 ],
1359 ],
1316 optionalrepo=True,
1360 optionalrepo=True,
1317 )
1361 )
1318 def debugdownload(ui, repo, url, output=None, **opts):
1362 def debugdownload(ui, repo, url, output=None, **opts):
1319 """download a resource using Mercurial logic and config"""
1363 """download a resource using Mercurial logic and config"""
1320 fh = urlmod.open(ui, url, output)
1364 fh = urlmod.open(ui, url, output)
1321
1365
1322 dest = ui
1366 dest = ui
1323 if output:
1367 if output:
1324 dest = open(output, b"wb", _chunksize)
1368 dest = open(output, b"wb", _chunksize)
1325 try:
1369 try:
1326 data = fh.read(_chunksize)
1370 data = fh.read(_chunksize)
1327 while data:
1371 while data:
1328 dest.write(data)
1372 dest.write(data)
1329 data = fh.read(_chunksize)
1373 data = fh.read(_chunksize)
1330 finally:
1374 finally:
1331 if output:
1375 if output:
1332 dest.close()
1376 dest.close()
1333
1377
1334
1378
1335 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1379 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1336 def debugextensions(ui, repo, **opts):
1380 def debugextensions(ui, repo, **opts):
1337 '''show information about active extensions'''
1381 '''show information about active extensions'''
1338 opts = pycompat.byteskwargs(opts)
1382 opts = pycompat.byteskwargs(opts)
1339 exts = extensions.extensions(ui)
1383 exts = extensions.extensions(ui)
1340 hgver = util.version()
1384 hgver = util.version()
1341 fm = ui.formatter(b'debugextensions', opts)
1385 fm = ui.formatter(b'debugextensions', opts)
1342 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1386 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1343 isinternal = extensions.ismoduleinternal(extmod)
1387 isinternal = extensions.ismoduleinternal(extmod)
1344 extsource = None
1388 extsource = None
1345
1389
1346 if util.safehasattr(extmod, '__file__'):
1390 if util.safehasattr(extmod, '__file__'):
1347 extsource = pycompat.fsencode(extmod.__file__)
1391 extsource = pycompat.fsencode(extmod.__file__)
1348 elif getattr(sys, 'oxidized', False):
1392 elif getattr(sys, 'oxidized', False):
1349 extsource = pycompat.sysexecutable
1393 extsource = pycompat.sysexecutable
1350 if isinternal:
1394 if isinternal:
1351 exttestedwith = [] # never expose magic string to users
1395 exttestedwith = [] # never expose magic string to users
1352 else:
1396 else:
1353 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1397 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1354 extbuglink = getattr(extmod, 'buglink', None)
1398 extbuglink = getattr(extmod, 'buglink', None)
1355
1399
1356 fm.startitem()
1400 fm.startitem()
1357
1401
1358 if ui.quiet or ui.verbose:
1402 if ui.quiet or ui.verbose:
1359 fm.write(b'name', b'%s\n', extname)
1403 fm.write(b'name', b'%s\n', extname)
1360 else:
1404 else:
1361 fm.write(b'name', b'%s', extname)
1405 fm.write(b'name', b'%s', extname)
1362 if isinternal or hgver in exttestedwith:
1406 if isinternal or hgver in exttestedwith:
1363 fm.plain(b'\n')
1407 fm.plain(b'\n')
1364 elif not exttestedwith:
1408 elif not exttestedwith:
1365 fm.plain(_(b' (untested!)\n'))
1409 fm.plain(_(b' (untested!)\n'))
1366 else:
1410 else:
1367 lasttestedversion = exttestedwith[-1]
1411 lasttestedversion = exttestedwith[-1]
1368 fm.plain(b' (%s!)\n' % lasttestedversion)
1412 fm.plain(b' (%s!)\n' % lasttestedversion)
1369
1413
1370 fm.condwrite(
1414 fm.condwrite(
1371 ui.verbose and extsource,
1415 ui.verbose and extsource,
1372 b'source',
1416 b'source',
1373 _(b' location: %s\n'),
1417 _(b' location: %s\n'),
1374 extsource or b"",
1418 extsource or b"",
1375 )
1419 )
1376
1420
1377 if ui.verbose:
1421 if ui.verbose:
1378 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1422 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1379 fm.data(bundled=isinternal)
1423 fm.data(bundled=isinternal)
1380
1424
1381 fm.condwrite(
1425 fm.condwrite(
1382 ui.verbose and exttestedwith,
1426 ui.verbose and exttestedwith,
1383 b'testedwith',
1427 b'testedwith',
1384 _(b' tested with: %s\n'),
1428 _(b' tested with: %s\n'),
1385 fm.formatlist(exttestedwith, name=b'ver'),
1429 fm.formatlist(exttestedwith, name=b'ver'),
1386 )
1430 )
1387
1431
1388 fm.condwrite(
1432 fm.condwrite(
1389 ui.verbose and extbuglink,
1433 ui.verbose and extbuglink,
1390 b'buglink',
1434 b'buglink',
1391 _(b' bug reporting: %s\n'),
1435 _(b' bug reporting: %s\n'),
1392 extbuglink or b"",
1436 extbuglink or b"",
1393 )
1437 )
1394
1438
1395 fm.end()
1439 fm.end()
1396
1440
1397
1441
1398 @command(
1442 @command(
1399 b'debugfileset',
1443 b'debugfileset',
1400 [
1444 [
1401 (
1445 (
1402 b'r',
1446 b'r',
1403 b'rev',
1447 b'rev',
1404 b'',
1448 b'',
1405 _(b'apply the filespec on this revision'),
1449 _(b'apply the filespec on this revision'),
1406 _(b'REV'),
1450 _(b'REV'),
1407 ),
1451 ),
1408 (
1452 (
1409 b'',
1453 b'',
1410 b'all-files',
1454 b'all-files',
1411 False,
1455 False,
1412 _(b'test files from all revisions and working directory'),
1456 _(b'test files from all revisions and working directory'),
1413 ),
1457 ),
1414 (
1458 (
1415 b's',
1459 b's',
1416 b'show-matcher',
1460 b'show-matcher',
1417 None,
1461 None,
1418 _(b'print internal representation of matcher'),
1462 _(b'print internal representation of matcher'),
1419 ),
1463 ),
1420 (
1464 (
1421 b'p',
1465 b'p',
1422 b'show-stage',
1466 b'show-stage',
1423 [],
1467 [],
1424 _(b'print parsed tree at the given stage'),
1468 _(b'print parsed tree at the given stage'),
1425 _(b'NAME'),
1469 _(b'NAME'),
1426 ),
1470 ),
1427 ],
1471 ],
1428 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1472 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1429 )
1473 )
1430 def debugfileset(ui, repo, expr, **opts):
1474 def debugfileset(ui, repo, expr, **opts):
1431 '''parse and apply a fileset specification'''
1475 '''parse and apply a fileset specification'''
1432 from . import fileset
1476 from . import fileset
1433
1477
1434 fileset.symbols # force import of fileset so we have predicates to optimize
1478 fileset.symbols # force import of fileset so we have predicates to optimize
1435 opts = pycompat.byteskwargs(opts)
1479 opts = pycompat.byteskwargs(opts)
1436 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1480 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1437
1481
1438 stages = [
1482 stages = [
1439 (b'parsed', pycompat.identity),
1483 (b'parsed', pycompat.identity),
1440 (b'analyzed', filesetlang.analyze),
1484 (b'analyzed', filesetlang.analyze),
1441 (b'optimized', filesetlang.optimize),
1485 (b'optimized', filesetlang.optimize),
1442 ]
1486 ]
1443 stagenames = {n for n, f in stages}
1487 stagenames = {n for n, f in stages}
1444
1488
1445 showalways = set()
1489 showalways = set()
1446 if ui.verbose and not opts[b'show_stage']:
1490 if ui.verbose and not opts[b'show_stage']:
1447 # show parsed tree by --verbose (deprecated)
1491 # show parsed tree by --verbose (deprecated)
1448 showalways.add(b'parsed')
1492 showalways.add(b'parsed')
1449 if opts[b'show_stage'] == [b'all']:
1493 if opts[b'show_stage'] == [b'all']:
1450 showalways.update(stagenames)
1494 showalways.update(stagenames)
1451 else:
1495 else:
1452 for n in opts[b'show_stage']:
1496 for n in opts[b'show_stage']:
1453 if n not in stagenames:
1497 if n not in stagenames:
1454 raise error.Abort(_(b'invalid stage name: %s') % n)
1498 raise error.Abort(_(b'invalid stage name: %s') % n)
1455 showalways.update(opts[b'show_stage'])
1499 showalways.update(opts[b'show_stage'])
1456
1500
1457 tree = filesetlang.parse(expr)
1501 tree = filesetlang.parse(expr)
1458 for n, f in stages:
1502 for n, f in stages:
1459 tree = f(tree)
1503 tree = f(tree)
1460 if n in showalways:
1504 if n in showalways:
1461 if opts[b'show_stage'] or n != b'parsed':
1505 if opts[b'show_stage'] or n != b'parsed':
1462 ui.write(b"* %s:\n" % n)
1506 ui.write(b"* %s:\n" % n)
1463 ui.write(filesetlang.prettyformat(tree), b"\n")
1507 ui.write(filesetlang.prettyformat(tree), b"\n")
1464
1508
1465 files = set()
1509 files = set()
1466 if opts[b'all_files']:
1510 if opts[b'all_files']:
1467 for r in repo:
1511 for r in repo:
1468 c = repo[r]
1512 c = repo[r]
1469 files.update(c.files())
1513 files.update(c.files())
1470 files.update(c.substate)
1514 files.update(c.substate)
1471 if opts[b'all_files'] or ctx.rev() is None:
1515 if opts[b'all_files'] or ctx.rev() is None:
1472 wctx = repo[None]
1516 wctx = repo[None]
1473 files.update(
1517 files.update(
1474 repo.dirstate.walk(
1518 repo.dirstate.walk(
1475 scmutil.matchall(repo),
1519 scmutil.matchall(repo),
1476 subrepos=list(wctx.substate),
1520 subrepos=list(wctx.substate),
1477 unknown=True,
1521 unknown=True,
1478 ignored=True,
1522 ignored=True,
1479 )
1523 )
1480 )
1524 )
1481 files.update(wctx.substate)
1525 files.update(wctx.substate)
1482 else:
1526 else:
1483 files.update(ctx.files())
1527 files.update(ctx.files())
1484 files.update(ctx.substate)
1528 files.update(ctx.substate)
1485
1529
1486 m = ctx.matchfileset(repo.getcwd(), expr)
1530 m = ctx.matchfileset(repo.getcwd(), expr)
1487 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1531 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1488 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1532 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1489 for f in sorted(files):
1533 for f in sorted(files):
1490 if not m(f):
1534 if not m(f):
1491 continue
1535 continue
1492 ui.write(b"%s\n" % f)
1536 ui.write(b"%s\n" % f)
1493
1537
1494
1538
1495 @command(
1539 @command(
1496 b"debug-repair-issue6528",
1540 b"debug-repair-issue6528",
1497 [
1541 [
1498 (
1542 (
1499 b'',
1543 b'',
1500 b'to-report',
1544 b'to-report',
1501 b'',
1545 b'',
1502 _(b'build a report of affected revisions to this file'),
1546 _(b'build a report of affected revisions to this file'),
1503 _(b'FILE'),
1547 _(b'FILE'),
1504 ),
1548 ),
1505 (
1549 (
1506 b'',
1550 b'',
1507 b'from-report',
1551 b'from-report',
1508 b'',
1552 b'',
1509 _(b'repair revisions listed in this report file'),
1553 _(b'repair revisions listed in this report file'),
1510 _(b'FILE'),
1554 _(b'FILE'),
1511 ),
1555 ),
1512 (
1556 (
1513 b'',
1557 b'',
1514 b'paranoid',
1558 b'paranoid',
1515 False,
1559 False,
1516 _(b'check that both detection methods do the same thing'),
1560 _(b'check that both detection methods do the same thing'),
1517 ),
1561 ),
1518 ]
1562 ]
1519 + cmdutil.dryrunopts,
1563 + cmdutil.dryrunopts,
1520 )
1564 )
1521 def debug_repair_issue6528(ui, repo, **opts):
1565 def debug_repair_issue6528(ui, repo, **opts):
1522 """find affected revisions and repair them. See issue6528 for more details.
1566 """find affected revisions and repair them. See issue6528 for more details.
1523
1567
1524 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1568 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1525 computation of affected revisions for a given repository across clones.
1569 computation of affected revisions for a given repository across clones.
1526 The report format is line-based (with empty lines ignored):
1570 The report format is line-based (with empty lines ignored):
1527
1571
1528 ```
1572 ```
1529 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1573 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1530 ```
1574 ```
1531
1575
1532 There can be multiple broken revisions per filelog, they are separated by
1576 There can be multiple broken revisions per filelog, they are separated by
1533 a comma with no spaces. The only space is between the revision(s) and the
1577 a comma with no spaces. The only space is between the revision(s) and the
1534 filename.
1578 filename.
1535
1579
1536 Note that this does *not* mean that this repairs future affected revisions,
1580 Note that this does *not* mean that this repairs future affected revisions,
1537 that needs a separate fix at the exchange level that was introduced in
1581 that needs a separate fix at the exchange level that was introduced in
1538 Mercurial 5.9.1.
1582 Mercurial 5.9.1.
1539
1583
1540 There is a `--paranoid` flag to test that the fast implementation is correct
1584 There is a `--paranoid` flag to test that the fast implementation is correct
1541 by checking it against the slow implementation. Since this matter is quite
1585 by checking it against the slow implementation. Since this matter is quite
1542 urgent and testing every edge-case is probably quite costly, we use this
1586 urgent and testing every edge-case is probably quite costly, we use this
1543 method to test on large repositories as a fuzzing method of sorts.
1587 method to test on large repositories as a fuzzing method of sorts.
1544 """
1588 """
1545 cmdutil.check_incompatible_arguments(
1589 cmdutil.check_incompatible_arguments(
1546 opts, 'to_report', ['from_report', 'dry_run']
1590 opts, 'to_report', ['from_report', 'dry_run']
1547 )
1591 )
1548 dry_run = opts.get('dry_run')
1592 dry_run = opts.get('dry_run')
1549 to_report = opts.get('to_report')
1593 to_report = opts.get('to_report')
1550 from_report = opts.get('from_report')
1594 from_report = opts.get('from_report')
1551 paranoid = opts.get('paranoid')
1595 paranoid = opts.get('paranoid')
1552 # TODO maybe add filelog pattern and revision pattern parameters to help
1596 # TODO maybe add filelog pattern and revision pattern parameters to help
1553 # narrow down the search for users that know what they're looking for?
1597 # narrow down the search for users that know what they're looking for?
1554
1598
1555 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1599 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1556 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1600 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1557 raise error.Abort(_(msg))
1601 raise error.Abort(_(msg))
1558
1602
1559 rewrite.repair_issue6528(
1603 rewrite.repair_issue6528(
1560 ui,
1604 ui,
1561 repo,
1605 repo,
1562 dry_run=dry_run,
1606 dry_run=dry_run,
1563 to_report=to_report,
1607 to_report=to_report,
1564 from_report=from_report,
1608 from_report=from_report,
1565 paranoid=paranoid,
1609 paranoid=paranoid,
1566 )
1610 )
1567
1611
1568
1612
1569 @command(b'debugformat', [] + cmdutil.formatteropts)
1613 @command(b'debugformat', [] + cmdutil.formatteropts)
1570 def debugformat(ui, repo, **opts):
1614 def debugformat(ui, repo, **opts):
1571 """display format information about the current repository
1615 """display format information about the current repository
1572
1616
1573 Use --verbose to get extra information about current config value and
1617 Use --verbose to get extra information about current config value and
1574 Mercurial default."""
1618 Mercurial default."""
1575 opts = pycompat.byteskwargs(opts)
1619 opts = pycompat.byteskwargs(opts)
1576 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1620 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1577 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1621 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1578
1622
1579 def makeformatname(name):
1623 def makeformatname(name):
1580 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1624 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1581
1625
1582 fm = ui.formatter(b'debugformat', opts)
1626 fm = ui.formatter(b'debugformat', opts)
1583 if fm.isplain():
1627 if fm.isplain():
1584
1628
1585 def formatvalue(value):
1629 def formatvalue(value):
1586 if util.safehasattr(value, b'startswith'):
1630 if util.safehasattr(value, b'startswith'):
1587 return value
1631 return value
1588 if value:
1632 if value:
1589 return b'yes'
1633 return b'yes'
1590 else:
1634 else:
1591 return b'no'
1635 return b'no'
1592
1636
1593 else:
1637 else:
1594 formatvalue = pycompat.identity
1638 formatvalue = pycompat.identity
1595
1639
1596 fm.plain(b'format-variant')
1640 fm.plain(b'format-variant')
1597 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1641 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1598 fm.plain(b' repo')
1642 fm.plain(b' repo')
1599 if ui.verbose:
1643 if ui.verbose:
1600 fm.plain(b' config default')
1644 fm.plain(b' config default')
1601 fm.plain(b'\n')
1645 fm.plain(b'\n')
1602 for fv in upgrade.allformatvariant:
1646 for fv in upgrade.allformatvariant:
1603 fm.startitem()
1647 fm.startitem()
1604 repovalue = fv.fromrepo(repo)
1648 repovalue = fv.fromrepo(repo)
1605 configvalue = fv.fromconfig(repo)
1649 configvalue = fv.fromconfig(repo)
1606
1650
1607 if repovalue != configvalue:
1651 if repovalue != configvalue:
1608 namelabel = b'formatvariant.name.mismatchconfig'
1652 namelabel = b'formatvariant.name.mismatchconfig'
1609 repolabel = b'formatvariant.repo.mismatchconfig'
1653 repolabel = b'formatvariant.repo.mismatchconfig'
1610 elif repovalue != fv.default:
1654 elif repovalue != fv.default:
1611 namelabel = b'formatvariant.name.mismatchdefault'
1655 namelabel = b'formatvariant.name.mismatchdefault'
1612 repolabel = b'formatvariant.repo.mismatchdefault'
1656 repolabel = b'formatvariant.repo.mismatchdefault'
1613 else:
1657 else:
1614 namelabel = b'formatvariant.name.uptodate'
1658 namelabel = b'formatvariant.name.uptodate'
1615 repolabel = b'formatvariant.repo.uptodate'
1659 repolabel = b'formatvariant.repo.uptodate'
1616
1660
1617 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1661 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1618 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1662 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1619 if fv.default != configvalue:
1663 if fv.default != configvalue:
1620 configlabel = b'formatvariant.config.special'
1664 configlabel = b'formatvariant.config.special'
1621 else:
1665 else:
1622 configlabel = b'formatvariant.config.default'
1666 configlabel = b'formatvariant.config.default'
1623 fm.condwrite(
1667 fm.condwrite(
1624 ui.verbose,
1668 ui.verbose,
1625 b'config',
1669 b'config',
1626 b' %6s',
1670 b' %6s',
1627 formatvalue(configvalue),
1671 formatvalue(configvalue),
1628 label=configlabel,
1672 label=configlabel,
1629 )
1673 )
1630 fm.condwrite(
1674 fm.condwrite(
1631 ui.verbose,
1675 ui.verbose,
1632 b'default',
1676 b'default',
1633 b' %7s',
1677 b' %7s',
1634 formatvalue(fv.default),
1678 formatvalue(fv.default),
1635 label=b'formatvariant.default',
1679 label=b'formatvariant.default',
1636 )
1680 )
1637 fm.plain(b'\n')
1681 fm.plain(b'\n')
1638 fm.end()
1682 fm.end()
1639
1683
1640
1684
1641 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1685 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1642 def debugfsinfo(ui, path=b"."):
1686 def debugfsinfo(ui, path=b"."):
1643 """show information detected about current filesystem"""
1687 """show information detected about current filesystem"""
1644 ui.writenoi18n(b'path: %s\n' % path)
1688 ui.writenoi18n(b'path: %s\n' % path)
1645 ui.writenoi18n(
1689 ui.writenoi18n(
1646 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1690 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1647 )
1691 )
1648 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1692 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1649 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1693 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1650 ui.writenoi18n(
1694 ui.writenoi18n(
1651 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1695 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1652 )
1696 )
1653 ui.writenoi18n(
1697 ui.writenoi18n(
1654 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1698 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1655 )
1699 )
1656 casesensitive = b'(unknown)'
1700 casesensitive = b'(unknown)'
1657 try:
1701 try:
1658 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1702 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1659 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1703 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1660 except OSError:
1704 except OSError:
1661 pass
1705 pass
1662 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1706 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1663
1707
1664
1708
1665 @command(
1709 @command(
1666 b'debuggetbundle',
1710 b'debuggetbundle',
1667 [
1711 [
1668 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1712 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1669 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1713 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1670 (
1714 (
1671 b't',
1715 b't',
1672 b'type',
1716 b'type',
1673 b'bzip2',
1717 b'bzip2',
1674 _(b'bundle compression type to use'),
1718 _(b'bundle compression type to use'),
1675 _(b'TYPE'),
1719 _(b'TYPE'),
1676 ),
1720 ),
1677 ],
1721 ],
1678 _(b'REPO FILE [-H|-C ID]...'),
1722 _(b'REPO FILE [-H|-C ID]...'),
1679 norepo=True,
1723 norepo=True,
1680 )
1724 )
1681 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1725 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1682 """retrieves a bundle from a repo
1726 """retrieves a bundle from a repo
1683
1727
1684 Every ID must be a full-length hex node id string. Saves the bundle to the
1728 Every ID must be a full-length hex node id string. Saves the bundle to the
1685 given file.
1729 given file.
1686 """
1730 """
1687 opts = pycompat.byteskwargs(opts)
1731 opts = pycompat.byteskwargs(opts)
1688 repo = hg.peer(ui, opts, repopath)
1732 repo = hg.peer(ui, opts, repopath)
1689 if not repo.capable(b'getbundle'):
1733 if not repo.capable(b'getbundle'):
1690 raise error.Abort(b"getbundle() not supported by target repository")
1734 raise error.Abort(b"getbundle() not supported by target repository")
1691 args = {}
1735 args = {}
1692 if common:
1736 if common:
1693 args['common'] = [bin(s) for s in common]
1737 args['common'] = [bin(s) for s in common]
1694 if head:
1738 if head:
1695 args['heads'] = [bin(s) for s in head]
1739 args['heads'] = [bin(s) for s in head]
1696 # TODO: get desired bundlecaps from command line.
1740 # TODO: get desired bundlecaps from command line.
1697 args['bundlecaps'] = None
1741 args['bundlecaps'] = None
1698 bundle = repo.getbundle(b'debug', **args)
1742 bundle = repo.getbundle(b'debug', **args)
1699
1743
1700 bundletype = opts.get(b'type', b'bzip2').lower()
1744 bundletype = opts.get(b'type', b'bzip2').lower()
1701 btypes = {
1745 btypes = {
1702 b'none': b'HG10UN',
1746 b'none': b'HG10UN',
1703 b'bzip2': b'HG10BZ',
1747 b'bzip2': b'HG10BZ',
1704 b'gzip': b'HG10GZ',
1748 b'gzip': b'HG10GZ',
1705 b'bundle2': b'HG20',
1749 b'bundle2': b'HG20',
1706 }
1750 }
1707 bundletype = btypes.get(bundletype)
1751 bundletype = btypes.get(bundletype)
1708 if bundletype not in bundle2.bundletypes:
1752 if bundletype not in bundle2.bundletypes:
1709 raise error.Abort(_(b'unknown bundle type specified with --type'))
1753 raise error.Abort(_(b'unknown bundle type specified with --type'))
1710 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1754 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1711
1755
1712
1756
1713 @command(b'debugignore', [], b'[FILE]')
1757 @command(b'debugignore', [], b'[FILE]')
1714 def debugignore(ui, repo, *files, **opts):
1758 def debugignore(ui, repo, *files, **opts):
1715 """display the combined ignore pattern and information about ignored files
1759 """display the combined ignore pattern and information about ignored files
1716
1760
1717 With no argument display the combined ignore pattern.
1761 With no argument display the combined ignore pattern.
1718
1762
1719 Given space separated file names, shows if the given file is ignored and
1763 Given space separated file names, shows if the given file is ignored and
1720 if so, show the ignore rule (file and line number) that matched it.
1764 if so, show the ignore rule (file and line number) that matched it.
1721 """
1765 """
1722 ignore = repo.dirstate._ignore
1766 ignore = repo.dirstate._ignore
1723 if not files:
1767 if not files:
1724 # Show all the patterns
1768 # Show all the patterns
1725 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1769 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1726 else:
1770 else:
1727 m = scmutil.match(repo[None], pats=files)
1771 m = scmutil.match(repo[None], pats=files)
1728 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1772 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1729 for f in m.files():
1773 for f in m.files():
1730 nf = util.normpath(f)
1774 nf = util.normpath(f)
1731 ignored = None
1775 ignored = None
1732 ignoredata = None
1776 ignoredata = None
1733 if nf != b'.':
1777 if nf != b'.':
1734 if ignore(nf):
1778 if ignore(nf):
1735 ignored = nf
1779 ignored = nf
1736 ignoredata = repo.dirstate._ignorefileandline(nf)
1780 ignoredata = repo.dirstate._ignorefileandline(nf)
1737 else:
1781 else:
1738 for p in pathutil.finddirs(nf):
1782 for p in pathutil.finddirs(nf):
1739 if ignore(p):
1783 if ignore(p):
1740 ignored = p
1784 ignored = p
1741 ignoredata = repo.dirstate._ignorefileandline(p)
1785 ignoredata = repo.dirstate._ignorefileandline(p)
1742 break
1786 break
1743 if ignored:
1787 if ignored:
1744 if ignored == nf:
1788 if ignored == nf:
1745 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1789 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1746 else:
1790 else:
1747 ui.write(
1791 ui.write(
1748 _(
1792 _(
1749 b"%s is ignored because of "
1793 b"%s is ignored because of "
1750 b"containing directory %s\n"
1794 b"containing directory %s\n"
1751 )
1795 )
1752 % (uipathfn(f), ignored)
1796 % (uipathfn(f), ignored)
1753 )
1797 )
1754 ignorefile, lineno, line = ignoredata
1798 ignorefile, lineno, line = ignoredata
1755 ui.write(
1799 ui.write(
1756 _(b"(ignore rule in %s, line %d: '%s')\n")
1800 _(b"(ignore rule in %s, line %d: '%s')\n")
1757 % (ignorefile, lineno, line)
1801 % (ignorefile, lineno, line)
1758 )
1802 )
1759 else:
1803 else:
1760 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1804 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1761
1805
1762
1806
1763 @command(
1807 @command(
1764 b'debugindex',
1808 b'debugindex',
1765 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1809 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1766 _(b'-c|-m|FILE'),
1810 _(b'-c|-m|FILE'),
1767 )
1811 )
1768 def debugindex(ui, repo, file_=None, **opts):
1812 def debugindex(ui, repo, file_=None, **opts):
1769 """dump index data for a storage primitive"""
1813 """dump index data for a storage primitive"""
1770 opts = pycompat.byteskwargs(opts)
1814 opts = pycompat.byteskwargs(opts)
1771 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1815 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1772
1816
1773 if ui.debugflag:
1817 if ui.debugflag:
1774 shortfn = hex
1818 shortfn = hex
1775 else:
1819 else:
1776 shortfn = short
1820 shortfn = short
1777
1821
1778 idlen = 12
1822 idlen = 12
1779 for i in store:
1823 for i in store:
1780 idlen = len(shortfn(store.node(i)))
1824 idlen = len(shortfn(store.node(i)))
1781 break
1825 break
1782
1826
1783 fm = ui.formatter(b'debugindex', opts)
1827 fm = ui.formatter(b'debugindex', opts)
1784 fm.plain(
1828 fm.plain(
1785 b' rev linkrev %s %s p2\n'
1829 b' rev linkrev %s %s p2\n'
1786 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1830 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1787 )
1831 )
1788
1832
1789 for rev in store:
1833 for rev in store:
1790 node = store.node(rev)
1834 node = store.node(rev)
1791 parents = store.parents(node)
1835 parents = store.parents(node)
1792
1836
1793 fm.startitem()
1837 fm.startitem()
1794 fm.write(b'rev', b'%6d ', rev)
1838 fm.write(b'rev', b'%6d ', rev)
1795 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1839 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1796 fm.write(b'node', b'%s ', shortfn(node))
1840 fm.write(b'node', b'%s ', shortfn(node))
1797 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1841 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1798 fm.write(b'p2', b'%s', shortfn(parents[1]))
1842 fm.write(b'p2', b'%s', shortfn(parents[1]))
1799 fm.plain(b'\n')
1843 fm.plain(b'\n')
1800
1844
1801 fm.end()
1845 fm.end()
1802
1846
1803
1847
1804 @command(
1848 @command(
1805 b'debugindexdot',
1849 b'debugindexdot',
1806 cmdutil.debugrevlogopts,
1850 cmdutil.debugrevlogopts,
1807 _(b'-c|-m|FILE'),
1851 _(b'-c|-m|FILE'),
1808 optionalrepo=True,
1852 optionalrepo=True,
1809 )
1853 )
1810 def debugindexdot(ui, repo, file_=None, **opts):
1854 def debugindexdot(ui, repo, file_=None, **opts):
1811 """dump an index DAG as a graphviz dot file"""
1855 """dump an index DAG as a graphviz dot file"""
1812 opts = pycompat.byteskwargs(opts)
1856 opts = pycompat.byteskwargs(opts)
1813 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1857 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1814 ui.writenoi18n(b"digraph G {\n")
1858 ui.writenoi18n(b"digraph G {\n")
1815 for i in r:
1859 for i in r:
1816 node = r.node(i)
1860 node = r.node(i)
1817 pp = r.parents(node)
1861 pp = r.parents(node)
1818 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1862 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1819 if pp[1] != repo.nullid:
1863 if pp[1] != repo.nullid:
1820 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1864 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1821 ui.write(b"}\n")
1865 ui.write(b"}\n")
1822
1866
1823
1867
1824 @command(b'debugindexstats', [])
1868 @command(b'debugindexstats', [])
1825 def debugindexstats(ui, repo):
1869 def debugindexstats(ui, repo):
1826 """show stats related to the changelog index"""
1870 """show stats related to the changelog index"""
1827 repo.changelog.shortest(repo.nullid, 1)
1871 repo.changelog.shortest(repo.nullid, 1)
1828 index = repo.changelog.index
1872 index = repo.changelog.index
1829 if not util.safehasattr(index, b'stats'):
1873 if not util.safehasattr(index, b'stats'):
1830 raise error.Abort(_(b'debugindexstats only works with native code'))
1874 raise error.Abort(_(b'debugindexstats only works with native code'))
1831 for k, v in sorted(index.stats().items()):
1875 for k, v in sorted(index.stats().items()):
1832 ui.write(b'%s: %d\n' % (k, v))
1876 ui.write(b'%s: %d\n' % (k, v))
1833
1877
1834
1878
1835 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1879 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1836 def debuginstall(ui, **opts):
1880 def debuginstall(ui, **opts):
1837 """test Mercurial installation
1881 """test Mercurial installation
1838
1882
1839 Returns 0 on success.
1883 Returns 0 on success.
1840 """
1884 """
1841 opts = pycompat.byteskwargs(opts)
1885 opts = pycompat.byteskwargs(opts)
1842
1886
1843 problems = 0
1887 problems = 0
1844
1888
1845 fm = ui.formatter(b'debuginstall', opts)
1889 fm = ui.formatter(b'debuginstall', opts)
1846 fm.startitem()
1890 fm.startitem()
1847
1891
1848 # encoding might be unknown or wrong. don't translate these messages.
1892 # encoding might be unknown or wrong. don't translate these messages.
1849 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1893 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1850 err = None
1894 err = None
1851 try:
1895 try:
1852 codecs.lookup(pycompat.sysstr(encoding.encoding))
1896 codecs.lookup(pycompat.sysstr(encoding.encoding))
1853 except LookupError as inst:
1897 except LookupError as inst:
1854 err = stringutil.forcebytestr(inst)
1898 err = stringutil.forcebytestr(inst)
1855 problems += 1
1899 problems += 1
1856 fm.condwrite(
1900 fm.condwrite(
1857 err,
1901 err,
1858 b'encodingerror',
1902 b'encodingerror',
1859 b" %s\n (check that your locale is properly set)\n",
1903 b" %s\n (check that your locale is properly set)\n",
1860 err,
1904 err,
1861 )
1905 )
1862
1906
1863 # Python
1907 # Python
1864 pythonlib = None
1908 pythonlib = None
1865 if util.safehasattr(os, '__file__'):
1909 if util.safehasattr(os, '__file__'):
1866 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1910 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1867 elif getattr(sys, 'oxidized', False):
1911 elif getattr(sys, 'oxidized', False):
1868 pythonlib = pycompat.sysexecutable
1912 pythonlib = pycompat.sysexecutable
1869
1913
1870 fm.write(
1914 fm.write(
1871 b'pythonexe',
1915 b'pythonexe',
1872 _(b"checking Python executable (%s)\n"),
1916 _(b"checking Python executable (%s)\n"),
1873 pycompat.sysexecutable or _(b"unknown"),
1917 pycompat.sysexecutable or _(b"unknown"),
1874 )
1918 )
1875 fm.write(
1919 fm.write(
1876 b'pythonimplementation',
1920 b'pythonimplementation',
1877 _(b"checking Python implementation (%s)\n"),
1921 _(b"checking Python implementation (%s)\n"),
1878 pycompat.sysbytes(platform.python_implementation()),
1922 pycompat.sysbytes(platform.python_implementation()),
1879 )
1923 )
1880 fm.write(
1924 fm.write(
1881 b'pythonver',
1925 b'pythonver',
1882 _(b"checking Python version (%s)\n"),
1926 _(b"checking Python version (%s)\n"),
1883 (b"%d.%d.%d" % sys.version_info[:3]),
1927 (b"%d.%d.%d" % sys.version_info[:3]),
1884 )
1928 )
1885 fm.write(
1929 fm.write(
1886 b'pythonlib',
1930 b'pythonlib',
1887 _(b"checking Python lib (%s)...\n"),
1931 _(b"checking Python lib (%s)...\n"),
1888 pythonlib or _(b"unknown"),
1932 pythonlib or _(b"unknown"),
1889 )
1933 )
1890
1934
1891 try:
1935 try:
1892 from . import rustext # pytype: disable=import-error
1936 from . import rustext # pytype: disable=import-error
1893
1937
1894 rustext.__doc__ # trigger lazy import
1938 rustext.__doc__ # trigger lazy import
1895 except ImportError:
1939 except ImportError:
1896 rustext = None
1940 rustext = None
1897
1941
1898 security = set(sslutil.supportedprotocols)
1942 security = set(sslutil.supportedprotocols)
1899 if sslutil.hassni:
1943 if sslutil.hassni:
1900 security.add(b'sni')
1944 security.add(b'sni')
1901
1945
1902 fm.write(
1946 fm.write(
1903 b'pythonsecurity',
1947 b'pythonsecurity',
1904 _(b"checking Python security support (%s)\n"),
1948 _(b"checking Python security support (%s)\n"),
1905 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1949 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1906 )
1950 )
1907
1951
1908 # These are warnings, not errors. So don't increment problem count. This
1952 # These are warnings, not errors. So don't increment problem count. This
1909 # may change in the future.
1953 # may change in the future.
1910 if b'tls1.2' not in security:
1954 if b'tls1.2' not in security:
1911 fm.plain(
1955 fm.plain(
1912 _(
1956 _(
1913 b' TLS 1.2 not supported by Python install; '
1957 b' TLS 1.2 not supported by Python install; '
1914 b'network connections lack modern security\n'
1958 b'network connections lack modern security\n'
1915 )
1959 )
1916 )
1960 )
1917 if b'sni' not in security:
1961 if b'sni' not in security:
1918 fm.plain(
1962 fm.plain(
1919 _(
1963 _(
1920 b' SNI not supported by Python install; may have '
1964 b' SNI not supported by Python install; may have '
1921 b'connectivity issues with some servers\n'
1965 b'connectivity issues with some servers\n'
1922 )
1966 )
1923 )
1967 )
1924
1968
1925 fm.plain(
1969 fm.plain(
1926 _(
1970 _(
1927 b"checking Rust extensions (%s)\n"
1971 b"checking Rust extensions (%s)\n"
1928 % (b'missing' if rustext is None else b'installed')
1972 % (b'missing' if rustext is None else b'installed')
1929 ),
1973 ),
1930 )
1974 )
1931
1975
1932 # TODO print CA cert info
1976 # TODO print CA cert info
1933
1977
1934 # hg version
1978 # hg version
1935 hgver = util.version()
1979 hgver = util.version()
1936 fm.write(
1980 fm.write(
1937 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1981 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1938 )
1982 )
1939 fm.write(
1983 fm.write(
1940 b'hgverextra',
1984 b'hgverextra',
1941 _(b"checking Mercurial custom build (%s)\n"),
1985 _(b"checking Mercurial custom build (%s)\n"),
1942 b'+'.join(hgver.split(b'+')[1:]),
1986 b'+'.join(hgver.split(b'+')[1:]),
1943 )
1987 )
1944
1988
1945 # compiled modules
1989 # compiled modules
1946 hgmodules = None
1990 hgmodules = None
1947 if util.safehasattr(sys.modules[__name__], '__file__'):
1991 if util.safehasattr(sys.modules[__name__], '__file__'):
1948 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1992 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1949 elif getattr(sys, 'oxidized', False):
1993 elif getattr(sys, 'oxidized', False):
1950 hgmodules = pycompat.sysexecutable
1994 hgmodules = pycompat.sysexecutable
1951
1995
1952 fm.write(
1996 fm.write(
1953 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1997 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1954 )
1998 )
1955 fm.write(
1999 fm.write(
1956 b'hgmodules',
2000 b'hgmodules',
1957 _(b"checking installed modules (%s)...\n"),
2001 _(b"checking installed modules (%s)...\n"),
1958 hgmodules or _(b"unknown"),
2002 hgmodules or _(b"unknown"),
1959 )
2003 )
1960
2004
1961 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2005 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1962 rustext = rustandc # for now, that's the only case
2006 rustext = rustandc # for now, that's the only case
1963 cext = policy.policy in (b'c', b'allow') or rustandc
2007 cext = policy.policy in (b'c', b'allow') or rustandc
1964 nopure = cext or rustext
2008 nopure = cext or rustext
1965 if nopure:
2009 if nopure:
1966 err = None
2010 err = None
1967 try:
2011 try:
1968 if cext:
2012 if cext:
1969 from .cext import ( # pytype: disable=import-error
2013 from .cext import ( # pytype: disable=import-error
1970 base85,
2014 base85,
1971 bdiff,
2015 bdiff,
1972 mpatch,
2016 mpatch,
1973 osutil,
2017 osutil,
1974 )
2018 )
1975
2019
1976 # quiet pyflakes
2020 # quiet pyflakes
1977 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2021 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1978 if rustext:
2022 if rustext:
1979 from .rustext import ( # pytype: disable=import-error
2023 from .rustext import ( # pytype: disable=import-error
1980 ancestor,
2024 ancestor,
1981 dirstate,
2025 dirstate,
1982 )
2026 )
1983
2027
1984 dir(ancestor), dir(dirstate) # quiet pyflakes
2028 dir(ancestor), dir(dirstate) # quiet pyflakes
1985 except Exception as inst:
2029 except Exception as inst:
1986 err = stringutil.forcebytestr(inst)
2030 err = stringutil.forcebytestr(inst)
1987 problems += 1
2031 problems += 1
1988 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2032 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1989
2033
1990 compengines = util.compengines._engines.values()
2034 compengines = util.compengines._engines.values()
1991 fm.write(
2035 fm.write(
1992 b'compengines',
2036 b'compengines',
1993 _(b'checking registered compression engines (%s)\n'),
2037 _(b'checking registered compression engines (%s)\n'),
1994 fm.formatlist(
2038 fm.formatlist(
1995 sorted(e.name() for e in compengines),
2039 sorted(e.name() for e in compengines),
1996 name=b'compengine',
2040 name=b'compengine',
1997 fmt=b'%s',
2041 fmt=b'%s',
1998 sep=b', ',
2042 sep=b', ',
1999 ),
2043 ),
2000 )
2044 )
2001 fm.write(
2045 fm.write(
2002 b'compenginesavail',
2046 b'compenginesavail',
2003 _(b'checking available compression engines (%s)\n'),
2047 _(b'checking available compression engines (%s)\n'),
2004 fm.formatlist(
2048 fm.formatlist(
2005 sorted(e.name() for e in compengines if e.available()),
2049 sorted(e.name() for e in compengines if e.available()),
2006 name=b'compengine',
2050 name=b'compengine',
2007 fmt=b'%s',
2051 fmt=b'%s',
2008 sep=b', ',
2052 sep=b', ',
2009 ),
2053 ),
2010 )
2054 )
2011 wirecompengines = compression.compengines.supportedwireengines(
2055 wirecompengines = compression.compengines.supportedwireengines(
2012 compression.SERVERROLE
2056 compression.SERVERROLE
2013 )
2057 )
2014 fm.write(
2058 fm.write(
2015 b'compenginesserver',
2059 b'compenginesserver',
2016 _(
2060 _(
2017 b'checking available compression engines '
2061 b'checking available compression engines '
2018 b'for wire protocol (%s)\n'
2062 b'for wire protocol (%s)\n'
2019 ),
2063 ),
2020 fm.formatlist(
2064 fm.formatlist(
2021 [e.name() for e in wirecompengines if e.wireprotosupport()],
2065 [e.name() for e in wirecompengines if e.wireprotosupport()],
2022 name=b'compengine',
2066 name=b'compengine',
2023 fmt=b'%s',
2067 fmt=b'%s',
2024 sep=b', ',
2068 sep=b', ',
2025 ),
2069 ),
2026 )
2070 )
2027 re2 = b'missing'
2071 re2 = b'missing'
2028 if util._re2:
2072 if util._re2:
2029 re2 = b'available'
2073 re2 = b'available'
2030 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2074 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2031 fm.data(re2=bool(util._re2))
2075 fm.data(re2=bool(util._re2))
2032
2076
2033 # templates
2077 # templates
2034 p = templater.templatedir()
2078 p = templater.templatedir()
2035 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2079 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2036 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2080 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2037 if p:
2081 if p:
2038 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2082 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2039 if m:
2083 if m:
2040 # template found, check if it is working
2084 # template found, check if it is working
2041 err = None
2085 err = None
2042 try:
2086 try:
2043 templater.templater.frommapfile(m)
2087 templater.templater.frommapfile(m)
2044 except Exception as inst:
2088 except Exception as inst:
2045 err = stringutil.forcebytestr(inst)
2089 err = stringutil.forcebytestr(inst)
2046 p = None
2090 p = None
2047 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2091 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2048 else:
2092 else:
2049 p = None
2093 p = None
2050 fm.condwrite(
2094 fm.condwrite(
2051 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2095 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2052 )
2096 )
2053 fm.condwrite(
2097 fm.condwrite(
2054 not m,
2098 not m,
2055 b'defaulttemplatenotfound',
2099 b'defaulttemplatenotfound',
2056 _(b" template '%s' not found\n"),
2100 _(b" template '%s' not found\n"),
2057 b"default",
2101 b"default",
2058 )
2102 )
2059 if not p:
2103 if not p:
2060 problems += 1
2104 problems += 1
2061 fm.condwrite(
2105 fm.condwrite(
2062 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2106 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2063 )
2107 )
2064
2108
2065 # editor
2109 # editor
2066 editor = ui.geteditor()
2110 editor = ui.geteditor()
2067 editor = util.expandpath(editor)
2111 editor = util.expandpath(editor)
2068 editorbin = procutil.shellsplit(editor)[0]
2112 editorbin = procutil.shellsplit(editor)[0]
2069 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2113 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2070 cmdpath = procutil.findexe(editorbin)
2114 cmdpath = procutil.findexe(editorbin)
2071 fm.condwrite(
2115 fm.condwrite(
2072 not cmdpath and editor == b'vi',
2116 not cmdpath and editor == b'vi',
2073 b'vinotfound',
2117 b'vinotfound',
2074 _(
2118 _(
2075 b" No commit editor set and can't find %s in PATH\n"
2119 b" No commit editor set and can't find %s in PATH\n"
2076 b" (specify a commit editor in your configuration"
2120 b" (specify a commit editor in your configuration"
2077 b" file)\n"
2121 b" file)\n"
2078 ),
2122 ),
2079 not cmdpath and editor == b'vi' and editorbin,
2123 not cmdpath and editor == b'vi' and editorbin,
2080 )
2124 )
2081 fm.condwrite(
2125 fm.condwrite(
2082 not cmdpath and editor != b'vi',
2126 not cmdpath and editor != b'vi',
2083 b'editornotfound',
2127 b'editornotfound',
2084 _(
2128 _(
2085 b" Can't find editor '%s' in PATH\n"
2129 b" Can't find editor '%s' in PATH\n"
2086 b" (specify a commit editor in your configuration"
2130 b" (specify a commit editor in your configuration"
2087 b" file)\n"
2131 b" file)\n"
2088 ),
2132 ),
2089 not cmdpath and editorbin,
2133 not cmdpath and editorbin,
2090 )
2134 )
2091 if not cmdpath and editor != b'vi':
2135 if not cmdpath and editor != b'vi':
2092 problems += 1
2136 problems += 1
2093
2137
2094 # check username
2138 # check username
2095 username = None
2139 username = None
2096 err = None
2140 err = None
2097 try:
2141 try:
2098 username = ui.username()
2142 username = ui.username()
2099 except error.Abort as e:
2143 except error.Abort as e:
2100 err = e.message
2144 err = e.message
2101 problems += 1
2145 problems += 1
2102
2146
2103 fm.condwrite(
2147 fm.condwrite(
2104 username, b'username', _(b"checking username (%s)\n"), username
2148 username, b'username', _(b"checking username (%s)\n"), username
2105 )
2149 )
2106 fm.condwrite(
2150 fm.condwrite(
2107 err,
2151 err,
2108 b'usernameerror',
2152 b'usernameerror',
2109 _(
2153 _(
2110 b"checking username...\n %s\n"
2154 b"checking username...\n %s\n"
2111 b" (specify a username in your configuration file)\n"
2155 b" (specify a username in your configuration file)\n"
2112 ),
2156 ),
2113 err,
2157 err,
2114 )
2158 )
2115
2159
2116 for name, mod in extensions.extensions():
2160 for name, mod in extensions.extensions():
2117 handler = getattr(mod, 'debuginstall', None)
2161 handler = getattr(mod, 'debuginstall', None)
2118 if handler is not None:
2162 if handler is not None:
2119 problems += handler(ui, fm)
2163 problems += handler(ui, fm)
2120
2164
2121 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2165 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2122 if not problems:
2166 if not problems:
2123 fm.data(problems=problems)
2167 fm.data(problems=problems)
2124 fm.condwrite(
2168 fm.condwrite(
2125 problems,
2169 problems,
2126 b'problems',
2170 b'problems',
2127 _(b"%d problems detected, please check your install!\n"),
2171 _(b"%d problems detected, please check your install!\n"),
2128 problems,
2172 problems,
2129 )
2173 )
2130 fm.end()
2174 fm.end()
2131
2175
2132 return problems
2176 return problems
2133
2177
2134
2178
2135 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2179 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2136 def debugknown(ui, repopath, *ids, **opts):
2180 def debugknown(ui, repopath, *ids, **opts):
2137 """test whether node ids are known to a repo
2181 """test whether node ids are known to a repo
2138
2182
2139 Every ID must be a full-length hex node id string. Returns a list of 0s
2183 Every ID must be a full-length hex node id string. Returns a list of 0s
2140 and 1s indicating unknown/known.
2184 and 1s indicating unknown/known.
2141 """
2185 """
2142 opts = pycompat.byteskwargs(opts)
2186 opts = pycompat.byteskwargs(opts)
2143 repo = hg.peer(ui, opts, repopath)
2187 repo = hg.peer(ui, opts, repopath)
2144 if not repo.capable(b'known'):
2188 if not repo.capable(b'known'):
2145 raise error.Abort(b"known() not supported by target repository")
2189 raise error.Abort(b"known() not supported by target repository")
2146 flags = repo.known([bin(s) for s in ids])
2190 flags = repo.known([bin(s) for s in ids])
2147 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2191 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2148
2192
2149
2193
2150 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2194 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2151 def debuglabelcomplete(ui, repo, *args):
2195 def debuglabelcomplete(ui, repo, *args):
2152 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2196 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2153 debugnamecomplete(ui, repo, *args)
2197 debugnamecomplete(ui, repo, *args)
2154
2198
2155
2199
2156 @command(
2200 @command(
2157 b'debuglocks',
2201 b'debuglocks',
2158 [
2202 [
2159 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2203 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2160 (
2204 (
2161 b'W',
2205 b'W',
2162 b'force-free-wlock',
2206 b'force-free-wlock',
2163 None,
2207 None,
2164 _(b'free the working state lock (DANGEROUS)'),
2208 _(b'free the working state lock (DANGEROUS)'),
2165 ),
2209 ),
2166 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2210 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2167 (
2211 (
2168 b'S',
2212 b'S',
2169 b'set-wlock',
2213 b'set-wlock',
2170 None,
2214 None,
2171 _(b'set the working state lock until stopped'),
2215 _(b'set the working state lock until stopped'),
2172 ),
2216 ),
2173 ],
2217 ],
2174 _(b'[OPTION]...'),
2218 _(b'[OPTION]...'),
2175 )
2219 )
2176 def debuglocks(ui, repo, **opts):
2220 def debuglocks(ui, repo, **opts):
2177 """show or modify state of locks
2221 """show or modify state of locks
2178
2222
2179 By default, this command will show which locks are held. This
2223 By default, this command will show which locks are held. This
2180 includes the user and process holding the lock, the amount of time
2224 includes the user and process holding the lock, the amount of time
2181 the lock has been held, and the machine name where the process is
2225 the lock has been held, and the machine name where the process is
2182 running if it's not local.
2226 running if it's not local.
2183
2227
2184 Locks protect the integrity of Mercurial's data, so should be
2228 Locks protect the integrity of Mercurial's data, so should be
2185 treated with care. System crashes or other interruptions may cause
2229 treated with care. System crashes or other interruptions may cause
2186 locks to not be properly released, though Mercurial will usually
2230 locks to not be properly released, though Mercurial will usually
2187 detect and remove such stale locks automatically.
2231 detect and remove such stale locks automatically.
2188
2232
2189 However, detecting stale locks may not always be possible (for
2233 However, detecting stale locks may not always be possible (for
2190 instance, on a shared filesystem). Removing locks may also be
2234 instance, on a shared filesystem). Removing locks may also be
2191 blocked by filesystem permissions.
2235 blocked by filesystem permissions.
2192
2236
2193 Setting a lock will prevent other commands from changing the data.
2237 Setting a lock will prevent other commands from changing the data.
2194 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2238 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2195 The set locks are removed when the command exits.
2239 The set locks are removed when the command exits.
2196
2240
2197 Returns 0 if no locks are held.
2241 Returns 0 if no locks are held.
2198
2242
2199 """
2243 """
2200
2244
2201 if opts.get('force_free_lock'):
2245 if opts.get('force_free_lock'):
2202 repo.svfs.tryunlink(b'lock')
2246 repo.svfs.tryunlink(b'lock')
2203 if opts.get('force_free_wlock'):
2247 if opts.get('force_free_wlock'):
2204 repo.vfs.tryunlink(b'wlock')
2248 repo.vfs.tryunlink(b'wlock')
2205 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2249 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2206 return 0
2250 return 0
2207
2251
2208 locks = []
2252 locks = []
2209 try:
2253 try:
2210 if opts.get('set_wlock'):
2254 if opts.get('set_wlock'):
2211 try:
2255 try:
2212 locks.append(repo.wlock(False))
2256 locks.append(repo.wlock(False))
2213 except error.LockHeld:
2257 except error.LockHeld:
2214 raise error.Abort(_(b'wlock is already held'))
2258 raise error.Abort(_(b'wlock is already held'))
2215 if opts.get('set_lock'):
2259 if opts.get('set_lock'):
2216 try:
2260 try:
2217 locks.append(repo.lock(False))
2261 locks.append(repo.lock(False))
2218 except error.LockHeld:
2262 except error.LockHeld:
2219 raise error.Abort(_(b'lock is already held'))
2263 raise error.Abort(_(b'lock is already held'))
2220 if len(locks):
2264 if len(locks):
2221 try:
2265 try:
2222 if ui.interactive():
2266 if ui.interactive():
2223 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2267 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2224 ui.promptchoice(prompt)
2268 ui.promptchoice(prompt)
2225 else:
2269 else:
2226 msg = b"%d locks held, waiting for signal\n"
2270 msg = b"%d locks held, waiting for signal\n"
2227 msg %= len(locks)
2271 msg %= len(locks)
2228 ui.status(msg)
2272 ui.status(msg)
2229 while True: # XXX wait for a signal
2273 while True: # XXX wait for a signal
2230 time.sleep(0.1)
2274 time.sleep(0.1)
2231 except KeyboardInterrupt:
2275 except KeyboardInterrupt:
2232 msg = b"signal-received releasing locks\n"
2276 msg = b"signal-received releasing locks\n"
2233 ui.status(msg)
2277 ui.status(msg)
2234 return 0
2278 return 0
2235 finally:
2279 finally:
2236 release(*locks)
2280 release(*locks)
2237
2281
2238 now = time.time()
2282 now = time.time()
2239 held = 0
2283 held = 0
2240
2284
2241 def report(vfs, name, method):
2285 def report(vfs, name, method):
2242 # this causes stale locks to get reaped for more accurate reporting
2286 # this causes stale locks to get reaped for more accurate reporting
2243 try:
2287 try:
2244 l = method(False)
2288 l = method(False)
2245 except error.LockHeld:
2289 except error.LockHeld:
2246 l = None
2290 l = None
2247
2291
2248 if l:
2292 if l:
2249 l.release()
2293 l.release()
2250 else:
2294 else:
2251 try:
2295 try:
2252 st = vfs.lstat(name)
2296 st = vfs.lstat(name)
2253 age = now - st[stat.ST_MTIME]
2297 age = now - st[stat.ST_MTIME]
2254 user = util.username(st.st_uid)
2298 user = util.username(st.st_uid)
2255 locker = vfs.readlock(name)
2299 locker = vfs.readlock(name)
2256 if b":" in locker:
2300 if b":" in locker:
2257 host, pid = locker.split(b':')
2301 host, pid = locker.split(b':')
2258 if host == socket.gethostname():
2302 if host == socket.gethostname():
2259 locker = b'user %s, process %s' % (user or b'None', pid)
2303 locker = b'user %s, process %s' % (user or b'None', pid)
2260 else:
2304 else:
2261 locker = b'user %s, process %s, host %s' % (
2305 locker = b'user %s, process %s, host %s' % (
2262 user or b'None',
2306 user or b'None',
2263 pid,
2307 pid,
2264 host,
2308 host,
2265 )
2309 )
2266 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2310 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2267 return 1
2311 return 1
2268 except OSError as e:
2312 except OSError as e:
2269 if e.errno != errno.ENOENT:
2313 if e.errno != errno.ENOENT:
2270 raise
2314 raise
2271
2315
2272 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2316 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2273 return 0
2317 return 0
2274
2318
2275 held += report(repo.svfs, b"lock", repo.lock)
2319 held += report(repo.svfs, b"lock", repo.lock)
2276 held += report(repo.vfs, b"wlock", repo.wlock)
2320 held += report(repo.vfs, b"wlock", repo.wlock)
2277
2321
2278 return held
2322 return held
2279
2323
2280
2324
2281 @command(
2325 @command(
2282 b'debugmanifestfulltextcache',
2326 b'debugmanifestfulltextcache',
2283 [
2327 [
2284 (b'', b'clear', False, _(b'clear the cache')),
2328 (b'', b'clear', False, _(b'clear the cache')),
2285 (
2329 (
2286 b'a',
2330 b'a',
2287 b'add',
2331 b'add',
2288 [],
2332 [],
2289 _(b'add the given manifest nodes to the cache'),
2333 _(b'add the given manifest nodes to the cache'),
2290 _(b'NODE'),
2334 _(b'NODE'),
2291 ),
2335 ),
2292 ],
2336 ],
2293 b'',
2337 b'',
2294 )
2338 )
2295 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2339 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2296 """show, clear or amend the contents of the manifest fulltext cache"""
2340 """show, clear or amend the contents of the manifest fulltext cache"""
2297
2341
2298 def getcache():
2342 def getcache():
2299 r = repo.manifestlog.getstorage(b'')
2343 r = repo.manifestlog.getstorage(b'')
2300 try:
2344 try:
2301 return r._fulltextcache
2345 return r._fulltextcache
2302 except AttributeError:
2346 except AttributeError:
2303 msg = _(
2347 msg = _(
2304 b"Current revlog implementation doesn't appear to have a "
2348 b"Current revlog implementation doesn't appear to have a "
2305 b"manifest fulltext cache\n"
2349 b"manifest fulltext cache\n"
2306 )
2350 )
2307 raise error.Abort(msg)
2351 raise error.Abort(msg)
2308
2352
2309 if opts.get('clear'):
2353 if opts.get('clear'):
2310 with repo.wlock():
2354 with repo.wlock():
2311 cache = getcache()
2355 cache = getcache()
2312 cache.clear(clear_persisted_data=True)
2356 cache.clear(clear_persisted_data=True)
2313 return
2357 return
2314
2358
2315 if add:
2359 if add:
2316 with repo.wlock():
2360 with repo.wlock():
2317 m = repo.manifestlog
2361 m = repo.manifestlog
2318 store = m.getstorage(b'')
2362 store = m.getstorage(b'')
2319 for n in add:
2363 for n in add:
2320 try:
2364 try:
2321 manifest = m[store.lookup(n)]
2365 manifest = m[store.lookup(n)]
2322 except error.LookupError as e:
2366 except error.LookupError as e:
2323 raise error.Abort(
2367 raise error.Abort(
2324 bytes(e), hint=b"Check your manifest node id"
2368 bytes(e), hint=b"Check your manifest node id"
2325 )
2369 )
2326 manifest.read() # stores revisision in cache too
2370 manifest.read() # stores revisision in cache too
2327 return
2371 return
2328
2372
2329 cache = getcache()
2373 cache = getcache()
2330 if not len(cache):
2374 if not len(cache):
2331 ui.write(_(b'cache empty\n'))
2375 ui.write(_(b'cache empty\n'))
2332 else:
2376 else:
2333 ui.write(
2377 ui.write(
2334 _(
2378 _(
2335 b'cache contains %d manifest entries, in order of most to '
2379 b'cache contains %d manifest entries, in order of most to '
2336 b'least recent:\n'
2380 b'least recent:\n'
2337 )
2381 )
2338 % (len(cache),)
2382 % (len(cache),)
2339 )
2383 )
2340 totalsize = 0
2384 totalsize = 0
2341 for nodeid in cache:
2385 for nodeid in cache:
2342 # Use cache.get to not update the LRU order
2386 # Use cache.get to not update the LRU order
2343 data = cache.peek(nodeid)
2387 data = cache.peek(nodeid)
2344 size = len(data)
2388 size = len(data)
2345 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2389 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2346 ui.write(
2390 ui.write(
2347 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2391 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2348 )
2392 )
2349 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2393 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2350 ui.write(
2394 ui.write(
2351 _(b'total cache data size %s, on-disk %s\n')
2395 _(b'total cache data size %s, on-disk %s\n')
2352 % (util.bytecount(totalsize), util.bytecount(ondisk))
2396 % (util.bytecount(totalsize), util.bytecount(ondisk))
2353 )
2397 )
2354
2398
2355
2399
2356 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2400 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2357 def debugmergestate(ui, repo, *args, **opts):
2401 def debugmergestate(ui, repo, *args, **opts):
2358 """print merge state
2402 """print merge state
2359
2403
2360 Use --verbose to print out information about whether v1 or v2 merge state
2404 Use --verbose to print out information about whether v1 or v2 merge state
2361 was chosen."""
2405 was chosen."""
2362
2406
2363 if ui.verbose:
2407 if ui.verbose:
2364 ms = mergestatemod.mergestate(repo)
2408 ms = mergestatemod.mergestate(repo)
2365
2409
2366 # sort so that reasonable information is on top
2410 # sort so that reasonable information is on top
2367 v1records = ms._readrecordsv1()
2411 v1records = ms._readrecordsv1()
2368 v2records = ms._readrecordsv2()
2412 v2records = ms._readrecordsv2()
2369
2413
2370 if not v1records and not v2records:
2414 if not v1records and not v2records:
2371 pass
2415 pass
2372 elif not v2records:
2416 elif not v2records:
2373 ui.writenoi18n(b'no version 2 merge state\n')
2417 ui.writenoi18n(b'no version 2 merge state\n')
2374 elif ms._v1v2match(v1records, v2records):
2418 elif ms._v1v2match(v1records, v2records):
2375 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2419 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2376 else:
2420 else:
2377 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2421 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2378
2422
2379 opts = pycompat.byteskwargs(opts)
2423 opts = pycompat.byteskwargs(opts)
2380 if not opts[b'template']:
2424 if not opts[b'template']:
2381 opts[b'template'] = (
2425 opts[b'template'] = (
2382 b'{if(commits, "", "no merge state found\n")}'
2426 b'{if(commits, "", "no merge state found\n")}'
2383 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2427 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2384 b'{files % "file: {path} (state \\"{state}\\")\n'
2428 b'{files % "file: {path} (state \\"{state}\\")\n'
2385 b'{if(local_path, "'
2429 b'{if(local_path, "'
2386 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2430 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2387 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2431 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2388 b' other path: {other_path} (node {other_node})\n'
2432 b' other path: {other_path} (node {other_node})\n'
2389 b'")}'
2433 b'")}'
2390 b'{if(rename_side, "'
2434 b'{if(rename_side, "'
2391 b' rename side: {rename_side}\n'
2435 b' rename side: {rename_side}\n'
2392 b' renamed path: {renamed_path}\n'
2436 b' renamed path: {renamed_path}\n'
2393 b'")}'
2437 b'")}'
2394 b'{extras % " extra: {key} = {value}\n"}'
2438 b'{extras % " extra: {key} = {value}\n"}'
2395 b'"}'
2439 b'"}'
2396 b'{extras % "extra: {file} ({key} = {value})\n"}'
2440 b'{extras % "extra: {file} ({key} = {value})\n"}'
2397 )
2441 )
2398
2442
2399 ms = mergestatemod.mergestate.read(repo)
2443 ms = mergestatemod.mergestate.read(repo)
2400
2444
2401 fm = ui.formatter(b'debugmergestate', opts)
2445 fm = ui.formatter(b'debugmergestate', opts)
2402 fm.startitem()
2446 fm.startitem()
2403
2447
2404 fm_commits = fm.nested(b'commits')
2448 fm_commits = fm.nested(b'commits')
2405 if ms.active():
2449 if ms.active():
2406 for name, node, label_index in (
2450 for name, node, label_index in (
2407 (b'local', ms.local, 0),
2451 (b'local', ms.local, 0),
2408 (b'other', ms.other, 1),
2452 (b'other', ms.other, 1),
2409 ):
2453 ):
2410 fm_commits.startitem()
2454 fm_commits.startitem()
2411 fm_commits.data(name=name)
2455 fm_commits.data(name=name)
2412 fm_commits.data(node=hex(node))
2456 fm_commits.data(node=hex(node))
2413 if ms._labels and len(ms._labels) > label_index:
2457 if ms._labels and len(ms._labels) > label_index:
2414 fm_commits.data(label=ms._labels[label_index])
2458 fm_commits.data(label=ms._labels[label_index])
2415 fm_commits.end()
2459 fm_commits.end()
2416
2460
2417 fm_files = fm.nested(b'files')
2461 fm_files = fm.nested(b'files')
2418 if ms.active():
2462 if ms.active():
2419 for f in ms:
2463 for f in ms:
2420 fm_files.startitem()
2464 fm_files.startitem()
2421 fm_files.data(path=f)
2465 fm_files.data(path=f)
2422 state = ms._state[f]
2466 state = ms._state[f]
2423 fm_files.data(state=state[0])
2467 fm_files.data(state=state[0])
2424 if state[0] in (
2468 if state[0] in (
2425 mergestatemod.MERGE_RECORD_UNRESOLVED,
2469 mergestatemod.MERGE_RECORD_UNRESOLVED,
2426 mergestatemod.MERGE_RECORD_RESOLVED,
2470 mergestatemod.MERGE_RECORD_RESOLVED,
2427 ):
2471 ):
2428 fm_files.data(local_key=state[1])
2472 fm_files.data(local_key=state[1])
2429 fm_files.data(local_path=state[2])
2473 fm_files.data(local_path=state[2])
2430 fm_files.data(ancestor_path=state[3])
2474 fm_files.data(ancestor_path=state[3])
2431 fm_files.data(ancestor_node=state[4])
2475 fm_files.data(ancestor_node=state[4])
2432 fm_files.data(other_path=state[5])
2476 fm_files.data(other_path=state[5])
2433 fm_files.data(other_node=state[6])
2477 fm_files.data(other_node=state[6])
2434 fm_files.data(local_flags=state[7])
2478 fm_files.data(local_flags=state[7])
2435 elif state[0] in (
2479 elif state[0] in (
2436 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2480 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2437 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2481 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2438 ):
2482 ):
2439 fm_files.data(renamed_path=state[1])
2483 fm_files.data(renamed_path=state[1])
2440 fm_files.data(rename_side=state[2])
2484 fm_files.data(rename_side=state[2])
2441 fm_extras = fm_files.nested(b'extras')
2485 fm_extras = fm_files.nested(b'extras')
2442 for k, v in sorted(ms.extras(f).items()):
2486 for k, v in sorted(ms.extras(f).items()):
2443 fm_extras.startitem()
2487 fm_extras.startitem()
2444 fm_extras.data(key=k)
2488 fm_extras.data(key=k)
2445 fm_extras.data(value=v)
2489 fm_extras.data(value=v)
2446 fm_extras.end()
2490 fm_extras.end()
2447
2491
2448 fm_files.end()
2492 fm_files.end()
2449
2493
2450 fm_extras = fm.nested(b'extras')
2494 fm_extras = fm.nested(b'extras')
2451 for f, d in sorted(ms.allextras().items()):
2495 for f, d in sorted(ms.allextras().items()):
2452 if f in ms:
2496 if f in ms:
2453 # If file is in mergestate, we have already processed it's extras
2497 # If file is in mergestate, we have already processed it's extras
2454 continue
2498 continue
2455 for k, v in d.items():
2499 for k, v in d.items():
2456 fm_extras.startitem()
2500 fm_extras.startitem()
2457 fm_extras.data(file=f)
2501 fm_extras.data(file=f)
2458 fm_extras.data(key=k)
2502 fm_extras.data(key=k)
2459 fm_extras.data(value=v)
2503 fm_extras.data(value=v)
2460 fm_extras.end()
2504 fm_extras.end()
2461
2505
2462 fm.end()
2506 fm.end()
2463
2507
2464
2508
2465 @command(b'debugnamecomplete', [], _(b'NAME...'))
2509 @command(b'debugnamecomplete', [], _(b'NAME...'))
2466 def debugnamecomplete(ui, repo, *args):
2510 def debugnamecomplete(ui, repo, *args):
2467 '''complete "names" - tags, open branch names, bookmark names'''
2511 '''complete "names" - tags, open branch names, bookmark names'''
2468
2512
2469 names = set()
2513 names = set()
2470 # since we previously only listed open branches, we will handle that
2514 # since we previously only listed open branches, we will handle that
2471 # specially (after this for loop)
2515 # specially (after this for loop)
2472 for name, ns in repo.names.items():
2516 for name, ns in repo.names.items():
2473 if name != b'branches':
2517 if name != b'branches':
2474 names.update(ns.listnames(repo))
2518 names.update(ns.listnames(repo))
2475 names.update(
2519 names.update(
2476 tag
2520 tag
2477 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2521 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2478 if not closed
2522 if not closed
2479 )
2523 )
2480 completions = set()
2524 completions = set()
2481 if not args:
2525 if not args:
2482 args = [b'']
2526 args = [b'']
2483 for a in args:
2527 for a in args:
2484 completions.update(n for n in names if n.startswith(a))
2528 completions.update(n for n in names if n.startswith(a))
2485 ui.write(b'\n'.join(sorted(completions)))
2529 ui.write(b'\n'.join(sorted(completions)))
2486 ui.write(b'\n')
2530 ui.write(b'\n')
2487
2531
2488
2532
2489 @command(
2533 @command(
2490 b'debugnodemap',
2534 b'debugnodemap',
2491 [
2535 [
2492 (
2536 (
2493 b'',
2537 b'',
2494 b'dump-new',
2538 b'dump-new',
2495 False,
2539 False,
2496 _(b'write a (new) persistent binary nodemap on stdout'),
2540 _(b'write a (new) persistent binary nodemap on stdout'),
2497 ),
2541 ),
2498 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2542 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2499 (
2543 (
2500 b'',
2544 b'',
2501 b'check',
2545 b'check',
2502 False,
2546 False,
2503 _(b'check that the data on disk data are correct.'),
2547 _(b'check that the data on disk data are correct.'),
2504 ),
2548 ),
2505 (
2549 (
2506 b'',
2550 b'',
2507 b'metadata',
2551 b'metadata',
2508 False,
2552 False,
2509 _(b'display the on disk meta data for the nodemap'),
2553 _(b'display the on disk meta data for the nodemap'),
2510 ),
2554 ),
2511 ],
2555 ],
2512 )
2556 )
2513 def debugnodemap(ui, repo, **opts):
2557 def debugnodemap(ui, repo, **opts):
2514 """write and inspect on disk nodemap"""
2558 """write and inspect on disk nodemap"""
2515 if opts['dump_new']:
2559 if opts['dump_new']:
2516 unfi = repo.unfiltered()
2560 unfi = repo.unfiltered()
2517 cl = unfi.changelog
2561 cl = unfi.changelog
2518 if util.safehasattr(cl.index, "nodemap_data_all"):
2562 if util.safehasattr(cl.index, "nodemap_data_all"):
2519 data = cl.index.nodemap_data_all()
2563 data = cl.index.nodemap_data_all()
2520 else:
2564 else:
2521 data = nodemap.persistent_data(cl.index)
2565 data = nodemap.persistent_data(cl.index)
2522 ui.write(data)
2566 ui.write(data)
2523 elif opts['dump_disk']:
2567 elif opts['dump_disk']:
2524 unfi = repo.unfiltered()
2568 unfi = repo.unfiltered()
2525 cl = unfi.changelog
2569 cl = unfi.changelog
2526 nm_data = nodemap.persisted_data(cl)
2570 nm_data = nodemap.persisted_data(cl)
2527 if nm_data is not None:
2571 if nm_data is not None:
2528 docket, data = nm_data
2572 docket, data = nm_data
2529 ui.write(data[:])
2573 ui.write(data[:])
2530 elif opts['check']:
2574 elif opts['check']:
2531 unfi = repo.unfiltered()
2575 unfi = repo.unfiltered()
2532 cl = unfi.changelog
2576 cl = unfi.changelog
2533 nm_data = nodemap.persisted_data(cl)
2577 nm_data = nodemap.persisted_data(cl)
2534 if nm_data is not None:
2578 if nm_data is not None:
2535 docket, data = nm_data
2579 docket, data = nm_data
2536 return nodemap.check_data(ui, cl.index, data)
2580 return nodemap.check_data(ui, cl.index, data)
2537 elif opts['metadata']:
2581 elif opts['metadata']:
2538 unfi = repo.unfiltered()
2582 unfi = repo.unfiltered()
2539 cl = unfi.changelog
2583 cl = unfi.changelog
2540 nm_data = nodemap.persisted_data(cl)
2584 nm_data = nodemap.persisted_data(cl)
2541 if nm_data is not None:
2585 if nm_data is not None:
2542 docket, data = nm_data
2586 docket, data = nm_data
2543 ui.write((b"uid: %s\n") % docket.uid)
2587 ui.write((b"uid: %s\n") % docket.uid)
2544 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2588 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2545 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2589 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2546 ui.write((b"data-length: %d\n") % docket.data_length)
2590 ui.write((b"data-length: %d\n") % docket.data_length)
2547 ui.write((b"data-unused: %d\n") % docket.data_unused)
2591 ui.write((b"data-unused: %d\n") % docket.data_unused)
2548 unused_perc = docket.data_unused * 100.0 / docket.data_length
2592 unused_perc = docket.data_unused * 100.0 / docket.data_length
2549 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2593 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2550
2594
2551
2595
2552 @command(
2596 @command(
2553 b'debugobsolete',
2597 b'debugobsolete',
2554 [
2598 [
2555 (b'', b'flags', 0, _(b'markers flag')),
2599 (b'', b'flags', 0, _(b'markers flag')),
2556 (
2600 (
2557 b'',
2601 b'',
2558 b'record-parents',
2602 b'record-parents',
2559 False,
2603 False,
2560 _(b'record parent information for the precursor'),
2604 _(b'record parent information for the precursor'),
2561 ),
2605 ),
2562 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2606 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2563 (
2607 (
2564 b'',
2608 b'',
2565 b'exclusive',
2609 b'exclusive',
2566 False,
2610 False,
2567 _(b'restrict display to markers only relevant to REV'),
2611 _(b'restrict display to markers only relevant to REV'),
2568 ),
2612 ),
2569 (b'', b'index', False, _(b'display index of the marker')),
2613 (b'', b'index', False, _(b'display index of the marker')),
2570 (b'', b'delete', [], _(b'delete markers specified by indices')),
2614 (b'', b'delete', [], _(b'delete markers specified by indices')),
2571 ]
2615 ]
2572 + cmdutil.commitopts2
2616 + cmdutil.commitopts2
2573 + cmdutil.formatteropts,
2617 + cmdutil.formatteropts,
2574 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2618 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2575 )
2619 )
2576 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2620 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2577 """create arbitrary obsolete marker
2621 """create arbitrary obsolete marker
2578
2622
2579 With no arguments, displays the list of obsolescence markers."""
2623 With no arguments, displays the list of obsolescence markers."""
2580
2624
2581 opts = pycompat.byteskwargs(opts)
2625 opts = pycompat.byteskwargs(opts)
2582
2626
2583 def parsenodeid(s):
2627 def parsenodeid(s):
2584 try:
2628 try:
2585 # We do not use revsingle/revrange functions here to accept
2629 # We do not use revsingle/revrange functions here to accept
2586 # arbitrary node identifiers, possibly not present in the
2630 # arbitrary node identifiers, possibly not present in the
2587 # local repository.
2631 # local repository.
2588 n = bin(s)
2632 n = bin(s)
2589 if len(n) != repo.nodeconstants.nodelen:
2633 if len(n) != repo.nodeconstants.nodelen:
2590 raise TypeError()
2634 raise TypeError()
2591 return n
2635 return n
2592 except TypeError:
2636 except TypeError:
2593 raise error.InputError(
2637 raise error.InputError(
2594 b'changeset references must be full hexadecimal '
2638 b'changeset references must be full hexadecimal '
2595 b'node identifiers'
2639 b'node identifiers'
2596 )
2640 )
2597
2641
2598 if opts.get(b'delete'):
2642 if opts.get(b'delete'):
2599 indices = []
2643 indices = []
2600 for v in opts.get(b'delete'):
2644 for v in opts.get(b'delete'):
2601 try:
2645 try:
2602 indices.append(int(v))
2646 indices.append(int(v))
2603 except ValueError:
2647 except ValueError:
2604 raise error.InputError(
2648 raise error.InputError(
2605 _(b'invalid index value: %r') % v,
2649 _(b'invalid index value: %r') % v,
2606 hint=_(b'use integers for indices'),
2650 hint=_(b'use integers for indices'),
2607 )
2651 )
2608
2652
2609 if repo.currenttransaction():
2653 if repo.currenttransaction():
2610 raise error.Abort(
2654 raise error.Abort(
2611 _(b'cannot delete obsmarkers in the middle of transaction.')
2655 _(b'cannot delete obsmarkers in the middle of transaction.')
2612 )
2656 )
2613
2657
2614 with repo.lock():
2658 with repo.lock():
2615 n = repair.deleteobsmarkers(repo.obsstore, indices)
2659 n = repair.deleteobsmarkers(repo.obsstore, indices)
2616 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2660 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2617
2661
2618 return
2662 return
2619
2663
2620 if precursor is not None:
2664 if precursor is not None:
2621 if opts[b'rev']:
2665 if opts[b'rev']:
2622 raise error.InputError(
2666 raise error.InputError(
2623 b'cannot select revision when creating marker'
2667 b'cannot select revision when creating marker'
2624 )
2668 )
2625 metadata = {}
2669 metadata = {}
2626 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2670 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2627 succs = tuple(parsenodeid(succ) for succ in successors)
2671 succs = tuple(parsenodeid(succ) for succ in successors)
2628 l = repo.lock()
2672 l = repo.lock()
2629 try:
2673 try:
2630 tr = repo.transaction(b'debugobsolete')
2674 tr = repo.transaction(b'debugobsolete')
2631 try:
2675 try:
2632 date = opts.get(b'date')
2676 date = opts.get(b'date')
2633 if date:
2677 if date:
2634 date = dateutil.parsedate(date)
2678 date = dateutil.parsedate(date)
2635 else:
2679 else:
2636 date = None
2680 date = None
2637 prec = parsenodeid(precursor)
2681 prec = parsenodeid(precursor)
2638 parents = None
2682 parents = None
2639 if opts[b'record_parents']:
2683 if opts[b'record_parents']:
2640 if prec not in repo.unfiltered():
2684 if prec not in repo.unfiltered():
2641 raise error.Abort(
2685 raise error.Abort(
2642 b'cannot used --record-parents on '
2686 b'cannot used --record-parents on '
2643 b'unknown changesets'
2687 b'unknown changesets'
2644 )
2688 )
2645 parents = repo.unfiltered()[prec].parents()
2689 parents = repo.unfiltered()[prec].parents()
2646 parents = tuple(p.node() for p in parents)
2690 parents = tuple(p.node() for p in parents)
2647 repo.obsstore.create(
2691 repo.obsstore.create(
2648 tr,
2692 tr,
2649 prec,
2693 prec,
2650 succs,
2694 succs,
2651 opts[b'flags'],
2695 opts[b'flags'],
2652 parents=parents,
2696 parents=parents,
2653 date=date,
2697 date=date,
2654 metadata=metadata,
2698 metadata=metadata,
2655 ui=ui,
2699 ui=ui,
2656 )
2700 )
2657 tr.close()
2701 tr.close()
2658 except ValueError as exc:
2702 except ValueError as exc:
2659 raise error.Abort(
2703 raise error.Abort(
2660 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2704 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2661 )
2705 )
2662 finally:
2706 finally:
2663 tr.release()
2707 tr.release()
2664 finally:
2708 finally:
2665 l.release()
2709 l.release()
2666 else:
2710 else:
2667 if opts[b'rev']:
2711 if opts[b'rev']:
2668 revs = logcmdutil.revrange(repo, opts[b'rev'])
2712 revs = logcmdutil.revrange(repo, opts[b'rev'])
2669 nodes = [repo[r].node() for r in revs]
2713 nodes = [repo[r].node() for r in revs]
2670 markers = list(
2714 markers = list(
2671 obsutil.getmarkers(
2715 obsutil.getmarkers(
2672 repo, nodes=nodes, exclusive=opts[b'exclusive']
2716 repo, nodes=nodes, exclusive=opts[b'exclusive']
2673 )
2717 )
2674 )
2718 )
2675 markers.sort(key=lambda x: x._data)
2719 markers.sort(key=lambda x: x._data)
2676 else:
2720 else:
2677 markers = obsutil.getmarkers(repo)
2721 markers = obsutil.getmarkers(repo)
2678
2722
2679 markerstoiter = markers
2723 markerstoiter = markers
2680 isrelevant = lambda m: True
2724 isrelevant = lambda m: True
2681 if opts.get(b'rev') and opts.get(b'index'):
2725 if opts.get(b'rev') and opts.get(b'index'):
2682 markerstoiter = obsutil.getmarkers(repo)
2726 markerstoiter = obsutil.getmarkers(repo)
2683 markerset = set(markers)
2727 markerset = set(markers)
2684 isrelevant = lambda m: m in markerset
2728 isrelevant = lambda m: m in markerset
2685
2729
2686 fm = ui.formatter(b'debugobsolete', opts)
2730 fm = ui.formatter(b'debugobsolete', opts)
2687 for i, m in enumerate(markerstoiter):
2731 for i, m in enumerate(markerstoiter):
2688 if not isrelevant(m):
2732 if not isrelevant(m):
2689 # marker can be irrelevant when we're iterating over a set
2733 # marker can be irrelevant when we're iterating over a set
2690 # of markers (markerstoiter) which is bigger than the set
2734 # of markers (markerstoiter) which is bigger than the set
2691 # of markers we want to display (markers)
2735 # of markers we want to display (markers)
2692 # this can happen if both --index and --rev options are
2736 # this can happen if both --index and --rev options are
2693 # provided and thus we need to iterate over all of the markers
2737 # provided and thus we need to iterate over all of the markers
2694 # to get the correct indices, but only display the ones that
2738 # to get the correct indices, but only display the ones that
2695 # are relevant to --rev value
2739 # are relevant to --rev value
2696 continue
2740 continue
2697 fm.startitem()
2741 fm.startitem()
2698 ind = i if opts.get(b'index') else None
2742 ind = i if opts.get(b'index') else None
2699 cmdutil.showmarker(fm, m, index=ind)
2743 cmdutil.showmarker(fm, m, index=ind)
2700 fm.end()
2744 fm.end()
2701
2745
2702
2746
2703 @command(
2747 @command(
2704 b'debugp1copies',
2748 b'debugp1copies',
2705 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2749 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2706 _(b'[-r REV]'),
2750 _(b'[-r REV]'),
2707 )
2751 )
2708 def debugp1copies(ui, repo, **opts):
2752 def debugp1copies(ui, repo, **opts):
2709 """dump copy information compared to p1"""
2753 """dump copy information compared to p1"""
2710
2754
2711 opts = pycompat.byteskwargs(opts)
2755 opts = pycompat.byteskwargs(opts)
2712 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2756 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2713 for dst, src in ctx.p1copies().items():
2757 for dst, src in ctx.p1copies().items():
2714 ui.write(b'%s -> %s\n' % (src, dst))
2758 ui.write(b'%s -> %s\n' % (src, dst))
2715
2759
2716
2760
2717 @command(
2761 @command(
2718 b'debugp2copies',
2762 b'debugp2copies',
2719 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2763 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2720 _(b'[-r REV]'),
2764 _(b'[-r REV]'),
2721 )
2765 )
2722 def debugp1copies(ui, repo, **opts):
2766 def debugp1copies(ui, repo, **opts):
2723 """dump copy information compared to p2"""
2767 """dump copy information compared to p2"""
2724
2768
2725 opts = pycompat.byteskwargs(opts)
2769 opts = pycompat.byteskwargs(opts)
2726 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2770 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2727 for dst, src in ctx.p2copies().items():
2771 for dst, src in ctx.p2copies().items():
2728 ui.write(b'%s -> %s\n' % (src, dst))
2772 ui.write(b'%s -> %s\n' % (src, dst))
2729
2773
2730
2774
2731 @command(
2775 @command(
2732 b'debugpathcomplete',
2776 b'debugpathcomplete',
2733 [
2777 [
2734 (b'f', b'full', None, _(b'complete an entire path')),
2778 (b'f', b'full', None, _(b'complete an entire path')),
2735 (b'n', b'normal', None, _(b'show only normal files')),
2779 (b'n', b'normal', None, _(b'show only normal files')),
2736 (b'a', b'added', None, _(b'show only added files')),
2780 (b'a', b'added', None, _(b'show only added files')),
2737 (b'r', b'removed', None, _(b'show only removed files')),
2781 (b'r', b'removed', None, _(b'show only removed files')),
2738 ],
2782 ],
2739 _(b'FILESPEC...'),
2783 _(b'FILESPEC...'),
2740 )
2784 )
2741 def debugpathcomplete(ui, repo, *specs, **opts):
2785 def debugpathcomplete(ui, repo, *specs, **opts):
2742 """complete part or all of a tracked path
2786 """complete part or all of a tracked path
2743
2787
2744 This command supports shells that offer path name completion. It
2788 This command supports shells that offer path name completion. It
2745 currently completes only files already known to the dirstate.
2789 currently completes only files already known to the dirstate.
2746
2790
2747 Completion extends only to the next path segment unless
2791 Completion extends only to the next path segment unless
2748 --full is specified, in which case entire paths are used."""
2792 --full is specified, in which case entire paths are used."""
2749
2793
2750 def complete(path, acceptable):
2794 def complete(path, acceptable):
2751 dirstate = repo.dirstate
2795 dirstate = repo.dirstate
2752 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2796 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2753 rootdir = repo.root + pycompat.ossep
2797 rootdir = repo.root + pycompat.ossep
2754 if spec != repo.root and not spec.startswith(rootdir):
2798 if spec != repo.root and not spec.startswith(rootdir):
2755 return [], []
2799 return [], []
2756 if os.path.isdir(spec):
2800 if os.path.isdir(spec):
2757 spec += b'/'
2801 spec += b'/'
2758 spec = spec[len(rootdir) :]
2802 spec = spec[len(rootdir) :]
2759 fixpaths = pycompat.ossep != b'/'
2803 fixpaths = pycompat.ossep != b'/'
2760 if fixpaths:
2804 if fixpaths:
2761 spec = spec.replace(pycompat.ossep, b'/')
2805 spec = spec.replace(pycompat.ossep, b'/')
2762 speclen = len(spec)
2806 speclen = len(spec)
2763 fullpaths = opts['full']
2807 fullpaths = opts['full']
2764 files, dirs = set(), set()
2808 files, dirs = set(), set()
2765 adddir, addfile = dirs.add, files.add
2809 adddir, addfile = dirs.add, files.add
2766 for f, st in dirstate.items():
2810 for f, st in dirstate.items():
2767 if f.startswith(spec) and st.state in acceptable:
2811 if f.startswith(spec) and st.state in acceptable:
2768 if fixpaths:
2812 if fixpaths:
2769 f = f.replace(b'/', pycompat.ossep)
2813 f = f.replace(b'/', pycompat.ossep)
2770 if fullpaths:
2814 if fullpaths:
2771 addfile(f)
2815 addfile(f)
2772 continue
2816 continue
2773 s = f.find(pycompat.ossep, speclen)
2817 s = f.find(pycompat.ossep, speclen)
2774 if s >= 0:
2818 if s >= 0:
2775 adddir(f[:s])
2819 adddir(f[:s])
2776 else:
2820 else:
2777 addfile(f)
2821 addfile(f)
2778 return files, dirs
2822 return files, dirs
2779
2823
2780 acceptable = b''
2824 acceptable = b''
2781 if opts['normal']:
2825 if opts['normal']:
2782 acceptable += b'nm'
2826 acceptable += b'nm'
2783 if opts['added']:
2827 if opts['added']:
2784 acceptable += b'a'
2828 acceptable += b'a'
2785 if opts['removed']:
2829 if opts['removed']:
2786 acceptable += b'r'
2830 acceptable += b'r'
2787 cwd = repo.getcwd()
2831 cwd = repo.getcwd()
2788 if not specs:
2832 if not specs:
2789 specs = [b'.']
2833 specs = [b'.']
2790
2834
2791 files, dirs = set(), set()
2835 files, dirs = set(), set()
2792 for spec in specs:
2836 for spec in specs:
2793 f, d = complete(spec, acceptable or b'nmar')
2837 f, d = complete(spec, acceptable or b'nmar')
2794 files.update(f)
2838 files.update(f)
2795 dirs.update(d)
2839 dirs.update(d)
2796 files.update(dirs)
2840 files.update(dirs)
2797 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2841 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2798 ui.write(b'\n')
2842 ui.write(b'\n')
2799
2843
2800
2844
2801 @command(
2845 @command(
2802 b'debugpathcopies',
2846 b'debugpathcopies',
2803 cmdutil.walkopts,
2847 cmdutil.walkopts,
2804 b'hg debugpathcopies REV1 REV2 [FILE]',
2848 b'hg debugpathcopies REV1 REV2 [FILE]',
2805 inferrepo=True,
2849 inferrepo=True,
2806 )
2850 )
2807 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2851 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2808 """show copies between two revisions"""
2852 """show copies between two revisions"""
2809 ctx1 = scmutil.revsingle(repo, rev1)
2853 ctx1 = scmutil.revsingle(repo, rev1)
2810 ctx2 = scmutil.revsingle(repo, rev2)
2854 ctx2 = scmutil.revsingle(repo, rev2)
2811 m = scmutil.match(ctx1, pats, opts)
2855 m = scmutil.match(ctx1, pats, opts)
2812 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2856 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2813 ui.write(b'%s -> %s\n' % (src, dst))
2857 ui.write(b'%s -> %s\n' % (src, dst))
2814
2858
2815
2859
2816 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2860 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2817 def debugpeer(ui, path):
2861 def debugpeer(ui, path):
2818 """establish a connection to a peer repository"""
2862 """establish a connection to a peer repository"""
2819 # Always enable peer request logging. Requires --debug to display
2863 # Always enable peer request logging. Requires --debug to display
2820 # though.
2864 # though.
2821 overrides = {
2865 overrides = {
2822 (b'devel', b'debug.peer-request'): True,
2866 (b'devel', b'debug.peer-request'): True,
2823 }
2867 }
2824
2868
2825 with ui.configoverride(overrides):
2869 with ui.configoverride(overrides):
2826 peer = hg.peer(ui, {}, path)
2870 peer = hg.peer(ui, {}, path)
2827
2871
2828 try:
2872 try:
2829 local = peer.local() is not None
2873 local = peer.local() is not None
2830 canpush = peer.canpush()
2874 canpush = peer.canpush()
2831
2875
2832 ui.write(_(b'url: %s\n') % peer.url())
2876 ui.write(_(b'url: %s\n') % peer.url())
2833 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2877 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2834 ui.write(
2878 ui.write(
2835 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2879 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2836 )
2880 )
2837 finally:
2881 finally:
2838 peer.close()
2882 peer.close()
2839
2883
2840
2884
2841 @command(
2885 @command(
2842 b'debugpickmergetool',
2886 b'debugpickmergetool',
2843 [
2887 [
2844 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2888 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2845 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2889 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2846 ]
2890 ]
2847 + cmdutil.walkopts
2891 + cmdutil.walkopts
2848 + cmdutil.mergetoolopts,
2892 + cmdutil.mergetoolopts,
2849 _(b'[PATTERN]...'),
2893 _(b'[PATTERN]...'),
2850 inferrepo=True,
2894 inferrepo=True,
2851 )
2895 )
2852 def debugpickmergetool(ui, repo, *pats, **opts):
2896 def debugpickmergetool(ui, repo, *pats, **opts):
2853 """examine which merge tool is chosen for specified file
2897 """examine which merge tool is chosen for specified file
2854
2898
2855 As described in :hg:`help merge-tools`, Mercurial examines
2899 As described in :hg:`help merge-tools`, Mercurial examines
2856 configurations below in this order to decide which merge tool is
2900 configurations below in this order to decide which merge tool is
2857 chosen for specified file.
2901 chosen for specified file.
2858
2902
2859 1. ``--tool`` option
2903 1. ``--tool`` option
2860 2. ``HGMERGE`` environment variable
2904 2. ``HGMERGE`` environment variable
2861 3. configurations in ``merge-patterns`` section
2905 3. configurations in ``merge-patterns`` section
2862 4. configuration of ``ui.merge``
2906 4. configuration of ``ui.merge``
2863 5. configurations in ``merge-tools`` section
2907 5. configurations in ``merge-tools`` section
2864 6. ``hgmerge`` tool (for historical reason only)
2908 6. ``hgmerge`` tool (for historical reason only)
2865 7. default tool for fallback (``:merge`` or ``:prompt``)
2909 7. default tool for fallback (``:merge`` or ``:prompt``)
2866
2910
2867 This command writes out examination result in the style below::
2911 This command writes out examination result in the style below::
2868
2912
2869 FILE = MERGETOOL
2913 FILE = MERGETOOL
2870
2914
2871 By default, all files known in the first parent context of the
2915 By default, all files known in the first parent context of the
2872 working directory are examined. Use file patterns and/or -I/-X
2916 working directory are examined. Use file patterns and/or -I/-X
2873 options to limit target files. -r/--rev is also useful to examine
2917 options to limit target files. -r/--rev is also useful to examine
2874 files in another context without actual updating to it.
2918 files in another context without actual updating to it.
2875
2919
2876 With --debug, this command shows warning messages while matching
2920 With --debug, this command shows warning messages while matching
2877 against ``merge-patterns`` and so on, too. It is recommended to
2921 against ``merge-patterns`` and so on, too. It is recommended to
2878 use this option with explicit file patterns and/or -I/-X options,
2922 use this option with explicit file patterns and/or -I/-X options,
2879 because this option increases amount of output per file according
2923 because this option increases amount of output per file according
2880 to configurations in hgrc.
2924 to configurations in hgrc.
2881
2925
2882 With -v/--verbose, this command shows configurations below at
2926 With -v/--verbose, this command shows configurations below at
2883 first (only if specified).
2927 first (only if specified).
2884
2928
2885 - ``--tool`` option
2929 - ``--tool`` option
2886 - ``HGMERGE`` environment variable
2930 - ``HGMERGE`` environment variable
2887 - configuration of ``ui.merge``
2931 - configuration of ``ui.merge``
2888
2932
2889 If merge tool is chosen before matching against
2933 If merge tool is chosen before matching against
2890 ``merge-patterns``, this command can't show any helpful
2934 ``merge-patterns``, this command can't show any helpful
2891 information, even with --debug. In such case, information above is
2935 information, even with --debug. In such case, information above is
2892 useful to know why a merge tool is chosen.
2936 useful to know why a merge tool is chosen.
2893 """
2937 """
2894 opts = pycompat.byteskwargs(opts)
2938 opts = pycompat.byteskwargs(opts)
2895 overrides = {}
2939 overrides = {}
2896 if opts[b'tool']:
2940 if opts[b'tool']:
2897 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2941 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2898 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2942 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2899
2943
2900 with ui.configoverride(overrides, b'debugmergepatterns'):
2944 with ui.configoverride(overrides, b'debugmergepatterns'):
2901 hgmerge = encoding.environ.get(b"HGMERGE")
2945 hgmerge = encoding.environ.get(b"HGMERGE")
2902 if hgmerge is not None:
2946 if hgmerge is not None:
2903 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2947 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2904 uimerge = ui.config(b"ui", b"merge")
2948 uimerge = ui.config(b"ui", b"merge")
2905 if uimerge:
2949 if uimerge:
2906 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2950 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2907
2951
2908 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2952 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2909 m = scmutil.match(ctx, pats, opts)
2953 m = scmutil.match(ctx, pats, opts)
2910 changedelete = opts[b'changedelete']
2954 changedelete = opts[b'changedelete']
2911 for path in ctx.walk(m):
2955 for path in ctx.walk(m):
2912 fctx = ctx[path]
2956 fctx = ctx[path]
2913 with ui.silent(
2957 with ui.silent(
2914 error=True
2958 error=True
2915 ) if not ui.debugflag else util.nullcontextmanager():
2959 ) if not ui.debugflag else util.nullcontextmanager():
2916 tool, toolpath = filemerge._picktool(
2960 tool, toolpath = filemerge._picktool(
2917 repo,
2961 repo,
2918 ui,
2962 ui,
2919 path,
2963 path,
2920 fctx.isbinary(),
2964 fctx.isbinary(),
2921 b'l' in fctx.flags(),
2965 b'l' in fctx.flags(),
2922 changedelete,
2966 changedelete,
2923 )
2967 )
2924 ui.write(b'%s = %s\n' % (path, tool))
2968 ui.write(b'%s = %s\n' % (path, tool))
2925
2969
2926
2970
2927 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2971 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2928 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2972 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2929 """access the pushkey key/value protocol
2973 """access the pushkey key/value protocol
2930
2974
2931 With two args, list the keys in the given namespace.
2975 With two args, list the keys in the given namespace.
2932
2976
2933 With five args, set a key to new if it currently is set to old.
2977 With five args, set a key to new if it currently is set to old.
2934 Reports success or failure.
2978 Reports success or failure.
2935 """
2979 """
2936
2980
2937 target = hg.peer(ui, {}, repopath)
2981 target = hg.peer(ui, {}, repopath)
2938 try:
2982 try:
2939 if keyinfo:
2983 if keyinfo:
2940 key, old, new = keyinfo
2984 key, old, new = keyinfo
2941 with target.commandexecutor() as e:
2985 with target.commandexecutor() as e:
2942 r = e.callcommand(
2986 r = e.callcommand(
2943 b'pushkey',
2987 b'pushkey',
2944 {
2988 {
2945 b'namespace': namespace,
2989 b'namespace': namespace,
2946 b'key': key,
2990 b'key': key,
2947 b'old': old,
2991 b'old': old,
2948 b'new': new,
2992 b'new': new,
2949 },
2993 },
2950 ).result()
2994 ).result()
2951
2995
2952 ui.status(pycompat.bytestr(r) + b'\n')
2996 ui.status(pycompat.bytestr(r) + b'\n')
2953 return not r
2997 return not r
2954 else:
2998 else:
2955 for k, v in sorted(target.listkeys(namespace).items()):
2999 for k, v in sorted(target.listkeys(namespace).items()):
2956 ui.write(
3000 ui.write(
2957 b"%s\t%s\n"
3001 b"%s\t%s\n"
2958 % (stringutil.escapestr(k), stringutil.escapestr(v))
3002 % (stringutil.escapestr(k), stringutil.escapestr(v))
2959 )
3003 )
2960 finally:
3004 finally:
2961 target.close()
3005 target.close()
2962
3006
2963
3007
2964 @command(b'debugpvec', [], _(b'A B'))
3008 @command(b'debugpvec', [], _(b'A B'))
2965 def debugpvec(ui, repo, a, b=None):
3009 def debugpvec(ui, repo, a, b=None):
2966 ca = scmutil.revsingle(repo, a)
3010 ca = scmutil.revsingle(repo, a)
2967 cb = scmutil.revsingle(repo, b)
3011 cb = scmutil.revsingle(repo, b)
2968 pa = pvec.ctxpvec(ca)
3012 pa = pvec.ctxpvec(ca)
2969 pb = pvec.ctxpvec(cb)
3013 pb = pvec.ctxpvec(cb)
2970 if pa == pb:
3014 if pa == pb:
2971 rel = b"="
3015 rel = b"="
2972 elif pa > pb:
3016 elif pa > pb:
2973 rel = b">"
3017 rel = b">"
2974 elif pa < pb:
3018 elif pa < pb:
2975 rel = b"<"
3019 rel = b"<"
2976 elif pa | pb:
3020 elif pa | pb:
2977 rel = b"|"
3021 rel = b"|"
2978 ui.write(_(b"a: %s\n") % pa)
3022 ui.write(_(b"a: %s\n") % pa)
2979 ui.write(_(b"b: %s\n") % pb)
3023 ui.write(_(b"b: %s\n") % pb)
2980 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3024 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2981 ui.write(
3025 ui.write(
2982 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3026 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2983 % (
3027 % (
2984 abs(pa._depth - pb._depth),
3028 abs(pa._depth - pb._depth),
2985 pvec._hamming(pa._vec, pb._vec),
3029 pvec._hamming(pa._vec, pb._vec),
2986 pa.distance(pb),
3030 pa.distance(pb),
2987 rel,
3031 rel,
2988 )
3032 )
2989 )
3033 )
2990
3034
2991
3035
2992 @command(
3036 @command(
2993 b'debugrebuilddirstate|debugrebuildstate',
3037 b'debugrebuilddirstate|debugrebuildstate',
2994 [
3038 [
2995 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3039 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2996 (
3040 (
2997 b'',
3041 b'',
2998 b'minimal',
3042 b'minimal',
2999 None,
3043 None,
3000 _(
3044 _(
3001 b'only rebuild files that are inconsistent with '
3045 b'only rebuild files that are inconsistent with '
3002 b'the working copy parent'
3046 b'the working copy parent'
3003 ),
3047 ),
3004 ),
3048 ),
3005 ],
3049 ],
3006 _(b'[-r REV]'),
3050 _(b'[-r REV]'),
3007 )
3051 )
3008 def debugrebuilddirstate(ui, repo, rev, **opts):
3052 def debugrebuilddirstate(ui, repo, rev, **opts):
3009 """rebuild the dirstate as it would look like for the given revision
3053 """rebuild the dirstate as it would look like for the given revision
3010
3054
3011 If no revision is specified the first current parent will be used.
3055 If no revision is specified the first current parent will be used.
3012
3056
3013 The dirstate will be set to the files of the given revision.
3057 The dirstate will be set to the files of the given revision.
3014 The actual working directory content or existing dirstate
3058 The actual working directory content or existing dirstate
3015 information such as adds or removes is not considered.
3059 information such as adds or removes is not considered.
3016
3060
3017 ``minimal`` will only rebuild the dirstate status for files that claim to be
3061 ``minimal`` will only rebuild the dirstate status for files that claim to be
3018 tracked but are not in the parent manifest, or that exist in the parent
3062 tracked but are not in the parent manifest, or that exist in the parent
3019 manifest but are not in the dirstate. It will not change adds, removes, or
3063 manifest but are not in the dirstate. It will not change adds, removes, or
3020 modified files that are in the working copy parent.
3064 modified files that are in the working copy parent.
3021
3065
3022 One use of this command is to make the next :hg:`status` invocation
3066 One use of this command is to make the next :hg:`status` invocation
3023 check the actual file content.
3067 check the actual file content.
3024 """
3068 """
3025 ctx = scmutil.revsingle(repo, rev)
3069 ctx = scmutil.revsingle(repo, rev)
3026 with repo.wlock():
3070 with repo.wlock():
3027 dirstate = repo.dirstate
3071 dirstate = repo.dirstate
3028 changedfiles = None
3072 changedfiles = None
3029 # See command doc for what minimal does.
3073 # See command doc for what minimal does.
3030 if opts.get('minimal'):
3074 if opts.get('minimal'):
3031 manifestfiles = set(ctx.manifest().keys())
3075 manifestfiles = set(ctx.manifest().keys())
3032 dirstatefiles = set(dirstate)
3076 dirstatefiles = set(dirstate)
3033 manifestonly = manifestfiles - dirstatefiles
3077 manifestonly = manifestfiles - dirstatefiles
3034 dsonly = dirstatefiles - manifestfiles
3078 dsonly = dirstatefiles - manifestfiles
3035 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3079 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3036 changedfiles = manifestonly | dsnotadded
3080 changedfiles = manifestonly | dsnotadded
3037
3081
3038 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3082 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3039
3083
3040
3084
3041 @command(
3085 @command(
3042 b'debugrebuildfncache',
3086 b'debugrebuildfncache',
3043 [
3087 [
3044 (
3088 (
3045 b'',
3089 b'',
3046 b'only-data',
3090 b'only-data',
3047 False,
3091 False,
3048 _(b'only look for wrong .d files (much faster)'),
3092 _(b'only look for wrong .d files (much faster)'),
3049 )
3093 )
3050 ],
3094 ],
3051 b'',
3095 b'',
3052 )
3096 )
3053 def debugrebuildfncache(ui, repo, **opts):
3097 def debugrebuildfncache(ui, repo, **opts):
3054 """rebuild the fncache file"""
3098 """rebuild the fncache file"""
3055 opts = pycompat.byteskwargs(opts)
3099 opts = pycompat.byteskwargs(opts)
3056 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3100 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3057
3101
3058
3102
3059 @command(
3103 @command(
3060 b'debugrename',
3104 b'debugrename',
3061 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3105 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3062 _(b'[-r REV] [FILE]...'),
3106 _(b'[-r REV] [FILE]...'),
3063 )
3107 )
3064 def debugrename(ui, repo, *pats, **opts):
3108 def debugrename(ui, repo, *pats, **opts):
3065 """dump rename information"""
3109 """dump rename information"""
3066
3110
3067 opts = pycompat.byteskwargs(opts)
3111 opts = pycompat.byteskwargs(opts)
3068 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3112 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3069 m = scmutil.match(ctx, pats, opts)
3113 m = scmutil.match(ctx, pats, opts)
3070 for abs in ctx.walk(m):
3114 for abs in ctx.walk(m):
3071 fctx = ctx[abs]
3115 fctx = ctx[abs]
3072 o = fctx.filelog().renamed(fctx.filenode())
3116 o = fctx.filelog().renamed(fctx.filenode())
3073 rel = repo.pathto(abs)
3117 rel = repo.pathto(abs)
3074 if o:
3118 if o:
3075 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3119 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3076 else:
3120 else:
3077 ui.write(_(b"%s not renamed\n") % rel)
3121 ui.write(_(b"%s not renamed\n") % rel)
3078
3122
3079
3123
3080 @command(b'debugrequires|debugrequirements', [], b'')
3124 @command(b'debugrequires|debugrequirements', [], b'')
3081 def debugrequirements(ui, repo):
3125 def debugrequirements(ui, repo):
3082 """print the current repo requirements"""
3126 """print the current repo requirements"""
3083 for r in sorted(repo.requirements):
3127 for r in sorted(repo.requirements):
3084 ui.write(b"%s\n" % r)
3128 ui.write(b"%s\n" % r)
3085
3129
3086
3130
3087 @command(
3131 @command(
3088 b'debugrevlog',
3132 b'debugrevlog',
3089 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3133 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3090 _(b'-c|-m|FILE'),
3134 _(b'-c|-m|FILE'),
3091 optionalrepo=True,
3135 optionalrepo=True,
3092 )
3136 )
3093 def debugrevlog(ui, repo, file_=None, **opts):
3137 def debugrevlog(ui, repo, file_=None, **opts):
3094 """show data and statistics about a revlog"""
3138 """show data and statistics about a revlog"""
3095 opts = pycompat.byteskwargs(opts)
3139 opts = pycompat.byteskwargs(opts)
3096 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3140 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3097
3141
3098 if opts.get(b"dump"):
3142 if opts.get(b"dump"):
3099 numrevs = len(r)
3143 numrevs = len(r)
3100 ui.write(
3144 ui.write(
3101 (
3145 (
3102 b"# rev p1rev p2rev start end deltastart base p1 p2"
3146 b"# rev p1rev p2rev start end deltastart base p1 p2"
3103 b" rawsize totalsize compression heads chainlen\n"
3147 b" rawsize totalsize compression heads chainlen\n"
3104 )
3148 )
3105 )
3149 )
3106 ts = 0
3150 ts = 0
3107 heads = set()
3151 heads = set()
3108
3152
3109 for rev in pycompat.xrange(numrevs):
3153 for rev in pycompat.xrange(numrevs):
3110 dbase = r.deltaparent(rev)
3154 dbase = r.deltaparent(rev)
3111 if dbase == -1:
3155 if dbase == -1:
3112 dbase = rev
3156 dbase = rev
3113 cbase = r.chainbase(rev)
3157 cbase = r.chainbase(rev)
3114 clen = r.chainlen(rev)
3158 clen = r.chainlen(rev)
3115 p1, p2 = r.parentrevs(rev)
3159 p1, p2 = r.parentrevs(rev)
3116 rs = r.rawsize(rev)
3160 rs = r.rawsize(rev)
3117 ts = ts + rs
3161 ts = ts + rs
3118 heads -= set(r.parentrevs(rev))
3162 heads -= set(r.parentrevs(rev))
3119 heads.add(rev)
3163 heads.add(rev)
3120 try:
3164 try:
3121 compression = ts / r.end(rev)
3165 compression = ts / r.end(rev)
3122 except ZeroDivisionError:
3166 except ZeroDivisionError:
3123 compression = 0
3167 compression = 0
3124 ui.write(
3168 ui.write(
3125 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3169 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3126 b"%11d %5d %8d\n"
3170 b"%11d %5d %8d\n"
3127 % (
3171 % (
3128 rev,
3172 rev,
3129 p1,
3173 p1,
3130 p2,
3174 p2,
3131 r.start(rev),
3175 r.start(rev),
3132 r.end(rev),
3176 r.end(rev),
3133 r.start(dbase),
3177 r.start(dbase),
3134 r.start(cbase),
3178 r.start(cbase),
3135 r.start(p1),
3179 r.start(p1),
3136 r.start(p2),
3180 r.start(p2),
3137 rs,
3181 rs,
3138 ts,
3182 ts,
3139 compression,
3183 compression,
3140 len(heads),
3184 len(heads),
3141 clen,
3185 clen,
3142 )
3186 )
3143 )
3187 )
3144 return 0
3188 return 0
3145
3189
3146 format = r._format_version
3190 format = r._format_version
3147 v = r._format_flags
3191 v = r._format_flags
3148 flags = []
3192 flags = []
3149 gdelta = False
3193 gdelta = False
3150 if v & revlog.FLAG_INLINE_DATA:
3194 if v & revlog.FLAG_INLINE_DATA:
3151 flags.append(b'inline')
3195 flags.append(b'inline')
3152 if v & revlog.FLAG_GENERALDELTA:
3196 if v & revlog.FLAG_GENERALDELTA:
3153 gdelta = True
3197 gdelta = True
3154 flags.append(b'generaldelta')
3198 flags.append(b'generaldelta')
3155 if not flags:
3199 if not flags:
3156 flags = [b'(none)']
3200 flags = [b'(none)']
3157
3201
3158 ### tracks merge vs single parent
3202 ### tracks merge vs single parent
3159 nummerges = 0
3203 nummerges = 0
3160
3204
3161 ### tracks ways the "delta" are build
3205 ### tracks ways the "delta" are build
3162 # nodelta
3206 # nodelta
3163 numempty = 0
3207 numempty = 0
3164 numemptytext = 0
3208 numemptytext = 0
3165 numemptydelta = 0
3209 numemptydelta = 0
3166 # full file content
3210 # full file content
3167 numfull = 0
3211 numfull = 0
3168 # intermediate snapshot against a prior snapshot
3212 # intermediate snapshot against a prior snapshot
3169 numsemi = 0
3213 numsemi = 0
3170 # snapshot count per depth
3214 # snapshot count per depth
3171 numsnapdepth = collections.defaultdict(lambda: 0)
3215 numsnapdepth = collections.defaultdict(lambda: 0)
3172 # delta against previous revision
3216 # delta against previous revision
3173 numprev = 0
3217 numprev = 0
3174 # delta against first or second parent (not prev)
3218 # delta against first or second parent (not prev)
3175 nump1 = 0
3219 nump1 = 0
3176 nump2 = 0
3220 nump2 = 0
3177 # delta against neither prev nor parents
3221 # delta against neither prev nor parents
3178 numother = 0
3222 numother = 0
3179 # delta against prev that are also first or second parent
3223 # delta against prev that are also first or second parent
3180 # (details of `numprev`)
3224 # (details of `numprev`)
3181 nump1prev = 0
3225 nump1prev = 0
3182 nump2prev = 0
3226 nump2prev = 0
3183
3227
3184 # data about delta chain of each revs
3228 # data about delta chain of each revs
3185 chainlengths = []
3229 chainlengths = []
3186 chainbases = []
3230 chainbases = []
3187 chainspans = []
3231 chainspans = []
3188
3232
3189 # data about each revision
3233 # data about each revision
3190 datasize = [None, 0, 0]
3234 datasize = [None, 0, 0]
3191 fullsize = [None, 0, 0]
3235 fullsize = [None, 0, 0]
3192 semisize = [None, 0, 0]
3236 semisize = [None, 0, 0]
3193 # snapshot count per depth
3237 # snapshot count per depth
3194 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3238 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3195 deltasize = [None, 0, 0]
3239 deltasize = [None, 0, 0]
3196 chunktypecounts = {}
3240 chunktypecounts = {}
3197 chunktypesizes = {}
3241 chunktypesizes = {}
3198
3242
3199 def addsize(size, l):
3243 def addsize(size, l):
3200 if l[0] is None or size < l[0]:
3244 if l[0] is None or size < l[0]:
3201 l[0] = size
3245 l[0] = size
3202 if size > l[1]:
3246 if size > l[1]:
3203 l[1] = size
3247 l[1] = size
3204 l[2] += size
3248 l[2] += size
3205
3249
3206 numrevs = len(r)
3250 numrevs = len(r)
3207 for rev in pycompat.xrange(numrevs):
3251 for rev in pycompat.xrange(numrevs):
3208 p1, p2 = r.parentrevs(rev)
3252 p1, p2 = r.parentrevs(rev)
3209 delta = r.deltaparent(rev)
3253 delta = r.deltaparent(rev)
3210 if format > 0:
3254 if format > 0:
3211 addsize(r.rawsize(rev), datasize)
3255 addsize(r.rawsize(rev), datasize)
3212 if p2 != nullrev:
3256 if p2 != nullrev:
3213 nummerges += 1
3257 nummerges += 1
3214 size = r.length(rev)
3258 size = r.length(rev)
3215 if delta == nullrev:
3259 if delta == nullrev:
3216 chainlengths.append(0)
3260 chainlengths.append(0)
3217 chainbases.append(r.start(rev))
3261 chainbases.append(r.start(rev))
3218 chainspans.append(size)
3262 chainspans.append(size)
3219 if size == 0:
3263 if size == 0:
3220 numempty += 1
3264 numempty += 1
3221 numemptytext += 1
3265 numemptytext += 1
3222 else:
3266 else:
3223 numfull += 1
3267 numfull += 1
3224 numsnapdepth[0] += 1
3268 numsnapdepth[0] += 1
3225 addsize(size, fullsize)
3269 addsize(size, fullsize)
3226 addsize(size, snapsizedepth[0])
3270 addsize(size, snapsizedepth[0])
3227 else:
3271 else:
3228 chainlengths.append(chainlengths[delta] + 1)
3272 chainlengths.append(chainlengths[delta] + 1)
3229 baseaddr = chainbases[delta]
3273 baseaddr = chainbases[delta]
3230 revaddr = r.start(rev)
3274 revaddr = r.start(rev)
3231 chainbases.append(baseaddr)
3275 chainbases.append(baseaddr)
3232 chainspans.append((revaddr - baseaddr) + size)
3276 chainspans.append((revaddr - baseaddr) + size)
3233 if size == 0:
3277 if size == 0:
3234 numempty += 1
3278 numempty += 1
3235 numemptydelta += 1
3279 numemptydelta += 1
3236 elif r.issnapshot(rev):
3280 elif r.issnapshot(rev):
3237 addsize(size, semisize)
3281 addsize(size, semisize)
3238 numsemi += 1
3282 numsemi += 1
3239 depth = r.snapshotdepth(rev)
3283 depth = r.snapshotdepth(rev)
3240 numsnapdepth[depth] += 1
3284 numsnapdepth[depth] += 1
3241 addsize(size, snapsizedepth[depth])
3285 addsize(size, snapsizedepth[depth])
3242 else:
3286 else:
3243 addsize(size, deltasize)
3287 addsize(size, deltasize)
3244 if delta == rev - 1:
3288 if delta == rev - 1:
3245 numprev += 1
3289 numprev += 1
3246 if delta == p1:
3290 if delta == p1:
3247 nump1prev += 1
3291 nump1prev += 1
3248 elif delta == p2:
3292 elif delta == p2:
3249 nump2prev += 1
3293 nump2prev += 1
3250 elif delta == p1:
3294 elif delta == p1:
3251 nump1 += 1
3295 nump1 += 1
3252 elif delta == p2:
3296 elif delta == p2:
3253 nump2 += 1
3297 nump2 += 1
3254 elif delta != nullrev:
3298 elif delta != nullrev:
3255 numother += 1
3299 numother += 1
3256
3300
3257 # Obtain data on the raw chunks in the revlog.
3301 # Obtain data on the raw chunks in the revlog.
3258 if util.safehasattr(r, b'_getsegmentforrevs'):
3302 if util.safehasattr(r, b'_getsegmentforrevs'):
3259 segment = r._getsegmentforrevs(rev, rev)[1]
3303 segment = r._getsegmentforrevs(rev, rev)[1]
3260 else:
3304 else:
3261 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3305 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3262 if segment:
3306 if segment:
3263 chunktype = bytes(segment[0:1])
3307 chunktype = bytes(segment[0:1])
3264 else:
3308 else:
3265 chunktype = b'empty'
3309 chunktype = b'empty'
3266
3310
3267 if chunktype not in chunktypecounts:
3311 if chunktype not in chunktypecounts:
3268 chunktypecounts[chunktype] = 0
3312 chunktypecounts[chunktype] = 0
3269 chunktypesizes[chunktype] = 0
3313 chunktypesizes[chunktype] = 0
3270
3314
3271 chunktypecounts[chunktype] += 1
3315 chunktypecounts[chunktype] += 1
3272 chunktypesizes[chunktype] += size
3316 chunktypesizes[chunktype] += size
3273
3317
3274 # Adjust size min value for empty cases
3318 # Adjust size min value for empty cases
3275 for size in (datasize, fullsize, semisize, deltasize):
3319 for size in (datasize, fullsize, semisize, deltasize):
3276 if size[0] is None:
3320 if size[0] is None:
3277 size[0] = 0
3321 size[0] = 0
3278
3322
3279 numdeltas = numrevs - numfull - numempty - numsemi
3323 numdeltas = numrevs - numfull - numempty - numsemi
3280 numoprev = numprev - nump1prev - nump2prev
3324 numoprev = numprev - nump1prev - nump2prev
3281 totalrawsize = datasize[2]
3325 totalrawsize = datasize[2]
3282 datasize[2] /= numrevs
3326 datasize[2] /= numrevs
3283 fulltotal = fullsize[2]
3327 fulltotal = fullsize[2]
3284 if numfull == 0:
3328 if numfull == 0:
3285 fullsize[2] = 0
3329 fullsize[2] = 0
3286 else:
3330 else:
3287 fullsize[2] /= numfull
3331 fullsize[2] /= numfull
3288 semitotal = semisize[2]
3332 semitotal = semisize[2]
3289 snaptotal = {}
3333 snaptotal = {}
3290 if numsemi > 0:
3334 if numsemi > 0:
3291 semisize[2] /= numsemi
3335 semisize[2] /= numsemi
3292 for depth in snapsizedepth:
3336 for depth in snapsizedepth:
3293 snaptotal[depth] = snapsizedepth[depth][2]
3337 snaptotal[depth] = snapsizedepth[depth][2]
3294 snapsizedepth[depth][2] /= numsnapdepth[depth]
3338 snapsizedepth[depth][2] /= numsnapdepth[depth]
3295
3339
3296 deltatotal = deltasize[2]
3340 deltatotal = deltasize[2]
3297 if numdeltas > 0:
3341 if numdeltas > 0:
3298 deltasize[2] /= numdeltas
3342 deltasize[2] /= numdeltas
3299 totalsize = fulltotal + semitotal + deltatotal
3343 totalsize = fulltotal + semitotal + deltatotal
3300 avgchainlen = sum(chainlengths) / numrevs
3344 avgchainlen = sum(chainlengths) / numrevs
3301 maxchainlen = max(chainlengths)
3345 maxchainlen = max(chainlengths)
3302 maxchainspan = max(chainspans)
3346 maxchainspan = max(chainspans)
3303 compratio = 1
3347 compratio = 1
3304 if totalsize:
3348 if totalsize:
3305 compratio = totalrawsize / totalsize
3349 compratio = totalrawsize / totalsize
3306
3350
3307 basedfmtstr = b'%%%dd\n'
3351 basedfmtstr = b'%%%dd\n'
3308 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3352 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3309
3353
3310 def dfmtstr(max):
3354 def dfmtstr(max):
3311 return basedfmtstr % len(str(max))
3355 return basedfmtstr % len(str(max))
3312
3356
3313 def pcfmtstr(max, padding=0):
3357 def pcfmtstr(max, padding=0):
3314 return basepcfmtstr % (len(str(max)), b' ' * padding)
3358 return basepcfmtstr % (len(str(max)), b' ' * padding)
3315
3359
3316 def pcfmt(value, total):
3360 def pcfmt(value, total):
3317 if total:
3361 if total:
3318 return (value, 100 * float(value) / total)
3362 return (value, 100 * float(value) / total)
3319 else:
3363 else:
3320 return value, 100.0
3364 return value, 100.0
3321
3365
3322 ui.writenoi18n(b'format : %d\n' % format)
3366 ui.writenoi18n(b'format : %d\n' % format)
3323 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3367 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3324
3368
3325 ui.write(b'\n')
3369 ui.write(b'\n')
3326 fmt = pcfmtstr(totalsize)
3370 fmt = pcfmtstr(totalsize)
3327 fmt2 = dfmtstr(totalsize)
3371 fmt2 = dfmtstr(totalsize)
3328 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3372 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3329 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3373 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3330 ui.writenoi18n(
3374 ui.writenoi18n(
3331 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3375 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3332 )
3376 )
3333 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3377 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3334 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3378 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3335 ui.writenoi18n(
3379 ui.writenoi18n(
3336 b' text : '
3380 b' text : '
3337 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3381 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3338 )
3382 )
3339 ui.writenoi18n(
3383 ui.writenoi18n(
3340 b' delta : '
3384 b' delta : '
3341 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3385 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3342 )
3386 )
3343 ui.writenoi18n(
3387 ui.writenoi18n(
3344 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3388 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3345 )
3389 )
3346 for depth in sorted(numsnapdepth):
3390 for depth in sorted(numsnapdepth):
3347 ui.write(
3391 ui.write(
3348 (b' lvl-%-3d : ' % depth)
3392 (b' lvl-%-3d : ' % depth)
3349 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3393 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3350 )
3394 )
3351 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3395 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3352 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3396 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3353 ui.writenoi18n(
3397 ui.writenoi18n(
3354 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3398 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3355 )
3399 )
3356 for depth in sorted(numsnapdepth):
3400 for depth in sorted(numsnapdepth):
3357 ui.write(
3401 ui.write(
3358 (b' lvl-%-3d : ' % depth)
3402 (b' lvl-%-3d : ' % depth)
3359 + fmt % pcfmt(snaptotal[depth], totalsize)
3403 + fmt % pcfmt(snaptotal[depth], totalsize)
3360 )
3404 )
3361 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3405 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3362
3406
3363 def fmtchunktype(chunktype):
3407 def fmtchunktype(chunktype):
3364 if chunktype == b'empty':
3408 if chunktype == b'empty':
3365 return b' %s : ' % chunktype
3409 return b' %s : ' % chunktype
3366 elif chunktype in pycompat.bytestr(string.ascii_letters):
3410 elif chunktype in pycompat.bytestr(string.ascii_letters):
3367 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3411 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3368 else:
3412 else:
3369 return b' 0x%s : ' % hex(chunktype)
3413 return b' 0x%s : ' % hex(chunktype)
3370
3414
3371 ui.write(b'\n')
3415 ui.write(b'\n')
3372 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3416 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3373 for chunktype in sorted(chunktypecounts):
3417 for chunktype in sorted(chunktypecounts):
3374 ui.write(fmtchunktype(chunktype))
3418 ui.write(fmtchunktype(chunktype))
3375 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3419 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3376 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3420 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3377 for chunktype in sorted(chunktypecounts):
3421 for chunktype in sorted(chunktypecounts):
3378 ui.write(fmtchunktype(chunktype))
3422 ui.write(fmtchunktype(chunktype))
3379 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3423 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3380
3424
3381 ui.write(b'\n')
3425 ui.write(b'\n')
3382 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3426 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3383 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3427 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3384 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3428 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3385 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3429 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3386 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3430 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3387
3431
3388 if format > 0:
3432 if format > 0:
3389 ui.write(b'\n')
3433 ui.write(b'\n')
3390 ui.writenoi18n(
3434 ui.writenoi18n(
3391 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3435 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3392 % tuple(datasize)
3436 % tuple(datasize)
3393 )
3437 )
3394 ui.writenoi18n(
3438 ui.writenoi18n(
3395 b'full revision size (min/max/avg) : %d / %d / %d\n'
3439 b'full revision size (min/max/avg) : %d / %d / %d\n'
3396 % tuple(fullsize)
3440 % tuple(fullsize)
3397 )
3441 )
3398 ui.writenoi18n(
3442 ui.writenoi18n(
3399 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3443 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3400 % tuple(semisize)
3444 % tuple(semisize)
3401 )
3445 )
3402 for depth in sorted(snapsizedepth):
3446 for depth in sorted(snapsizedepth):
3403 if depth == 0:
3447 if depth == 0:
3404 continue
3448 continue
3405 ui.writenoi18n(
3449 ui.writenoi18n(
3406 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3450 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3407 % ((depth,) + tuple(snapsizedepth[depth]))
3451 % ((depth,) + tuple(snapsizedepth[depth]))
3408 )
3452 )
3409 ui.writenoi18n(
3453 ui.writenoi18n(
3410 b'delta size (min/max/avg) : %d / %d / %d\n'
3454 b'delta size (min/max/avg) : %d / %d / %d\n'
3411 % tuple(deltasize)
3455 % tuple(deltasize)
3412 )
3456 )
3413
3457
3414 if numdeltas > 0:
3458 if numdeltas > 0:
3415 ui.write(b'\n')
3459 ui.write(b'\n')
3416 fmt = pcfmtstr(numdeltas)
3460 fmt = pcfmtstr(numdeltas)
3417 fmt2 = pcfmtstr(numdeltas, 4)
3461 fmt2 = pcfmtstr(numdeltas, 4)
3418 ui.writenoi18n(
3462 ui.writenoi18n(
3419 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3463 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3420 )
3464 )
3421 if numprev > 0:
3465 if numprev > 0:
3422 ui.writenoi18n(
3466 ui.writenoi18n(
3423 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3467 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3424 )
3468 )
3425 ui.writenoi18n(
3469 ui.writenoi18n(
3426 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3470 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3427 )
3471 )
3428 ui.writenoi18n(
3472 ui.writenoi18n(
3429 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3473 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3430 )
3474 )
3431 if gdelta:
3475 if gdelta:
3432 ui.writenoi18n(
3476 ui.writenoi18n(
3433 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3477 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3434 )
3478 )
3435 ui.writenoi18n(
3479 ui.writenoi18n(
3436 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3480 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3437 )
3481 )
3438 ui.writenoi18n(
3482 ui.writenoi18n(
3439 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3483 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3440 )
3484 )
3441
3485
3442
3486
3443 @command(
3487 @command(
3444 b'debugrevlogindex',
3488 b'debugrevlogindex',
3445 cmdutil.debugrevlogopts
3489 cmdutil.debugrevlogopts
3446 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3490 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3447 _(b'[-f FORMAT] -c|-m|FILE'),
3491 _(b'[-f FORMAT] -c|-m|FILE'),
3448 optionalrepo=True,
3492 optionalrepo=True,
3449 )
3493 )
3450 def debugrevlogindex(ui, repo, file_=None, **opts):
3494 def debugrevlogindex(ui, repo, file_=None, **opts):
3451 """dump the contents of a revlog index"""
3495 """dump the contents of a revlog index"""
3452 opts = pycompat.byteskwargs(opts)
3496 opts = pycompat.byteskwargs(opts)
3453 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3497 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3454 format = opts.get(b'format', 0)
3498 format = opts.get(b'format', 0)
3455 if format not in (0, 1):
3499 if format not in (0, 1):
3456 raise error.Abort(_(b"unknown format %d") % format)
3500 raise error.Abort(_(b"unknown format %d") % format)
3457
3501
3458 if ui.debugflag:
3502 if ui.debugflag:
3459 shortfn = hex
3503 shortfn = hex
3460 else:
3504 else:
3461 shortfn = short
3505 shortfn = short
3462
3506
3463 # There might not be anything in r, so have a sane default
3507 # There might not be anything in r, so have a sane default
3464 idlen = 12
3508 idlen = 12
3465 for i in r:
3509 for i in r:
3466 idlen = len(shortfn(r.node(i)))
3510 idlen = len(shortfn(r.node(i)))
3467 break
3511 break
3468
3512
3469 if format == 0:
3513 if format == 0:
3470 if ui.verbose:
3514 if ui.verbose:
3471 ui.writenoi18n(
3515 ui.writenoi18n(
3472 b" rev offset length linkrev %s %s p2\n"
3516 b" rev offset length linkrev %s %s p2\n"
3473 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3517 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3474 )
3518 )
3475 else:
3519 else:
3476 ui.writenoi18n(
3520 ui.writenoi18n(
3477 b" rev linkrev %s %s p2\n"
3521 b" rev linkrev %s %s p2\n"
3478 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3522 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3479 )
3523 )
3480 elif format == 1:
3524 elif format == 1:
3481 if ui.verbose:
3525 if ui.verbose:
3482 ui.writenoi18n(
3526 ui.writenoi18n(
3483 (
3527 (
3484 b" rev flag offset length size link p1"
3528 b" rev flag offset length size link p1"
3485 b" p2 %s\n"
3529 b" p2 %s\n"
3486 )
3530 )
3487 % b"nodeid".rjust(idlen)
3531 % b"nodeid".rjust(idlen)
3488 )
3532 )
3489 else:
3533 else:
3490 ui.writenoi18n(
3534 ui.writenoi18n(
3491 b" rev flag size link p1 p2 %s\n"
3535 b" rev flag size link p1 p2 %s\n"
3492 % b"nodeid".rjust(idlen)
3536 % b"nodeid".rjust(idlen)
3493 )
3537 )
3494
3538
3495 for i in r:
3539 for i in r:
3496 node = r.node(i)
3540 node = r.node(i)
3497 if format == 0:
3541 if format == 0:
3498 try:
3542 try:
3499 pp = r.parents(node)
3543 pp = r.parents(node)
3500 except Exception:
3544 except Exception:
3501 pp = [repo.nullid, repo.nullid]
3545 pp = [repo.nullid, repo.nullid]
3502 if ui.verbose:
3546 if ui.verbose:
3503 ui.write(
3547 ui.write(
3504 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3548 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3505 % (
3549 % (
3506 i,
3550 i,
3507 r.start(i),
3551 r.start(i),
3508 r.length(i),
3552 r.length(i),
3509 r.linkrev(i),
3553 r.linkrev(i),
3510 shortfn(node),
3554 shortfn(node),
3511 shortfn(pp[0]),
3555 shortfn(pp[0]),
3512 shortfn(pp[1]),
3556 shortfn(pp[1]),
3513 )
3557 )
3514 )
3558 )
3515 else:
3559 else:
3516 ui.write(
3560 ui.write(
3517 b"% 6d % 7d %s %s %s\n"
3561 b"% 6d % 7d %s %s %s\n"
3518 % (
3562 % (
3519 i,
3563 i,
3520 r.linkrev(i),
3564 r.linkrev(i),
3521 shortfn(node),
3565 shortfn(node),
3522 shortfn(pp[0]),
3566 shortfn(pp[0]),
3523 shortfn(pp[1]),
3567 shortfn(pp[1]),
3524 )
3568 )
3525 )
3569 )
3526 elif format == 1:
3570 elif format == 1:
3527 pr = r.parentrevs(i)
3571 pr = r.parentrevs(i)
3528 if ui.verbose:
3572 if ui.verbose:
3529 ui.write(
3573 ui.write(
3530 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3574 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3531 % (
3575 % (
3532 i,
3576 i,
3533 r.flags(i),
3577 r.flags(i),
3534 r.start(i),
3578 r.start(i),
3535 r.length(i),
3579 r.length(i),
3536 r.rawsize(i),
3580 r.rawsize(i),
3537 r.linkrev(i),
3581 r.linkrev(i),
3538 pr[0],
3582 pr[0],
3539 pr[1],
3583 pr[1],
3540 shortfn(node),
3584 shortfn(node),
3541 )
3585 )
3542 )
3586 )
3543 else:
3587 else:
3544 ui.write(
3588 ui.write(
3545 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3589 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3546 % (
3590 % (
3547 i,
3591 i,
3548 r.flags(i),
3592 r.flags(i),
3549 r.rawsize(i),
3593 r.rawsize(i),
3550 r.linkrev(i),
3594 r.linkrev(i),
3551 pr[0],
3595 pr[0],
3552 pr[1],
3596 pr[1],
3553 shortfn(node),
3597 shortfn(node),
3554 )
3598 )
3555 )
3599 )
3556
3600
3557
3601
3558 @command(
3602 @command(
3559 b'debugrevspec',
3603 b'debugrevspec',
3560 [
3604 [
3561 (
3605 (
3562 b'',
3606 b'',
3563 b'optimize',
3607 b'optimize',
3564 None,
3608 None,
3565 _(b'print parsed tree after optimizing (DEPRECATED)'),
3609 _(b'print parsed tree after optimizing (DEPRECATED)'),
3566 ),
3610 ),
3567 (
3611 (
3568 b'',
3612 b'',
3569 b'show-revs',
3613 b'show-revs',
3570 True,
3614 True,
3571 _(b'print list of result revisions (default)'),
3615 _(b'print list of result revisions (default)'),
3572 ),
3616 ),
3573 (
3617 (
3574 b's',
3618 b's',
3575 b'show-set',
3619 b'show-set',
3576 None,
3620 None,
3577 _(b'print internal representation of result set'),
3621 _(b'print internal representation of result set'),
3578 ),
3622 ),
3579 (
3623 (
3580 b'p',
3624 b'p',
3581 b'show-stage',
3625 b'show-stage',
3582 [],
3626 [],
3583 _(b'print parsed tree at the given stage'),
3627 _(b'print parsed tree at the given stage'),
3584 _(b'NAME'),
3628 _(b'NAME'),
3585 ),
3629 ),
3586 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3630 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3587 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3631 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3588 ],
3632 ],
3589 b'REVSPEC',
3633 b'REVSPEC',
3590 )
3634 )
3591 def debugrevspec(ui, repo, expr, **opts):
3635 def debugrevspec(ui, repo, expr, **opts):
3592 """parse and apply a revision specification
3636 """parse and apply a revision specification
3593
3637
3594 Use -p/--show-stage option to print the parsed tree at the given stages.
3638 Use -p/--show-stage option to print the parsed tree at the given stages.
3595 Use -p all to print tree at every stage.
3639 Use -p all to print tree at every stage.
3596
3640
3597 Use --no-show-revs option with -s or -p to print only the set
3641 Use --no-show-revs option with -s or -p to print only the set
3598 representation or the parsed tree respectively.
3642 representation or the parsed tree respectively.
3599
3643
3600 Use --verify-optimized to compare the optimized result with the unoptimized
3644 Use --verify-optimized to compare the optimized result with the unoptimized
3601 one. Returns 1 if the optimized result differs.
3645 one. Returns 1 if the optimized result differs.
3602 """
3646 """
3603 opts = pycompat.byteskwargs(opts)
3647 opts = pycompat.byteskwargs(opts)
3604 aliases = ui.configitems(b'revsetalias')
3648 aliases = ui.configitems(b'revsetalias')
3605 stages = [
3649 stages = [
3606 (b'parsed', lambda tree: tree),
3650 (b'parsed', lambda tree: tree),
3607 (
3651 (
3608 b'expanded',
3652 b'expanded',
3609 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3653 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3610 ),
3654 ),
3611 (b'concatenated', revsetlang.foldconcat),
3655 (b'concatenated', revsetlang.foldconcat),
3612 (b'analyzed', revsetlang.analyze),
3656 (b'analyzed', revsetlang.analyze),
3613 (b'optimized', revsetlang.optimize),
3657 (b'optimized', revsetlang.optimize),
3614 ]
3658 ]
3615 if opts[b'no_optimized']:
3659 if opts[b'no_optimized']:
3616 stages = stages[:-1]
3660 stages = stages[:-1]
3617 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3661 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3618 raise error.Abort(
3662 raise error.Abort(
3619 _(b'cannot use --verify-optimized with --no-optimized')
3663 _(b'cannot use --verify-optimized with --no-optimized')
3620 )
3664 )
3621 stagenames = {n for n, f in stages}
3665 stagenames = {n for n, f in stages}
3622
3666
3623 showalways = set()
3667 showalways = set()
3624 showchanged = set()
3668 showchanged = set()
3625 if ui.verbose and not opts[b'show_stage']:
3669 if ui.verbose and not opts[b'show_stage']:
3626 # show parsed tree by --verbose (deprecated)
3670 # show parsed tree by --verbose (deprecated)
3627 showalways.add(b'parsed')
3671 showalways.add(b'parsed')
3628 showchanged.update([b'expanded', b'concatenated'])
3672 showchanged.update([b'expanded', b'concatenated'])
3629 if opts[b'optimize']:
3673 if opts[b'optimize']:
3630 showalways.add(b'optimized')
3674 showalways.add(b'optimized')
3631 if opts[b'show_stage'] and opts[b'optimize']:
3675 if opts[b'show_stage'] and opts[b'optimize']:
3632 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3676 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3633 if opts[b'show_stage'] == [b'all']:
3677 if opts[b'show_stage'] == [b'all']:
3634 showalways.update(stagenames)
3678 showalways.update(stagenames)
3635 else:
3679 else:
3636 for n in opts[b'show_stage']:
3680 for n in opts[b'show_stage']:
3637 if n not in stagenames:
3681 if n not in stagenames:
3638 raise error.Abort(_(b'invalid stage name: %s') % n)
3682 raise error.Abort(_(b'invalid stage name: %s') % n)
3639 showalways.update(opts[b'show_stage'])
3683 showalways.update(opts[b'show_stage'])
3640
3684
3641 treebystage = {}
3685 treebystage = {}
3642 printedtree = None
3686 printedtree = None
3643 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3687 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3644 for n, f in stages:
3688 for n, f in stages:
3645 treebystage[n] = tree = f(tree)
3689 treebystage[n] = tree = f(tree)
3646 if n in showalways or (n in showchanged and tree != printedtree):
3690 if n in showalways or (n in showchanged and tree != printedtree):
3647 if opts[b'show_stage'] or n != b'parsed':
3691 if opts[b'show_stage'] or n != b'parsed':
3648 ui.write(b"* %s:\n" % n)
3692 ui.write(b"* %s:\n" % n)
3649 ui.write(revsetlang.prettyformat(tree), b"\n")
3693 ui.write(revsetlang.prettyformat(tree), b"\n")
3650 printedtree = tree
3694 printedtree = tree
3651
3695
3652 if opts[b'verify_optimized']:
3696 if opts[b'verify_optimized']:
3653 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3697 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3654 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3698 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3655 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3699 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3656 ui.writenoi18n(
3700 ui.writenoi18n(
3657 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3701 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3658 )
3702 )
3659 ui.writenoi18n(
3703 ui.writenoi18n(
3660 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3704 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3661 )
3705 )
3662 arevs = list(arevs)
3706 arevs = list(arevs)
3663 brevs = list(brevs)
3707 brevs = list(brevs)
3664 if arevs == brevs:
3708 if arevs == brevs:
3665 return 0
3709 return 0
3666 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3710 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3667 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3711 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3668 sm = difflib.SequenceMatcher(None, arevs, brevs)
3712 sm = difflib.SequenceMatcher(None, arevs, brevs)
3669 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3713 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3670 if tag in ('delete', 'replace'):
3714 if tag in ('delete', 'replace'):
3671 for c in arevs[alo:ahi]:
3715 for c in arevs[alo:ahi]:
3672 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3716 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3673 if tag in ('insert', 'replace'):
3717 if tag in ('insert', 'replace'):
3674 for c in brevs[blo:bhi]:
3718 for c in brevs[blo:bhi]:
3675 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3719 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3676 if tag == 'equal':
3720 if tag == 'equal':
3677 for c in arevs[alo:ahi]:
3721 for c in arevs[alo:ahi]:
3678 ui.write(b' %d\n' % c)
3722 ui.write(b' %d\n' % c)
3679 return 1
3723 return 1
3680
3724
3681 func = revset.makematcher(tree)
3725 func = revset.makematcher(tree)
3682 revs = func(repo)
3726 revs = func(repo)
3683 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3727 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3684 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3728 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3685 if not opts[b'show_revs']:
3729 if not opts[b'show_revs']:
3686 return
3730 return
3687 for c in revs:
3731 for c in revs:
3688 ui.write(b"%d\n" % c)
3732 ui.write(b"%d\n" % c)
3689
3733
3690
3734
3691 @command(
3735 @command(
3692 b'debugserve',
3736 b'debugserve',
3693 [
3737 [
3694 (
3738 (
3695 b'',
3739 b'',
3696 b'sshstdio',
3740 b'sshstdio',
3697 False,
3741 False,
3698 _(b'run an SSH server bound to process handles'),
3742 _(b'run an SSH server bound to process handles'),
3699 ),
3743 ),
3700 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3744 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3701 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3745 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3702 ],
3746 ],
3703 b'',
3747 b'',
3704 )
3748 )
3705 def debugserve(ui, repo, **opts):
3749 def debugserve(ui, repo, **opts):
3706 """run a server with advanced settings
3750 """run a server with advanced settings
3707
3751
3708 This command is similar to :hg:`serve`. It exists partially as a
3752 This command is similar to :hg:`serve`. It exists partially as a
3709 workaround to the fact that ``hg serve --stdio`` must have specific
3753 workaround to the fact that ``hg serve --stdio`` must have specific
3710 arguments for security reasons.
3754 arguments for security reasons.
3711 """
3755 """
3712 opts = pycompat.byteskwargs(opts)
3756 opts = pycompat.byteskwargs(opts)
3713
3757
3714 if not opts[b'sshstdio']:
3758 if not opts[b'sshstdio']:
3715 raise error.Abort(_(b'only --sshstdio is currently supported'))
3759 raise error.Abort(_(b'only --sshstdio is currently supported'))
3716
3760
3717 logfh = None
3761 logfh = None
3718
3762
3719 if opts[b'logiofd'] and opts[b'logiofile']:
3763 if opts[b'logiofd'] and opts[b'logiofile']:
3720 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3764 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3721
3765
3722 if opts[b'logiofd']:
3766 if opts[b'logiofd']:
3723 # Ideally we would be line buffered. But line buffering in binary
3767 # Ideally we would be line buffered. But line buffering in binary
3724 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3768 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3725 # buffering could have performance impacts. But since this isn't
3769 # buffering could have performance impacts. But since this isn't
3726 # performance critical code, it should be fine.
3770 # performance critical code, it should be fine.
3727 try:
3771 try:
3728 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3772 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3729 except OSError as e:
3773 except OSError as e:
3730 if e.errno != errno.ESPIPE:
3774 if e.errno != errno.ESPIPE:
3731 raise
3775 raise
3732 # can't seek a pipe, so `ab` mode fails on py3
3776 # can't seek a pipe, so `ab` mode fails on py3
3733 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3777 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3734 elif opts[b'logiofile']:
3778 elif opts[b'logiofile']:
3735 logfh = open(opts[b'logiofile'], b'ab', 0)
3779 logfh = open(opts[b'logiofile'], b'ab', 0)
3736
3780
3737 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3781 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3738 s.serve_forever()
3782 s.serve_forever()
3739
3783
3740
3784
3741 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3785 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3742 def debugsetparents(ui, repo, rev1, rev2=None):
3786 def debugsetparents(ui, repo, rev1, rev2=None):
3743 """manually set the parents of the current working directory (DANGEROUS)
3787 """manually set the parents of the current working directory (DANGEROUS)
3744
3788
3745 This command is not what you are looking for and should not be used. Using
3789 This command is not what you are looking for and should not be used. Using
3746 this command will most certainly results in slight corruption of the file
3790 this command will most certainly results in slight corruption of the file
3747 level histories withing your repository. DO NOT USE THIS COMMAND.
3791 level histories withing your repository. DO NOT USE THIS COMMAND.
3748
3792
3749 The command update the p1 and p2 field in the dirstate, and not touching
3793 The command update the p1 and p2 field in the dirstate, and not touching
3750 anything else. This useful for writing repository conversion tools, but
3794 anything else. This useful for writing repository conversion tools, but
3751 should be used with extreme care. For example, neither the working
3795 should be used with extreme care. For example, neither the working
3752 directory nor the dirstate is updated, so file status may be incorrect
3796 directory nor the dirstate is updated, so file status may be incorrect
3753 after running this command. Only used if you are one of the few people that
3797 after running this command. Only used if you are one of the few people that
3754 deeply unstand both conversion tools and file level histories. If you are
3798 deeply unstand both conversion tools and file level histories. If you are
3755 reading this help, you are not one of this people (most of them sailed west
3799 reading this help, you are not one of this people (most of them sailed west
3756 from Mithlond anyway.
3800 from Mithlond anyway.
3757
3801
3758 So one last time DO NOT USE THIS COMMAND.
3802 So one last time DO NOT USE THIS COMMAND.
3759
3803
3760 Returns 0 on success.
3804 Returns 0 on success.
3761 """
3805 """
3762
3806
3763 node1 = scmutil.revsingle(repo, rev1).node()
3807 node1 = scmutil.revsingle(repo, rev1).node()
3764 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3808 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3765
3809
3766 with repo.wlock():
3810 with repo.wlock():
3767 repo.setparents(node1, node2)
3811 repo.setparents(node1, node2)
3768
3812
3769
3813
3770 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3814 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3771 def debugsidedata(ui, repo, file_, rev=None, **opts):
3815 def debugsidedata(ui, repo, file_, rev=None, **opts):
3772 """dump the side data for a cl/manifest/file revision
3816 """dump the side data for a cl/manifest/file revision
3773
3817
3774 Use --verbose to dump the sidedata content."""
3818 Use --verbose to dump the sidedata content."""
3775 opts = pycompat.byteskwargs(opts)
3819 opts = pycompat.byteskwargs(opts)
3776 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3820 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3777 if rev is not None:
3821 if rev is not None:
3778 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3822 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3779 file_, rev = None, file_
3823 file_, rev = None, file_
3780 elif rev is None:
3824 elif rev is None:
3781 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3825 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3782 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3826 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3783 r = getattr(r, '_revlog', r)
3827 r = getattr(r, '_revlog', r)
3784 try:
3828 try:
3785 sidedata = r.sidedata(r.lookup(rev))
3829 sidedata = r.sidedata(r.lookup(rev))
3786 except KeyError:
3830 except KeyError:
3787 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3831 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3788 if sidedata:
3832 if sidedata:
3789 sidedata = list(sidedata.items())
3833 sidedata = list(sidedata.items())
3790 sidedata.sort()
3834 sidedata.sort()
3791 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3835 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3792 for key, value in sidedata:
3836 for key, value in sidedata:
3793 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3837 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3794 if ui.verbose:
3838 if ui.verbose:
3795 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3839 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3796
3840
3797
3841
3798 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3842 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3799 def debugssl(ui, repo, source=None, **opts):
3843 def debugssl(ui, repo, source=None, **opts):
3800 """test a secure connection to a server
3844 """test a secure connection to a server
3801
3845
3802 This builds the certificate chain for the server on Windows, installing the
3846 This builds the certificate chain for the server on Windows, installing the
3803 missing intermediates and trusted root via Windows Update if necessary. It
3847 missing intermediates and trusted root via Windows Update if necessary. It
3804 does nothing on other platforms.
3848 does nothing on other platforms.
3805
3849
3806 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3850 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3807 that server is used. See :hg:`help urls` for more information.
3851 that server is used. See :hg:`help urls` for more information.
3808
3852
3809 If the update succeeds, retry the original operation. Otherwise, the cause
3853 If the update succeeds, retry the original operation. Otherwise, the cause
3810 of the SSL error is likely another issue.
3854 of the SSL error is likely another issue.
3811 """
3855 """
3812 if not pycompat.iswindows:
3856 if not pycompat.iswindows:
3813 raise error.Abort(
3857 raise error.Abort(
3814 _(b'certificate chain building is only possible on Windows')
3858 _(b'certificate chain building is only possible on Windows')
3815 )
3859 )
3816
3860
3817 if not source:
3861 if not source:
3818 if not repo:
3862 if not repo:
3819 raise error.Abort(
3863 raise error.Abort(
3820 _(
3864 _(
3821 b"there is no Mercurial repository here, and no "
3865 b"there is no Mercurial repository here, and no "
3822 b"server specified"
3866 b"server specified"
3823 )
3867 )
3824 )
3868 )
3825 source = b"default"
3869 source = b"default"
3826
3870
3827 source, branches = urlutil.get_unique_pull_path(
3871 source, branches = urlutil.get_unique_pull_path(
3828 b'debugssl', repo, ui, source
3872 b'debugssl', repo, ui, source
3829 )
3873 )
3830 url = urlutil.url(source)
3874 url = urlutil.url(source)
3831
3875
3832 defaultport = {b'https': 443, b'ssh': 22}
3876 defaultport = {b'https': 443, b'ssh': 22}
3833 if url.scheme in defaultport:
3877 if url.scheme in defaultport:
3834 try:
3878 try:
3835 addr = (url.host, int(url.port or defaultport[url.scheme]))
3879 addr = (url.host, int(url.port or defaultport[url.scheme]))
3836 except ValueError:
3880 except ValueError:
3837 raise error.Abort(_(b"malformed port number in URL"))
3881 raise error.Abort(_(b"malformed port number in URL"))
3838 else:
3882 else:
3839 raise error.Abort(_(b"only https and ssh connections are supported"))
3883 raise error.Abort(_(b"only https and ssh connections are supported"))
3840
3884
3841 from . import win32
3885 from . import win32
3842
3886
3843 s = ssl.wrap_socket(
3887 s = ssl.wrap_socket(
3844 socket.socket(),
3888 socket.socket(),
3845 ssl_version=ssl.PROTOCOL_TLS,
3889 ssl_version=ssl.PROTOCOL_TLS,
3846 cert_reqs=ssl.CERT_NONE,
3890 cert_reqs=ssl.CERT_NONE,
3847 ca_certs=None,
3891 ca_certs=None,
3848 )
3892 )
3849
3893
3850 try:
3894 try:
3851 s.connect(addr)
3895 s.connect(addr)
3852 cert = s.getpeercert(True)
3896 cert = s.getpeercert(True)
3853
3897
3854 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3898 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3855
3899
3856 complete = win32.checkcertificatechain(cert, build=False)
3900 complete = win32.checkcertificatechain(cert, build=False)
3857
3901
3858 if not complete:
3902 if not complete:
3859 ui.status(_(b'certificate chain is incomplete, updating... '))
3903 ui.status(_(b'certificate chain is incomplete, updating... '))
3860
3904
3861 if not win32.checkcertificatechain(cert):
3905 if not win32.checkcertificatechain(cert):
3862 ui.status(_(b'failed.\n'))
3906 ui.status(_(b'failed.\n'))
3863 else:
3907 else:
3864 ui.status(_(b'done.\n'))
3908 ui.status(_(b'done.\n'))
3865 else:
3909 else:
3866 ui.status(_(b'full certificate chain is available\n'))
3910 ui.status(_(b'full certificate chain is available\n'))
3867 finally:
3911 finally:
3868 s.close()
3912 s.close()
3869
3913
3870
3914
3871 @command(
3915 @command(
3872 b"debugbackupbundle",
3916 b"debugbackupbundle",
3873 [
3917 [
3874 (
3918 (
3875 b"",
3919 b"",
3876 b"recover",
3920 b"recover",
3877 b"",
3921 b"",
3878 b"brings the specified changeset back into the repository",
3922 b"brings the specified changeset back into the repository",
3879 )
3923 )
3880 ]
3924 ]
3881 + cmdutil.logopts,
3925 + cmdutil.logopts,
3882 _(b"hg debugbackupbundle [--recover HASH]"),
3926 _(b"hg debugbackupbundle [--recover HASH]"),
3883 )
3927 )
3884 def debugbackupbundle(ui, repo, *pats, **opts):
3928 def debugbackupbundle(ui, repo, *pats, **opts):
3885 """lists the changesets available in backup bundles
3929 """lists the changesets available in backup bundles
3886
3930
3887 Without any arguments, this command prints a list of the changesets in each
3931 Without any arguments, this command prints a list of the changesets in each
3888 backup bundle.
3932 backup bundle.
3889
3933
3890 --recover takes a changeset hash and unbundles the first bundle that
3934 --recover takes a changeset hash and unbundles the first bundle that
3891 contains that hash, which puts that changeset back in your repository.
3935 contains that hash, which puts that changeset back in your repository.
3892
3936
3893 --verbose will print the entire commit message and the bundle path for that
3937 --verbose will print the entire commit message and the bundle path for that
3894 backup.
3938 backup.
3895 """
3939 """
3896 backups = list(
3940 backups = list(
3897 filter(
3941 filter(
3898 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3942 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3899 )
3943 )
3900 )
3944 )
3901 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3945 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3902
3946
3903 opts = pycompat.byteskwargs(opts)
3947 opts = pycompat.byteskwargs(opts)
3904 opts[b"bundle"] = b""
3948 opts[b"bundle"] = b""
3905 opts[b"force"] = None
3949 opts[b"force"] = None
3906 limit = logcmdutil.getlimit(opts)
3950 limit = logcmdutil.getlimit(opts)
3907
3951
3908 def display(other, chlist, displayer):
3952 def display(other, chlist, displayer):
3909 if opts.get(b"newest_first"):
3953 if opts.get(b"newest_first"):
3910 chlist.reverse()
3954 chlist.reverse()
3911 count = 0
3955 count = 0
3912 for n in chlist:
3956 for n in chlist:
3913 if limit is not None and count >= limit:
3957 if limit is not None and count >= limit:
3914 break
3958 break
3915 parents = [
3959 parents = [
3916 True for p in other.changelog.parents(n) if p != repo.nullid
3960 True for p in other.changelog.parents(n) if p != repo.nullid
3917 ]
3961 ]
3918 if opts.get(b"no_merges") and len(parents) == 2:
3962 if opts.get(b"no_merges") and len(parents) == 2:
3919 continue
3963 continue
3920 count += 1
3964 count += 1
3921 displayer.show(other[n])
3965 displayer.show(other[n])
3922
3966
3923 recovernode = opts.get(b"recover")
3967 recovernode = opts.get(b"recover")
3924 if recovernode:
3968 if recovernode:
3925 if scmutil.isrevsymbol(repo, recovernode):
3969 if scmutil.isrevsymbol(repo, recovernode):
3926 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3970 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3927 return
3971 return
3928 elif backups:
3972 elif backups:
3929 msg = _(
3973 msg = _(
3930 b"Recover changesets using: hg debugbackupbundle --recover "
3974 b"Recover changesets using: hg debugbackupbundle --recover "
3931 b"<changeset hash>\n\nAvailable backup changesets:"
3975 b"<changeset hash>\n\nAvailable backup changesets:"
3932 )
3976 )
3933 ui.status(msg, label=b"status.removed")
3977 ui.status(msg, label=b"status.removed")
3934 else:
3978 else:
3935 ui.status(_(b"no backup changesets found\n"))
3979 ui.status(_(b"no backup changesets found\n"))
3936 return
3980 return
3937
3981
3938 for backup in backups:
3982 for backup in backups:
3939 # Much of this is copied from the hg incoming logic
3983 # Much of this is copied from the hg incoming logic
3940 source = os.path.relpath(backup, encoding.getcwd())
3984 source = os.path.relpath(backup, encoding.getcwd())
3941 source, branches = urlutil.get_unique_pull_path(
3985 source, branches = urlutil.get_unique_pull_path(
3942 b'debugbackupbundle',
3986 b'debugbackupbundle',
3943 repo,
3987 repo,
3944 ui,
3988 ui,
3945 source,
3989 source,
3946 default_branches=opts.get(b'branch'),
3990 default_branches=opts.get(b'branch'),
3947 )
3991 )
3948 try:
3992 try:
3949 other = hg.peer(repo, opts, source)
3993 other = hg.peer(repo, opts, source)
3950 except error.LookupError as ex:
3994 except error.LookupError as ex:
3951 msg = _(b"\nwarning: unable to open bundle %s") % source
3995 msg = _(b"\nwarning: unable to open bundle %s") % source
3952 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3996 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3953 ui.warn(msg, hint=hint)
3997 ui.warn(msg, hint=hint)
3954 continue
3998 continue
3955 revs, checkout = hg.addbranchrevs(
3999 revs, checkout = hg.addbranchrevs(
3956 repo, other, branches, opts.get(b"rev")
4000 repo, other, branches, opts.get(b"rev")
3957 )
4001 )
3958
4002
3959 if revs:
4003 if revs:
3960 revs = [other.lookup(rev) for rev in revs]
4004 revs = [other.lookup(rev) for rev in revs]
3961
4005
3962 with ui.silent():
4006 with ui.silent():
3963 try:
4007 try:
3964 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4008 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3965 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4009 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3966 )
4010 )
3967 except error.LookupError:
4011 except error.LookupError:
3968 continue
4012 continue
3969
4013
3970 try:
4014 try:
3971 if not chlist:
4015 if not chlist:
3972 continue
4016 continue
3973 if recovernode:
4017 if recovernode:
3974 with repo.lock(), repo.transaction(b"unbundle") as tr:
4018 with repo.lock(), repo.transaction(b"unbundle") as tr:
3975 if scmutil.isrevsymbol(other, recovernode):
4019 if scmutil.isrevsymbol(other, recovernode):
3976 ui.status(_(b"Unbundling %s\n") % (recovernode))
4020 ui.status(_(b"Unbundling %s\n") % (recovernode))
3977 f = hg.openpath(ui, source)
4021 f = hg.openpath(ui, source)
3978 gen = exchange.readbundle(ui, f, source)
4022 gen = exchange.readbundle(ui, f, source)
3979 if isinstance(gen, bundle2.unbundle20):
4023 if isinstance(gen, bundle2.unbundle20):
3980 bundle2.applybundle(
4024 bundle2.applybundle(
3981 repo,
4025 repo,
3982 gen,
4026 gen,
3983 tr,
4027 tr,
3984 source=b"unbundle",
4028 source=b"unbundle",
3985 url=b"bundle:" + source,
4029 url=b"bundle:" + source,
3986 )
4030 )
3987 else:
4031 else:
3988 gen.apply(repo, b"unbundle", b"bundle:" + source)
4032 gen.apply(repo, b"unbundle", b"bundle:" + source)
3989 break
4033 break
3990 else:
4034 else:
3991 backupdate = encoding.strtolocal(
4035 backupdate = encoding.strtolocal(
3992 time.strftime(
4036 time.strftime(
3993 "%a %H:%M, %Y-%m-%d",
4037 "%a %H:%M, %Y-%m-%d",
3994 time.localtime(os.path.getmtime(source)),
4038 time.localtime(os.path.getmtime(source)),
3995 )
4039 )
3996 )
4040 )
3997 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4041 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3998 if ui.verbose:
4042 if ui.verbose:
3999 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4043 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4000 else:
4044 else:
4001 opts[
4045 opts[
4002 b"template"
4046 b"template"
4003 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4047 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4004 displayer = logcmdutil.changesetdisplayer(
4048 displayer = logcmdutil.changesetdisplayer(
4005 ui, other, opts, False
4049 ui, other, opts, False
4006 )
4050 )
4007 display(other, chlist, displayer)
4051 display(other, chlist, displayer)
4008 displayer.close()
4052 displayer.close()
4009 finally:
4053 finally:
4010 cleanupfn()
4054 cleanupfn()
4011
4055
4012
4056
4013 @command(
4057 @command(
4014 b'debugsub',
4058 b'debugsub',
4015 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4059 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4016 _(b'[-r REV] [REV]'),
4060 _(b'[-r REV] [REV]'),
4017 )
4061 )
4018 def debugsub(ui, repo, rev=None):
4062 def debugsub(ui, repo, rev=None):
4019 ctx = scmutil.revsingle(repo, rev, None)
4063 ctx = scmutil.revsingle(repo, rev, None)
4020 for k, v in sorted(ctx.substate.items()):
4064 for k, v in sorted(ctx.substate.items()):
4021 ui.writenoi18n(b'path %s\n' % k)
4065 ui.writenoi18n(b'path %s\n' % k)
4022 ui.writenoi18n(b' source %s\n' % v[0])
4066 ui.writenoi18n(b' source %s\n' % v[0])
4023 ui.writenoi18n(b' revision %s\n' % v[1])
4067 ui.writenoi18n(b' revision %s\n' % v[1])
4024
4068
4025
4069
4026 @command(b'debugshell', optionalrepo=True)
4070 @command(b'debugshell', optionalrepo=True)
4027 def debugshell(ui, repo):
4071 def debugshell(ui, repo):
4028 """run an interactive Python interpreter
4072 """run an interactive Python interpreter
4029
4073
4030 The local namespace is provided with a reference to the ui and
4074 The local namespace is provided with a reference to the ui and
4031 the repo instance (if available).
4075 the repo instance (if available).
4032 """
4076 """
4033 import code
4077 import code
4034
4078
4035 imported_objects = {
4079 imported_objects = {
4036 'ui': ui,
4080 'ui': ui,
4037 'repo': repo,
4081 'repo': repo,
4038 }
4082 }
4039
4083
4040 code.interact(local=imported_objects)
4084 code.interact(local=imported_objects)
4041
4085
4042
4086
4043 @command(
4087 @command(
4044 b'debugsuccessorssets',
4088 b'debugsuccessorssets',
4045 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4089 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4046 _(b'[REV]'),
4090 _(b'[REV]'),
4047 )
4091 )
4048 def debugsuccessorssets(ui, repo, *revs, **opts):
4092 def debugsuccessorssets(ui, repo, *revs, **opts):
4049 """show set of successors for revision
4093 """show set of successors for revision
4050
4094
4051 A successors set of changeset A is a consistent group of revisions that
4095 A successors set of changeset A is a consistent group of revisions that
4052 succeed A. It contains non-obsolete changesets only unless closests
4096 succeed A. It contains non-obsolete changesets only unless closests
4053 successors set is set.
4097 successors set is set.
4054
4098
4055 In most cases a changeset A has a single successors set containing a single
4099 In most cases a changeset A has a single successors set containing a single
4056 successor (changeset A replaced by A').
4100 successor (changeset A replaced by A').
4057
4101
4058 A changeset that is made obsolete with no successors are called "pruned".
4102 A changeset that is made obsolete with no successors are called "pruned".
4059 Such changesets have no successors sets at all.
4103 Such changesets have no successors sets at all.
4060
4104
4061 A changeset that has been "split" will have a successors set containing
4105 A changeset that has been "split" will have a successors set containing
4062 more than one successor.
4106 more than one successor.
4063
4107
4064 A changeset that has been rewritten in multiple different ways is called
4108 A changeset that has been rewritten in multiple different ways is called
4065 "divergent". Such changesets have multiple successor sets (each of which
4109 "divergent". Such changesets have multiple successor sets (each of which
4066 may also be split, i.e. have multiple successors).
4110 may also be split, i.e. have multiple successors).
4067
4111
4068 Results are displayed as follows::
4112 Results are displayed as follows::
4069
4113
4070 <rev1>
4114 <rev1>
4071 <successors-1A>
4115 <successors-1A>
4072 <rev2>
4116 <rev2>
4073 <successors-2A>
4117 <successors-2A>
4074 <successors-2B1> <successors-2B2> <successors-2B3>
4118 <successors-2B1> <successors-2B2> <successors-2B3>
4075
4119
4076 Here rev2 has two possible (i.e. divergent) successors sets. The first
4120 Here rev2 has two possible (i.e. divergent) successors sets. The first
4077 holds one element, whereas the second holds three (i.e. the changeset has
4121 holds one element, whereas the second holds three (i.e. the changeset has
4078 been split).
4122 been split).
4079 """
4123 """
4080 # passed to successorssets caching computation from one call to another
4124 # passed to successorssets caching computation from one call to another
4081 cache = {}
4125 cache = {}
4082 ctx2str = bytes
4126 ctx2str = bytes
4083 node2str = short
4127 node2str = short
4084 for rev in logcmdutil.revrange(repo, revs):
4128 for rev in logcmdutil.revrange(repo, revs):
4085 ctx = repo[rev]
4129 ctx = repo[rev]
4086 ui.write(b'%s\n' % ctx2str(ctx))
4130 ui.write(b'%s\n' % ctx2str(ctx))
4087 for succsset in obsutil.successorssets(
4131 for succsset in obsutil.successorssets(
4088 repo, ctx.node(), closest=opts['closest'], cache=cache
4132 repo, ctx.node(), closest=opts['closest'], cache=cache
4089 ):
4133 ):
4090 if succsset:
4134 if succsset:
4091 ui.write(b' ')
4135 ui.write(b' ')
4092 ui.write(node2str(succsset[0]))
4136 ui.write(node2str(succsset[0]))
4093 for node in succsset[1:]:
4137 for node in succsset[1:]:
4094 ui.write(b' ')
4138 ui.write(b' ')
4095 ui.write(node2str(node))
4139 ui.write(node2str(node))
4096 ui.write(b'\n')
4140 ui.write(b'\n')
4097
4141
4098
4142
4099 @command(b'debugtagscache', [])
4143 @command(b'debugtagscache', [])
4100 def debugtagscache(ui, repo):
4144 def debugtagscache(ui, repo):
4101 """display the contents of .hg/cache/hgtagsfnodes1"""
4145 """display the contents of .hg/cache/hgtagsfnodes1"""
4102 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4146 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4103 flog = repo.file(b'.hgtags')
4147 flog = repo.file(b'.hgtags')
4104 for r in repo:
4148 for r in repo:
4105 node = repo[r].node()
4149 node = repo[r].node()
4106 tagsnode = cache.getfnode(node, computemissing=False)
4150 tagsnode = cache.getfnode(node, computemissing=False)
4107 if tagsnode:
4151 if tagsnode:
4108 tagsnodedisplay = hex(tagsnode)
4152 tagsnodedisplay = hex(tagsnode)
4109 if not flog.hasnode(tagsnode):
4153 if not flog.hasnode(tagsnode):
4110 tagsnodedisplay += b' (unknown node)'
4154 tagsnodedisplay += b' (unknown node)'
4111 elif tagsnode is None:
4155 elif tagsnode is None:
4112 tagsnodedisplay = b'missing'
4156 tagsnodedisplay = b'missing'
4113 else:
4157 else:
4114 tagsnodedisplay = b'invalid'
4158 tagsnodedisplay = b'invalid'
4115
4159
4116 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4160 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4117
4161
4118
4162
4119 @command(
4163 @command(
4120 b'debugtemplate',
4164 b'debugtemplate',
4121 [
4165 [
4122 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4166 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4123 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4167 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4124 ],
4168 ],
4125 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4169 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4126 optionalrepo=True,
4170 optionalrepo=True,
4127 )
4171 )
4128 def debugtemplate(ui, repo, tmpl, **opts):
4172 def debugtemplate(ui, repo, tmpl, **opts):
4129 """parse and apply a template
4173 """parse and apply a template
4130
4174
4131 If -r/--rev is given, the template is processed as a log template and
4175 If -r/--rev is given, the template is processed as a log template and
4132 applied to the given changesets. Otherwise, it is processed as a generic
4176 applied to the given changesets. Otherwise, it is processed as a generic
4133 template.
4177 template.
4134
4178
4135 Use --verbose to print the parsed tree.
4179 Use --verbose to print the parsed tree.
4136 """
4180 """
4137 revs = None
4181 revs = None
4138 if opts['rev']:
4182 if opts['rev']:
4139 if repo is None:
4183 if repo is None:
4140 raise error.RepoError(
4184 raise error.RepoError(
4141 _(b'there is no Mercurial repository here (.hg not found)')
4185 _(b'there is no Mercurial repository here (.hg not found)')
4142 )
4186 )
4143 revs = logcmdutil.revrange(repo, opts['rev'])
4187 revs = logcmdutil.revrange(repo, opts['rev'])
4144
4188
4145 props = {}
4189 props = {}
4146 for d in opts['define']:
4190 for d in opts['define']:
4147 try:
4191 try:
4148 k, v = (e.strip() for e in d.split(b'=', 1))
4192 k, v = (e.strip() for e in d.split(b'=', 1))
4149 if not k or k == b'ui':
4193 if not k or k == b'ui':
4150 raise ValueError
4194 raise ValueError
4151 props[k] = v
4195 props[k] = v
4152 except ValueError:
4196 except ValueError:
4153 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4197 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4154
4198
4155 if ui.verbose:
4199 if ui.verbose:
4156 aliases = ui.configitems(b'templatealias')
4200 aliases = ui.configitems(b'templatealias')
4157 tree = templater.parse(tmpl)
4201 tree = templater.parse(tmpl)
4158 ui.note(templater.prettyformat(tree), b'\n')
4202 ui.note(templater.prettyformat(tree), b'\n')
4159 newtree = templater.expandaliases(tree, aliases)
4203 newtree = templater.expandaliases(tree, aliases)
4160 if newtree != tree:
4204 if newtree != tree:
4161 ui.notenoi18n(
4205 ui.notenoi18n(
4162 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4206 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4163 )
4207 )
4164
4208
4165 if revs is None:
4209 if revs is None:
4166 tres = formatter.templateresources(ui, repo)
4210 tres = formatter.templateresources(ui, repo)
4167 t = formatter.maketemplater(ui, tmpl, resources=tres)
4211 t = formatter.maketemplater(ui, tmpl, resources=tres)
4168 if ui.verbose:
4212 if ui.verbose:
4169 kwds, funcs = t.symbolsuseddefault()
4213 kwds, funcs = t.symbolsuseddefault()
4170 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4214 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4171 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4215 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4172 ui.write(t.renderdefault(props))
4216 ui.write(t.renderdefault(props))
4173 else:
4217 else:
4174 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4218 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4175 if ui.verbose:
4219 if ui.verbose:
4176 kwds, funcs = displayer.t.symbolsuseddefault()
4220 kwds, funcs = displayer.t.symbolsuseddefault()
4177 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4221 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4178 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4222 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4179 for r in revs:
4223 for r in revs:
4180 displayer.show(repo[r], **pycompat.strkwargs(props))
4224 displayer.show(repo[r], **pycompat.strkwargs(props))
4181 displayer.close()
4225 displayer.close()
4182
4226
4183
4227
4184 @command(
4228 @command(
4185 b'debuguigetpass',
4229 b'debuguigetpass',
4186 [
4230 [
4187 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4231 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4188 ],
4232 ],
4189 _(b'[-p TEXT]'),
4233 _(b'[-p TEXT]'),
4190 norepo=True,
4234 norepo=True,
4191 )
4235 )
4192 def debuguigetpass(ui, prompt=b''):
4236 def debuguigetpass(ui, prompt=b''):
4193 """show prompt to type password"""
4237 """show prompt to type password"""
4194 r = ui.getpass(prompt)
4238 r = ui.getpass(prompt)
4195 if r is None:
4239 if r is None:
4196 r = b"<default response>"
4240 r = b"<default response>"
4197 ui.writenoi18n(b'response: %s\n' % r)
4241 ui.writenoi18n(b'response: %s\n' % r)
4198
4242
4199
4243
4200 @command(
4244 @command(
4201 b'debuguiprompt',
4245 b'debuguiprompt',
4202 [
4246 [
4203 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4247 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4204 ],
4248 ],
4205 _(b'[-p TEXT]'),
4249 _(b'[-p TEXT]'),
4206 norepo=True,
4250 norepo=True,
4207 )
4251 )
4208 def debuguiprompt(ui, prompt=b''):
4252 def debuguiprompt(ui, prompt=b''):
4209 """show plain prompt"""
4253 """show plain prompt"""
4210 r = ui.prompt(prompt)
4254 r = ui.prompt(prompt)
4211 ui.writenoi18n(b'response: %s\n' % r)
4255 ui.writenoi18n(b'response: %s\n' % r)
4212
4256
4213
4257
4214 @command(b'debugupdatecaches', [])
4258 @command(b'debugupdatecaches', [])
4215 def debugupdatecaches(ui, repo, *pats, **opts):
4259 def debugupdatecaches(ui, repo, *pats, **opts):
4216 """warm all known caches in the repository"""
4260 """warm all known caches in the repository"""
4217 with repo.wlock(), repo.lock():
4261 with repo.wlock(), repo.lock():
4218 repo.updatecaches(caches=repository.CACHES_ALL)
4262 repo.updatecaches(caches=repository.CACHES_ALL)
4219
4263
4220
4264
4221 @command(
4265 @command(
4222 b'debugupgraderepo',
4266 b'debugupgraderepo',
4223 [
4267 [
4224 (
4268 (
4225 b'o',
4269 b'o',
4226 b'optimize',
4270 b'optimize',
4227 [],
4271 [],
4228 _(b'extra optimization to perform'),
4272 _(b'extra optimization to perform'),
4229 _(b'NAME'),
4273 _(b'NAME'),
4230 ),
4274 ),
4231 (b'', b'run', False, _(b'performs an upgrade')),
4275 (b'', b'run', False, _(b'performs an upgrade')),
4232 (b'', b'backup', True, _(b'keep the old repository content around')),
4276 (b'', b'backup', True, _(b'keep the old repository content around')),
4233 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4277 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4234 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4278 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4235 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4279 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4236 ],
4280 ],
4237 )
4281 )
4238 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4282 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4239 """upgrade a repository to use different features
4283 """upgrade a repository to use different features
4240
4284
4241 If no arguments are specified, the repository is evaluated for upgrade
4285 If no arguments are specified, the repository is evaluated for upgrade
4242 and a list of problems and potential optimizations is printed.
4286 and a list of problems and potential optimizations is printed.
4243
4287
4244 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4288 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4245 can be influenced via additional arguments. More details will be provided
4289 can be influenced via additional arguments. More details will be provided
4246 by the command output when run without ``--run``.
4290 by the command output when run without ``--run``.
4247
4291
4248 During the upgrade, the repository will be locked and no writes will be
4292 During the upgrade, the repository will be locked and no writes will be
4249 allowed.
4293 allowed.
4250
4294
4251 At the end of the upgrade, the repository may not be readable while new
4295 At the end of the upgrade, the repository may not be readable while new
4252 repository data is swapped in. This window will be as long as it takes to
4296 repository data is swapped in. This window will be as long as it takes to
4253 rename some directories inside the ``.hg`` directory. On most machines, this
4297 rename some directories inside the ``.hg`` directory. On most machines, this
4254 should complete almost instantaneously and the chances of a consumer being
4298 should complete almost instantaneously and the chances of a consumer being
4255 unable to access the repository should be low.
4299 unable to access the repository should be low.
4256
4300
4257 By default, all revlogs will be upgraded. You can restrict this using flags
4301 By default, all revlogs will be upgraded. You can restrict this using flags
4258 such as `--manifest`:
4302 such as `--manifest`:
4259
4303
4260 * `--manifest`: only optimize the manifest
4304 * `--manifest`: only optimize the manifest
4261 * `--no-manifest`: optimize all revlog but the manifest
4305 * `--no-manifest`: optimize all revlog but the manifest
4262 * `--changelog`: optimize the changelog only
4306 * `--changelog`: optimize the changelog only
4263 * `--no-changelog --no-manifest`: optimize filelogs only
4307 * `--no-changelog --no-manifest`: optimize filelogs only
4264 * `--filelogs`: optimize the filelogs only
4308 * `--filelogs`: optimize the filelogs only
4265 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4309 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4266 """
4310 """
4267 return upgrade.upgraderepo(
4311 return upgrade.upgraderepo(
4268 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4312 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4269 )
4313 )
4270
4314
4271
4315
4272 @command(
4316 @command(
4273 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4317 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4274 )
4318 )
4275 def debugwalk(ui, repo, *pats, **opts):
4319 def debugwalk(ui, repo, *pats, **opts):
4276 """show how files match on given patterns"""
4320 """show how files match on given patterns"""
4277 opts = pycompat.byteskwargs(opts)
4321 opts = pycompat.byteskwargs(opts)
4278 m = scmutil.match(repo[None], pats, opts)
4322 m = scmutil.match(repo[None], pats, opts)
4279 if ui.verbose:
4323 if ui.verbose:
4280 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4324 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4281 items = list(repo[None].walk(m))
4325 items = list(repo[None].walk(m))
4282 if not items:
4326 if not items:
4283 return
4327 return
4284 f = lambda fn: fn
4328 f = lambda fn: fn
4285 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4329 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4286 f = lambda fn: util.normpath(fn)
4330 f = lambda fn: util.normpath(fn)
4287 fmt = b'f %%-%ds %%-%ds %%s' % (
4331 fmt = b'f %%-%ds %%-%ds %%s' % (
4288 max([len(abs) for abs in items]),
4332 max([len(abs) for abs in items]),
4289 max([len(repo.pathto(abs)) for abs in items]),
4333 max([len(repo.pathto(abs)) for abs in items]),
4290 )
4334 )
4291 for abs in items:
4335 for abs in items:
4292 line = fmt % (
4336 line = fmt % (
4293 abs,
4337 abs,
4294 f(repo.pathto(abs)),
4338 f(repo.pathto(abs)),
4295 m.exact(abs) and b'exact' or b'',
4339 m.exact(abs) and b'exact' or b'',
4296 )
4340 )
4297 ui.write(b"%s\n" % line.rstrip())
4341 ui.write(b"%s\n" % line.rstrip())
4298
4342
4299
4343
4300 @command(b'debugwhyunstable', [], _(b'REV'))
4344 @command(b'debugwhyunstable', [], _(b'REV'))
4301 def debugwhyunstable(ui, repo, rev):
4345 def debugwhyunstable(ui, repo, rev):
4302 """explain instabilities of a changeset"""
4346 """explain instabilities of a changeset"""
4303 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4347 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4304 dnodes = b''
4348 dnodes = b''
4305 if entry.get(b'divergentnodes'):
4349 if entry.get(b'divergentnodes'):
4306 dnodes = (
4350 dnodes = (
4307 b' '.join(
4351 b' '.join(
4308 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4352 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4309 for ctx in entry[b'divergentnodes']
4353 for ctx in entry[b'divergentnodes']
4310 )
4354 )
4311 + b' '
4355 + b' '
4312 )
4356 )
4313 ui.write(
4357 ui.write(
4314 b'%s: %s%s %s\n'
4358 b'%s: %s%s %s\n'
4315 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4359 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4316 )
4360 )
4317
4361
4318
4362
4319 @command(
4363 @command(
4320 b'debugwireargs',
4364 b'debugwireargs',
4321 [
4365 [
4322 (b'', b'three', b'', b'three'),
4366 (b'', b'three', b'', b'three'),
4323 (b'', b'four', b'', b'four'),
4367 (b'', b'four', b'', b'four'),
4324 (b'', b'five', b'', b'five'),
4368 (b'', b'five', b'', b'five'),
4325 ]
4369 ]
4326 + cmdutil.remoteopts,
4370 + cmdutil.remoteopts,
4327 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4371 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4328 norepo=True,
4372 norepo=True,
4329 )
4373 )
4330 def debugwireargs(ui, repopath, *vals, **opts):
4374 def debugwireargs(ui, repopath, *vals, **opts):
4331 opts = pycompat.byteskwargs(opts)
4375 opts = pycompat.byteskwargs(opts)
4332 repo = hg.peer(ui, opts, repopath)
4376 repo = hg.peer(ui, opts, repopath)
4333 try:
4377 try:
4334 for opt in cmdutil.remoteopts:
4378 for opt in cmdutil.remoteopts:
4335 del opts[opt[1]]
4379 del opts[opt[1]]
4336 args = {}
4380 args = {}
4337 for k, v in opts.items():
4381 for k, v in opts.items():
4338 if v:
4382 if v:
4339 args[k] = v
4383 args[k] = v
4340 args = pycompat.strkwargs(args)
4384 args = pycompat.strkwargs(args)
4341 # run twice to check that we don't mess up the stream for the next command
4385 # run twice to check that we don't mess up the stream for the next command
4342 res1 = repo.debugwireargs(*vals, **args)
4386 res1 = repo.debugwireargs(*vals, **args)
4343 res2 = repo.debugwireargs(*vals, **args)
4387 res2 = repo.debugwireargs(*vals, **args)
4344 ui.write(b"%s\n" % res1)
4388 ui.write(b"%s\n" % res1)
4345 if res1 != res2:
4389 if res1 != res2:
4346 ui.warn(b"%s\n" % res2)
4390 ui.warn(b"%s\n" % res2)
4347 finally:
4391 finally:
4348 repo.close()
4392 repo.close()
4349
4393
4350
4394
4351 def _parsewirelangblocks(fh):
4395 def _parsewirelangblocks(fh):
4352 activeaction = None
4396 activeaction = None
4353 blocklines = []
4397 blocklines = []
4354 lastindent = 0
4398 lastindent = 0
4355
4399
4356 for line in fh:
4400 for line in fh:
4357 line = line.rstrip()
4401 line = line.rstrip()
4358 if not line:
4402 if not line:
4359 continue
4403 continue
4360
4404
4361 if line.startswith(b'#'):
4405 if line.startswith(b'#'):
4362 continue
4406 continue
4363
4407
4364 if not line.startswith(b' '):
4408 if not line.startswith(b' '):
4365 # New block. Flush previous one.
4409 # New block. Flush previous one.
4366 if activeaction:
4410 if activeaction:
4367 yield activeaction, blocklines
4411 yield activeaction, blocklines
4368
4412
4369 activeaction = line
4413 activeaction = line
4370 blocklines = []
4414 blocklines = []
4371 lastindent = 0
4415 lastindent = 0
4372 continue
4416 continue
4373
4417
4374 # Else we start with an indent.
4418 # Else we start with an indent.
4375
4419
4376 if not activeaction:
4420 if not activeaction:
4377 raise error.Abort(_(b'indented line outside of block'))
4421 raise error.Abort(_(b'indented line outside of block'))
4378
4422
4379 indent = len(line) - len(line.lstrip())
4423 indent = len(line) - len(line.lstrip())
4380
4424
4381 # If this line is indented more than the last line, concatenate it.
4425 # If this line is indented more than the last line, concatenate it.
4382 if indent > lastindent and blocklines:
4426 if indent > lastindent and blocklines:
4383 blocklines[-1] += line.lstrip()
4427 blocklines[-1] += line.lstrip()
4384 else:
4428 else:
4385 blocklines.append(line)
4429 blocklines.append(line)
4386 lastindent = indent
4430 lastindent = indent
4387
4431
4388 # Flush last block.
4432 # Flush last block.
4389 if activeaction:
4433 if activeaction:
4390 yield activeaction, blocklines
4434 yield activeaction, blocklines
4391
4435
4392
4436
4393 @command(
4437 @command(
4394 b'debugwireproto',
4438 b'debugwireproto',
4395 [
4439 [
4396 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4440 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4397 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4441 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4398 (
4442 (
4399 b'',
4443 b'',
4400 b'noreadstderr',
4444 b'noreadstderr',
4401 False,
4445 False,
4402 _(b'do not read from stderr of the remote'),
4446 _(b'do not read from stderr of the remote'),
4403 ),
4447 ),
4404 (
4448 (
4405 b'',
4449 b'',
4406 b'nologhandshake',
4450 b'nologhandshake',
4407 False,
4451 False,
4408 _(b'do not log I/O related to the peer handshake'),
4452 _(b'do not log I/O related to the peer handshake'),
4409 ),
4453 ),
4410 ]
4454 ]
4411 + cmdutil.remoteopts,
4455 + cmdutil.remoteopts,
4412 _(b'[PATH]'),
4456 _(b'[PATH]'),
4413 optionalrepo=True,
4457 optionalrepo=True,
4414 )
4458 )
4415 def debugwireproto(ui, repo, path=None, **opts):
4459 def debugwireproto(ui, repo, path=None, **opts):
4416 """send wire protocol commands to a server
4460 """send wire protocol commands to a server
4417
4461
4418 This command can be used to issue wire protocol commands to remote
4462 This command can be used to issue wire protocol commands to remote
4419 peers and to debug the raw data being exchanged.
4463 peers and to debug the raw data being exchanged.
4420
4464
4421 ``--localssh`` will start an SSH server against the current repository
4465 ``--localssh`` will start an SSH server against the current repository
4422 and connect to that. By default, the connection will perform a handshake
4466 and connect to that. By default, the connection will perform a handshake
4423 and establish an appropriate peer instance.
4467 and establish an appropriate peer instance.
4424
4468
4425 ``--peer`` can be used to bypass the handshake protocol and construct a
4469 ``--peer`` can be used to bypass the handshake protocol and construct a
4426 peer instance using the specified class type. Valid values are ``raw``,
4470 peer instance using the specified class type. Valid values are ``raw``,
4427 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4471 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4428 don't support higher-level command actions.
4472 don't support higher-level command actions.
4429
4473
4430 ``--noreadstderr`` can be used to disable automatic reading from stderr
4474 ``--noreadstderr`` can be used to disable automatic reading from stderr
4431 of the peer (for SSH connections only). Disabling automatic reading of
4475 of the peer (for SSH connections only). Disabling automatic reading of
4432 stderr is useful for making output more deterministic.
4476 stderr is useful for making output more deterministic.
4433
4477
4434 Commands are issued via a mini language which is specified via stdin.
4478 Commands are issued via a mini language which is specified via stdin.
4435 The language consists of individual actions to perform. An action is
4479 The language consists of individual actions to perform. An action is
4436 defined by a block. A block is defined as a line with no leading
4480 defined by a block. A block is defined as a line with no leading
4437 space followed by 0 or more lines with leading space. Blocks are
4481 space followed by 0 or more lines with leading space. Blocks are
4438 effectively a high-level command with additional metadata.
4482 effectively a high-level command with additional metadata.
4439
4483
4440 Lines beginning with ``#`` are ignored.
4484 Lines beginning with ``#`` are ignored.
4441
4485
4442 The following sections denote available actions.
4486 The following sections denote available actions.
4443
4487
4444 raw
4488 raw
4445 ---
4489 ---
4446
4490
4447 Send raw data to the server.
4491 Send raw data to the server.
4448
4492
4449 The block payload contains the raw data to send as one atomic send
4493 The block payload contains the raw data to send as one atomic send
4450 operation. The data may not actually be delivered in a single system
4494 operation. The data may not actually be delivered in a single system
4451 call: it depends on the abilities of the transport being used.
4495 call: it depends on the abilities of the transport being used.
4452
4496
4453 Each line in the block is de-indented and concatenated. Then, that
4497 Each line in the block is de-indented and concatenated. Then, that
4454 value is evaluated as a Python b'' literal. This allows the use of
4498 value is evaluated as a Python b'' literal. This allows the use of
4455 backslash escaping, etc.
4499 backslash escaping, etc.
4456
4500
4457 raw+
4501 raw+
4458 ----
4502 ----
4459
4503
4460 Behaves like ``raw`` except flushes output afterwards.
4504 Behaves like ``raw`` except flushes output afterwards.
4461
4505
4462 command <X>
4506 command <X>
4463 -----------
4507 -----------
4464
4508
4465 Send a request to run a named command, whose name follows the ``command``
4509 Send a request to run a named command, whose name follows the ``command``
4466 string.
4510 string.
4467
4511
4468 Arguments to the command are defined as lines in this block. The format of
4512 Arguments to the command are defined as lines in this block. The format of
4469 each line is ``<key> <value>``. e.g.::
4513 each line is ``<key> <value>``. e.g.::
4470
4514
4471 command listkeys
4515 command listkeys
4472 namespace bookmarks
4516 namespace bookmarks
4473
4517
4474 If the value begins with ``eval:``, it will be interpreted as a Python
4518 If the value begins with ``eval:``, it will be interpreted as a Python
4475 literal expression. Otherwise values are interpreted as Python b'' literals.
4519 literal expression. Otherwise values are interpreted as Python b'' literals.
4476 This allows sending complex types and encoding special byte sequences via
4520 This allows sending complex types and encoding special byte sequences via
4477 backslash escaping.
4521 backslash escaping.
4478
4522
4479 The following arguments have special meaning:
4523 The following arguments have special meaning:
4480
4524
4481 ``PUSHFILE``
4525 ``PUSHFILE``
4482 When defined, the *push* mechanism of the peer will be used instead
4526 When defined, the *push* mechanism of the peer will be used instead
4483 of the static request-response mechanism and the content of the
4527 of the static request-response mechanism and the content of the
4484 file specified in the value of this argument will be sent as the
4528 file specified in the value of this argument will be sent as the
4485 command payload.
4529 command payload.
4486
4530
4487 This can be used to submit a local bundle file to the remote.
4531 This can be used to submit a local bundle file to the remote.
4488
4532
4489 batchbegin
4533 batchbegin
4490 ----------
4534 ----------
4491
4535
4492 Instruct the peer to begin a batched send.
4536 Instruct the peer to begin a batched send.
4493
4537
4494 All ``command`` blocks are queued for execution until the next
4538 All ``command`` blocks are queued for execution until the next
4495 ``batchsubmit`` block.
4539 ``batchsubmit`` block.
4496
4540
4497 batchsubmit
4541 batchsubmit
4498 -----------
4542 -----------
4499
4543
4500 Submit previously queued ``command`` blocks as a batch request.
4544 Submit previously queued ``command`` blocks as a batch request.
4501
4545
4502 This action MUST be paired with a ``batchbegin`` action.
4546 This action MUST be paired with a ``batchbegin`` action.
4503
4547
4504 httprequest <method> <path>
4548 httprequest <method> <path>
4505 ---------------------------
4549 ---------------------------
4506
4550
4507 (HTTP peer only)
4551 (HTTP peer only)
4508
4552
4509 Send an HTTP request to the peer.
4553 Send an HTTP request to the peer.
4510
4554
4511 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4555 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4512
4556
4513 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4557 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4514 headers to add to the request. e.g. ``Accept: foo``.
4558 headers to add to the request. e.g. ``Accept: foo``.
4515
4559
4516 The following arguments are special:
4560 The following arguments are special:
4517
4561
4518 ``BODYFILE``
4562 ``BODYFILE``
4519 The content of the file defined as the value to this argument will be
4563 The content of the file defined as the value to this argument will be
4520 transferred verbatim as the HTTP request body.
4564 transferred verbatim as the HTTP request body.
4521
4565
4522 ``frame <type> <flags> <payload>``
4566 ``frame <type> <flags> <payload>``
4523 Send a unified protocol frame as part of the request body.
4567 Send a unified protocol frame as part of the request body.
4524
4568
4525 All frames will be collected and sent as the body to the HTTP
4569 All frames will be collected and sent as the body to the HTTP
4526 request.
4570 request.
4527
4571
4528 close
4572 close
4529 -----
4573 -----
4530
4574
4531 Close the connection to the server.
4575 Close the connection to the server.
4532
4576
4533 flush
4577 flush
4534 -----
4578 -----
4535
4579
4536 Flush data written to the server.
4580 Flush data written to the server.
4537
4581
4538 readavailable
4582 readavailable
4539 -------------
4583 -------------
4540
4584
4541 Close the write end of the connection and read all available data from
4585 Close the write end of the connection and read all available data from
4542 the server.
4586 the server.
4543
4587
4544 If the connection to the server encompasses multiple pipes, we poll both
4588 If the connection to the server encompasses multiple pipes, we poll both
4545 pipes and read available data.
4589 pipes and read available data.
4546
4590
4547 readline
4591 readline
4548 --------
4592 --------
4549
4593
4550 Read a line of output from the server. If there are multiple output
4594 Read a line of output from the server. If there are multiple output
4551 pipes, reads only the main pipe.
4595 pipes, reads only the main pipe.
4552
4596
4553 ereadline
4597 ereadline
4554 ---------
4598 ---------
4555
4599
4556 Like ``readline``, but read from the stderr pipe, if available.
4600 Like ``readline``, but read from the stderr pipe, if available.
4557
4601
4558 read <X>
4602 read <X>
4559 --------
4603 --------
4560
4604
4561 ``read()`` N bytes from the server's main output pipe.
4605 ``read()`` N bytes from the server's main output pipe.
4562
4606
4563 eread <X>
4607 eread <X>
4564 ---------
4608 ---------
4565
4609
4566 ``read()`` N bytes from the server's stderr pipe, if available.
4610 ``read()`` N bytes from the server's stderr pipe, if available.
4567
4611
4568 Specifying Unified Frame-Based Protocol Frames
4612 Specifying Unified Frame-Based Protocol Frames
4569 ----------------------------------------------
4613 ----------------------------------------------
4570
4614
4571 It is possible to emit a *Unified Frame-Based Protocol* by using special
4615 It is possible to emit a *Unified Frame-Based Protocol* by using special
4572 syntax.
4616 syntax.
4573
4617
4574 A frame is composed as a type, flags, and payload. These can be parsed
4618 A frame is composed as a type, flags, and payload. These can be parsed
4575 from a string of the form:
4619 from a string of the form:
4576
4620
4577 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4621 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4578
4622
4579 ``request-id`` and ``stream-id`` are integers defining the request and
4623 ``request-id`` and ``stream-id`` are integers defining the request and
4580 stream identifiers.
4624 stream identifiers.
4581
4625
4582 ``type`` can be an integer value for the frame type or the string name
4626 ``type`` can be an integer value for the frame type or the string name
4583 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4627 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4584 ``command-name``.
4628 ``command-name``.
4585
4629
4586 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4630 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4587 components. Each component (and there can be just one) can be an integer
4631 components. Each component (and there can be just one) can be an integer
4588 or a flag name for stream flags or frame flags, respectively. Values are
4632 or a flag name for stream flags or frame flags, respectively. Values are
4589 resolved to integers and then bitwise OR'd together.
4633 resolved to integers and then bitwise OR'd together.
4590
4634
4591 ``payload`` represents the raw frame payload. If it begins with
4635 ``payload`` represents the raw frame payload. If it begins with
4592 ``cbor:``, the following string is evaluated as Python code and the
4636 ``cbor:``, the following string is evaluated as Python code and the
4593 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4637 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4594 as a Python byte string literal.
4638 as a Python byte string literal.
4595 """
4639 """
4596 opts = pycompat.byteskwargs(opts)
4640 opts = pycompat.byteskwargs(opts)
4597
4641
4598 if opts[b'localssh'] and not repo:
4642 if opts[b'localssh'] and not repo:
4599 raise error.Abort(_(b'--localssh requires a repository'))
4643 raise error.Abort(_(b'--localssh requires a repository'))
4600
4644
4601 if opts[b'peer'] and opts[b'peer'] not in (
4645 if opts[b'peer'] and opts[b'peer'] not in (
4602 b'raw',
4646 b'raw',
4603 b'ssh1',
4647 b'ssh1',
4604 ):
4648 ):
4605 raise error.Abort(
4649 raise error.Abort(
4606 _(b'invalid value for --peer'),
4650 _(b'invalid value for --peer'),
4607 hint=_(b'valid values are "raw" and "ssh1"'),
4651 hint=_(b'valid values are "raw" and "ssh1"'),
4608 )
4652 )
4609
4653
4610 if path and opts[b'localssh']:
4654 if path and opts[b'localssh']:
4611 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4655 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4612
4656
4613 if ui.interactive():
4657 if ui.interactive():
4614 ui.write(_(b'(waiting for commands on stdin)\n'))
4658 ui.write(_(b'(waiting for commands on stdin)\n'))
4615
4659
4616 blocks = list(_parsewirelangblocks(ui.fin))
4660 blocks = list(_parsewirelangblocks(ui.fin))
4617
4661
4618 proc = None
4662 proc = None
4619 stdin = None
4663 stdin = None
4620 stdout = None
4664 stdout = None
4621 stderr = None
4665 stderr = None
4622 opener = None
4666 opener = None
4623
4667
4624 if opts[b'localssh']:
4668 if opts[b'localssh']:
4625 # We start the SSH server in its own process so there is process
4669 # We start the SSH server in its own process so there is process
4626 # separation. This prevents a whole class of potential bugs around
4670 # separation. This prevents a whole class of potential bugs around
4627 # shared state from interfering with server operation.
4671 # shared state from interfering with server operation.
4628 args = procutil.hgcmd() + [
4672 args = procutil.hgcmd() + [
4629 b'-R',
4673 b'-R',
4630 repo.root,
4674 repo.root,
4631 b'debugserve',
4675 b'debugserve',
4632 b'--sshstdio',
4676 b'--sshstdio',
4633 ]
4677 ]
4634 proc = subprocess.Popen(
4678 proc = subprocess.Popen(
4635 pycompat.rapply(procutil.tonativestr, args),
4679 pycompat.rapply(procutil.tonativestr, args),
4636 stdin=subprocess.PIPE,
4680 stdin=subprocess.PIPE,
4637 stdout=subprocess.PIPE,
4681 stdout=subprocess.PIPE,
4638 stderr=subprocess.PIPE,
4682 stderr=subprocess.PIPE,
4639 bufsize=0,
4683 bufsize=0,
4640 )
4684 )
4641
4685
4642 stdin = proc.stdin
4686 stdin = proc.stdin
4643 stdout = proc.stdout
4687 stdout = proc.stdout
4644 stderr = proc.stderr
4688 stderr = proc.stderr
4645
4689
4646 # We turn the pipes into observers so we can log I/O.
4690 # We turn the pipes into observers so we can log I/O.
4647 if ui.verbose or opts[b'peer'] == b'raw':
4691 if ui.verbose or opts[b'peer'] == b'raw':
4648 stdin = util.makeloggingfileobject(
4692 stdin = util.makeloggingfileobject(
4649 ui, proc.stdin, b'i', logdata=True
4693 ui, proc.stdin, b'i', logdata=True
4650 )
4694 )
4651 stdout = util.makeloggingfileobject(
4695 stdout = util.makeloggingfileobject(
4652 ui, proc.stdout, b'o', logdata=True
4696 ui, proc.stdout, b'o', logdata=True
4653 )
4697 )
4654 stderr = util.makeloggingfileobject(
4698 stderr = util.makeloggingfileobject(
4655 ui, proc.stderr, b'e', logdata=True
4699 ui, proc.stderr, b'e', logdata=True
4656 )
4700 )
4657
4701
4658 # --localssh also implies the peer connection settings.
4702 # --localssh also implies the peer connection settings.
4659
4703
4660 url = b'ssh://localserver'
4704 url = b'ssh://localserver'
4661 autoreadstderr = not opts[b'noreadstderr']
4705 autoreadstderr = not opts[b'noreadstderr']
4662
4706
4663 if opts[b'peer'] == b'ssh1':
4707 if opts[b'peer'] == b'ssh1':
4664 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4708 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4665 peer = sshpeer.sshv1peer(
4709 peer = sshpeer.sshv1peer(
4666 ui,
4710 ui,
4667 url,
4711 url,
4668 proc,
4712 proc,
4669 stdin,
4713 stdin,
4670 stdout,
4714 stdout,
4671 stderr,
4715 stderr,
4672 None,
4716 None,
4673 autoreadstderr=autoreadstderr,
4717 autoreadstderr=autoreadstderr,
4674 )
4718 )
4675 elif opts[b'peer'] == b'raw':
4719 elif opts[b'peer'] == b'raw':
4676 ui.write(_(b'using raw connection to peer\n'))
4720 ui.write(_(b'using raw connection to peer\n'))
4677 peer = None
4721 peer = None
4678 else:
4722 else:
4679 ui.write(_(b'creating ssh peer from handshake results\n'))
4723 ui.write(_(b'creating ssh peer from handshake results\n'))
4680 peer = sshpeer.makepeer(
4724 peer = sshpeer.makepeer(
4681 ui,
4725 ui,
4682 url,
4726 url,
4683 proc,
4727 proc,
4684 stdin,
4728 stdin,
4685 stdout,
4729 stdout,
4686 stderr,
4730 stderr,
4687 autoreadstderr=autoreadstderr,
4731 autoreadstderr=autoreadstderr,
4688 )
4732 )
4689
4733
4690 elif path:
4734 elif path:
4691 # We bypass hg.peer() so we can proxy the sockets.
4735 # We bypass hg.peer() so we can proxy the sockets.
4692 # TODO consider not doing this because we skip
4736 # TODO consider not doing this because we skip
4693 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4737 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4694 u = urlutil.url(path)
4738 u = urlutil.url(path)
4695 if u.scheme != b'http':
4739 if u.scheme != b'http':
4696 raise error.Abort(_(b'only http:// paths are currently supported'))
4740 raise error.Abort(_(b'only http:// paths are currently supported'))
4697
4741
4698 url, authinfo = u.authinfo()
4742 url, authinfo = u.authinfo()
4699 openerargs = {
4743 openerargs = {
4700 'useragent': b'Mercurial debugwireproto',
4744 'useragent': b'Mercurial debugwireproto',
4701 }
4745 }
4702
4746
4703 # Turn pipes/sockets into observers so we can log I/O.
4747 # Turn pipes/sockets into observers so we can log I/O.
4704 if ui.verbose:
4748 if ui.verbose:
4705 openerargs.update(
4749 openerargs.update(
4706 {
4750 {
4707 'loggingfh': ui,
4751 'loggingfh': ui,
4708 'loggingname': b's',
4752 'loggingname': b's',
4709 'loggingopts': {
4753 'loggingopts': {
4710 'logdata': True,
4754 'logdata': True,
4711 'logdataapis': False,
4755 'logdataapis': False,
4712 },
4756 },
4713 }
4757 }
4714 )
4758 )
4715
4759
4716 if ui.debugflag:
4760 if ui.debugflag:
4717 openerargs['loggingopts']['logdataapis'] = True
4761 openerargs['loggingopts']['logdataapis'] = True
4718
4762
4719 # Don't send default headers when in raw mode. This allows us to
4763 # Don't send default headers when in raw mode. This allows us to
4720 # bypass most of the behavior of our URL handling code so we can
4764 # bypass most of the behavior of our URL handling code so we can
4721 # have near complete control over what's sent on the wire.
4765 # have near complete control over what's sent on the wire.
4722 if opts[b'peer'] == b'raw':
4766 if opts[b'peer'] == b'raw':
4723 openerargs['sendaccept'] = False
4767 openerargs['sendaccept'] = False
4724
4768
4725 opener = urlmod.opener(ui, authinfo, **openerargs)
4769 opener = urlmod.opener(ui, authinfo, **openerargs)
4726
4770
4727 if opts[b'peer'] == b'raw':
4771 if opts[b'peer'] == b'raw':
4728 ui.write(_(b'using raw connection to peer\n'))
4772 ui.write(_(b'using raw connection to peer\n'))
4729 peer = None
4773 peer = None
4730 elif opts[b'peer']:
4774 elif opts[b'peer']:
4731 raise error.Abort(
4775 raise error.Abort(
4732 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4776 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4733 )
4777 )
4734 else:
4778 else:
4735 peer = httppeer.makepeer(ui, path, opener=opener)
4779 peer = httppeer.makepeer(ui, path, opener=opener)
4736
4780
4737 # We /could/ populate stdin/stdout with sock.makefile()...
4781 # We /could/ populate stdin/stdout with sock.makefile()...
4738 else:
4782 else:
4739 raise error.Abort(_(b'unsupported connection configuration'))
4783 raise error.Abort(_(b'unsupported connection configuration'))
4740
4784
4741 batchedcommands = None
4785 batchedcommands = None
4742
4786
4743 # Now perform actions based on the parsed wire language instructions.
4787 # Now perform actions based on the parsed wire language instructions.
4744 for action, lines in blocks:
4788 for action, lines in blocks:
4745 if action in (b'raw', b'raw+'):
4789 if action in (b'raw', b'raw+'):
4746 if not stdin:
4790 if not stdin:
4747 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4791 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4748
4792
4749 # Concatenate the data together.
4793 # Concatenate the data together.
4750 data = b''.join(l.lstrip() for l in lines)
4794 data = b''.join(l.lstrip() for l in lines)
4751 data = stringutil.unescapestr(data)
4795 data = stringutil.unescapestr(data)
4752 stdin.write(data)
4796 stdin.write(data)
4753
4797
4754 if action == b'raw+':
4798 if action == b'raw+':
4755 stdin.flush()
4799 stdin.flush()
4756 elif action == b'flush':
4800 elif action == b'flush':
4757 if not stdin:
4801 if not stdin:
4758 raise error.Abort(_(b'cannot call flush on this peer'))
4802 raise error.Abort(_(b'cannot call flush on this peer'))
4759 stdin.flush()
4803 stdin.flush()
4760 elif action.startswith(b'command'):
4804 elif action.startswith(b'command'):
4761 if not peer:
4805 if not peer:
4762 raise error.Abort(
4806 raise error.Abort(
4763 _(
4807 _(
4764 b'cannot send commands unless peer instance '
4808 b'cannot send commands unless peer instance '
4765 b'is available'
4809 b'is available'
4766 )
4810 )
4767 )
4811 )
4768
4812
4769 command = action.split(b' ', 1)[1]
4813 command = action.split(b' ', 1)[1]
4770
4814
4771 args = {}
4815 args = {}
4772 for line in lines:
4816 for line in lines:
4773 # We need to allow empty values.
4817 # We need to allow empty values.
4774 fields = line.lstrip().split(b' ', 1)
4818 fields = line.lstrip().split(b' ', 1)
4775 if len(fields) == 1:
4819 if len(fields) == 1:
4776 key = fields[0]
4820 key = fields[0]
4777 value = b''
4821 value = b''
4778 else:
4822 else:
4779 key, value = fields
4823 key, value = fields
4780
4824
4781 if value.startswith(b'eval:'):
4825 if value.startswith(b'eval:'):
4782 value = stringutil.evalpythonliteral(value[5:])
4826 value = stringutil.evalpythonliteral(value[5:])
4783 else:
4827 else:
4784 value = stringutil.unescapestr(value)
4828 value = stringutil.unescapestr(value)
4785
4829
4786 args[key] = value
4830 args[key] = value
4787
4831
4788 if batchedcommands is not None:
4832 if batchedcommands is not None:
4789 batchedcommands.append((command, args))
4833 batchedcommands.append((command, args))
4790 continue
4834 continue
4791
4835
4792 ui.status(_(b'sending %s command\n') % command)
4836 ui.status(_(b'sending %s command\n') % command)
4793
4837
4794 if b'PUSHFILE' in args:
4838 if b'PUSHFILE' in args:
4795 with open(args[b'PUSHFILE'], 'rb') as fh:
4839 with open(args[b'PUSHFILE'], 'rb') as fh:
4796 del args[b'PUSHFILE']
4840 del args[b'PUSHFILE']
4797 res, output = peer._callpush(
4841 res, output = peer._callpush(
4798 command, fh, **pycompat.strkwargs(args)
4842 command, fh, **pycompat.strkwargs(args)
4799 )
4843 )
4800 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4844 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4801 ui.status(
4845 ui.status(
4802 _(b'remote output: %s\n') % stringutil.escapestr(output)
4846 _(b'remote output: %s\n') % stringutil.escapestr(output)
4803 )
4847 )
4804 else:
4848 else:
4805 with peer.commandexecutor() as e:
4849 with peer.commandexecutor() as e:
4806 res = e.callcommand(command, args).result()
4850 res = e.callcommand(command, args).result()
4807
4851
4808 ui.status(
4852 ui.status(
4809 _(b'response: %s\n')
4853 _(b'response: %s\n')
4810 % stringutil.pprint(res, bprefix=True, indent=2)
4854 % stringutil.pprint(res, bprefix=True, indent=2)
4811 )
4855 )
4812
4856
4813 elif action == b'batchbegin':
4857 elif action == b'batchbegin':
4814 if batchedcommands is not None:
4858 if batchedcommands is not None:
4815 raise error.Abort(_(b'nested batchbegin not allowed'))
4859 raise error.Abort(_(b'nested batchbegin not allowed'))
4816
4860
4817 batchedcommands = []
4861 batchedcommands = []
4818 elif action == b'batchsubmit':
4862 elif action == b'batchsubmit':
4819 # There is a batching API we could go through. But it would be
4863 # There is a batching API we could go through. But it would be
4820 # difficult to normalize requests into function calls. It is easier
4864 # difficult to normalize requests into function calls. It is easier
4821 # to bypass this layer and normalize to commands + args.
4865 # to bypass this layer and normalize to commands + args.
4822 ui.status(
4866 ui.status(
4823 _(b'sending batch with %d sub-commands\n')
4867 _(b'sending batch with %d sub-commands\n')
4824 % len(batchedcommands)
4868 % len(batchedcommands)
4825 )
4869 )
4826 assert peer is not None
4870 assert peer is not None
4827 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4871 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4828 ui.status(
4872 ui.status(
4829 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4873 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4830 )
4874 )
4831
4875
4832 batchedcommands = None
4876 batchedcommands = None
4833
4877
4834 elif action.startswith(b'httprequest '):
4878 elif action.startswith(b'httprequest '):
4835 if not opener:
4879 if not opener:
4836 raise error.Abort(
4880 raise error.Abort(
4837 _(b'cannot use httprequest without an HTTP peer')
4881 _(b'cannot use httprequest without an HTTP peer')
4838 )
4882 )
4839
4883
4840 request = action.split(b' ', 2)
4884 request = action.split(b' ', 2)
4841 if len(request) != 3:
4885 if len(request) != 3:
4842 raise error.Abort(
4886 raise error.Abort(
4843 _(
4887 _(
4844 b'invalid httprequest: expected format is '
4888 b'invalid httprequest: expected format is '
4845 b'"httprequest <method> <path>'
4889 b'"httprequest <method> <path>'
4846 )
4890 )
4847 )
4891 )
4848
4892
4849 method, httppath = request[1:]
4893 method, httppath = request[1:]
4850 headers = {}
4894 headers = {}
4851 body = None
4895 body = None
4852 frames = []
4896 frames = []
4853 for line in lines:
4897 for line in lines:
4854 line = line.lstrip()
4898 line = line.lstrip()
4855 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4899 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4856 if m:
4900 if m:
4857 # Headers need to use native strings.
4901 # Headers need to use native strings.
4858 key = pycompat.strurl(m.group(1))
4902 key = pycompat.strurl(m.group(1))
4859 value = pycompat.strurl(m.group(2))
4903 value = pycompat.strurl(m.group(2))
4860 headers[key] = value
4904 headers[key] = value
4861 continue
4905 continue
4862
4906
4863 if line.startswith(b'BODYFILE '):
4907 if line.startswith(b'BODYFILE '):
4864 with open(line.split(b' ', 1), b'rb') as fh:
4908 with open(line.split(b' ', 1), b'rb') as fh:
4865 body = fh.read()
4909 body = fh.read()
4866 elif line.startswith(b'frame '):
4910 elif line.startswith(b'frame '):
4867 frame = wireprotoframing.makeframefromhumanstring(
4911 frame = wireprotoframing.makeframefromhumanstring(
4868 line[len(b'frame ') :]
4912 line[len(b'frame ') :]
4869 )
4913 )
4870
4914
4871 frames.append(frame)
4915 frames.append(frame)
4872 else:
4916 else:
4873 raise error.Abort(
4917 raise error.Abort(
4874 _(b'unknown argument to httprequest: %s') % line
4918 _(b'unknown argument to httprequest: %s') % line
4875 )
4919 )
4876
4920
4877 url = path + httppath
4921 url = path + httppath
4878
4922
4879 if frames:
4923 if frames:
4880 body = b''.join(bytes(f) for f in frames)
4924 body = b''.join(bytes(f) for f in frames)
4881
4925
4882 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4926 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4883
4927
4884 # urllib.Request insists on using has_data() as a proxy for
4928 # urllib.Request insists on using has_data() as a proxy for
4885 # determining the request method. Override that to use our
4929 # determining the request method. Override that to use our
4886 # explicitly requested method.
4930 # explicitly requested method.
4887 req.get_method = lambda: pycompat.sysstr(method)
4931 req.get_method = lambda: pycompat.sysstr(method)
4888
4932
4889 try:
4933 try:
4890 res = opener.open(req)
4934 res = opener.open(req)
4891 body = res.read()
4935 body = res.read()
4892 except util.urlerr.urlerror as e:
4936 except util.urlerr.urlerror as e:
4893 # read() method must be called, but only exists in Python 2
4937 # read() method must be called, but only exists in Python 2
4894 getattr(e, 'read', lambda: None)()
4938 getattr(e, 'read', lambda: None)()
4895 continue
4939 continue
4896
4940
4897 ct = res.headers.get('Content-Type')
4941 ct = res.headers.get('Content-Type')
4898 if ct == 'application/mercurial-cbor':
4942 if ct == 'application/mercurial-cbor':
4899 ui.write(
4943 ui.write(
4900 _(b'cbor> %s\n')
4944 _(b'cbor> %s\n')
4901 % stringutil.pprint(
4945 % stringutil.pprint(
4902 cborutil.decodeall(body), bprefix=True, indent=2
4946 cborutil.decodeall(body), bprefix=True, indent=2
4903 )
4947 )
4904 )
4948 )
4905
4949
4906 elif action == b'close':
4950 elif action == b'close':
4907 assert peer is not None
4951 assert peer is not None
4908 peer.close()
4952 peer.close()
4909 elif action == b'readavailable':
4953 elif action == b'readavailable':
4910 if not stdout or not stderr:
4954 if not stdout or not stderr:
4911 raise error.Abort(
4955 raise error.Abort(
4912 _(b'readavailable not available on this peer')
4956 _(b'readavailable not available on this peer')
4913 )
4957 )
4914
4958
4915 stdin.close()
4959 stdin.close()
4916 stdout.read()
4960 stdout.read()
4917 stderr.read()
4961 stderr.read()
4918
4962
4919 elif action == b'readline':
4963 elif action == b'readline':
4920 if not stdout:
4964 if not stdout:
4921 raise error.Abort(_(b'readline not available on this peer'))
4965 raise error.Abort(_(b'readline not available on this peer'))
4922 stdout.readline()
4966 stdout.readline()
4923 elif action == b'ereadline':
4967 elif action == b'ereadline':
4924 if not stderr:
4968 if not stderr:
4925 raise error.Abort(_(b'ereadline not available on this peer'))
4969 raise error.Abort(_(b'ereadline not available on this peer'))
4926 stderr.readline()
4970 stderr.readline()
4927 elif action.startswith(b'read '):
4971 elif action.startswith(b'read '):
4928 count = int(action.split(b' ', 1)[1])
4972 count = int(action.split(b' ', 1)[1])
4929 if not stdout:
4973 if not stdout:
4930 raise error.Abort(_(b'read not available on this peer'))
4974 raise error.Abort(_(b'read not available on this peer'))
4931 stdout.read(count)
4975 stdout.read(count)
4932 elif action.startswith(b'eread '):
4976 elif action.startswith(b'eread '):
4933 count = int(action.split(b' ', 1)[1])
4977 count = int(action.split(b' ', 1)[1])
4934 if not stderr:
4978 if not stderr:
4935 raise error.Abort(_(b'eread not available on this peer'))
4979 raise error.Abort(_(b'eread not available on this peer'))
4936 stderr.read(count)
4980 stderr.read(count)
4937 else:
4981 else:
4938 raise error.Abort(_(b'unknown action: %s') % action)
4982 raise error.Abort(_(b'unknown action: %s') % action)
4939
4983
4940 if batchedcommands is not None:
4984 if batchedcommands is not None:
4941 raise error.Abort(_(b'unclosed "batchbegin" request'))
4985 raise error.Abort(_(b'unclosed "batchbegin" request'))
4942
4986
4943 if peer:
4987 if peer:
4944 peer.close()
4988 peer.close()
4945
4989
4946 if proc:
4990 if proc:
4947 proc.kill()
4991 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now