##// END OF EJS Templates
debug: convert a few exceptions to bytes before wrapping in another error...
Matt Harbison -
r47516:8408c319 stable
parent child Browse files
Show More
@@ -1,4661 +1,4663
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import glob
14 import glob
15 import operator
15 import operator
16 import os
16 import os
17 import platform
17 import platform
18 import random
18 import random
19 import re
19 import re
20 import socket
20 import socket
21 import ssl
21 import ssl
22 import stat
22 import stat
23 import string
23 import string
24 import subprocess
24 import subprocess
25 import sys
25 import sys
26 import time
26 import time
27
27
28 from .i18n import _
28 from .i18n import _
29 from .node import (
29 from .node import (
30 bin,
30 bin,
31 hex,
31 hex,
32 nullid,
32 nullid,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 encoding,
49 encoding,
50 error,
50 error,
51 exchange,
51 exchange,
52 extensions,
52 extensions,
53 filemerge,
53 filemerge,
54 filesetlang,
54 filesetlang,
55 formatter,
55 formatter,
56 hg,
56 hg,
57 httppeer,
57 httppeer,
58 localrepo,
58 localrepo,
59 lock as lockmod,
59 lock as lockmod,
60 logcmdutil,
60 logcmdutil,
61 mergestate as mergestatemod,
61 mergestate as mergestatemod,
62 metadata,
62 metadata,
63 obsolete,
63 obsolete,
64 obsutil,
64 obsutil,
65 pathutil,
65 pathutil,
66 phases,
66 phases,
67 policy,
67 policy,
68 pvec,
68 pvec,
69 pycompat,
69 pycompat,
70 registrar,
70 registrar,
71 repair,
71 repair,
72 revlog,
72 revlog,
73 revset,
73 revset,
74 revsetlang,
74 revsetlang,
75 scmutil,
75 scmutil,
76 setdiscovery,
76 setdiscovery,
77 simplemerge,
77 simplemerge,
78 sshpeer,
78 sshpeer,
79 sslutil,
79 sslutil,
80 streamclone,
80 streamclone,
81 strip,
81 strip,
82 tags as tagsmod,
82 tags as tagsmod,
83 templater,
83 templater,
84 treediscovery,
84 treediscovery,
85 upgrade,
85 upgrade,
86 url as urlmod,
86 url as urlmod,
87 util,
87 util,
88 vfs as vfsmod,
88 vfs as vfsmod,
89 wireprotoframing,
89 wireprotoframing,
90 wireprotoserver,
90 wireprotoserver,
91 wireprotov2peer,
91 wireprotov2peer,
92 )
92 )
93 from .utils import (
93 from .utils import (
94 cborutil,
94 cborutil,
95 compression,
95 compression,
96 dateutil,
96 dateutil,
97 procutil,
97 procutil,
98 stringutil,
98 stringutil,
99 )
99 )
100
100
101 from .revlogutils import (
101 from .revlogutils import (
102 deltas as deltautil,
102 deltas as deltautil,
103 nodemap,
103 nodemap,
104 sidedata,
104 sidedata,
105 )
105 )
106
106
107 release = lockmod.release
107 release = lockmod.release
108
108
109 table = {}
109 table = {}
110 table.update(strip.command._table)
110 table.update(strip.command._table)
111 command = registrar.command(table)
111 command = registrar.command(table)
112
112
113
113
114 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
114 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
115 def debugancestor(ui, repo, *args):
115 def debugancestor(ui, repo, *args):
116 """find the ancestor revision of two revisions in a given index"""
116 """find the ancestor revision of two revisions in a given index"""
117 if len(args) == 3:
117 if len(args) == 3:
118 index, rev1, rev2 = args
118 index, rev1, rev2 = args
119 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
119 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
120 lookup = r.lookup
120 lookup = r.lookup
121 elif len(args) == 2:
121 elif len(args) == 2:
122 if not repo:
122 if not repo:
123 raise error.Abort(
123 raise error.Abort(
124 _(b'there is no Mercurial repository here (.hg not found)')
124 _(b'there is no Mercurial repository here (.hg not found)')
125 )
125 )
126 rev1, rev2 = args
126 rev1, rev2 = args
127 r = repo.changelog
127 r = repo.changelog
128 lookup = repo.lookup
128 lookup = repo.lookup
129 else:
129 else:
130 raise error.Abort(_(b'either two or three arguments required'))
130 raise error.Abort(_(b'either two or three arguments required'))
131 a = r.ancestor(lookup(rev1), lookup(rev2))
131 a = r.ancestor(lookup(rev1), lookup(rev2))
132 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
132 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
133
133
134
134
135 @command(b'debugantivirusrunning', [])
135 @command(b'debugantivirusrunning', [])
136 def debugantivirusrunning(ui, repo):
136 def debugantivirusrunning(ui, repo):
137 """attempt to trigger an antivirus scanner to see if one is active"""
137 """attempt to trigger an antivirus scanner to see if one is active"""
138 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
138 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
139 f.write(
139 f.write(
140 util.b85decode(
140 util.b85decode(
141 # This is a base85-armored version of the EICAR test file. See
141 # This is a base85-armored version of the EICAR test file. See
142 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
142 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
143 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
143 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
144 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
144 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
145 )
145 )
146 )
146 )
147 # Give an AV engine time to scan the file.
147 # Give an AV engine time to scan the file.
148 time.sleep(2)
148 time.sleep(2)
149 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
149 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
150
150
151
151
152 @command(b'debugapplystreamclonebundle', [], b'FILE')
152 @command(b'debugapplystreamclonebundle', [], b'FILE')
153 def debugapplystreamclonebundle(ui, repo, fname):
153 def debugapplystreamclonebundle(ui, repo, fname):
154 """apply a stream clone bundle file"""
154 """apply a stream clone bundle file"""
155 f = hg.openpath(ui, fname)
155 f = hg.openpath(ui, fname)
156 gen = exchange.readbundle(ui, f, fname)
156 gen = exchange.readbundle(ui, f, fname)
157 gen.apply(repo)
157 gen.apply(repo)
158
158
159
159
160 @command(
160 @command(
161 b'debugbuilddag',
161 b'debugbuilddag',
162 [
162 [
163 (
163 (
164 b'm',
164 b'm',
165 b'mergeable-file',
165 b'mergeable-file',
166 None,
166 None,
167 _(b'add single file mergeable changes'),
167 _(b'add single file mergeable changes'),
168 ),
168 ),
169 (
169 (
170 b'o',
170 b'o',
171 b'overwritten-file',
171 b'overwritten-file',
172 None,
172 None,
173 _(b'add single file all revs overwrite'),
173 _(b'add single file all revs overwrite'),
174 ),
174 ),
175 (b'n', b'new-file', None, _(b'add new file at each rev')),
175 (b'n', b'new-file', None, _(b'add new file at each rev')),
176 ],
176 ],
177 _(b'[OPTION]... [TEXT]'),
177 _(b'[OPTION]... [TEXT]'),
178 )
178 )
179 def debugbuilddag(
179 def debugbuilddag(
180 ui,
180 ui,
181 repo,
181 repo,
182 text=None,
182 text=None,
183 mergeable_file=False,
183 mergeable_file=False,
184 overwritten_file=False,
184 overwritten_file=False,
185 new_file=False,
185 new_file=False,
186 ):
186 ):
187 """builds a repo with a given DAG from scratch in the current empty repo
187 """builds a repo with a given DAG from scratch in the current empty repo
188
188
189 The description of the DAG is read from stdin if not given on the
189 The description of the DAG is read from stdin if not given on the
190 command line.
190 command line.
191
191
192 Elements:
192 Elements:
193
193
194 - "+n" is a linear run of n nodes based on the current default parent
194 - "+n" is a linear run of n nodes based on the current default parent
195 - "." is a single node based on the current default parent
195 - "." is a single node based on the current default parent
196 - "$" resets the default parent to null (implied at the start);
196 - "$" resets the default parent to null (implied at the start);
197 otherwise the default parent is always the last node created
197 otherwise the default parent is always the last node created
198 - "<p" sets the default parent to the backref p
198 - "<p" sets the default parent to the backref p
199 - "*p" is a fork at parent p, which is a backref
199 - "*p" is a fork at parent p, which is a backref
200 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
200 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
201 - "/p2" is a merge of the preceding node and p2
201 - "/p2" is a merge of the preceding node and p2
202 - ":tag" defines a local tag for the preceding node
202 - ":tag" defines a local tag for the preceding node
203 - "@branch" sets the named branch for subsequent nodes
203 - "@branch" sets the named branch for subsequent nodes
204 - "#...\\n" is a comment up to the end of the line
204 - "#...\\n" is a comment up to the end of the line
205
205
206 Whitespace between the above elements is ignored.
206 Whitespace between the above elements is ignored.
207
207
208 A backref is either
208 A backref is either
209
209
210 - a number n, which references the node curr-n, where curr is the current
210 - a number n, which references the node curr-n, where curr is the current
211 node, or
211 node, or
212 - the name of a local tag you placed earlier using ":tag", or
212 - the name of a local tag you placed earlier using ":tag", or
213 - empty to denote the default parent.
213 - empty to denote the default parent.
214
214
215 All string valued-elements are either strictly alphanumeric, or must
215 All string valued-elements are either strictly alphanumeric, or must
216 be enclosed in double quotes ("..."), with "\\" as escape character.
216 be enclosed in double quotes ("..."), with "\\" as escape character.
217 """
217 """
218
218
219 if text is None:
219 if text is None:
220 ui.status(_(b"reading DAG from stdin\n"))
220 ui.status(_(b"reading DAG from stdin\n"))
221 text = ui.fin.read()
221 text = ui.fin.read()
222
222
223 cl = repo.changelog
223 cl = repo.changelog
224 if len(cl) > 0:
224 if len(cl) > 0:
225 raise error.Abort(_(b'repository is not empty'))
225 raise error.Abort(_(b'repository is not empty'))
226
226
227 # determine number of revs in DAG
227 # determine number of revs in DAG
228 total = 0
228 total = 0
229 for type, data in dagparser.parsedag(text):
229 for type, data in dagparser.parsedag(text):
230 if type == b'n':
230 if type == b'n':
231 total += 1
231 total += 1
232
232
233 if mergeable_file:
233 if mergeable_file:
234 linesperrev = 2
234 linesperrev = 2
235 # make a file with k lines per rev
235 # make a file with k lines per rev
236 initialmergedlines = [
236 initialmergedlines = [
237 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
237 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
238 ]
238 ]
239 initialmergedlines.append(b"")
239 initialmergedlines.append(b"")
240
240
241 tags = []
241 tags = []
242 progress = ui.makeprogress(
242 progress = ui.makeprogress(
243 _(b'building'), unit=_(b'revisions'), total=total
243 _(b'building'), unit=_(b'revisions'), total=total
244 )
244 )
245 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
245 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
246 at = -1
246 at = -1
247 atbranch = b'default'
247 atbranch = b'default'
248 nodeids = []
248 nodeids = []
249 id = 0
249 id = 0
250 progress.update(id)
250 progress.update(id)
251 for type, data in dagparser.parsedag(text):
251 for type, data in dagparser.parsedag(text):
252 if type == b'n':
252 if type == b'n':
253 ui.note((b'node %s\n' % pycompat.bytestr(data)))
253 ui.note((b'node %s\n' % pycompat.bytestr(data)))
254 id, ps = data
254 id, ps = data
255
255
256 files = []
256 files = []
257 filecontent = {}
257 filecontent = {}
258
258
259 p2 = None
259 p2 = None
260 if mergeable_file:
260 if mergeable_file:
261 fn = b"mf"
261 fn = b"mf"
262 p1 = repo[ps[0]]
262 p1 = repo[ps[0]]
263 if len(ps) > 1:
263 if len(ps) > 1:
264 p2 = repo[ps[1]]
264 p2 = repo[ps[1]]
265 pa = p1.ancestor(p2)
265 pa = p1.ancestor(p2)
266 base, local, other = [
266 base, local, other = [
267 x[fn].data() for x in (pa, p1, p2)
267 x[fn].data() for x in (pa, p1, p2)
268 ]
268 ]
269 m3 = simplemerge.Merge3Text(base, local, other)
269 m3 = simplemerge.Merge3Text(base, local, other)
270 ml = [l.strip() for l in m3.merge_lines()]
270 ml = [l.strip() for l in m3.merge_lines()]
271 ml.append(b"")
271 ml.append(b"")
272 elif at > 0:
272 elif at > 0:
273 ml = p1[fn].data().split(b"\n")
273 ml = p1[fn].data().split(b"\n")
274 else:
274 else:
275 ml = initialmergedlines
275 ml = initialmergedlines
276 ml[id * linesperrev] += b" r%i" % id
276 ml[id * linesperrev] += b" r%i" % id
277 mergedtext = b"\n".join(ml)
277 mergedtext = b"\n".join(ml)
278 files.append(fn)
278 files.append(fn)
279 filecontent[fn] = mergedtext
279 filecontent[fn] = mergedtext
280
280
281 if overwritten_file:
281 if overwritten_file:
282 fn = b"of"
282 fn = b"of"
283 files.append(fn)
283 files.append(fn)
284 filecontent[fn] = b"r%i\n" % id
284 filecontent[fn] = b"r%i\n" % id
285
285
286 if new_file:
286 if new_file:
287 fn = b"nf%i" % id
287 fn = b"nf%i" % id
288 files.append(fn)
288 files.append(fn)
289 filecontent[fn] = b"r%i\n" % id
289 filecontent[fn] = b"r%i\n" % id
290 if len(ps) > 1:
290 if len(ps) > 1:
291 if not p2:
291 if not p2:
292 p2 = repo[ps[1]]
292 p2 = repo[ps[1]]
293 for fn in p2:
293 for fn in p2:
294 if fn.startswith(b"nf"):
294 if fn.startswith(b"nf"):
295 files.append(fn)
295 files.append(fn)
296 filecontent[fn] = p2[fn].data()
296 filecontent[fn] = p2[fn].data()
297
297
298 def fctxfn(repo, cx, path):
298 def fctxfn(repo, cx, path):
299 if path in filecontent:
299 if path in filecontent:
300 return context.memfilectx(
300 return context.memfilectx(
301 repo, cx, path, filecontent[path]
301 repo, cx, path, filecontent[path]
302 )
302 )
303 return None
303 return None
304
304
305 if len(ps) == 0 or ps[0] < 0:
305 if len(ps) == 0 or ps[0] < 0:
306 pars = [None, None]
306 pars = [None, None]
307 elif len(ps) == 1:
307 elif len(ps) == 1:
308 pars = [nodeids[ps[0]], None]
308 pars = [nodeids[ps[0]], None]
309 else:
309 else:
310 pars = [nodeids[p] for p in ps]
310 pars = [nodeids[p] for p in ps]
311 cx = context.memctx(
311 cx = context.memctx(
312 repo,
312 repo,
313 pars,
313 pars,
314 b"r%i" % id,
314 b"r%i" % id,
315 files,
315 files,
316 fctxfn,
316 fctxfn,
317 date=(id, 0),
317 date=(id, 0),
318 user=b"debugbuilddag",
318 user=b"debugbuilddag",
319 extra={b'branch': atbranch},
319 extra={b'branch': atbranch},
320 )
320 )
321 nodeid = repo.commitctx(cx)
321 nodeid = repo.commitctx(cx)
322 nodeids.append(nodeid)
322 nodeids.append(nodeid)
323 at = id
323 at = id
324 elif type == b'l':
324 elif type == b'l':
325 id, name = data
325 id, name = data
326 ui.note((b'tag %s\n' % name))
326 ui.note((b'tag %s\n' % name))
327 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
327 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
328 elif type == b'a':
328 elif type == b'a':
329 ui.note((b'branch %s\n' % data))
329 ui.note((b'branch %s\n' % data))
330 atbranch = data
330 atbranch = data
331 progress.update(id)
331 progress.update(id)
332
332
333 if tags:
333 if tags:
334 repo.vfs.write(b"localtags", b"".join(tags))
334 repo.vfs.write(b"localtags", b"".join(tags))
335
335
336
336
337 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
337 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
338 indent_string = b' ' * indent
338 indent_string = b' ' * indent
339 if all:
339 if all:
340 ui.writenoi18n(
340 ui.writenoi18n(
341 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
341 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
342 % indent_string
342 % indent_string
343 )
343 )
344
344
345 def showchunks(named):
345 def showchunks(named):
346 ui.write(b"\n%s%s\n" % (indent_string, named))
346 ui.write(b"\n%s%s\n" % (indent_string, named))
347 for deltadata in gen.deltaiter():
347 for deltadata in gen.deltaiter():
348 node, p1, p2, cs, deltabase, delta, flags = deltadata
348 node, p1, p2, cs, deltabase, delta, flags = deltadata
349 ui.write(
349 ui.write(
350 b"%s%s %s %s %s %s %d\n"
350 b"%s%s %s %s %s %s %d\n"
351 % (
351 % (
352 indent_string,
352 indent_string,
353 hex(node),
353 hex(node),
354 hex(p1),
354 hex(p1),
355 hex(p2),
355 hex(p2),
356 hex(cs),
356 hex(cs),
357 hex(deltabase),
357 hex(deltabase),
358 len(delta),
358 len(delta),
359 )
359 )
360 )
360 )
361
361
362 gen.changelogheader()
362 gen.changelogheader()
363 showchunks(b"changelog")
363 showchunks(b"changelog")
364 gen.manifestheader()
364 gen.manifestheader()
365 showchunks(b"manifest")
365 showchunks(b"manifest")
366 for chunkdata in iter(gen.filelogheader, {}):
366 for chunkdata in iter(gen.filelogheader, {}):
367 fname = chunkdata[b'filename']
367 fname = chunkdata[b'filename']
368 showchunks(fname)
368 showchunks(fname)
369 else:
369 else:
370 if isinstance(gen, bundle2.unbundle20):
370 if isinstance(gen, bundle2.unbundle20):
371 raise error.Abort(_(b'use debugbundle2 for this file'))
371 raise error.Abort(_(b'use debugbundle2 for this file'))
372 gen.changelogheader()
372 gen.changelogheader()
373 for deltadata in gen.deltaiter():
373 for deltadata in gen.deltaiter():
374 node, p1, p2, cs, deltabase, delta, flags = deltadata
374 node, p1, p2, cs, deltabase, delta, flags = deltadata
375 ui.write(b"%s%s\n" % (indent_string, hex(node)))
375 ui.write(b"%s%s\n" % (indent_string, hex(node)))
376
376
377
377
378 def _debugobsmarkers(ui, part, indent=0, **opts):
378 def _debugobsmarkers(ui, part, indent=0, **opts):
379 """display version and markers contained in 'data'"""
379 """display version and markers contained in 'data'"""
380 opts = pycompat.byteskwargs(opts)
380 opts = pycompat.byteskwargs(opts)
381 data = part.read()
381 data = part.read()
382 indent_string = b' ' * indent
382 indent_string = b' ' * indent
383 try:
383 try:
384 version, markers = obsolete._readmarkers(data)
384 version, markers = obsolete._readmarkers(data)
385 except error.UnknownVersion as exc:
385 except error.UnknownVersion as exc:
386 msg = b"%sunsupported version: %s (%d bytes)\n"
386 msg = b"%sunsupported version: %s (%d bytes)\n"
387 msg %= indent_string, exc.version, len(data)
387 msg %= indent_string, exc.version, len(data)
388 ui.write(msg)
388 ui.write(msg)
389 else:
389 else:
390 msg = b"%sversion: %d (%d bytes)\n"
390 msg = b"%sversion: %d (%d bytes)\n"
391 msg %= indent_string, version, len(data)
391 msg %= indent_string, version, len(data)
392 ui.write(msg)
392 ui.write(msg)
393 fm = ui.formatter(b'debugobsolete', opts)
393 fm = ui.formatter(b'debugobsolete', opts)
394 for rawmarker in sorted(markers):
394 for rawmarker in sorted(markers):
395 m = obsutil.marker(None, rawmarker)
395 m = obsutil.marker(None, rawmarker)
396 fm.startitem()
396 fm.startitem()
397 fm.plain(indent_string)
397 fm.plain(indent_string)
398 cmdutil.showmarker(fm, m)
398 cmdutil.showmarker(fm, m)
399 fm.end()
399 fm.end()
400
400
401
401
402 def _debugphaseheads(ui, data, indent=0):
402 def _debugphaseheads(ui, data, indent=0):
403 """display version and markers contained in 'data'"""
403 """display version and markers contained in 'data'"""
404 indent_string = b' ' * indent
404 indent_string = b' ' * indent
405 headsbyphase = phases.binarydecode(data)
405 headsbyphase = phases.binarydecode(data)
406 for phase in phases.allphases:
406 for phase in phases.allphases:
407 for head in headsbyphase[phase]:
407 for head in headsbyphase[phase]:
408 ui.write(indent_string)
408 ui.write(indent_string)
409 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
409 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
410
410
411
411
412 def _quasirepr(thing):
412 def _quasirepr(thing):
413 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
413 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
414 return b'{%s}' % (
414 return b'{%s}' % (
415 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
415 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
416 )
416 )
417 return pycompat.bytestr(repr(thing))
417 return pycompat.bytestr(repr(thing))
418
418
419
419
420 def _debugbundle2(ui, gen, all=None, **opts):
420 def _debugbundle2(ui, gen, all=None, **opts):
421 """lists the contents of a bundle2"""
421 """lists the contents of a bundle2"""
422 if not isinstance(gen, bundle2.unbundle20):
422 if not isinstance(gen, bundle2.unbundle20):
423 raise error.Abort(_(b'not a bundle2 file'))
423 raise error.Abort(_(b'not a bundle2 file'))
424 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
424 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
425 parttypes = opts.get('part_type', [])
425 parttypes = opts.get('part_type', [])
426 for part in gen.iterparts():
426 for part in gen.iterparts():
427 if parttypes and part.type not in parttypes:
427 if parttypes and part.type not in parttypes:
428 continue
428 continue
429 msg = b'%s -- %s (mandatory: %r)\n'
429 msg = b'%s -- %s (mandatory: %r)\n'
430 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
430 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
431 if part.type == b'changegroup':
431 if part.type == b'changegroup':
432 version = part.params.get(b'version', b'01')
432 version = part.params.get(b'version', b'01')
433 cg = changegroup.getunbundler(version, part, b'UN')
433 cg = changegroup.getunbundler(version, part, b'UN')
434 if not ui.quiet:
434 if not ui.quiet:
435 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
435 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
436 if part.type == b'obsmarkers':
436 if part.type == b'obsmarkers':
437 if not ui.quiet:
437 if not ui.quiet:
438 _debugobsmarkers(ui, part, indent=4, **opts)
438 _debugobsmarkers(ui, part, indent=4, **opts)
439 if part.type == b'phase-heads':
439 if part.type == b'phase-heads':
440 if not ui.quiet:
440 if not ui.quiet:
441 _debugphaseheads(ui, part, indent=4)
441 _debugphaseheads(ui, part, indent=4)
442
442
443
443
444 @command(
444 @command(
445 b'debugbundle',
445 b'debugbundle',
446 [
446 [
447 (b'a', b'all', None, _(b'show all details')),
447 (b'a', b'all', None, _(b'show all details')),
448 (b'', b'part-type', [], _(b'show only the named part type')),
448 (b'', b'part-type', [], _(b'show only the named part type')),
449 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
449 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
450 ],
450 ],
451 _(b'FILE'),
451 _(b'FILE'),
452 norepo=True,
452 norepo=True,
453 )
453 )
454 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
454 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
455 """lists the contents of a bundle"""
455 """lists the contents of a bundle"""
456 with hg.openpath(ui, bundlepath) as f:
456 with hg.openpath(ui, bundlepath) as f:
457 if spec:
457 if spec:
458 spec = exchange.getbundlespec(ui, f)
458 spec = exchange.getbundlespec(ui, f)
459 ui.write(b'%s\n' % spec)
459 ui.write(b'%s\n' % spec)
460 return
460 return
461
461
462 gen = exchange.readbundle(ui, f, bundlepath)
462 gen = exchange.readbundle(ui, f, bundlepath)
463 if isinstance(gen, bundle2.unbundle20):
463 if isinstance(gen, bundle2.unbundle20):
464 return _debugbundle2(ui, gen, all=all, **opts)
464 return _debugbundle2(ui, gen, all=all, **opts)
465 _debugchangegroup(ui, gen, all=all, **opts)
465 _debugchangegroup(ui, gen, all=all, **opts)
466
466
467
467
468 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
468 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
469 def debugcapabilities(ui, path, **opts):
469 def debugcapabilities(ui, path, **opts):
470 """lists the capabilities of a remote peer"""
470 """lists the capabilities of a remote peer"""
471 opts = pycompat.byteskwargs(opts)
471 opts = pycompat.byteskwargs(opts)
472 peer = hg.peer(ui, opts, path)
472 peer = hg.peer(ui, opts, path)
473 caps = peer.capabilities()
473 caps = peer.capabilities()
474 ui.writenoi18n(b'Main capabilities:\n')
474 ui.writenoi18n(b'Main capabilities:\n')
475 for c in sorted(caps):
475 for c in sorted(caps):
476 ui.write(b' %s\n' % c)
476 ui.write(b' %s\n' % c)
477 b2caps = bundle2.bundle2caps(peer)
477 b2caps = bundle2.bundle2caps(peer)
478 if b2caps:
478 if b2caps:
479 ui.writenoi18n(b'Bundle2 capabilities:\n')
479 ui.writenoi18n(b'Bundle2 capabilities:\n')
480 for key, values in sorted(pycompat.iteritems(b2caps)):
480 for key, values in sorted(pycompat.iteritems(b2caps)):
481 ui.write(b' %s\n' % key)
481 ui.write(b' %s\n' % key)
482 for v in values:
482 for v in values:
483 ui.write(b' %s\n' % v)
483 ui.write(b' %s\n' % v)
484
484
485
485
486 @command(b'debugchangedfiles', [], b'REV')
486 @command(b'debugchangedfiles', [], b'REV')
487 def debugchangedfiles(ui, repo, rev):
487 def debugchangedfiles(ui, repo, rev):
488 """list the stored files changes for a revision"""
488 """list the stored files changes for a revision"""
489 ctx = scmutil.revsingle(repo, rev, None)
489 ctx = scmutil.revsingle(repo, rev, None)
490 sd = repo.changelog.sidedata(ctx.rev())
490 sd = repo.changelog.sidedata(ctx.rev())
491 files_block = sd.get(sidedata.SD_FILES)
491 files_block = sd.get(sidedata.SD_FILES)
492 if files_block is not None:
492 if files_block is not None:
493 files = metadata.decode_files_sidedata(sd)
493 files = metadata.decode_files_sidedata(sd)
494 for f in sorted(files.touched):
494 for f in sorted(files.touched):
495 if f in files.added:
495 if f in files.added:
496 action = b"added"
496 action = b"added"
497 elif f in files.removed:
497 elif f in files.removed:
498 action = b"removed"
498 action = b"removed"
499 elif f in files.merged:
499 elif f in files.merged:
500 action = b"merged"
500 action = b"merged"
501 elif f in files.salvaged:
501 elif f in files.salvaged:
502 action = b"salvaged"
502 action = b"salvaged"
503 else:
503 else:
504 action = b"touched"
504 action = b"touched"
505
505
506 copy_parent = b""
506 copy_parent = b""
507 copy_source = b""
507 copy_source = b""
508 if f in files.copied_from_p1:
508 if f in files.copied_from_p1:
509 copy_parent = b"p1"
509 copy_parent = b"p1"
510 copy_source = files.copied_from_p1[f]
510 copy_source = files.copied_from_p1[f]
511 elif f in files.copied_from_p2:
511 elif f in files.copied_from_p2:
512 copy_parent = b"p2"
512 copy_parent = b"p2"
513 copy_source = files.copied_from_p2[f]
513 copy_source = files.copied_from_p2[f]
514
514
515 data = (action, copy_parent, f, copy_source)
515 data = (action, copy_parent, f, copy_source)
516 template = b"%-8s %2s: %s, %s;\n"
516 template = b"%-8s %2s: %s, %s;\n"
517 ui.write(template % data)
517 ui.write(template % data)
518
518
519
519
520 @command(b'debugcheckstate', [], b'')
520 @command(b'debugcheckstate', [], b'')
521 def debugcheckstate(ui, repo):
521 def debugcheckstate(ui, repo):
522 """validate the correctness of the current dirstate"""
522 """validate the correctness of the current dirstate"""
523 parent1, parent2 = repo.dirstate.parents()
523 parent1, parent2 = repo.dirstate.parents()
524 m1 = repo[parent1].manifest()
524 m1 = repo[parent1].manifest()
525 m2 = repo[parent2].manifest()
525 m2 = repo[parent2].manifest()
526 errors = 0
526 errors = 0
527 for f in repo.dirstate:
527 for f in repo.dirstate:
528 state = repo.dirstate[f]
528 state = repo.dirstate[f]
529 if state in b"nr" and f not in m1:
529 if state in b"nr" and f not in m1:
530 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
530 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
531 errors += 1
531 errors += 1
532 if state in b"a" and f in m1:
532 if state in b"a" and f in m1:
533 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
533 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
534 errors += 1
534 errors += 1
535 if state in b"m" and f not in m1 and f not in m2:
535 if state in b"m" and f not in m1 and f not in m2:
536 ui.warn(
536 ui.warn(
537 _(b"%s in state %s, but not in either manifest\n") % (f, state)
537 _(b"%s in state %s, but not in either manifest\n") % (f, state)
538 )
538 )
539 errors += 1
539 errors += 1
540 for f in m1:
540 for f in m1:
541 state = repo.dirstate[f]
541 state = repo.dirstate[f]
542 if state not in b"nrm":
542 if state not in b"nrm":
543 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
543 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
544 errors += 1
544 errors += 1
545 if errors:
545 if errors:
546 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
546 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
547 raise error.Abort(errstr)
547 raise error.Abort(errstr)
548
548
549
549
550 @command(
550 @command(
551 b'debugcolor',
551 b'debugcolor',
552 [(b'', b'style', None, _(b'show all configured styles'))],
552 [(b'', b'style', None, _(b'show all configured styles'))],
553 b'hg debugcolor',
553 b'hg debugcolor',
554 )
554 )
555 def debugcolor(ui, repo, **opts):
555 def debugcolor(ui, repo, **opts):
556 """show available color, effects or style"""
556 """show available color, effects or style"""
557 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
557 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
558 if opts.get('style'):
558 if opts.get('style'):
559 return _debugdisplaystyle(ui)
559 return _debugdisplaystyle(ui)
560 else:
560 else:
561 return _debugdisplaycolor(ui)
561 return _debugdisplaycolor(ui)
562
562
563
563
564 def _debugdisplaycolor(ui):
564 def _debugdisplaycolor(ui):
565 ui = ui.copy()
565 ui = ui.copy()
566 ui._styles.clear()
566 ui._styles.clear()
567 for effect in color._activeeffects(ui).keys():
567 for effect in color._activeeffects(ui).keys():
568 ui._styles[effect] = effect
568 ui._styles[effect] = effect
569 if ui._terminfoparams:
569 if ui._terminfoparams:
570 for k, v in ui.configitems(b'color'):
570 for k, v in ui.configitems(b'color'):
571 if k.startswith(b'color.'):
571 if k.startswith(b'color.'):
572 ui._styles[k] = k[6:]
572 ui._styles[k] = k[6:]
573 elif k.startswith(b'terminfo.'):
573 elif k.startswith(b'terminfo.'):
574 ui._styles[k] = k[9:]
574 ui._styles[k] = k[9:]
575 ui.write(_(b'available colors:\n'))
575 ui.write(_(b'available colors:\n'))
576 # sort label with a '_' after the other to group '_background' entry.
576 # sort label with a '_' after the other to group '_background' entry.
577 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
577 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
578 for colorname, label in items:
578 for colorname, label in items:
579 ui.write(b'%s\n' % colorname, label=label)
579 ui.write(b'%s\n' % colorname, label=label)
580
580
581
581
582 def _debugdisplaystyle(ui):
582 def _debugdisplaystyle(ui):
583 ui.write(_(b'available style:\n'))
583 ui.write(_(b'available style:\n'))
584 if not ui._styles:
584 if not ui._styles:
585 return
585 return
586 width = max(len(s) for s in ui._styles)
586 width = max(len(s) for s in ui._styles)
587 for label, effects in sorted(ui._styles.items()):
587 for label, effects in sorted(ui._styles.items()):
588 ui.write(b'%s' % label, label=label)
588 ui.write(b'%s' % label, label=label)
589 if effects:
589 if effects:
590 # 50
590 # 50
591 ui.write(b': ')
591 ui.write(b': ')
592 ui.write(b' ' * (max(0, width - len(label))))
592 ui.write(b' ' * (max(0, width - len(label))))
593 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
593 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
594 ui.write(b'\n')
594 ui.write(b'\n')
595
595
596
596
597 @command(b'debugcreatestreamclonebundle', [], b'FILE')
597 @command(b'debugcreatestreamclonebundle', [], b'FILE')
598 def debugcreatestreamclonebundle(ui, repo, fname):
598 def debugcreatestreamclonebundle(ui, repo, fname):
599 """create a stream clone bundle file
599 """create a stream clone bundle file
600
600
601 Stream bundles are special bundles that are essentially archives of
601 Stream bundles are special bundles that are essentially archives of
602 revlog files. They are commonly used for cloning very quickly.
602 revlog files. They are commonly used for cloning very quickly.
603 """
603 """
604 # TODO we may want to turn this into an abort when this functionality
604 # TODO we may want to turn this into an abort when this functionality
605 # is moved into `hg bundle`.
605 # is moved into `hg bundle`.
606 if phases.hassecret(repo):
606 if phases.hassecret(repo):
607 ui.warn(
607 ui.warn(
608 _(
608 _(
609 b'(warning: stream clone bundle will contain secret '
609 b'(warning: stream clone bundle will contain secret '
610 b'revisions)\n'
610 b'revisions)\n'
611 )
611 )
612 )
612 )
613
613
614 requirements, gen = streamclone.generatebundlev1(repo)
614 requirements, gen = streamclone.generatebundlev1(repo)
615 changegroup.writechunks(ui, gen, fname)
615 changegroup.writechunks(ui, gen, fname)
616
616
617 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
617 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
618
618
619
619
620 @command(
620 @command(
621 b'debugdag',
621 b'debugdag',
622 [
622 [
623 (b't', b'tags', None, _(b'use tags as labels')),
623 (b't', b'tags', None, _(b'use tags as labels')),
624 (b'b', b'branches', None, _(b'annotate with branch names')),
624 (b'b', b'branches', None, _(b'annotate with branch names')),
625 (b'', b'dots', None, _(b'use dots for runs')),
625 (b'', b'dots', None, _(b'use dots for runs')),
626 (b's', b'spaces', None, _(b'separate elements by spaces')),
626 (b's', b'spaces', None, _(b'separate elements by spaces')),
627 ],
627 ],
628 _(b'[OPTION]... [FILE [REV]...]'),
628 _(b'[OPTION]... [FILE [REV]...]'),
629 optionalrepo=True,
629 optionalrepo=True,
630 )
630 )
631 def debugdag(ui, repo, file_=None, *revs, **opts):
631 def debugdag(ui, repo, file_=None, *revs, **opts):
632 """format the changelog or an index DAG as a concise textual description
632 """format the changelog or an index DAG as a concise textual description
633
633
634 If you pass a revlog index, the revlog's DAG is emitted. If you list
634 If you pass a revlog index, the revlog's DAG is emitted. If you list
635 revision numbers, they get labeled in the output as rN.
635 revision numbers, they get labeled in the output as rN.
636
636
637 Otherwise, the changelog DAG of the current repo is emitted.
637 Otherwise, the changelog DAG of the current repo is emitted.
638 """
638 """
639 spaces = opts.get('spaces')
639 spaces = opts.get('spaces')
640 dots = opts.get('dots')
640 dots = opts.get('dots')
641 if file_:
641 if file_:
642 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
642 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
643 revs = {int(r) for r in revs}
643 revs = {int(r) for r in revs}
644
644
645 def events():
645 def events():
646 for r in rlog:
646 for r in rlog:
647 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
647 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
648 if r in revs:
648 if r in revs:
649 yield b'l', (r, b"r%i" % r)
649 yield b'l', (r, b"r%i" % r)
650
650
651 elif repo:
651 elif repo:
652 cl = repo.changelog
652 cl = repo.changelog
653 tags = opts.get('tags')
653 tags = opts.get('tags')
654 branches = opts.get('branches')
654 branches = opts.get('branches')
655 if tags:
655 if tags:
656 labels = {}
656 labels = {}
657 for l, n in repo.tags().items():
657 for l, n in repo.tags().items():
658 labels.setdefault(cl.rev(n), []).append(l)
658 labels.setdefault(cl.rev(n), []).append(l)
659
659
660 def events():
660 def events():
661 b = b"default"
661 b = b"default"
662 for r in cl:
662 for r in cl:
663 if branches:
663 if branches:
664 newb = cl.read(cl.node(r))[5][b'branch']
664 newb = cl.read(cl.node(r))[5][b'branch']
665 if newb != b:
665 if newb != b:
666 yield b'a', newb
666 yield b'a', newb
667 b = newb
667 b = newb
668 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
668 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
669 if tags:
669 if tags:
670 ls = labels.get(r)
670 ls = labels.get(r)
671 if ls:
671 if ls:
672 for l in ls:
672 for l in ls:
673 yield b'l', (r, l)
673 yield b'l', (r, l)
674
674
675 else:
675 else:
676 raise error.Abort(_(b'need repo for changelog dag'))
676 raise error.Abort(_(b'need repo for changelog dag'))
677
677
678 for line in dagparser.dagtextlines(
678 for line in dagparser.dagtextlines(
679 events(),
679 events(),
680 addspaces=spaces,
680 addspaces=spaces,
681 wraplabels=True,
681 wraplabels=True,
682 wrapannotations=True,
682 wrapannotations=True,
683 wrapnonlinear=dots,
683 wrapnonlinear=dots,
684 usedots=dots,
684 usedots=dots,
685 maxlinewidth=70,
685 maxlinewidth=70,
686 ):
686 ):
687 ui.write(line)
687 ui.write(line)
688 ui.write(b"\n")
688 ui.write(b"\n")
689
689
690
690
691 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
691 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
692 def debugdata(ui, repo, file_, rev=None, **opts):
692 def debugdata(ui, repo, file_, rev=None, **opts):
693 """dump the contents of a data file revision"""
693 """dump the contents of a data file revision"""
694 opts = pycompat.byteskwargs(opts)
694 opts = pycompat.byteskwargs(opts)
695 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
695 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
696 if rev is not None:
696 if rev is not None:
697 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
697 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
698 file_, rev = None, file_
698 file_, rev = None, file_
699 elif rev is None:
699 elif rev is None:
700 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
700 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
701 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
701 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
702 try:
702 try:
703 ui.write(r.rawdata(r.lookup(rev)))
703 ui.write(r.rawdata(r.lookup(rev)))
704 except KeyError:
704 except KeyError:
705 raise error.Abort(_(b'invalid revision identifier %s') % rev)
705 raise error.Abort(_(b'invalid revision identifier %s') % rev)
706
706
707
707
708 @command(
708 @command(
709 b'debugdate',
709 b'debugdate',
710 [(b'e', b'extended', None, _(b'try extended date formats'))],
710 [(b'e', b'extended', None, _(b'try extended date formats'))],
711 _(b'[-e] DATE [RANGE]'),
711 _(b'[-e] DATE [RANGE]'),
712 norepo=True,
712 norepo=True,
713 optionalrepo=True,
713 optionalrepo=True,
714 )
714 )
715 def debugdate(ui, date, range=None, **opts):
715 def debugdate(ui, date, range=None, **opts):
716 """parse and display a date"""
716 """parse and display a date"""
717 if opts["extended"]:
717 if opts["extended"]:
718 d = dateutil.parsedate(date, dateutil.extendeddateformats)
718 d = dateutil.parsedate(date, dateutil.extendeddateformats)
719 else:
719 else:
720 d = dateutil.parsedate(date)
720 d = dateutil.parsedate(date)
721 ui.writenoi18n(b"internal: %d %d\n" % d)
721 ui.writenoi18n(b"internal: %d %d\n" % d)
722 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
722 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
723 if range:
723 if range:
724 m = dateutil.matchdate(range)
724 m = dateutil.matchdate(range)
725 ui.writenoi18n(b"match: %s\n" % m(d[0]))
725 ui.writenoi18n(b"match: %s\n" % m(d[0]))
726
726
727
727
728 @command(
728 @command(
729 b'debugdeltachain',
729 b'debugdeltachain',
730 cmdutil.debugrevlogopts + cmdutil.formatteropts,
730 cmdutil.debugrevlogopts + cmdutil.formatteropts,
731 _(b'-c|-m|FILE'),
731 _(b'-c|-m|FILE'),
732 optionalrepo=True,
732 optionalrepo=True,
733 )
733 )
734 def debugdeltachain(ui, repo, file_=None, **opts):
734 def debugdeltachain(ui, repo, file_=None, **opts):
735 """dump information about delta chains in a revlog
735 """dump information about delta chains in a revlog
736
736
737 Output can be templatized. Available template keywords are:
737 Output can be templatized. Available template keywords are:
738
738
739 :``rev``: revision number
739 :``rev``: revision number
740 :``chainid``: delta chain identifier (numbered by unique base)
740 :``chainid``: delta chain identifier (numbered by unique base)
741 :``chainlen``: delta chain length to this revision
741 :``chainlen``: delta chain length to this revision
742 :``prevrev``: previous revision in delta chain
742 :``prevrev``: previous revision in delta chain
743 :``deltatype``: role of delta / how it was computed
743 :``deltatype``: role of delta / how it was computed
744 :``compsize``: compressed size of revision
744 :``compsize``: compressed size of revision
745 :``uncompsize``: uncompressed size of revision
745 :``uncompsize``: uncompressed size of revision
746 :``chainsize``: total size of compressed revisions in chain
746 :``chainsize``: total size of compressed revisions in chain
747 :``chainratio``: total chain size divided by uncompressed revision size
747 :``chainratio``: total chain size divided by uncompressed revision size
748 (new delta chains typically start at ratio 2.00)
748 (new delta chains typically start at ratio 2.00)
749 :``lindist``: linear distance from base revision in delta chain to end
749 :``lindist``: linear distance from base revision in delta chain to end
750 of this revision
750 of this revision
751 :``extradist``: total size of revisions not part of this delta chain from
751 :``extradist``: total size of revisions not part of this delta chain from
752 base of delta chain to end of this revision; a measurement
752 base of delta chain to end of this revision; a measurement
753 of how much extra data we need to read/seek across to read
753 of how much extra data we need to read/seek across to read
754 the delta chain for this revision
754 the delta chain for this revision
755 :``extraratio``: extradist divided by chainsize; another representation of
755 :``extraratio``: extradist divided by chainsize; another representation of
756 how much unrelated data is needed to load this delta chain
756 how much unrelated data is needed to load this delta chain
757
757
758 If the repository is configured to use the sparse read, additional keywords
758 If the repository is configured to use the sparse read, additional keywords
759 are available:
759 are available:
760
760
761 :``readsize``: total size of data read from the disk for a revision
761 :``readsize``: total size of data read from the disk for a revision
762 (sum of the sizes of all the blocks)
762 (sum of the sizes of all the blocks)
763 :``largestblock``: size of the largest block of data read from the disk
763 :``largestblock``: size of the largest block of data read from the disk
764 :``readdensity``: density of useful bytes in the data read from the disk
764 :``readdensity``: density of useful bytes in the data read from the disk
765 :``srchunks``: in how many data hunks the whole revision would be read
765 :``srchunks``: in how many data hunks the whole revision would be read
766
766
767 The sparse read can be enabled with experimental.sparse-read = True
767 The sparse read can be enabled with experimental.sparse-read = True
768 """
768 """
769 opts = pycompat.byteskwargs(opts)
769 opts = pycompat.byteskwargs(opts)
770 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
770 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
771 index = r.index
771 index = r.index
772 start = r.start
772 start = r.start
773 length = r.length
773 length = r.length
774 generaldelta = r.version & revlog.FLAG_GENERALDELTA
774 generaldelta = r.version & revlog.FLAG_GENERALDELTA
775 withsparseread = getattr(r, '_withsparseread', False)
775 withsparseread = getattr(r, '_withsparseread', False)
776
776
777 def revinfo(rev):
777 def revinfo(rev):
778 e = index[rev]
778 e = index[rev]
779 compsize = e[1]
779 compsize = e[1]
780 uncompsize = e[2]
780 uncompsize = e[2]
781 chainsize = 0
781 chainsize = 0
782
782
783 if generaldelta:
783 if generaldelta:
784 if e[3] == e[5]:
784 if e[3] == e[5]:
785 deltatype = b'p1'
785 deltatype = b'p1'
786 elif e[3] == e[6]:
786 elif e[3] == e[6]:
787 deltatype = b'p2'
787 deltatype = b'p2'
788 elif e[3] == rev - 1:
788 elif e[3] == rev - 1:
789 deltatype = b'prev'
789 deltatype = b'prev'
790 elif e[3] == rev:
790 elif e[3] == rev:
791 deltatype = b'base'
791 deltatype = b'base'
792 else:
792 else:
793 deltatype = b'other'
793 deltatype = b'other'
794 else:
794 else:
795 if e[3] == rev:
795 if e[3] == rev:
796 deltatype = b'base'
796 deltatype = b'base'
797 else:
797 else:
798 deltatype = b'prev'
798 deltatype = b'prev'
799
799
800 chain = r._deltachain(rev)[0]
800 chain = r._deltachain(rev)[0]
801 for iterrev in chain:
801 for iterrev in chain:
802 e = index[iterrev]
802 e = index[iterrev]
803 chainsize += e[1]
803 chainsize += e[1]
804
804
805 return compsize, uncompsize, deltatype, chain, chainsize
805 return compsize, uncompsize, deltatype, chain, chainsize
806
806
807 fm = ui.formatter(b'debugdeltachain', opts)
807 fm = ui.formatter(b'debugdeltachain', opts)
808
808
809 fm.plain(
809 fm.plain(
810 b' rev chain# chainlen prev delta '
810 b' rev chain# chainlen prev delta '
811 b'size rawsize chainsize ratio lindist extradist '
811 b'size rawsize chainsize ratio lindist extradist '
812 b'extraratio'
812 b'extraratio'
813 )
813 )
814 if withsparseread:
814 if withsparseread:
815 fm.plain(b' readsize largestblk rddensity srchunks')
815 fm.plain(b' readsize largestblk rddensity srchunks')
816 fm.plain(b'\n')
816 fm.plain(b'\n')
817
817
818 chainbases = {}
818 chainbases = {}
819 for rev in r:
819 for rev in r:
820 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
820 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
821 chainbase = chain[0]
821 chainbase = chain[0]
822 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
822 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
823 basestart = start(chainbase)
823 basestart = start(chainbase)
824 revstart = start(rev)
824 revstart = start(rev)
825 lineardist = revstart + comp - basestart
825 lineardist = revstart + comp - basestart
826 extradist = lineardist - chainsize
826 extradist = lineardist - chainsize
827 try:
827 try:
828 prevrev = chain[-2]
828 prevrev = chain[-2]
829 except IndexError:
829 except IndexError:
830 prevrev = -1
830 prevrev = -1
831
831
832 if uncomp != 0:
832 if uncomp != 0:
833 chainratio = float(chainsize) / float(uncomp)
833 chainratio = float(chainsize) / float(uncomp)
834 else:
834 else:
835 chainratio = chainsize
835 chainratio = chainsize
836
836
837 if chainsize != 0:
837 if chainsize != 0:
838 extraratio = float(extradist) / float(chainsize)
838 extraratio = float(extradist) / float(chainsize)
839 else:
839 else:
840 extraratio = extradist
840 extraratio = extradist
841
841
842 fm.startitem()
842 fm.startitem()
843 fm.write(
843 fm.write(
844 b'rev chainid chainlen prevrev deltatype compsize '
844 b'rev chainid chainlen prevrev deltatype compsize '
845 b'uncompsize chainsize chainratio lindist extradist '
845 b'uncompsize chainsize chainratio lindist extradist '
846 b'extraratio',
846 b'extraratio',
847 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
847 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
848 rev,
848 rev,
849 chainid,
849 chainid,
850 len(chain),
850 len(chain),
851 prevrev,
851 prevrev,
852 deltatype,
852 deltatype,
853 comp,
853 comp,
854 uncomp,
854 uncomp,
855 chainsize,
855 chainsize,
856 chainratio,
856 chainratio,
857 lineardist,
857 lineardist,
858 extradist,
858 extradist,
859 extraratio,
859 extraratio,
860 rev=rev,
860 rev=rev,
861 chainid=chainid,
861 chainid=chainid,
862 chainlen=len(chain),
862 chainlen=len(chain),
863 prevrev=prevrev,
863 prevrev=prevrev,
864 deltatype=deltatype,
864 deltatype=deltatype,
865 compsize=comp,
865 compsize=comp,
866 uncompsize=uncomp,
866 uncompsize=uncomp,
867 chainsize=chainsize,
867 chainsize=chainsize,
868 chainratio=chainratio,
868 chainratio=chainratio,
869 lindist=lineardist,
869 lindist=lineardist,
870 extradist=extradist,
870 extradist=extradist,
871 extraratio=extraratio,
871 extraratio=extraratio,
872 )
872 )
873 if withsparseread:
873 if withsparseread:
874 readsize = 0
874 readsize = 0
875 largestblock = 0
875 largestblock = 0
876 srchunks = 0
876 srchunks = 0
877
877
878 for revschunk in deltautil.slicechunk(r, chain):
878 for revschunk in deltautil.slicechunk(r, chain):
879 srchunks += 1
879 srchunks += 1
880 blkend = start(revschunk[-1]) + length(revschunk[-1])
880 blkend = start(revschunk[-1]) + length(revschunk[-1])
881 blksize = blkend - start(revschunk[0])
881 blksize = blkend - start(revschunk[0])
882
882
883 readsize += blksize
883 readsize += blksize
884 if largestblock < blksize:
884 if largestblock < blksize:
885 largestblock = blksize
885 largestblock = blksize
886
886
887 if readsize:
887 if readsize:
888 readdensity = float(chainsize) / float(readsize)
888 readdensity = float(chainsize) / float(readsize)
889 else:
889 else:
890 readdensity = 1
890 readdensity = 1
891
891
892 fm.write(
892 fm.write(
893 b'readsize largestblock readdensity srchunks',
893 b'readsize largestblock readdensity srchunks',
894 b' %10d %10d %9.5f %8d',
894 b' %10d %10d %9.5f %8d',
895 readsize,
895 readsize,
896 largestblock,
896 largestblock,
897 readdensity,
897 readdensity,
898 srchunks,
898 srchunks,
899 readsize=readsize,
899 readsize=readsize,
900 largestblock=largestblock,
900 largestblock=largestblock,
901 readdensity=readdensity,
901 readdensity=readdensity,
902 srchunks=srchunks,
902 srchunks=srchunks,
903 )
903 )
904
904
905 fm.plain(b'\n')
905 fm.plain(b'\n')
906
906
907 fm.end()
907 fm.end()
908
908
909
909
910 @command(
910 @command(
911 b'debugdirstate|debugstate',
911 b'debugdirstate|debugstate',
912 [
912 [
913 (
913 (
914 b'',
914 b'',
915 b'nodates',
915 b'nodates',
916 None,
916 None,
917 _(b'do not display the saved mtime (DEPRECATED)'),
917 _(b'do not display the saved mtime (DEPRECATED)'),
918 ),
918 ),
919 (b'', b'dates', True, _(b'display the saved mtime')),
919 (b'', b'dates', True, _(b'display the saved mtime')),
920 (b'', b'datesort', None, _(b'sort by saved mtime')),
920 (b'', b'datesort', None, _(b'sort by saved mtime')),
921 ],
921 ],
922 _(b'[OPTION]...'),
922 _(b'[OPTION]...'),
923 )
923 )
924 def debugstate(ui, repo, **opts):
924 def debugstate(ui, repo, **opts):
925 """show the contents of the current dirstate"""
925 """show the contents of the current dirstate"""
926
926
927 nodates = not opts['dates']
927 nodates = not opts['dates']
928 if opts.get('nodates') is not None:
928 if opts.get('nodates') is not None:
929 nodates = True
929 nodates = True
930 datesort = opts.get('datesort')
930 datesort = opts.get('datesort')
931
931
932 if datesort:
932 if datesort:
933 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
933 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
934 else:
934 else:
935 keyfunc = None # sort by filename
935 keyfunc = None # sort by filename
936 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
936 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
937 if ent[3] == -1:
937 if ent[3] == -1:
938 timestr = b'unset '
938 timestr = b'unset '
939 elif nodates:
939 elif nodates:
940 timestr = b'set '
940 timestr = b'set '
941 else:
941 else:
942 timestr = time.strftime(
942 timestr = time.strftime(
943 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
943 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
944 )
944 )
945 timestr = encoding.strtolocal(timestr)
945 timestr = encoding.strtolocal(timestr)
946 if ent[1] & 0o20000:
946 if ent[1] & 0o20000:
947 mode = b'lnk'
947 mode = b'lnk'
948 else:
948 else:
949 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
949 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
950 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
950 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
951 for f in repo.dirstate.copies():
951 for f in repo.dirstate.copies():
952 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
952 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
953
953
954
954
955 @command(
955 @command(
956 b'debugdiscovery',
956 b'debugdiscovery',
957 [
957 [
958 (b'', b'old', None, _(b'use old-style discovery')),
958 (b'', b'old', None, _(b'use old-style discovery')),
959 (
959 (
960 b'',
960 b'',
961 b'nonheads',
961 b'nonheads',
962 None,
962 None,
963 _(b'use old-style discovery with non-heads included'),
963 _(b'use old-style discovery with non-heads included'),
964 ),
964 ),
965 (b'', b'rev', [], b'restrict discovery to this set of revs'),
965 (b'', b'rev', [], b'restrict discovery to this set of revs'),
966 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
966 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
967 ]
967 ]
968 + cmdutil.remoteopts,
968 + cmdutil.remoteopts,
969 _(b'[--rev REV] [OTHER]'),
969 _(b'[--rev REV] [OTHER]'),
970 )
970 )
971 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
971 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
972 """runs the changeset discovery protocol in isolation"""
972 """runs the changeset discovery protocol in isolation"""
973 opts = pycompat.byteskwargs(opts)
973 opts = pycompat.byteskwargs(opts)
974 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
974 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
975 remote = hg.peer(repo, opts, remoteurl)
975 remote = hg.peer(repo, opts, remoteurl)
976 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
976 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
977
977
978 # make sure tests are repeatable
978 # make sure tests are repeatable
979 random.seed(int(opts[b'seed']))
979 random.seed(int(opts[b'seed']))
980
980
981 data = {}
981 data = {}
982 if opts.get(b'old'):
982 if opts.get(b'old'):
983
983
984 def doit(pushedrevs, remoteheads, remote=remote):
984 def doit(pushedrevs, remoteheads, remote=remote):
985 if not util.safehasattr(remote, b'branches'):
985 if not util.safehasattr(remote, b'branches'):
986 # enable in-client legacy support
986 # enable in-client legacy support
987 remote = localrepo.locallegacypeer(remote.local())
987 remote = localrepo.locallegacypeer(remote.local())
988 common, _in, hds = treediscovery.findcommonincoming(
988 common, _in, hds = treediscovery.findcommonincoming(
989 repo, remote, force=True, audit=data
989 repo, remote, force=True, audit=data
990 )
990 )
991 common = set(common)
991 common = set(common)
992 if not opts.get(b'nonheads'):
992 if not opts.get(b'nonheads'):
993 ui.writenoi18n(
993 ui.writenoi18n(
994 b"unpruned common: %s\n"
994 b"unpruned common: %s\n"
995 % b" ".join(sorted(short(n) for n in common))
995 % b" ".join(sorted(short(n) for n in common))
996 )
996 )
997
997
998 clnode = repo.changelog.node
998 clnode = repo.changelog.node
999 common = repo.revs(b'heads(::%ln)', common)
999 common = repo.revs(b'heads(::%ln)', common)
1000 common = {clnode(r) for r in common}
1000 common = {clnode(r) for r in common}
1001 return common, hds
1001 return common, hds
1002
1002
1003 else:
1003 else:
1004
1004
1005 def doit(pushedrevs, remoteheads, remote=remote):
1005 def doit(pushedrevs, remoteheads, remote=remote):
1006 nodes = None
1006 nodes = None
1007 if pushedrevs:
1007 if pushedrevs:
1008 revs = scmutil.revrange(repo, pushedrevs)
1008 revs = scmutil.revrange(repo, pushedrevs)
1009 nodes = [repo[r].node() for r in revs]
1009 nodes = [repo[r].node() for r in revs]
1010 common, any, hds = setdiscovery.findcommonheads(
1010 common, any, hds = setdiscovery.findcommonheads(
1011 ui, repo, remote, ancestorsof=nodes, audit=data
1011 ui, repo, remote, ancestorsof=nodes, audit=data
1012 )
1012 )
1013 return common, hds
1013 return common, hds
1014
1014
1015 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1015 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1016 localrevs = opts[b'rev']
1016 localrevs = opts[b'rev']
1017 with util.timedcm('debug-discovery') as t:
1017 with util.timedcm('debug-discovery') as t:
1018 common, hds = doit(localrevs, remoterevs)
1018 common, hds = doit(localrevs, remoterevs)
1019
1019
1020 # compute all statistics
1020 # compute all statistics
1021 heads_common = set(common)
1021 heads_common = set(common)
1022 heads_remote = set(hds)
1022 heads_remote = set(hds)
1023 heads_local = set(repo.heads())
1023 heads_local = set(repo.heads())
1024 # note: they cannot be a local or remote head that is in common and not
1024 # note: they cannot be a local or remote head that is in common and not
1025 # itself a head of common.
1025 # itself a head of common.
1026 heads_common_local = heads_common & heads_local
1026 heads_common_local = heads_common & heads_local
1027 heads_common_remote = heads_common & heads_remote
1027 heads_common_remote = heads_common & heads_remote
1028 heads_common_both = heads_common & heads_remote & heads_local
1028 heads_common_both = heads_common & heads_remote & heads_local
1029
1029
1030 all = repo.revs(b'all()')
1030 all = repo.revs(b'all()')
1031 common = repo.revs(b'::%ln', common)
1031 common = repo.revs(b'::%ln', common)
1032 roots_common = repo.revs(b'roots(::%ld)', common)
1032 roots_common = repo.revs(b'roots(::%ld)', common)
1033 missing = repo.revs(b'not ::%ld', common)
1033 missing = repo.revs(b'not ::%ld', common)
1034 heads_missing = repo.revs(b'heads(%ld)', missing)
1034 heads_missing = repo.revs(b'heads(%ld)', missing)
1035 roots_missing = repo.revs(b'roots(%ld)', missing)
1035 roots_missing = repo.revs(b'roots(%ld)', missing)
1036 assert len(common) + len(missing) == len(all)
1036 assert len(common) + len(missing) == len(all)
1037
1037
1038 initial_undecided = repo.revs(
1038 initial_undecided = repo.revs(
1039 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1039 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1040 )
1040 )
1041 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1041 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1042 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1042 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1043 common_initial_undecided = initial_undecided & common
1043 common_initial_undecided = initial_undecided & common
1044 missing_initial_undecided = initial_undecided & missing
1044 missing_initial_undecided = initial_undecided & missing
1045
1045
1046 data[b'elapsed'] = t.elapsed
1046 data[b'elapsed'] = t.elapsed
1047 data[b'nb-common-heads'] = len(heads_common)
1047 data[b'nb-common-heads'] = len(heads_common)
1048 data[b'nb-common-heads-local'] = len(heads_common_local)
1048 data[b'nb-common-heads-local'] = len(heads_common_local)
1049 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1049 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1050 data[b'nb-common-heads-both'] = len(heads_common_both)
1050 data[b'nb-common-heads-both'] = len(heads_common_both)
1051 data[b'nb-common-roots'] = len(roots_common)
1051 data[b'nb-common-roots'] = len(roots_common)
1052 data[b'nb-head-local'] = len(heads_local)
1052 data[b'nb-head-local'] = len(heads_local)
1053 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1053 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1054 data[b'nb-head-remote'] = len(heads_remote)
1054 data[b'nb-head-remote'] = len(heads_remote)
1055 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1055 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1056 heads_common_remote
1056 heads_common_remote
1057 )
1057 )
1058 data[b'nb-revs'] = len(all)
1058 data[b'nb-revs'] = len(all)
1059 data[b'nb-revs-common'] = len(common)
1059 data[b'nb-revs-common'] = len(common)
1060 data[b'nb-revs-missing'] = len(missing)
1060 data[b'nb-revs-missing'] = len(missing)
1061 data[b'nb-missing-heads'] = len(heads_missing)
1061 data[b'nb-missing-heads'] = len(heads_missing)
1062 data[b'nb-missing-roots'] = len(roots_missing)
1062 data[b'nb-missing-roots'] = len(roots_missing)
1063 data[b'nb-ini_und'] = len(initial_undecided)
1063 data[b'nb-ini_und'] = len(initial_undecided)
1064 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1064 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1065 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1065 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1066 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1066 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1067 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1067 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1068
1068
1069 # display discovery summary
1069 # display discovery summary
1070 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
1070 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
1071 ui.writenoi18n(b"round-trips: %(total-roundtrips)9d\n" % data)
1071 ui.writenoi18n(b"round-trips: %(total-roundtrips)9d\n" % data)
1072 ui.writenoi18n(b"heads summary:\n")
1072 ui.writenoi18n(b"heads summary:\n")
1073 ui.writenoi18n(b" total common heads: %(nb-common-heads)9d\n" % data)
1073 ui.writenoi18n(b" total common heads: %(nb-common-heads)9d\n" % data)
1074 ui.writenoi18n(
1074 ui.writenoi18n(
1075 b" also local heads: %(nb-common-heads-local)9d\n" % data
1075 b" also local heads: %(nb-common-heads-local)9d\n" % data
1076 )
1076 )
1077 ui.writenoi18n(
1077 ui.writenoi18n(
1078 b" also remote heads: %(nb-common-heads-remote)9d\n" % data
1078 b" also remote heads: %(nb-common-heads-remote)9d\n" % data
1079 )
1079 )
1080 ui.writenoi18n(b" both: %(nb-common-heads-both)9d\n" % data)
1080 ui.writenoi18n(b" both: %(nb-common-heads-both)9d\n" % data)
1081 ui.writenoi18n(b" local heads: %(nb-head-local)9d\n" % data)
1081 ui.writenoi18n(b" local heads: %(nb-head-local)9d\n" % data)
1082 ui.writenoi18n(
1082 ui.writenoi18n(
1083 b" common: %(nb-common-heads-local)9d\n" % data
1083 b" common: %(nb-common-heads-local)9d\n" % data
1084 )
1084 )
1085 ui.writenoi18n(
1085 ui.writenoi18n(
1086 b" missing: %(nb-head-local-missing)9d\n" % data
1086 b" missing: %(nb-head-local-missing)9d\n" % data
1087 )
1087 )
1088 ui.writenoi18n(b" remote heads: %(nb-head-remote)9d\n" % data)
1088 ui.writenoi18n(b" remote heads: %(nb-head-remote)9d\n" % data)
1089 ui.writenoi18n(
1089 ui.writenoi18n(
1090 b" common: %(nb-common-heads-remote)9d\n" % data
1090 b" common: %(nb-common-heads-remote)9d\n" % data
1091 )
1091 )
1092 ui.writenoi18n(
1092 ui.writenoi18n(
1093 b" unknown: %(nb-head-remote-unknown)9d\n" % data
1093 b" unknown: %(nb-head-remote-unknown)9d\n" % data
1094 )
1094 )
1095 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
1095 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
1096 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
1096 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
1097 ui.writenoi18n(b" heads: %(nb-common-heads)9d\n" % data)
1097 ui.writenoi18n(b" heads: %(nb-common-heads)9d\n" % data)
1098 ui.writenoi18n(b" roots: %(nb-common-roots)9d\n" % data)
1098 ui.writenoi18n(b" roots: %(nb-common-roots)9d\n" % data)
1099 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
1099 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
1100 ui.writenoi18n(b" heads: %(nb-missing-heads)9d\n" % data)
1100 ui.writenoi18n(b" heads: %(nb-missing-heads)9d\n" % data)
1101 ui.writenoi18n(b" roots: %(nb-missing-roots)9d\n" % data)
1101 ui.writenoi18n(b" roots: %(nb-missing-roots)9d\n" % data)
1102 ui.writenoi18n(b" first undecided set: %(nb-ini_und)9d\n" % data)
1102 ui.writenoi18n(b" first undecided set: %(nb-ini_und)9d\n" % data)
1103 ui.writenoi18n(b" heads: %(nb-ini_und-heads)9d\n" % data)
1103 ui.writenoi18n(b" heads: %(nb-ini_und-heads)9d\n" % data)
1104 ui.writenoi18n(b" roots: %(nb-ini_und-roots)9d\n" % data)
1104 ui.writenoi18n(b" roots: %(nb-ini_und-roots)9d\n" % data)
1105 ui.writenoi18n(b" common: %(nb-ini_und-common)9d\n" % data)
1105 ui.writenoi18n(b" common: %(nb-ini_und-common)9d\n" % data)
1106 ui.writenoi18n(b" missing: %(nb-ini_und-missing)9d\n" % data)
1106 ui.writenoi18n(b" missing: %(nb-ini_und-missing)9d\n" % data)
1107
1107
1108 if ui.verbose:
1108 if ui.verbose:
1109 ui.writenoi18n(
1109 ui.writenoi18n(
1110 b"common heads: %s\n"
1110 b"common heads: %s\n"
1111 % b" ".join(sorted(short(n) for n in heads_common))
1111 % b" ".join(sorted(short(n) for n in heads_common))
1112 )
1112 )
1113
1113
1114
1114
1115 _chunksize = 4 << 10
1115 _chunksize = 4 << 10
1116
1116
1117
1117
1118 @command(
1118 @command(
1119 b'debugdownload',
1119 b'debugdownload',
1120 [
1120 [
1121 (b'o', b'output', b'', _(b'path')),
1121 (b'o', b'output', b'', _(b'path')),
1122 ],
1122 ],
1123 optionalrepo=True,
1123 optionalrepo=True,
1124 )
1124 )
1125 def debugdownload(ui, repo, url, output=None, **opts):
1125 def debugdownload(ui, repo, url, output=None, **opts):
1126 """download a resource using Mercurial logic and config"""
1126 """download a resource using Mercurial logic and config"""
1127 fh = urlmod.open(ui, url, output)
1127 fh = urlmod.open(ui, url, output)
1128
1128
1129 dest = ui
1129 dest = ui
1130 if output:
1130 if output:
1131 dest = open(output, b"wb", _chunksize)
1131 dest = open(output, b"wb", _chunksize)
1132 try:
1132 try:
1133 data = fh.read(_chunksize)
1133 data = fh.read(_chunksize)
1134 while data:
1134 while data:
1135 dest.write(data)
1135 dest.write(data)
1136 data = fh.read(_chunksize)
1136 data = fh.read(_chunksize)
1137 finally:
1137 finally:
1138 if output:
1138 if output:
1139 dest.close()
1139 dest.close()
1140
1140
1141
1141
1142 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1142 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1143 def debugextensions(ui, repo, **opts):
1143 def debugextensions(ui, repo, **opts):
1144 '''show information about active extensions'''
1144 '''show information about active extensions'''
1145 opts = pycompat.byteskwargs(opts)
1145 opts = pycompat.byteskwargs(opts)
1146 exts = extensions.extensions(ui)
1146 exts = extensions.extensions(ui)
1147 hgver = util.version()
1147 hgver = util.version()
1148 fm = ui.formatter(b'debugextensions', opts)
1148 fm = ui.formatter(b'debugextensions', opts)
1149 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1149 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1150 isinternal = extensions.ismoduleinternal(extmod)
1150 isinternal = extensions.ismoduleinternal(extmod)
1151 extsource = None
1151 extsource = None
1152
1152
1153 if util.safehasattr(extmod, '__file__'):
1153 if util.safehasattr(extmod, '__file__'):
1154 extsource = pycompat.fsencode(extmod.__file__)
1154 extsource = pycompat.fsencode(extmod.__file__)
1155 elif getattr(sys, 'oxidized', False):
1155 elif getattr(sys, 'oxidized', False):
1156 extsource = pycompat.sysexecutable
1156 extsource = pycompat.sysexecutable
1157 if isinternal:
1157 if isinternal:
1158 exttestedwith = [] # never expose magic string to users
1158 exttestedwith = [] # never expose magic string to users
1159 else:
1159 else:
1160 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1160 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1161 extbuglink = getattr(extmod, 'buglink', None)
1161 extbuglink = getattr(extmod, 'buglink', None)
1162
1162
1163 fm.startitem()
1163 fm.startitem()
1164
1164
1165 if ui.quiet or ui.verbose:
1165 if ui.quiet or ui.verbose:
1166 fm.write(b'name', b'%s\n', extname)
1166 fm.write(b'name', b'%s\n', extname)
1167 else:
1167 else:
1168 fm.write(b'name', b'%s', extname)
1168 fm.write(b'name', b'%s', extname)
1169 if isinternal or hgver in exttestedwith:
1169 if isinternal or hgver in exttestedwith:
1170 fm.plain(b'\n')
1170 fm.plain(b'\n')
1171 elif not exttestedwith:
1171 elif not exttestedwith:
1172 fm.plain(_(b' (untested!)\n'))
1172 fm.plain(_(b' (untested!)\n'))
1173 else:
1173 else:
1174 lasttestedversion = exttestedwith[-1]
1174 lasttestedversion = exttestedwith[-1]
1175 fm.plain(b' (%s!)\n' % lasttestedversion)
1175 fm.plain(b' (%s!)\n' % lasttestedversion)
1176
1176
1177 fm.condwrite(
1177 fm.condwrite(
1178 ui.verbose and extsource,
1178 ui.verbose and extsource,
1179 b'source',
1179 b'source',
1180 _(b' location: %s\n'),
1180 _(b' location: %s\n'),
1181 extsource or b"",
1181 extsource or b"",
1182 )
1182 )
1183
1183
1184 if ui.verbose:
1184 if ui.verbose:
1185 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1185 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1186 fm.data(bundled=isinternal)
1186 fm.data(bundled=isinternal)
1187
1187
1188 fm.condwrite(
1188 fm.condwrite(
1189 ui.verbose and exttestedwith,
1189 ui.verbose and exttestedwith,
1190 b'testedwith',
1190 b'testedwith',
1191 _(b' tested with: %s\n'),
1191 _(b' tested with: %s\n'),
1192 fm.formatlist(exttestedwith, name=b'ver'),
1192 fm.formatlist(exttestedwith, name=b'ver'),
1193 )
1193 )
1194
1194
1195 fm.condwrite(
1195 fm.condwrite(
1196 ui.verbose and extbuglink,
1196 ui.verbose and extbuglink,
1197 b'buglink',
1197 b'buglink',
1198 _(b' bug reporting: %s\n'),
1198 _(b' bug reporting: %s\n'),
1199 extbuglink or b"",
1199 extbuglink or b"",
1200 )
1200 )
1201
1201
1202 fm.end()
1202 fm.end()
1203
1203
1204
1204
1205 @command(
1205 @command(
1206 b'debugfileset',
1206 b'debugfileset',
1207 [
1207 [
1208 (
1208 (
1209 b'r',
1209 b'r',
1210 b'rev',
1210 b'rev',
1211 b'',
1211 b'',
1212 _(b'apply the filespec on this revision'),
1212 _(b'apply the filespec on this revision'),
1213 _(b'REV'),
1213 _(b'REV'),
1214 ),
1214 ),
1215 (
1215 (
1216 b'',
1216 b'',
1217 b'all-files',
1217 b'all-files',
1218 False,
1218 False,
1219 _(b'test files from all revisions and working directory'),
1219 _(b'test files from all revisions and working directory'),
1220 ),
1220 ),
1221 (
1221 (
1222 b's',
1222 b's',
1223 b'show-matcher',
1223 b'show-matcher',
1224 None,
1224 None,
1225 _(b'print internal representation of matcher'),
1225 _(b'print internal representation of matcher'),
1226 ),
1226 ),
1227 (
1227 (
1228 b'p',
1228 b'p',
1229 b'show-stage',
1229 b'show-stage',
1230 [],
1230 [],
1231 _(b'print parsed tree at the given stage'),
1231 _(b'print parsed tree at the given stage'),
1232 _(b'NAME'),
1232 _(b'NAME'),
1233 ),
1233 ),
1234 ],
1234 ],
1235 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1235 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1236 )
1236 )
1237 def debugfileset(ui, repo, expr, **opts):
1237 def debugfileset(ui, repo, expr, **opts):
1238 '''parse and apply a fileset specification'''
1238 '''parse and apply a fileset specification'''
1239 from . import fileset
1239 from . import fileset
1240
1240
1241 fileset.symbols # force import of fileset so we have predicates to optimize
1241 fileset.symbols # force import of fileset so we have predicates to optimize
1242 opts = pycompat.byteskwargs(opts)
1242 opts = pycompat.byteskwargs(opts)
1243 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1243 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1244
1244
1245 stages = [
1245 stages = [
1246 (b'parsed', pycompat.identity),
1246 (b'parsed', pycompat.identity),
1247 (b'analyzed', filesetlang.analyze),
1247 (b'analyzed', filesetlang.analyze),
1248 (b'optimized', filesetlang.optimize),
1248 (b'optimized', filesetlang.optimize),
1249 ]
1249 ]
1250 stagenames = {n for n, f in stages}
1250 stagenames = {n for n, f in stages}
1251
1251
1252 showalways = set()
1252 showalways = set()
1253 if ui.verbose and not opts[b'show_stage']:
1253 if ui.verbose and not opts[b'show_stage']:
1254 # show parsed tree by --verbose (deprecated)
1254 # show parsed tree by --verbose (deprecated)
1255 showalways.add(b'parsed')
1255 showalways.add(b'parsed')
1256 if opts[b'show_stage'] == [b'all']:
1256 if opts[b'show_stage'] == [b'all']:
1257 showalways.update(stagenames)
1257 showalways.update(stagenames)
1258 else:
1258 else:
1259 for n in opts[b'show_stage']:
1259 for n in opts[b'show_stage']:
1260 if n not in stagenames:
1260 if n not in stagenames:
1261 raise error.Abort(_(b'invalid stage name: %s') % n)
1261 raise error.Abort(_(b'invalid stage name: %s') % n)
1262 showalways.update(opts[b'show_stage'])
1262 showalways.update(opts[b'show_stage'])
1263
1263
1264 tree = filesetlang.parse(expr)
1264 tree = filesetlang.parse(expr)
1265 for n, f in stages:
1265 for n, f in stages:
1266 tree = f(tree)
1266 tree = f(tree)
1267 if n in showalways:
1267 if n in showalways:
1268 if opts[b'show_stage'] or n != b'parsed':
1268 if opts[b'show_stage'] or n != b'parsed':
1269 ui.write(b"* %s:\n" % n)
1269 ui.write(b"* %s:\n" % n)
1270 ui.write(filesetlang.prettyformat(tree), b"\n")
1270 ui.write(filesetlang.prettyformat(tree), b"\n")
1271
1271
1272 files = set()
1272 files = set()
1273 if opts[b'all_files']:
1273 if opts[b'all_files']:
1274 for r in repo:
1274 for r in repo:
1275 c = repo[r]
1275 c = repo[r]
1276 files.update(c.files())
1276 files.update(c.files())
1277 files.update(c.substate)
1277 files.update(c.substate)
1278 if opts[b'all_files'] or ctx.rev() is None:
1278 if opts[b'all_files'] or ctx.rev() is None:
1279 wctx = repo[None]
1279 wctx = repo[None]
1280 files.update(
1280 files.update(
1281 repo.dirstate.walk(
1281 repo.dirstate.walk(
1282 scmutil.matchall(repo),
1282 scmutil.matchall(repo),
1283 subrepos=list(wctx.substate),
1283 subrepos=list(wctx.substate),
1284 unknown=True,
1284 unknown=True,
1285 ignored=True,
1285 ignored=True,
1286 )
1286 )
1287 )
1287 )
1288 files.update(wctx.substate)
1288 files.update(wctx.substate)
1289 else:
1289 else:
1290 files.update(ctx.files())
1290 files.update(ctx.files())
1291 files.update(ctx.substate)
1291 files.update(ctx.substate)
1292
1292
1293 m = ctx.matchfileset(repo.getcwd(), expr)
1293 m = ctx.matchfileset(repo.getcwd(), expr)
1294 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1294 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1295 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1295 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1296 for f in sorted(files):
1296 for f in sorted(files):
1297 if not m(f):
1297 if not m(f):
1298 continue
1298 continue
1299 ui.write(b"%s\n" % f)
1299 ui.write(b"%s\n" % f)
1300
1300
1301
1301
1302 @command(b'debugformat', [] + cmdutil.formatteropts)
1302 @command(b'debugformat', [] + cmdutil.formatteropts)
1303 def debugformat(ui, repo, **opts):
1303 def debugformat(ui, repo, **opts):
1304 """display format information about the current repository
1304 """display format information about the current repository
1305
1305
1306 Use --verbose to get extra information about current config value and
1306 Use --verbose to get extra information about current config value and
1307 Mercurial default."""
1307 Mercurial default."""
1308 opts = pycompat.byteskwargs(opts)
1308 opts = pycompat.byteskwargs(opts)
1309 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1309 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1310 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1310 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1311
1311
1312 def makeformatname(name):
1312 def makeformatname(name):
1313 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1313 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1314
1314
1315 fm = ui.formatter(b'debugformat', opts)
1315 fm = ui.formatter(b'debugformat', opts)
1316 if fm.isplain():
1316 if fm.isplain():
1317
1317
1318 def formatvalue(value):
1318 def formatvalue(value):
1319 if util.safehasattr(value, b'startswith'):
1319 if util.safehasattr(value, b'startswith'):
1320 return value
1320 return value
1321 if value:
1321 if value:
1322 return b'yes'
1322 return b'yes'
1323 else:
1323 else:
1324 return b'no'
1324 return b'no'
1325
1325
1326 else:
1326 else:
1327 formatvalue = pycompat.identity
1327 formatvalue = pycompat.identity
1328
1328
1329 fm.plain(b'format-variant')
1329 fm.plain(b'format-variant')
1330 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1330 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1331 fm.plain(b' repo')
1331 fm.plain(b' repo')
1332 if ui.verbose:
1332 if ui.verbose:
1333 fm.plain(b' config default')
1333 fm.plain(b' config default')
1334 fm.plain(b'\n')
1334 fm.plain(b'\n')
1335 for fv in upgrade.allformatvariant:
1335 for fv in upgrade.allformatvariant:
1336 fm.startitem()
1336 fm.startitem()
1337 repovalue = fv.fromrepo(repo)
1337 repovalue = fv.fromrepo(repo)
1338 configvalue = fv.fromconfig(repo)
1338 configvalue = fv.fromconfig(repo)
1339
1339
1340 if repovalue != configvalue:
1340 if repovalue != configvalue:
1341 namelabel = b'formatvariant.name.mismatchconfig'
1341 namelabel = b'formatvariant.name.mismatchconfig'
1342 repolabel = b'formatvariant.repo.mismatchconfig'
1342 repolabel = b'formatvariant.repo.mismatchconfig'
1343 elif repovalue != fv.default:
1343 elif repovalue != fv.default:
1344 namelabel = b'formatvariant.name.mismatchdefault'
1344 namelabel = b'formatvariant.name.mismatchdefault'
1345 repolabel = b'formatvariant.repo.mismatchdefault'
1345 repolabel = b'formatvariant.repo.mismatchdefault'
1346 else:
1346 else:
1347 namelabel = b'formatvariant.name.uptodate'
1347 namelabel = b'formatvariant.name.uptodate'
1348 repolabel = b'formatvariant.repo.uptodate'
1348 repolabel = b'formatvariant.repo.uptodate'
1349
1349
1350 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1350 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1351 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1351 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1352 if fv.default != configvalue:
1352 if fv.default != configvalue:
1353 configlabel = b'formatvariant.config.special'
1353 configlabel = b'formatvariant.config.special'
1354 else:
1354 else:
1355 configlabel = b'formatvariant.config.default'
1355 configlabel = b'formatvariant.config.default'
1356 fm.condwrite(
1356 fm.condwrite(
1357 ui.verbose,
1357 ui.verbose,
1358 b'config',
1358 b'config',
1359 b' %6s',
1359 b' %6s',
1360 formatvalue(configvalue),
1360 formatvalue(configvalue),
1361 label=configlabel,
1361 label=configlabel,
1362 )
1362 )
1363 fm.condwrite(
1363 fm.condwrite(
1364 ui.verbose,
1364 ui.verbose,
1365 b'default',
1365 b'default',
1366 b' %7s',
1366 b' %7s',
1367 formatvalue(fv.default),
1367 formatvalue(fv.default),
1368 label=b'formatvariant.default',
1368 label=b'formatvariant.default',
1369 )
1369 )
1370 fm.plain(b'\n')
1370 fm.plain(b'\n')
1371 fm.end()
1371 fm.end()
1372
1372
1373
1373
1374 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1374 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1375 def debugfsinfo(ui, path=b"."):
1375 def debugfsinfo(ui, path=b"."):
1376 """show information detected about current filesystem"""
1376 """show information detected about current filesystem"""
1377 ui.writenoi18n(b'path: %s\n' % path)
1377 ui.writenoi18n(b'path: %s\n' % path)
1378 ui.writenoi18n(
1378 ui.writenoi18n(
1379 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1379 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1380 )
1380 )
1381 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1381 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1382 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1382 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1383 ui.writenoi18n(
1383 ui.writenoi18n(
1384 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1384 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1385 )
1385 )
1386 ui.writenoi18n(
1386 ui.writenoi18n(
1387 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1387 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1388 )
1388 )
1389 casesensitive = b'(unknown)'
1389 casesensitive = b'(unknown)'
1390 try:
1390 try:
1391 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1391 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1392 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1392 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1393 except OSError:
1393 except OSError:
1394 pass
1394 pass
1395 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1395 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1396
1396
1397
1397
1398 @command(
1398 @command(
1399 b'debuggetbundle',
1399 b'debuggetbundle',
1400 [
1400 [
1401 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1401 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1402 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1402 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1403 (
1403 (
1404 b't',
1404 b't',
1405 b'type',
1405 b'type',
1406 b'bzip2',
1406 b'bzip2',
1407 _(b'bundle compression type to use'),
1407 _(b'bundle compression type to use'),
1408 _(b'TYPE'),
1408 _(b'TYPE'),
1409 ),
1409 ),
1410 ],
1410 ],
1411 _(b'REPO FILE [-H|-C ID]...'),
1411 _(b'REPO FILE [-H|-C ID]...'),
1412 norepo=True,
1412 norepo=True,
1413 )
1413 )
1414 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1414 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1415 """retrieves a bundle from a repo
1415 """retrieves a bundle from a repo
1416
1416
1417 Every ID must be a full-length hex node id string. Saves the bundle to the
1417 Every ID must be a full-length hex node id string. Saves the bundle to the
1418 given file.
1418 given file.
1419 """
1419 """
1420 opts = pycompat.byteskwargs(opts)
1420 opts = pycompat.byteskwargs(opts)
1421 repo = hg.peer(ui, opts, repopath)
1421 repo = hg.peer(ui, opts, repopath)
1422 if not repo.capable(b'getbundle'):
1422 if not repo.capable(b'getbundle'):
1423 raise error.Abort(b"getbundle() not supported by target repository")
1423 raise error.Abort(b"getbundle() not supported by target repository")
1424 args = {}
1424 args = {}
1425 if common:
1425 if common:
1426 args['common'] = [bin(s) for s in common]
1426 args['common'] = [bin(s) for s in common]
1427 if head:
1427 if head:
1428 args['heads'] = [bin(s) for s in head]
1428 args['heads'] = [bin(s) for s in head]
1429 # TODO: get desired bundlecaps from command line.
1429 # TODO: get desired bundlecaps from command line.
1430 args['bundlecaps'] = None
1430 args['bundlecaps'] = None
1431 bundle = repo.getbundle(b'debug', **args)
1431 bundle = repo.getbundle(b'debug', **args)
1432
1432
1433 bundletype = opts.get(b'type', b'bzip2').lower()
1433 bundletype = opts.get(b'type', b'bzip2').lower()
1434 btypes = {
1434 btypes = {
1435 b'none': b'HG10UN',
1435 b'none': b'HG10UN',
1436 b'bzip2': b'HG10BZ',
1436 b'bzip2': b'HG10BZ',
1437 b'gzip': b'HG10GZ',
1437 b'gzip': b'HG10GZ',
1438 b'bundle2': b'HG20',
1438 b'bundle2': b'HG20',
1439 }
1439 }
1440 bundletype = btypes.get(bundletype)
1440 bundletype = btypes.get(bundletype)
1441 if bundletype not in bundle2.bundletypes:
1441 if bundletype not in bundle2.bundletypes:
1442 raise error.Abort(_(b'unknown bundle type specified with --type'))
1442 raise error.Abort(_(b'unknown bundle type specified with --type'))
1443 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1443 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1444
1444
1445
1445
1446 @command(b'debugignore', [], b'[FILE]')
1446 @command(b'debugignore', [], b'[FILE]')
1447 def debugignore(ui, repo, *files, **opts):
1447 def debugignore(ui, repo, *files, **opts):
1448 """display the combined ignore pattern and information about ignored files
1448 """display the combined ignore pattern and information about ignored files
1449
1449
1450 With no argument display the combined ignore pattern.
1450 With no argument display the combined ignore pattern.
1451
1451
1452 Given space separated file names, shows if the given file is ignored and
1452 Given space separated file names, shows if the given file is ignored and
1453 if so, show the ignore rule (file and line number) that matched it.
1453 if so, show the ignore rule (file and line number) that matched it.
1454 """
1454 """
1455 ignore = repo.dirstate._ignore
1455 ignore = repo.dirstate._ignore
1456 if not files:
1456 if not files:
1457 # Show all the patterns
1457 # Show all the patterns
1458 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1458 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1459 else:
1459 else:
1460 m = scmutil.match(repo[None], pats=files)
1460 m = scmutil.match(repo[None], pats=files)
1461 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1461 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1462 for f in m.files():
1462 for f in m.files():
1463 nf = util.normpath(f)
1463 nf = util.normpath(f)
1464 ignored = None
1464 ignored = None
1465 ignoredata = None
1465 ignoredata = None
1466 if nf != b'.':
1466 if nf != b'.':
1467 if ignore(nf):
1467 if ignore(nf):
1468 ignored = nf
1468 ignored = nf
1469 ignoredata = repo.dirstate._ignorefileandline(nf)
1469 ignoredata = repo.dirstate._ignorefileandline(nf)
1470 else:
1470 else:
1471 for p in pathutil.finddirs(nf):
1471 for p in pathutil.finddirs(nf):
1472 if ignore(p):
1472 if ignore(p):
1473 ignored = p
1473 ignored = p
1474 ignoredata = repo.dirstate._ignorefileandline(p)
1474 ignoredata = repo.dirstate._ignorefileandline(p)
1475 break
1475 break
1476 if ignored:
1476 if ignored:
1477 if ignored == nf:
1477 if ignored == nf:
1478 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1478 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1479 else:
1479 else:
1480 ui.write(
1480 ui.write(
1481 _(
1481 _(
1482 b"%s is ignored because of "
1482 b"%s is ignored because of "
1483 b"containing directory %s\n"
1483 b"containing directory %s\n"
1484 )
1484 )
1485 % (uipathfn(f), ignored)
1485 % (uipathfn(f), ignored)
1486 )
1486 )
1487 ignorefile, lineno, line = ignoredata
1487 ignorefile, lineno, line = ignoredata
1488 ui.write(
1488 ui.write(
1489 _(b"(ignore rule in %s, line %d: '%s')\n")
1489 _(b"(ignore rule in %s, line %d: '%s')\n")
1490 % (ignorefile, lineno, line)
1490 % (ignorefile, lineno, line)
1491 )
1491 )
1492 else:
1492 else:
1493 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1493 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1494
1494
1495
1495
1496 @command(
1496 @command(
1497 b'debugindex',
1497 b'debugindex',
1498 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1498 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1499 _(b'-c|-m|FILE'),
1499 _(b'-c|-m|FILE'),
1500 )
1500 )
1501 def debugindex(ui, repo, file_=None, **opts):
1501 def debugindex(ui, repo, file_=None, **opts):
1502 """dump index data for a storage primitive"""
1502 """dump index data for a storage primitive"""
1503 opts = pycompat.byteskwargs(opts)
1503 opts = pycompat.byteskwargs(opts)
1504 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1504 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1505
1505
1506 if ui.debugflag:
1506 if ui.debugflag:
1507 shortfn = hex
1507 shortfn = hex
1508 else:
1508 else:
1509 shortfn = short
1509 shortfn = short
1510
1510
1511 idlen = 12
1511 idlen = 12
1512 for i in store:
1512 for i in store:
1513 idlen = len(shortfn(store.node(i)))
1513 idlen = len(shortfn(store.node(i)))
1514 break
1514 break
1515
1515
1516 fm = ui.formatter(b'debugindex', opts)
1516 fm = ui.formatter(b'debugindex', opts)
1517 fm.plain(
1517 fm.plain(
1518 b' rev linkrev %s %s p2\n'
1518 b' rev linkrev %s %s p2\n'
1519 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1519 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1520 )
1520 )
1521
1521
1522 for rev in store:
1522 for rev in store:
1523 node = store.node(rev)
1523 node = store.node(rev)
1524 parents = store.parents(node)
1524 parents = store.parents(node)
1525
1525
1526 fm.startitem()
1526 fm.startitem()
1527 fm.write(b'rev', b'%6d ', rev)
1527 fm.write(b'rev', b'%6d ', rev)
1528 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1528 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1529 fm.write(b'node', b'%s ', shortfn(node))
1529 fm.write(b'node', b'%s ', shortfn(node))
1530 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1530 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1531 fm.write(b'p2', b'%s', shortfn(parents[1]))
1531 fm.write(b'p2', b'%s', shortfn(parents[1]))
1532 fm.plain(b'\n')
1532 fm.plain(b'\n')
1533
1533
1534 fm.end()
1534 fm.end()
1535
1535
1536
1536
1537 @command(
1537 @command(
1538 b'debugindexdot',
1538 b'debugindexdot',
1539 cmdutil.debugrevlogopts,
1539 cmdutil.debugrevlogopts,
1540 _(b'-c|-m|FILE'),
1540 _(b'-c|-m|FILE'),
1541 optionalrepo=True,
1541 optionalrepo=True,
1542 )
1542 )
1543 def debugindexdot(ui, repo, file_=None, **opts):
1543 def debugindexdot(ui, repo, file_=None, **opts):
1544 """dump an index DAG as a graphviz dot file"""
1544 """dump an index DAG as a graphviz dot file"""
1545 opts = pycompat.byteskwargs(opts)
1545 opts = pycompat.byteskwargs(opts)
1546 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1546 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1547 ui.writenoi18n(b"digraph G {\n")
1547 ui.writenoi18n(b"digraph G {\n")
1548 for i in r:
1548 for i in r:
1549 node = r.node(i)
1549 node = r.node(i)
1550 pp = r.parents(node)
1550 pp = r.parents(node)
1551 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1551 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1552 if pp[1] != nullid:
1552 if pp[1] != nullid:
1553 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1553 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1554 ui.write(b"}\n")
1554 ui.write(b"}\n")
1555
1555
1556
1556
1557 @command(b'debugindexstats', [])
1557 @command(b'debugindexstats', [])
1558 def debugindexstats(ui, repo):
1558 def debugindexstats(ui, repo):
1559 """show stats related to the changelog index"""
1559 """show stats related to the changelog index"""
1560 repo.changelog.shortest(nullid, 1)
1560 repo.changelog.shortest(nullid, 1)
1561 index = repo.changelog.index
1561 index = repo.changelog.index
1562 if not util.safehasattr(index, b'stats'):
1562 if not util.safehasattr(index, b'stats'):
1563 raise error.Abort(_(b'debugindexstats only works with native code'))
1563 raise error.Abort(_(b'debugindexstats only works with native code'))
1564 for k, v in sorted(index.stats().items()):
1564 for k, v in sorted(index.stats().items()):
1565 ui.write(b'%s: %d\n' % (k, v))
1565 ui.write(b'%s: %d\n' % (k, v))
1566
1566
1567
1567
1568 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1568 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1569 def debuginstall(ui, **opts):
1569 def debuginstall(ui, **opts):
1570 """test Mercurial installation
1570 """test Mercurial installation
1571
1571
1572 Returns 0 on success.
1572 Returns 0 on success.
1573 """
1573 """
1574 opts = pycompat.byteskwargs(opts)
1574 opts = pycompat.byteskwargs(opts)
1575
1575
1576 problems = 0
1576 problems = 0
1577
1577
1578 fm = ui.formatter(b'debuginstall', opts)
1578 fm = ui.formatter(b'debuginstall', opts)
1579 fm.startitem()
1579 fm.startitem()
1580
1580
1581 # encoding might be unknown or wrong. don't translate these messages.
1581 # encoding might be unknown or wrong. don't translate these messages.
1582 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1582 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1583 err = None
1583 err = None
1584 try:
1584 try:
1585 codecs.lookup(pycompat.sysstr(encoding.encoding))
1585 codecs.lookup(pycompat.sysstr(encoding.encoding))
1586 except LookupError as inst:
1586 except LookupError as inst:
1587 err = stringutil.forcebytestr(inst)
1587 err = stringutil.forcebytestr(inst)
1588 problems += 1
1588 problems += 1
1589 fm.condwrite(
1589 fm.condwrite(
1590 err,
1590 err,
1591 b'encodingerror',
1591 b'encodingerror',
1592 b" %s\n (check that your locale is properly set)\n",
1592 b" %s\n (check that your locale is properly set)\n",
1593 err,
1593 err,
1594 )
1594 )
1595
1595
1596 # Python
1596 # Python
1597 pythonlib = None
1597 pythonlib = None
1598 if util.safehasattr(os, '__file__'):
1598 if util.safehasattr(os, '__file__'):
1599 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1599 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1600 elif getattr(sys, 'oxidized', False):
1600 elif getattr(sys, 'oxidized', False):
1601 pythonlib = pycompat.sysexecutable
1601 pythonlib = pycompat.sysexecutable
1602
1602
1603 fm.write(
1603 fm.write(
1604 b'pythonexe',
1604 b'pythonexe',
1605 _(b"checking Python executable (%s)\n"),
1605 _(b"checking Python executable (%s)\n"),
1606 pycompat.sysexecutable or _(b"unknown"),
1606 pycompat.sysexecutable or _(b"unknown"),
1607 )
1607 )
1608 fm.write(
1608 fm.write(
1609 b'pythonimplementation',
1609 b'pythonimplementation',
1610 _(b"checking Python implementation (%s)\n"),
1610 _(b"checking Python implementation (%s)\n"),
1611 pycompat.sysbytes(platform.python_implementation()),
1611 pycompat.sysbytes(platform.python_implementation()),
1612 )
1612 )
1613 fm.write(
1613 fm.write(
1614 b'pythonver',
1614 b'pythonver',
1615 _(b"checking Python version (%s)\n"),
1615 _(b"checking Python version (%s)\n"),
1616 (b"%d.%d.%d" % sys.version_info[:3]),
1616 (b"%d.%d.%d" % sys.version_info[:3]),
1617 )
1617 )
1618 fm.write(
1618 fm.write(
1619 b'pythonlib',
1619 b'pythonlib',
1620 _(b"checking Python lib (%s)...\n"),
1620 _(b"checking Python lib (%s)...\n"),
1621 pythonlib or _(b"unknown"),
1621 pythonlib or _(b"unknown"),
1622 )
1622 )
1623
1623
1624 try:
1624 try:
1625 from . import rustext
1625 from . import rustext
1626
1626
1627 rustext.__doc__ # trigger lazy import
1627 rustext.__doc__ # trigger lazy import
1628 except ImportError:
1628 except ImportError:
1629 rustext = None
1629 rustext = None
1630
1630
1631 security = set(sslutil.supportedprotocols)
1631 security = set(sslutil.supportedprotocols)
1632 if sslutil.hassni:
1632 if sslutil.hassni:
1633 security.add(b'sni')
1633 security.add(b'sni')
1634
1634
1635 fm.write(
1635 fm.write(
1636 b'pythonsecurity',
1636 b'pythonsecurity',
1637 _(b"checking Python security support (%s)\n"),
1637 _(b"checking Python security support (%s)\n"),
1638 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1638 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1639 )
1639 )
1640
1640
1641 # These are warnings, not errors. So don't increment problem count. This
1641 # These are warnings, not errors. So don't increment problem count. This
1642 # may change in the future.
1642 # may change in the future.
1643 if b'tls1.2' not in security:
1643 if b'tls1.2' not in security:
1644 fm.plain(
1644 fm.plain(
1645 _(
1645 _(
1646 b' TLS 1.2 not supported by Python install; '
1646 b' TLS 1.2 not supported by Python install; '
1647 b'network connections lack modern security\n'
1647 b'network connections lack modern security\n'
1648 )
1648 )
1649 )
1649 )
1650 if b'sni' not in security:
1650 if b'sni' not in security:
1651 fm.plain(
1651 fm.plain(
1652 _(
1652 _(
1653 b' SNI not supported by Python install; may have '
1653 b' SNI not supported by Python install; may have '
1654 b'connectivity issues with some servers\n'
1654 b'connectivity issues with some servers\n'
1655 )
1655 )
1656 )
1656 )
1657
1657
1658 fm.plain(
1658 fm.plain(
1659 _(
1659 _(
1660 b"checking Rust extensions (%s)\n"
1660 b"checking Rust extensions (%s)\n"
1661 % (b'missing' if rustext is None else b'installed')
1661 % (b'missing' if rustext is None else b'installed')
1662 ),
1662 ),
1663 )
1663 )
1664
1664
1665 # TODO print CA cert info
1665 # TODO print CA cert info
1666
1666
1667 # hg version
1667 # hg version
1668 hgver = util.version()
1668 hgver = util.version()
1669 fm.write(
1669 fm.write(
1670 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1670 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1671 )
1671 )
1672 fm.write(
1672 fm.write(
1673 b'hgverextra',
1673 b'hgverextra',
1674 _(b"checking Mercurial custom build (%s)\n"),
1674 _(b"checking Mercurial custom build (%s)\n"),
1675 b'+'.join(hgver.split(b'+')[1:]),
1675 b'+'.join(hgver.split(b'+')[1:]),
1676 )
1676 )
1677
1677
1678 # compiled modules
1678 # compiled modules
1679 hgmodules = None
1679 hgmodules = None
1680 if util.safehasattr(sys.modules[__name__], '__file__'):
1680 if util.safehasattr(sys.modules[__name__], '__file__'):
1681 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1681 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1682 elif getattr(sys, 'oxidized', False):
1682 elif getattr(sys, 'oxidized', False):
1683 hgmodules = pycompat.sysexecutable
1683 hgmodules = pycompat.sysexecutable
1684
1684
1685 fm.write(
1685 fm.write(
1686 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1686 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1687 )
1687 )
1688 fm.write(
1688 fm.write(
1689 b'hgmodules',
1689 b'hgmodules',
1690 _(b"checking installed modules (%s)...\n"),
1690 _(b"checking installed modules (%s)...\n"),
1691 hgmodules or _(b"unknown"),
1691 hgmodules or _(b"unknown"),
1692 )
1692 )
1693
1693
1694 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1694 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1695 rustext = rustandc # for now, that's the only case
1695 rustext = rustandc # for now, that's the only case
1696 cext = policy.policy in (b'c', b'allow') or rustandc
1696 cext = policy.policy in (b'c', b'allow') or rustandc
1697 nopure = cext or rustext
1697 nopure = cext or rustext
1698 if nopure:
1698 if nopure:
1699 err = None
1699 err = None
1700 try:
1700 try:
1701 if cext:
1701 if cext:
1702 from .cext import ( # pytype: disable=import-error
1702 from .cext import ( # pytype: disable=import-error
1703 base85,
1703 base85,
1704 bdiff,
1704 bdiff,
1705 mpatch,
1705 mpatch,
1706 osutil,
1706 osutil,
1707 )
1707 )
1708
1708
1709 # quiet pyflakes
1709 # quiet pyflakes
1710 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1710 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1711 if rustext:
1711 if rustext:
1712 from .rustext import ( # pytype: disable=import-error
1712 from .rustext import ( # pytype: disable=import-error
1713 ancestor,
1713 ancestor,
1714 dirstate,
1714 dirstate,
1715 )
1715 )
1716
1716
1717 dir(ancestor), dir(dirstate) # quiet pyflakes
1717 dir(ancestor), dir(dirstate) # quiet pyflakes
1718 except Exception as inst:
1718 except Exception as inst:
1719 err = stringutil.forcebytestr(inst)
1719 err = stringutil.forcebytestr(inst)
1720 problems += 1
1720 problems += 1
1721 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1721 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1722
1722
1723 compengines = util.compengines._engines.values()
1723 compengines = util.compengines._engines.values()
1724 fm.write(
1724 fm.write(
1725 b'compengines',
1725 b'compengines',
1726 _(b'checking registered compression engines (%s)\n'),
1726 _(b'checking registered compression engines (%s)\n'),
1727 fm.formatlist(
1727 fm.formatlist(
1728 sorted(e.name() for e in compengines),
1728 sorted(e.name() for e in compengines),
1729 name=b'compengine',
1729 name=b'compengine',
1730 fmt=b'%s',
1730 fmt=b'%s',
1731 sep=b', ',
1731 sep=b', ',
1732 ),
1732 ),
1733 )
1733 )
1734 fm.write(
1734 fm.write(
1735 b'compenginesavail',
1735 b'compenginesavail',
1736 _(b'checking available compression engines (%s)\n'),
1736 _(b'checking available compression engines (%s)\n'),
1737 fm.formatlist(
1737 fm.formatlist(
1738 sorted(e.name() for e in compengines if e.available()),
1738 sorted(e.name() for e in compengines if e.available()),
1739 name=b'compengine',
1739 name=b'compengine',
1740 fmt=b'%s',
1740 fmt=b'%s',
1741 sep=b', ',
1741 sep=b', ',
1742 ),
1742 ),
1743 )
1743 )
1744 wirecompengines = compression.compengines.supportedwireengines(
1744 wirecompengines = compression.compengines.supportedwireengines(
1745 compression.SERVERROLE
1745 compression.SERVERROLE
1746 )
1746 )
1747 fm.write(
1747 fm.write(
1748 b'compenginesserver',
1748 b'compenginesserver',
1749 _(
1749 _(
1750 b'checking available compression engines '
1750 b'checking available compression engines '
1751 b'for wire protocol (%s)\n'
1751 b'for wire protocol (%s)\n'
1752 ),
1752 ),
1753 fm.formatlist(
1753 fm.formatlist(
1754 [e.name() for e in wirecompengines if e.wireprotosupport()],
1754 [e.name() for e in wirecompengines if e.wireprotosupport()],
1755 name=b'compengine',
1755 name=b'compengine',
1756 fmt=b'%s',
1756 fmt=b'%s',
1757 sep=b', ',
1757 sep=b', ',
1758 ),
1758 ),
1759 )
1759 )
1760 re2 = b'missing'
1760 re2 = b'missing'
1761 if util._re2:
1761 if util._re2:
1762 re2 = b'available'
1762 re2 = b'available'
1763 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1763 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1764 fm.data(re2=bool(util._re2))
1764 fm.data(re2=bool(util._re2))
1765
1765
1766 # templates
1766 # templates
1767 p = templater.templatedir()
1767 p = templater.templatedir()
1768 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1768 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1769 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1769 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1770 if p:
1770 if p:
1771 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1771 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1772 if m:
1772 if m:
1773 # template found, check if it is working
1773 # template found, check if it is working
1774 err = None
1774 err = None
1775 try:
1775 try:
1776 templater.templater.frommapfile(m)
1776 templater.templater.frommapfile(m)
1777 except Exception as inst:
1777 except Exception as inst:
1778 err = stringutil.forcebytestr(inst)
1778 err = stringutil.forcebytestr(inst)
1779 p = None
1779 p = None
1780 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1780 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1781 else:
1781 else:
1782 p = None
1782 p = None
1783 fm.condwrite(
1783 fm.condwrite(
1784 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1784 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1785 )
1785 )
1786 fm.condwrite(
1786 fm.condwrite(
1787 not m,
1787 not m,
1788 b'defaulttemplatenotfound',
1788 b'defaulttemplatenotfound',
1789 _(b" template '%s' not found\n"),
1789 _(b" template '%s' not found\n"),
1790 b"default",
1790 b"default",
1791 )
1791 )
1792 if not p:
1792 if not p:
1793 problems += 1
1793 problems += 1
1794 fm.condwrite(
1794 fm.condwrite(
1795 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1795 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1796 )
1796 )
1797
1797
1798 # editor
1798 # editor
1799 editor = ui.geteditor()
1799 editor = ui.geteditor()
1800 editor = util.expandpath(editor)
1800 editor = util.expandpath(editor)
1801 editorbin = procutil.shellsplit(editor)[0]
1801 editorbin = procutil.shellsplit(editor)[0]
1802 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1802 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1803 cmdpath = procutil.findexe(editorbin)
1803 cmdpath = procutil.findexe(editorbin)
1804 fm.condwrite(
1804 fm.condwrite(
1805 not cmdpath and editor == b'vi',
1805 not cmdpath and editor == b'vi',
1806 b'vinotfound',
1806 b'vinotfound',
1807 _(
1807 _(
1808 b" No commit editor set and can't find %s in PATH\n"
1808 b" No commit editor set and can't find %s in PATH\n"
1809 b" (specify a commit editor in your configuration"
1809 b" (specify a commit editor in your configuration"
1810 b" file)\n"
1810 b" file)\n"
1811 ),
1811 ),
1812 not cmdpath and editor == b'vi' and editorbin,
1812 not cmdpath and editor == b'vi' and editorbin,
1813 )
1813 )
1814 fm.condwrite(
1814 fm.condwrite(
1815 not cmdpath and editor != b'vi',
1815 not cmdpath and editor != b'vi',
1816 b'editornotfound',
1816 b'editornotfound',
1817 _(
1817 _(
1818 b" Can't find editor '%s' in PATH\n"
1818 b" Can't find editor '%s' in PATH\n"
1819 b" (specify a commit editor in your configuration"
1819 b" (specify a commit editor in your configuration"
1820 b" file)\n"
1820 b" file)\n"
1821 ),
1821 ),
1822 not cmdpath and editorbin,
1822 not cmdpath and editorbin,
1823 )
1823 )
1824 if not cmdpath and editor != b'vi':
1824 if not cmdpath and editor != b'vi':
1825 problems += 1
1825 problems += 1
1826
1826
1827 # check username
1827 # check username
1828 username = None
1828 username = None
1829 err = None
1829 err = None
1830 try:
1830 try:
1831 username = ui.username()
1831 username = ui.username()
1832 except error.Abort as e:
1832 except error.Abort as e:
1833 err = e.message
1833 err = e.message
1834 problems += 1
1834 problems += 1
1835
1835
1836 fm.condwrite(
1836 fm.condwrite(
1837 username, b'username', _(b"checking username (%s)\n"), username
1837 username, b'username', _(b"checking username (%s)\n"), username
1838 )
1838 )
1839 fm.condwrite(
1839 fm.condwrite(
1840 err,
1840 err,
1841 b'usernameerror',
1841 b'usernameerror',
1842 _(
1842 _(
1843 b"checking username...\n %s\n"
1843 b"checking username...\n %s\n"
1844 b" (specify a username in your configuration file)\n"
1844 b" (specify a username in your configuration file)\n"
1845 ),
1845 ),
1846 err,
1846 err,
1847 )
1847 )
1848
1848
1849 for name, mod in extensions.extensions():
1849 for name, mod in extensions.extensions():
1850 handler = getattr(mod, 'debuginstall', None)
1850 handler = getattr(mod, 'debuginstall', None)
1851 if handler is not None:
1851 if handler is not None:
1852 problems += handler(ui, fm)
1852 problems += handler(ui, fm)
1853
1853
1854 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1854 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1855 if not problems:
1855 if not problems:
1856 fm.data(problems=problems)
1856 fm.data(problems=problems)
1857 fm.condwrite(
1857 fm.condwrite(
1858 problems,
1858 problems,
1859 b'problems',
1859 b'problems',
1860 _(b"%d problems detected, please check your install!\n"),
1860 _(b"%d problems detected, please check your install!\n"),
1861 problems,
1861 problems,
1862 )
1862 )
1863 fm.end()
1863 fm.end()
1864
1864
1865 return problems
1865 return problems
1866
1866
1867
1867
1868 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1868 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1869 def debugknown(ui, repopath, *ids, **opts):
1869 def debugknown(ui, repopath, *ids, **opts):
1870 """test whether node ids are known to a repo
1870 """test whether node ids are known to a repo
1871
1871
1872 Every ID must be a full-length hex node id string. Returns a list of 0s
1872 Every ID must be a full-length hex node id string. Returns a list of 0s
1873 and 1s indicating unknown/known.
1873 and 1s indicating unknown/known.
1874 """
1874 """
1875 opts = pycompat.byteskwargs(opts)
1875 opts = pycompat.byteskwargs(opts)
1876 repo = hg.peer(ui, opts, repopath)
1876 repo = hg.peer(ui, opts, repopath)
1877 if not repo.capable(b'known'):
1877 if not repo.capable(b'known'):
1878 raise error.Abort(b"known() not supported by target repository")
1878 raise error.Abort(b"known() not supported by target repository")
1879 flags = repo.known([bin(s) for s in ids])
1879 flags = repo.known([bin(s) for s in ids])
1880 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1880 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1881
1881
1882
1882
1883 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1883 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1884 def debuglabelcomplete(ui, repo, *args):
1884 def debuglabelcomplete(ui, repo, *args):
1885 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1885 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1886 debugnamecomplete(ui, repo, *args)
1886 debugnamecomplete(ui, repo, *args)
1887
1887
1888
1888
1889 @command(
1889 @command(
1890 b'debuglocks',
1890 b'debuglocks',
1891 [
1891 [
1892 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
1892 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
1893 (
1893 (
1894 b'W',
1894 b'W',
1895 b'force-free-wlock',
1895 b'force-free-wlock',
1896 None,
1896 None,
1897 _(b'free the working state lock (DANGEROUS)'),
1897 _(b'free the working state lock (DANGEROUS)'),
1898 ),
1898 ),
1899 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1899 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1900 (
1900 (
1901 b'S',
1901 b'S',
1902 b'set-wlock',
1902 b'set-wlock',
1903 None,
1903 None,
1904 _(b'set the working state lock until stopped'),
1904 _(b'set the working state lock until stopped'),
1905 ),
1905 ),
1906 ],
1906 ],
1907 _(b'[OPTION]...'),
1907 _(b'[OPTION]...'),
1908 )
1908 )
1909 def debuglocks(ui, repo, **opts):
1909 def debuglocks(ui, repo, **opts):
1910 """show or modify state of locks
1910 """show or modify state of locks
1911
1911
1912 By default, this command will show which locks are held. This
1912 By default, this command will show which locks are held. This
1913 includes the user and process holding the lock, the amount of time
1913 includes the user and process holding the lock, the amount of time
1914 the lock has been held, and the machine name where the process is
1914 the lock has been held, and the machine name where the process is
1915 running if it's not local.
1915 running if it's not local.
1916
1916
1917 Locks protect the integrity of Mercurial's data, so should be
1917 Locks protect the integrity of Mercurial's data, so should be
1918 treated with care. System crashes or other interruptions may cause
1918 treated with care. System crashes or other interruptions may cause
1919 locks to not be properly released, though Mercurial will usually
1919 locks to not be properly released, though Mercurial will usually
1920 detect and remove such stale locks automatically.
1920 detect and remove such stale locks automatically.
1921
1921
1922 However, detecting stale locks may not always be possible (for
1922 However, detecting stale locks may not always be possible (for
1923 instance, on a shared filesystem). Removing locks may also be
1923 instance, on a shared filesystem). Removing locks may also be
1924 blocked by filesystem permissions.
1924 blocked by filesystem permissions.
1925
1925
1926 Setting a lock will prevent other commands from changing the data.
1926 Setting a lock will prevent other commands from changing the data.
1927 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1927 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1928 The set locks are removed when the command exits.
1928 The set locks are removed when the command exits.
1929
1929
1930 Returns 0 if no locks are held.
1930 Returns 0 if no locks are held.
1931
1931
1932 """
1932 """
1933
1933
1934 if opts.get('force_free_lock'):
1934 if opts.get('force_free_lock'):
1935 repo.svfs.unlink(b'lock')
1935 repo.svfs.unlink(b'lock')
1936 if opts.get('force_free_wlock'):
1936 if opts.get('force_free_wlock'):
1937 repo.vfs.unlink(b'wlock')
1937 repo.vfs.unlink(b'wlock')
1938 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
1938 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
1939 return 0
1939 return 0
1940
1940
1941 locks = []
1941 locks = []
1942 try:
1942 try:
1943 if opts.get('set_wlock'):
1943 if opts.get('set_wlock'):
1944 try:
1944 try:
1945 locks.append(repo.wlock(False))
1945 locks.append(repo.wlock(False))
1946 except error.LockHeld:
1946 except error.LockHeld:
1947 raise error.Abort(_(b'wlock is already held'))
1947 raise error.Abort(_(b'wlock is already held'))
1948 if opts.get('set_lock'):
1948 if opts.get('set_lock'):
1949 try:
1949 try:
1950 locks.append(repo.lock(False))
1950 locks.append(repo.lock(False))
1951 except error.LockHeld:
1951 except error.LockHeld:
1952 raise error.Abort(_(b'lock is already held'))
1952 raise error.Abort(_(b'lock is already held'))
1953 if len(locks):
1953 if len(locks):
1954 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1954 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1955 return 0
1955 return 0
1956 finally:
1956 finally:
1957 release(*locks)
1957 release(*locks)
1958
1958
1959 now = time.time()
1959 now = time.time()
1960 held = 0
1960 held = 0
1961
1961
1962 def report(vfs, name, method):
1962 def report(vfs, name, method):
1963 # this causes stale locks to get reaped for more accurate reporting
1963 # this causes stale locks to get reaped for more accurate reporting
1964 try:
1964 try:
1965 l = method(False)
1965 l = method(False)
1966 except error.LockHeld:
1966 except error.LockHeld:
1967 l = None
1967 l = None
1968
1968
1969 if l:
1969 if l:
1970 l.release()
1970 l.release()
1971 else:
1971 else:
1972 try:
1972 try:
1973 st = vfs.lstat(name)
1973 st = vfs.lstat(name)
1974 age = now - st[stat.ST_MTIME]
1974 age = now - st[stat.ST_MTIME]
1975 user = util.username(st.st_uid)
1975 user = util.username(st.st_uid)
1976 locker = vfs.readlock(name)
1976 locker = vfs.readlock(name)
1977 if b":" in locker:
1977 if b":" in locker:
1978 host, pid = locker.split(b':')
1978 host, pid = locker.split(b':')
1979 if host == socket.gethostname():
1979 if host == socket.gethostname():
1980 locker = b'user %s, process %s' % (user or b'None', pid)
1980 locker = b'user %s, process %s' % (user or b'None', pid)
1981 else:
1981 else:
1982 locker = b'user %s, process %s, host %s' % (
1982 locker = b'user %s, process %s, host %s' % (
1983 user or b'None',
1983 user or b'None',
1984 pid,
1984 pid,
1985 host,
1985 host,
1986 )
1986 )
1987 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1987 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1988 return 1
1988 return 1
1989 except OSError as e:
1989 except OSError as e:
1990 if e.errno != errno.ENOENT:
1990 if e.errno != errno.ENOENT:
1991 raise
1991 raise
1992
1992
1993 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1993 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1994 return 0
1994 return 0
1995
1995
1996 held += report(repo.svfs, b"lock", repo.lock)
1996 held += report(repo.svfs, b"lock", repo.lock)
1997 held += report(repo.vfs, b"wlock", repo.wlock)
1997 held += report(repo.vfs, b"wlock", repo.wlock)
1998
1998
1999 return held
1999 return held
2000
2000
2001
2001
2002 @command(
2002 @command(
2003 b'debugmanifestfulltextcache',
2003 b'debugmanifestfulltextcache',
2004 [
2004 [
2005 (b'', b'clear', False, _(b'clear the cache')),
2005 (b'', b'clear', False, _(b'clear the cache')),
2006 (
2006 (
2007 b'a',
2007 b'a',
2008 b'add',
2008 b'add',
2009 [],
2009 [],
2010 _(b'add the given manifest nodes to the cache'),
2010 _(b'add the given manifest nodes to the cache'),
2011 _(b'NODE'),
2011 _(b'NODE'),
2012 ),
2012 ),
2013 ],
2013 ],
2014 b'',
2014 b'',
2015 )
2015 )
2016 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2016 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2017 """show, clear or amend the contents of the manifest fulltext cache"""
2017 """show, clear or amend the contents of the manifest fulltext cache"""
2018
2018
2019 def getcache():
2019 def getcache():
2020 r = repo.manifestlog.getstorage(b'')
2020 r = repo.manifestlog.getstorage(b'')
2021 try:
2021 try:
2022 return r._fulltextcache
2022 return r._fulltextcache
2023 except AttributeError:
2023 except AttributeError:
2024 msg = _(
2024 msg = _(
2025 b"Current revlog implementation doesn't appear to have a "
2025 b"Current revlog implementation doesn't appear to have a "
2026 b"manifest fulltext cache\n"
2026 b"manifest fulltext cache\n"
2027 )
2027 )
2028 raise error.Abort(msg)
2028 raise error.Abort(msg)
2029
2029
2030 if opts.get('clear'):
2030 if opts.get('clear'):
2031 with repo.wlock():
2031 with repo.wlock():
2032 cache = getcache()
2032 cache = getcache()
2033 cache.clear(clear_persisted_data=True)
2033 cache.clear(clear_persisted_data=True)
2034 return
2034 return
2035
2035
2036 if add:
2036 if add:
2037 with repo.wlock():
2037 with repo.wlock():
2038 m = repo.manifestlog
2038 m = repo.manifestlog
2039 store = m.getstorage(b'')
2039 store = m.getstorage(b'')
2040 for n in add:
2040 for n in add:
2041 try:
2041 try:
2042 manifest = m[store.lookup(n)]
2042 manifest = m[store.lookup(n)]
2043 except error.LookupError as e:
2043 except error.LookupError as e:
2044 raise error.Abort(e, hint=b"Check your manifest node id")
2044 raise error.Abort(
2045 bytes(e), hint=b"Check your manifest node id"
2046 )
2045 manifest.read() # stores revisision in cache too
2047 manifest.read() # stores revisision in cache too
2046 return
2048 return
2047
2049
2048 cache = getcache()
2050 cache = getcache()
2049 if not len(cache):
2051 if not len(cache):
2050 ui.write(_(b'cache empty\n'))
2052 ui.write(_(b'cache empty\n'))
2051 else:
2053 else:
2052 ui.write(
2054 ui.write(
2053 _(
2055 _(
2054 b'cache contains %d manifest entries, in order of most to '
2056 b'cache contains %d manifest entries, in order of most to '
2055 b'least recent:\n'
2057 b'least recent:\n'
2056 )
2058 )
2057 % (len(cache),)
2059 % (len(cache),)
2058 )
2060 )
2059 totalsize = 0
2061 totalsize = 0
2060 for nodeid in cache:
2062 for nodeid in cache:
2061 # Use cache.get to not update the LRU order
2063 # Use cache.get to not update the LRU order
2062 data = cache.peek(nodeid)
2064 data = cache.peek(nodeid)
2063 size = len(data)
2065 size = len(data)
2064 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2066 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2065 ui.write(
2067 ui.write(
2066 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2068 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2067 )
2069 )
2068 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2070 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2069 ui.write(
2071 ui.write(
2070 _(b'total cache data size %s, on-disk %s\n')
2072 _(b'total cache data size %s, on-disk %s\n')
2071 % (util.bytecount(totalsize), util.bytecount(ondisk))
2073 % (util.bytecount(totalsize), util.bytecount(ondisk))
2072 )
2074 )
2073
2075
2074
2076
2075 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2077 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2076 def debugmergestate(ui, repo, *args, **opts):
2078 def debugmergestate(ui, repo, *args, **opts):
2077 """print merge state
2079 """print merge state
2078
2080
2079 Use --verbose to print out information about whether v1 or v2 merge state
2081 Use --verbose to print out information about whether v1 or v2 merge state
2080 was chosen."""
2082 was chosen."""
2081
2083
2082 if ui.verbose:
2084 if ui.verbose:
2083 ms = mergestatemod.mergestate(repo)
2085 ms = mergestatemod.mergestate(repo)
2084
2086
2085 # sort so that reasonable information is on top
2087 # sort so that reasonable information is on top
2086 v1records = ms._readrecordsv1()
2088 v1records = ms._readrecordsv1()
2087 v2records = ms._readrecordsv2()
2089 v2records = ms._readrecordsv2()
2088
2090
2089 if not v1records and not v2records:
2091 if not v1records and not v2records:
2090 pass
2092 pass
2091 elif not v2records:
2093 elif not v2records:
2092 ui.writenoi18n(b'no version 2 merge state\n')
2094 ui.writenoi18n(b'no version 2 merge state\n')
2093 elif ms._v1v2match(v1records, v2records):
2095 elif ms._v1v2match(v1records, v2records):
2094 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2096 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2095 else:
2097 else:
2096 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2098 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2097
2099
2098 opts = pycompat.byteskwargs(opts)
2100 opts = pycompat.byteskwargs(opts)
2099 if not opts[b'template']:
2101 if not opts[b'template']:
2100 opts[b'template'] = (
2102 opts[b'template'] = (
2101 b'{if(commits, "", "no merge state found\n")}'
2103 b'{if(commits, "", "no merge state found\n")}'
2102 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2104 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2103 b'{files % "file: {path} (state \\"{state}\\")\n'
2105 b'{files % "file: {path} (state \\"{state}\\")\n'
2104 b'{if(local_path, "'
2106 b'{if(local_path, "'
2105 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2107 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2106 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2108 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2107 b' other path: {other_path} (node {other_node})\n'
2109 b' other path: {other_path} (node {other_node})\n'
2108 b'")}'
2110 b'")}'
2109 b'{if(rename_side, "'
2111 b'{if(rename_side, "'
2110 b' rename side: {rename_side}\n'
2112 b' rename side: {rename_side}\n'
2111 b' renamed path: {renamed_path}\n'
2113 b' renamed path: {renamed_path}\n'
2112 b'")}'
2114 b'")}'
2113 b'{extras % " extra: {key} = {value}\n"}'
2115 b'{extras % " extra: {key} = {value}\n"}'
2114 b'"}'
2116 b'"}'
2115 b'{extras % "extra: {file} ({key} = {value})\n"}'
2117 b'{extras % "extra: {file} ({key} = {value})\n"}'
2116 )
2118 )
2117
2119
2118 ms = mergestatemod.mergestate.read(repo)
2120 ms = mergestatemod.mergestate.read(repo)
2119
2121
2120 fm = ui.formatter(b'debugmergestate', opts)
2122 fm = ui.formatter(b'debugmergestate', opts)
2121 fm.startitem()
2123 fm.startitem()
2122
2124
2123 fm_commits = fm.nested(b'commits')
2125 fm_commits = fm.nested(b'commits')
2124 if ms.active():
2126 if ms.active():
2125 for name, node, label_index in (
2127 for name, node, label_index in (
2126 (b'local', ms.local, 0),
2128 (b'local', ms.local, 0),
2127 (b'other', ms.other, 1),
2129 (b'other', ms.other, 1),
2128 ):
2130 ):
2129 fm_commits.startitem()
2131 fm_commits.startitem()
2130 fm_commits.data(name=name)
2132 fm_commits.data(name=name)
2131 fm_commits.data(node=hex(node))
2133 fm_commits.data(node=hex(node))
2132 if ms._labels and len(ms._labels) > label_index:
2134 if ms._labels and len(ms._labels) > label_index:
2133 fm_commits.data(label=ms._labels[label_index])
2135 fm_commits.data(label=ms._labels[label_index])
2134 fm_commits.end()
2136 fm_commits.end()
2135
2137
2136 fm_files = fm.nested(b'files')
2138 fm_files = fm.nested(b'files')
2137 if ms.active():
2139 if ms.active():
2138 for f in ms:
2140 for f in ms:
2139 fm_files.startitem()
2141 fm_files.startitem()
2140 fm_files.data(path=f)
2142 fm_files.data(path=f)
2141 state = ms._state[f]
2143 state = ms._state[f]
2142 fm_files.data(state=state[0])
2144 fm_files.data(state=state[0])
2143 if state[0] in (
2145 if state[0] in (
2144 mergestatemod.MERGE_RECORD_UNRESOLVED,
2146 mergestatemod.MERGE_RECORD_UNRESOLVED,
2145 mergestatemod.MERGE_RECORD_RESOLVED,
2147 mergestatemod.MERGE_RECORD_RESOLVED,
2146 ):
2148 ):
2147 fm_files.data(local_key=state[1])
2149 fm_files.data(local_key=state[1])
2148 fm_files.data(local_path=state[2])
2150 fm_files.data(local_path=state[2])
2149 fm_files.data(ancestor_path=state[3])
2151 fm_files.data(ancestor_path=state[3])
2150 fm_files.data(ancestor_node=state[4])
2152 fm_files.data(ancestor_node=state[4])
2151 fm_files.data(other_path=state[5])
2153 fm_files.data(other_path=state[5])
2152 fm_files.data(other_node=state[6])
2154 fm_files.data(other_node=state[6])
2153 fm_files.data(local_flags=state[7])
2155 fm_files.data(local_flags=state[7])
2154 elif state[0] in (
2156 elif state[0] in (
2155 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2157 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2156 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2158 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2157 ):
2159 ):
2158 fm_files.data(renamed_path=state[1])
2160 fm_files.data(renamed_path=state[1])
2159 fm_files.data(rename_side=state[2])
2161 fm_files.data(rename_side=state[2])
2160 fm_extras = fm_files.nested(b'extras')
2162 fm_extras = fm_files.nested(b'extras')
2161 for k, v in sorted(ms.extras(f).items()):
2163 for k, v in sorted(ms.extras(f).items()):
2162 fm_extras.startitem()
2164 fm_extras.startitem()
2163 fm_extras.data(key=k)
2165 fm_extras.data(key=k)
2164 fm_extras.data(value=v)
2166 fm_extras.data(value=v)
2165 fm_extras.end()
2167 fm_extras.end()
2166
2168
2167 fm_files.end()
2169 fm_files.end()
2168
2170
2169 fm_extras = fm.nested(b'extras')
2171 fm_extras = fm.nested(b'extras')
2170 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2172 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2171 if f in ms:
2173 if f in ms:
2172 # If file is in mergestate, we have already processed it's extras
2174 # If file is in mergestate, we have already processed it's extras
2173 continue
2175 continue
2174 for k, v in pycompat.iteritems(d):
2176 for k, v in pycompat.iteritems(d):
2175 fm_extras.startitem()
2177 fm_extras.startitem()
2176 fm_extras.data(file=f)
2178 fm_extras.data(file=f)
2177 fm_extras.data(key=k)
2179 fm_extras.data(key=k)
2178 fm_extras.data(value=v)
2180 fm_extras.data(value=v)
2179 fm_extras.end()
2181 fm_extras.end()
2180
2182
2181 fm.end()
2183 fm.end()
2182
2184
2183
2185
2184 @command(b'debugnamecomplete', [], _(b'NAME...'))
2186 @command(b'debugnamecomplete', [], _(b'NAME...'))
2185 def debugnamecomplete(ui, repo, *args):
2187 def debugnamecomplete(ui, repo, *args):
2186 '''complete "names" - tags, open branch names, bookmark names'''
2188 '''complete "names" - tags, open branch names, bookmark names'''
2187
2189
2188 names = set()
2190 names = set()
2189 # since we previously only listed open branches, we will handle that
2191 # since we previously only listed open branches, we will handle that
2190 # specially (after this for loop)
2192 # specially (after this for loop)
2191 for name, ns in pycompat.iteritems(repo.names):
2193 for name, ns in pycompat.iteritems(repo.names):
2192 if name != b'branches':
2194 if name != b'branches':
2193 names.update(ns.listnames(repo))
2195 names.update(ns.listnames(repo))
2194 names.update(
2196 names.update(
2195 tag
2197 tag
2196 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2198 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2197 if not closed
2199 if not closed
2198 )
2200 )
2199 completions = set()
2201 completions = set()
2200 if not args:
2202 if not args:
2201 args = [b'']
2203 args = [b'']
2202 for a in args:
2204 for a in args:
2203 completions.update(n for n in names if n.startswith(a))
2205 completions.update(n for n in names if n.startswith(a))
2204 ui.write(b'\n'.join(sorted(completions)))
2206 ui.write(b'\n'.join(sorted(completions)))
2205 ui.write(b'\n')
2207 ui.write(b'\n')
2206
2208
2207
2209
2208 @command(
2210 @command(
2209 b'debugnodemap',
2211 b'debugnodemap',
2210 [
2212 [
2211 (
2213 (
2212 b'',
2214 b'',
2213 b'dump-new',
2215 b'dump-new',
2214 False,
2216 False,
2215 _(b'write a (new) persistent binary nodemap on stdin'),
2217 _(b'write a (new) persistent binary nodemap on stdin'),
2216 ),
2218 ),
2217 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2219 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2218 (
2220 (
2219 b'',
2221 b'',
2220 b'check',
2222 b'check',
2221 False,
2223 False,
2222 _(b'check that the data on disk data are correct.'),
2224 _(b'check that the data on disk data are correct.'),
2223 ),
2225 ),
2224 (
2226 (
2225 b'',
2227 b'',
2226 b'metadata',
2228 b'metadata',
2227 False,
2229 False,
2228 _(b'display the on disk meta data for the nodemap'),
2230 _(b'display the on disk meta data for the nodemap'),
2229 ),
2231 ),
2230 ],
2232 ],
2231 )
2233 )
2232 def debugnodemap(ui, repo, **opts):
2234 def debugnodemap(ui, repo, **opts):
2233 """write and inspect on disk nodemap"""
2235 """write and inspect on disk nodemap"""
2234 if opts['dump_new']:
2236 if opts['dump_new']:
2235 unfi = repo.unfiltered()
2237 unfi = repo.unfiltered()
2236 cl = unfi.changelog
2238 cl = unfi.changelog
2237 if util.safehasattr(cl.index, "nodemap_data_all"):
2239 if util.safehasattr(cl.index, "nodemap_data_all"):
2238 data = cl.index.nodemap_data_all()
2240 data = cl.index.nodemap_data_all()
2239 else:
2241 else:
2240 data = nodemap.persistent_data(cl.index)
2242 data = nodemap.persistent_data(cl.index)
2241 ui.write(data)
2243 ui.write(data)
2242 elif opts['dump_disk']:
2244 elif opts['dump_disk']:
2243 unfi = repo.unfiltered()
2245 unfi = repo.unfiltered()
2244 cl = unfi.changelog
2246 cl = unfi.changelog
2245 nm_data = nodemap.persisted_data(cl)
2247 nm_data = nodemap.persisted_data(cl)
2246 if nm_data is not None:
2248 if nm_data is not None:
2247 docket, data = nm_data
2249 docket, data = nm_data
2248 ui.write(data[:])
2250 ui.write(data[:])
2249 elif opts['check']:
2251 elif opts['check']:
2250 unfi = repo.unfiltered()
2252 unfi = repo.unfiltered()
2251 cl = unfi.changelog
2253 cl = unfi.changelog
2252 nm_data = nodemap.persisted_data(cl)
2254 nm_data = nodemap.persisted_data(cl)
2253 if nm_data is not None:
2255 if nm_data is not None:
2254 docket, data = nm_data
2256 docket, data = nm_data
2255 return nodemap.check_data(ui, cl.index, data)
2257 return nodemap.check_data(ui, cl.index, data)
2256 elif opts['metadata']:
2258 elif opts['metadata']:
2257 unfi = repo.unfiltered()
2259 unfi = repo.unfiltered()
2258 cl = unfi.changelog
2260 cl = unfi.changelog
2259 nm_data = nodemap.persisted_data(cl)
2261 nm_data = nodemap.persisted_data(cl)
2260 if nm_data is not None:
2262 if nm_data is not None:
2261 docket, data = nm_data
2263 docket, data = nm_data
2262 ui.write((b"uid: %s\n") % docket.uid)
2264 ui.write((b"uid: %s\n") % docket.uid)
2263 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2265 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2264 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2266 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2265 ui.write((b"data-length: %d\n") % docket.data_length)
2267 ui.write((b"data-length: %d\n") % docket.data_length)
2266 ui.write((b"data-unused: %d\n") % docket.data_unused)
2268 ui.write((b"data-unused: %d\n") % docket.data_unused)
2267 unused_perc = docket.data_unused * 100.0 / docket.data_length
2269 unused_perc = docket.data_unused * 100.0 / docket.data_length
2268 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2270 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2269
2271
2270
2272
2271 @command(
2273 @command(
2272 b'debugobsolete',
2274 b'debugobsolete',
2273 [
2275 [
2274 (b'', b'flags', 0, _(b'markers flag')),
2276 (b'', b'flags', 0, _(b'markers flag')),
2275 (
2277 (
2276 b'',
2278 b'',
2277 b'record-parents',
2279 b'record-parents',
2278 False,
2280 False,
2279 _(b'record parent information for the precursor'),
2281 _(b'record parent information for the precursor'),
2280 ),
2282 ),
2281 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2283 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2282 (
2284 (
2283 b'',
2285 b'',
2284 b'exclusive',
2286 b'exclusive',
2285 False,
2287 False,
2286 _(b'restrict display to markers only relevant to REV'),
2288 _(b'restrict display to markers only relevant to REV'),
2287 ),
2289 ),
2288 (b'', b'index', False, _(b'display index of the marker')),
2290 (b'', b'index', False, _(b'display index of the marker')),
2289 (b'', b'delete', [], _(b'delete markers specified by indices')),
2291 (b'', b'delete', [], _(b'delete markers specified by indices')),
2290 ]
2292 ]
2291 + cmdutil.commitopts2
2293 + cmdutil.commitopts2
2292 + cmdutil.formatteropts,
2294 + cmdutil.formatteropts,
2293 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2295 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2294 )
2296 )
2295 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2297 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2296 """create arbitrary obsolete marker
2298 """create arbitrary obsolete marker
2297
2299
2298 With no arguments, displays the list of obsolescence markers."""
2300 With no arguments, displays the list of obsolescence markers."""
2299
2301
2300 opts = pycompat.byteskwargs(opts)
2302 opts = pycompat.byteskwargs(opts)
2301
2303
2302 def parsenodeid(s):
2304 def parsenodeid(s):
2303 try:
2305 try:
2304 # We do not use revsingle/revrange functions here to accept
2306 # We do not use revsingle/revrange functions here to accept
2305 # arbitrary node identifiers, possibly not present in the
2307 # arbitrary node identifiers, possibly not present in the
2306 # local repository.
2308 # local repository.
2307 n = bin(s)
2309 n = bin(s)
2308 if len(n) != len(nullid):
2310 if len(n) != len(nullid):
2309 raise TypeError()
2311 raise TypeError()
2310 return n
2312 return n
2311 except TypeError:
2313 except TypeError:
2312 raise error.InputError(
2314 raise error.InputError(
2313 b'changeset references must be full hexadecimal '
2315 b'changeset references must be full hexadecimal '
2314 b'node identifiers'
2316 b'node identifiers'
2315 )
2317 )
2316
2318
2317 if opts.get(b'delete'):
2319 if opts.get(b'delete'):
2318 indices = []
2320 indices = []
2319 for v in opts.get(b'delete'):
2321 for v in opts.get(b'delete'):
2320 try:
2322 try:
2321 indices.append(int(v))
2323 indices.append(int(v))
2322 except ValueError:
2324 except ValueError:
2323 raise error.InputError(
2325 raise error.InputError(
2324 _(b'invalid index value: %r') % v,
2326 _(b'invalid index value: %r') % v,
2325 hint=_(b'use integers for indices'),
2327 hint=_(b'use integers for indices'),
2326 )
2328 )
2327
2329
2328 if repo.currenttransaction():
2330 if repo.currenttransaction():
2329 raise error.Abort(
2331 raise error.Abort(
2330 _(b'cannot delete obsmarkers in the middle of transaction.')
2332 _(b'cannot delete obsmarkers in the middle of transaction.')
2331 )
2333 )
2332
2334
2333 with repo.lock():
2335 with repo.lock():
2334 n = repair.deleteobsmarkers(repo.obsstore, indices)
2336 n = repair.deleteobsmarkers(repo.obsstore, indices)
2335 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2337 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2336
2338
2337 return
2339 return
2338
2340
2339 if precursor is not None:
2341 if precursor is not None:
2340 if opts[b'rev']:
2342 if opts[b'rev']:
2341 raise error.InputError(
2343 raise error.InputError(
2342 b'cannot select revision when creating marker'
2344 b'cannot select revision when creating marker'
2343 )
2345 )
2344 metadata = {}
2346 metadata = {}
2345 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2347 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2346 succs = tuple(parsenodeid(succ) for succ in successors)
2348 succs = tuple(parsenodeid(succ) for succ in successors)
2347 l = repo.lock()
2349 l = repo.lock()
2348 try:
2350 try:
2349 tr = repo.transaction(b'debugobsolete')
2351 tr = repo.transaction(b'debugobsolete')
2350 try:
2352 try:
2351 date = opts.get(b'date')
2353 date = opts.get(b'date')
2352 if date:
2354 if date:
2353 date = dateutil.parsedate(date)
2355 date = dateutil.parsedate(date)
2354 else:
2356 else:
2355 date = None
2357 date = None
2356 prec = parsenodeid(precursor)
2358 prec = parsenodeid(precursor)
2357 parents = None
2359 parents = None
2358 if opts[b'record_parents']:
2360 if opts[b'record_parents']:
2359 if prec not in repo.unfiltered():
2361 if prec not in repo.unfiltered():
2360 raise error.Abort(
2362 raise error.Abort(
2361 b'cannot used --record-parents on '
2363 b'cannot used --record-parents on '
2362 b'unknown changesets'
2364 b'unknown changesets'
2363 )
2365 )
2364 parents = repo.unfiltered()[prec].parents()
2366 parents = repo.unfiltered()[prec].parents()
2365 parents = tuple(p.node() for p in parents)
2367 parents = tuple(p.node() for p in parents)
2366 repo.obsstore.create(
2368 repo.obsstore.create(
2367 tr,
2369 tr,
2368 prec,
2370 prec,
2369 succs,
2371 succs,
2370 opts[b'flags'],
2372 opts[b'flags'],
2371 parents=parents,
2373 parents=parents,
2372 date=date,
2374 date=date,
2373 metadata=metadata,
2375 metadata=metadata,
2374 ui=ui,
2376 ui=ui,
2375 )
2377 )
2376 tr.close()
2378 tr.close()
2377 except ValueError as exc:
2379 except ValueError as exc:
2378 raise error.Abort(
2380 raise error.Abort(
2379 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2381 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2380 )
2382 )
2381 finally:
2383 finally:
2382 tr.release()
2384 tr.release()
2383 finally:
2385 finally:
2384 l.release()
2386 l.release()
2385 else:
2387 else:
2386 if opts[b'rev']:
2388 if opts[b'rev']:
2387 revs = scmutil.revrange(repo, opts[b'rev'])
2389 revs = scmutil.revrange(repo, opts[b'rev'])
2388 nodes = [repo[r].node() for r in revs]
2390 nodes = [repo[r].node() for r in revs]
2389 markers = list(
2391 markers = list(
2390 obsutil.getmarkers(
2392 obsutil.getmarkers(
2391 repo, nodes=nodes, exclusive=opts[b'exclusive']
2393 repo, nodes=nodes, exclusive=opts[b'exclusive']
2392 )
2394 )
2393 )
2395 )
2394 markers.sort(key=lambda x: x._data)
2396 markers.sort(key=lambda x: x._data)
2395 else:
2397 else:
2396 markers = obsutil.getmarkers(repo)
2398 markers = obsutil.getmarkers(repo)
2397
2399
2398 markerstoiter = markers
2400 markerstoiter = markers
2399 isrelevant = lambda m: True
2401 isrelevant = lambda m: True
2400 if opts.get(b'rev') and opts.get(b'index'):
2402 if opts.get(b'rev') and opts.get(b'index'):
2401 markerstoiter = obsutil.getmarkers(repo)
2403 markerstoiter = obsutil.getmarkers(repo)
2402 markerset = set(markers)
2404 markerset = set(markers)
2403 isrelevant = lambda m: m in markerset
2405 isrelevant = lambda m: m in markerset
2404
2406
2405 fm = ui.formatter(b'debugobsolete', opts)
2407 fm = ui.formatter(b'debugobsolete', opts)
2406 for i, m in enumerate(markerstoiter):
2408 for i, m in enumerate(markerstoiter):
2407 if not isrelevant(m):
2409 if not isrelevant(m):
2408 # marker can be irrelevant when we're iterating over a set
2410 # marker can be irrelevant when we're iterating over a set
2409 # of markers (markerstoiter) which is bigger than the set
2411 # of markers (markerstoiter) which is bigger than the set
2410 # of markers we want to display (markers)
2412 # of markers we want to display (markers)
2411 # this can happen if both --index and --rev options are
2413 # this can happen if both --index and --rev options are
2412 # provided and thus we need to iterate over all of the markers
2414 # provided and thus we need to iterate over all of the markers
2413 # to get the correct indices, but only display the ones that
2415 # to get the correct indices, but only display the ones that
2414 # are relevant to --rev value
2416 # are relevant to --rev value
2415 continue
2417 continue
2416 fm.startitem()
2418 fm.startitem()
2417 ind = i if opts.get(b'index') else None
2419 ind = i if opts.get(b'index') else None
2418 cmdutil.showmarker(fm, m, index=ind)
2420 cmdutil.showmarker(fm, m, index=ind)
2419 fm.end()
2421 fm.end()
2420
2422
2421
2423
2422 @command(
2424 @command(
2423 b'debugp1copies',
2425 b'debugp1copies',
2424 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2426 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2425 _(b'[-r REV]'),
2427 _(b'[-r REV]'),
2426 )
2428 )
2427 def debugp1copies(ui, repo, **opts):
2429 def debugp1copies(ui, repo, **opts):
2428 """dump copy information compared to p1"""
2430 """dump copy information compared to p1"""
2429
2431
2430 opts = pycompat.byteskwargs(opts)
2432 opts = pycompat.byteskwargs(opts)
2431 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2433 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2432 for dst, src in ctx.p1copies().items():
2434 for dst, src in ctx.p1copies().items():
2433 ui.write(b'%s -> %s\n' % (src, dst))
2435 ui.write(b'%s -> %s\n' % (src, dst))
2434
2436
2435
2437
2436 @command(
2438 @command(
2437 b'debugp2copies',
2439 b'debugp2copies',
2438 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2440 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2439 _(b'[-r REV]'),
2441 _(b'[-r REV]'),
2440 )
2442 )
2441 def debugp1copies(ui, repo, **opts):
2443 def debugp1copies(ui, repo, **opts):
2442 """dump copy information compared to p2"""
2444 """dump copy information compared to p2"""
2443
2445
2444 opts = pycompat.byteskwargs(opts)
2446 opts = pycompat.byteskwargs(opts)
2445 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2447 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2446 for dst, src in ctx.p2copies().items():
2448 for dst, src in ctx.p2copies().items():
2447 ui.write(b'%s -> %s\n' % (src, dst))
2449 ui.write(b'%s -> %s\n' % (src, dst))
2448
2450
2449
2451
2450 @command(
2452 @command(
2451 b'debugpathcomplete',
2453 b'debugpathcomplete',
2452 [
2454 [
2453 (b'f', b'full', None, _(b'complete an entire path')),
2455 (b'f', b'full', None, _(b'complete an entire path')),
2454 (b'n', b'normal', None, _(b'show only normal files')),
2456 (b'n', b'normal', None, _(b'show only normal files')),
2455 (b'a', b'added', None, _(b'show only added files')),
2457 (b'a', b'added', None, _(b'show only added files')),
2456 (b'r', b'removed', None, _(b'show only removed files')),
2458 (b'r', b'removed', None, _(b'show only removed files')),
2457 ],
2459 ],
2458 _(b'FILESPEC...'),
2460 _(b'FILESPEC...'),
2459 )
2461 )
2460 def debugpathcomplete(ui, repo, *specs, **opts):
2462 def debugpathcomplete(ui, repo, *specs, **opts):
2461 """complete part or all of a tracked path
2463 """complete part or all of a tracked path
2462
2464
2463 This command supports shells that offer path name completion. It
2465 This command supports shells that offer path name completion. It
2464 currently completes only files already known to the dirstate.
2466 currently completes only files already known to the dirstate.
2465
2467
2466 Completion extends only to the next path segment unless
2468 Completion extends only to the next path segment unless
2467 --full is specified, in which case entire paths are used."""
2469 --full is specified, in which case entire paths are used."""
2468
2470
2469 def complete(path, acceptable):
2471 def complete(path, acceptable):
2470 dirstate = repo.dirstate
2472 dirstate = repo.dirstate
2471 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2473 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2472 rootdir = repo.root + pycompat.ossep
2474 rootdir = repo.root + pycompat.ossep
2473 if spec != repo.root and not spec.startswith(rootdir):
2475 if spec != repo.root and not spec.startswith(rootdir):
2474 return [], []
2476 return [], []
2475 if os.path.isdir(spec):
2477 if os.path.isdir(spec):
2476 spec += b'/'
2478 spec += b'/'
2477 spec = spec[len(rootdir) :]
2479 spec = spec[len(rootdir) :]
2478 fixpaths = pycompat.ossep != b'/'
2480 fixpaths = pycompat.ossep != b'/'
2479 if fixpaths:
2481 if fixpaths:
2480 spec = spec.replace(pycompat.ossep, b'/')
2482 spec = spec.replace(pycompat.ossep, b'/')
2481 speclen = len(spec)
2483 speclen = len(spec)
2482 fullpaths = opts['full']
2484 fullpaths = opts['full']
2483 files, dirs = set(), set()
2485 files, dirs = set(), set()
2484 adddir, addfile = dirs.add, files.add
2486 adddir, addfile = dirs.add, files.add
2485 for f, st in pycompat.iteritems(dirstate):
2487 for f, st in pycompat.iteritems(dirstate):
2486 if f.startswith(spec) and st[0] in acceptable:
2488 if f.startswith(spec) and st[0] in acceptable:
2487 if fixpaths:
2489 if fixpaths:
2488 f = f.replace(b'/', pycompat.ossep)
2490 f = f.replace(b'/', pycompat.ossep)
2489 if fullpaths:
2491 if fullpaths:
2490 addfile(f)
2492 addfile(f)
2491 continue
2493 continue
2492 s = f.find(pycompat.ossep, speclen)
2494 s = f.find(pycompat.ossep, speclen)
2493 if s >= 0:
2495 if s >= 0:
2494 adddir(f[:s])
2496 adddir(f[:s])
2495 else:
2497 else:
2496 addfile(f)
2498 addfile(f)
2497 return files, dirs
2499 return files, dirs
2498
2500
2499 acceptable = b''
2501 acceptable = b''
2500 if opts['normal']:
2502 if opts['normal']:
2501 acceptable += b'nm'
2503 acceptable += b'nm'
2502 if opts['added']:
2504 if opts['added']:
2503 acceptable += b'a'
2505 acceptable += b'a'
2504 if opts['removed']:
2506 if opts['removed']:
2505 acceptable += b'r'
2507 acceptable += b'r'
2506 cwd = repo.getcwd()
2508 cwd = repo.getcwd()
2507 if not specs:
2509 if not specs:
2508 specs = [b'.']
2510 specs = [b'.']
2509
2511
2510 files, dirs = set(), set()
2512 files, dirs = set(), set()
2511 for spec in specs:
2513 for spec in specs:
2512 f, d = complete(spec, acceptable or b'nmar')
2514 f, d = complete(spec, acceptable or b'nmar')
2513 files.update(f)
2515 files.update(f)
2514 dirs.update(d)
2516 dirs.update(d)
2515 files.update(dirs)
2517 files.update(dirs)
2516 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2518 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2517 ui.write(b'\n')
2519 ui.write(b'\n')
2518
2520
2519
2521
2520 @command(
2522 @command(
2521 b'debugpathcopies',
2523 b'debugpathcopies',
2522 cmdutil.walkopts,
2524 cmdutil.walkopts,
2523 b'hg debugpathcopies REV1 REV2 [FILE]',
2525 b'hg debugpathcopies REV1 REV2 [FILE]',
2524 inferrepo=True,
2526 inferrepo=True,
2525 )
2527 )
2526 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2528 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2527 """show copies between two revisions"""
2529 """show copies between two revisions"""
2528 ctx1 = scmutil.revsingle(repo, rev1)
2530 ctx1 = scmutil.revsingle(repo, rev1)
2529 ctx2 = scmutil.revsingle(repo, rev2)
2531 ctx2 = scmutil.revsingle(repo, rev2)
2530 m = scmutil.match(ctx1, pats, opts)
2532 m = scmutil.match(ctx1, pats, opts)
2531 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2533 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2532 ui.write(b'%s -> %s\n' % (src, dst))
2534 ui.write(b'%s -> %s\n' % (src, dst))
2533
2535
2534
2536
2535 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2537 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2536 def debugpeer(ui, path):
2538 def debugpeer(ui, path):
2537 """establish a connection to a peer repository"""
2539 """establish a connection to a peer repository"""
2538 # Always enable peer request logging. Requires --debug to display
2540 # Always enable peer request logging. Requires --debug to display
2539 # though.
2541 # though.
2540 overrides = {
2542 overrides = {
2541 (b'devel', b'debug.peer-request'): True,
2543 (b'devel', b'debug.peer-request'): True,
2542 }
2544 }
2543
2545
2544 with ui.configoverride(overrides):
2546 with ui.configoverride(overrides):
2545 peer = hg.peer(ui, {}, path)
2547 peer = hg.peer(ui, {}, path)
2546
2548
2547 local = peer.local() is not None
2549 local = peer.local() is not None
2548 canpush = peer.canpush()
2550 canpush = peer.canpush()
2549
2551
2550 ui.write(_(b'url: %s\n') % peer.url())
2552 ui.write(_(b'url: %s\n') % peer.url())
2551 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2553 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2552 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2554 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2553
2555
2554
2556
2555 @command(
2557 @command(
2556 b'debugpickmergetool',
2558 b'debugpickmergetool',
2557 [
2559 [
2558 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2560 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2559 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2561 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2560 ]
2562 ]
2561 + cmdutil.walkopts
2563 + cmdutil.walkopts
2562 + cmdutil.mergetoolopts,
2564 + cmdutil.mergetoolopts,
2563 _(b'[PATTERN]...'),
2565 _(b'[PATTERN]...'),
2564 inferrepo=True,
2566 inferrepo=True,
2565 )
2567 )
2566 def debugpickmergetool(ui, repo, *pats, **opts):
2568 def debugpickmergetool(ui, repo, *pats, **opts):
2567 """examine which merge tool is chosen for specified file
2569 """examine which merge tool is chosen for specified file
2568
2570
2569 As described in :hg:`help merge-tools`, Mercurial examines
2571 As described in :hg:`help merge-tools`, Mercurial examines
2570 configurations below in this order to decide which merge tool is
2572 configurations below in this order to decide which merge tool is
2571 chosen for specified file.
2573 chosen for specified file.
2572
2574
2573 1. ``--tool`` option
2575 1. ``--tool`` option
2574 2. ``HGMERGE`` environment variable
2576 2. ``HGMERGE`` environment variable
2575 3. configurations in ``merge-patterns`` section
2577 3. configurations in ``merge-patterns`` section
2576 4. configuration of ``ui.merge``
2578 4. configuration of ``ui.merge``
2577 5. configurations in ``merge-tools`` section
2579 5. configurations in ``merge-tools`` section
2578 6. ``hgmerge`` tool (for historical reason only)
2580 6. ``hgmerge`` tool (for historical reason only)
2579 7. default tool for fallback (``:merge`` or ``:prompt``)
2581 7. default tool for fallback (``:merge`` or ``:prompt``)
2580
2582
2581 This command writes out examination result in the style below::
2583 This command writes out examination result in the style below::
2582
2584
2583 FILE = MERGETOOL
2585 FILE = MERGETOOL
2584
2586
2585 By default, all files known in the first parent context of the
2587 By default, all files known in the first parent context of the
2586 working directory are examined. Use file patterns and/or -I/-X
2588 working directory are examined. Use file patterns and/or -I/-X
2587 options to limit target files. -r/--rev is also useful to examine
2589 options to limit target files. -r/--rev is also useful to examine
2588 files in another context without actual updating to it.
2590 files in another context without actual updating to it.
2589
2591
2590 With --debug, this command shows warning messages while matching
2592 With --debug, this command shows warning messages while matching
2591 against ``merge-patterns`` and so on, too. It is recommended to
2593 against ``merge-patterns`` and so on, too. It is recommended to
2592 use this option with explicit file patterns and/or -I/-X options,
2594 use this option with explicit file patterns and/or -I/-X options,
2593 because this option increases amount of output per file according
2595 because this option increases amount of output per file according
2594 to configurations in hgrc.
2596 to configurations in hgrc.
2595
2597
2596 With -v/--verbose, this command shows configurations below at
2598 With -v/--verbose, this command shows configurations below at
2597 first (only if specified).
2599 first (only if specified).
2598
2600
2599 - ``--tool`` option
2601 - ``--tool`` option
2600 - ``HGMERGE`` environment variable
2602 - ``HGMERGE`` environment variable
2601 - configuration of ``ui.merge``
2603 - configuration of ``ui.merge``
2602
2604
2603 If merge tool is chosen before matching against
2605 If merge tool is chosen before matching against
2604 ``merge-patterns``, this command can't show any helpful
2606 ``merge-patterns``, this command can't show any helpful
2605 information, even with --debug. In such case, information above is
2607 information, even with --debug. In such case, information above is
2606 useful to know why a merge tool is chosen.
2608 useful to know why a merge tool is chosen.
2607 """
2609 """
2608 opts = pycompat.byteskwargs(opts)
2610 opts = pycompat.byteskwargs(opts)
2609 overrides = {}
2611 overrides = {}
2610 if opts[b'tool']:
2612 if opts[b'tool']:
2611 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2613 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2612 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2614 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2613
2615
2614 with ui.configoverride(overrides, b'debugmergepatterns'):
2616 with ui.configoverride(overrides, b'debugmergepatterns'):
2615 hgmerge = encoding.environ.get(b"HGMERGE")
2617 hgmerge = encoding.environ.get(b"HGMERGE")
2616 if hgmerge is not None:
2618 if hgmerge is not None:
2617 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2619 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2618 uimerge = ui.config(b"ui", b"merge")
2620 uimerge = ui.config(b"ui", b"merge")
2619 if uimerge:
2621 if uimerge:
2620 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2622 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2621
2623
2622 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2624 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2623 m = scmutil.match(ctx, pats, opts)
2625 m = scmutil.match(ctx, pats, opts)
2624 changedelete = opts[b'changedelete']
2626 changedelete = opts[b'changedelete']
2625 for path in ctx.walk(m):
2627 for path in ctx.walk(m):
2626 fctx = ctx[path]
2628 fctx = ctx[path]
2627 try:
2629 try:
2628 if not ui.debugflag:
2630 if not ui.debugflag:
2629 ui.pushbuffer(error=True)
2631 ui.pushbuffer(error=True)
2630 tool, toolpath = filemerge._picktool(
2632 tool, toolpath = filemerge._picktool(
2631 repo,
2633 repo,
2632 ui,
2634 ui,
2633 path,
2635 path,
2634 fctx.isbinary(),
2636 fctx.isbinary(),
2635 b'l' in fctx.flags(),
2637 b'l' in fctx.flags(),
2636 changedelete,
2638 changedelete,
2637 )
2639 )
2638 finally:
2640 finally:
2639 if not ui.debugflag:
2641 if not ui.debugflag:
2640 ui.popbuffer()
2642 ui.popbuffer()
2641 ui.write(b'%s = %s\n' % (path, tool))
2643 ui.write(b'%s = %s\n' % (path, tool))
2642
2644
2643
2645
2644 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2646 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2645 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2647 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2646 """access the pushkey key/value protocol
2648 """access the pushkey key/value protocol
2647
2649
2648 With two args, list the keys in the given namespace.
2650 With two args, list the keys in the given namespace.
2649
2651
2650 With five args, set a key to new if it currently is set to old.
2652 With five args, set a key to new if it currently is set to old.
2651 Reports success or failure.
2653 Reports success or failure.
2652 """
2654 """
2653
2655
2654 target = hg.peer(ui, {}, repopath)
2656 target = hg.peer(ui, {}, repopath)
2655 if keyinfo:
2657 if keyinfo:
2656 key, old, new = keyinfo
2658 key, old, new = keyinfo
2657 with target.commandexecutor() as e:
2659 with target.commandexecutor() as e:
2658 r = e.callcommand(
2660 r = e.callcommand(
2659 b'pushkey',
2661 b'pushkey',
2660 {
2662 {
2661 b'namespace': namespace,
2663 b'namespace': namespace,
2662 b'key': key,
2664 b'key': key,
2663 b'old': old,
2665 b'old': old,
2664 b'new': new,
2666 b'new': new,
2665 },
2667 },
2666 ).result()
2668 ).result()
2667
2669
2668 ui.status(pycompat.bytestr(r) + b'\n')
2670 ui.status(pycompat.bytestr(r) + b'\n')
2669 return not r
2671 return not r
2670 else:
2672 else:
2671 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2673 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2672 ui.write(
2674 ui.write(
2673 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2675 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2674 )
2676 )
2675
2677
2676
2678
2677 @command(b'debugpvec', [], _(b'A B'))
2679 @command(b'debugpvec', [], _(b'A B'))
2678 def debugpvec(ui, repo, a, b=None):
2680 def debugpvec(ui, repo, a, b=None):
2679 ca = scmutil.revsingle(repo, a)
2681 ca = scmutil.revsingle(repo, a)
2680 cb = scmutil.revsingle(repo, b)
2682 cb = scmutil.revsingle(repo, b)
2681 pa = pvec.ctxpvec(ca)
2683 pa = pvec.ctxpvec(ca)
2682 pb = pvec.ctxpvec(cb)
2684 pb = pvec.ctxpvec(cb)
2683 if pa == pb:
2685 if pa == pb:
2684 rel = b"="
2686 rel = b"="
2685 elif pa > pb:
2687 elif pa > pb:
2686 rel = b">"
2688 rel = b">"
2687 elif pa < pb:
2689 elif pa < pb:
2688 rel = b"<"
2690 rel = b"<"
2689 elif pa | pb:
2691 elif pa | pb:
2690 rel = b"|"
2692 rel = b"|"
2691 ui.write(_(b"a: %s\n") % pa)
2693 ui.write(_(b"a: %s\n") % pa)
2692 ui.write(_(b"b: %s\n") % pb)
2694 ui.write(_(b"b: %s\n") % pb)
2693 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2695 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2694 ui.write(
2696 ui.write(
2695 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2697 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2696 % (
2698 % (
2697 abs(pa._depth - pb._depth),
2699 abs(pa._depth - pb._depth),
2698 pvec._hamming(pa._vec, pb._vec),
2700 pvec._hamming(pa._vec, pb._vec),
2699 pa.distance(pb),
2701 pa.distance(pb),
2700 rel,
2702 rel,
2701 )
2703 )
2702 )
2704 )
2703
2705
2704
2706
2705 @command(
2707 @command(
2706 b'debugrebuilddirstate|debugrebuildstate',
2708 b'debugrebuilddirstate|debugrebuildstate',
2707 [
2709 [
2708 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2710 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2709 (
2711 (
2710 b'',
2712 b'',
2711 b'minimal',
2713 b'minimal',
2712 None,
2714 None,
2713 _(
2715 _(
2714 b'only rebuild files that are inconsistent with '
2716 b'only rebuild files that are inconsistent with '
2715 b'the working copy parent'
2717 b'the working copy parent'
2716 ),
2718 ),
2717 ),
2719 ),
2718 ],
2720 ],
2719 _(b'[-r REV]'),
2721 _(b'[-r REV]'),
2720 )
2722 )
2721 def debugrebuilddirstate(ui, repo, rev, **opts):
2723 def debugrebuilddirstate(ui, repo, rev, **opts):
2722 """rebuild the dirstate as it would look like for the given revision
2724 """rebuild the dirstate as it would look like for the given revision
2723
2725
2724 If no revision is specified the first current parent will be used.
2726 If no revision is specified the first current parent will be used.
2725
2727
2726 The dirstate will be set to the files of the given revision.
2728 The dirstate will be set to the files of the given revision.
2727 The actual working directory content or existing dirstate
2729 The actual working directory content or existing dirstate
2728 information such as adds or removes is not considered.
2730 information such as adds or removes is not considered.
2729
2731
2730 ``minimal`` will only rebuild the dirstate status for files that claim to be
2732 ``minimal`` will only rebuild the dirstate status for files that claim to be
2731 tracked but are not in the parent manifest, or that exist in the parent
2733 tracked but are not in the parent manifest, or that exist in the parent
2732 manifest but are not in the dirstate. It will not change adds, removes, or
2734 manifest but are not in the dirstate. It will not change adds, removes, or
2733 modified files that are in the working copy parent.
2735 modified files that are in the working copy parent.
2734
2736
2735 One use of this command is to make the next :hg:`status` invocation
2737 One use of this command is to make the next :hg:`status` invocation
2736 check the actual file content.
2738 check the actual file content.
2737 """
2739 """
2738 ctx = scmutil.revsingle(repo, rev)
2740 ctx = scmutil.revsingle(repo, rev)
2739 with repo.wlock():
2741 with repo.wlock():
2740 dirstate = repo.dirstate
2742 dirstate = repo.dirstate
2741 changedfiles = None
2743 changedfiles = None
2742 # See command doc for what minimal does.
2744 # See command doc for what minimal does.
2743 if opts.get('minimal'):
2745 if opts.get('minimal'):
2744 manifestfiles = set(ctx.manifest().keys())
2746 manifestfiles = set(ctx.manifest().keys())
2745 dirstatefiles = set(dirstate)
2747 dirstatefiles = set(dirstate)
2746 manifestonly = manifestfiles - dirstatefiles
2748 manifestonly = manifestfiles - dirstatefiles
2747 dsonly = dirstatefiles - manifestfiles
2749 dsonly = dirstatefiles - manifestfiles
2748 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2750 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2749 changedfiles = manifestonly | dsnotadded
2751 changedfiles = manifestonly | dsnotadded
2750
2752
2751 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2753 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2752
2754
2753
2755
2754 @command(b'debugrebuildfncache', [], b'')
2756 @command(b'debugrebuildfncache', [], b'')
2755 def debugrebuildfncache(ui, repo):
2757 def debugrebuildfncache(ui, repo):
2756 """rebuild the fncache file"""
2758 """rebuild the fncache file"""
2757 repair.rebuildfncache(ui, repo)
2759 repair.rebuildfncache(ui, repo)
2758
2760
2759
2761
2760 @command(
2762 @command(
2761 b'debugrename',
2763 b'debugrename',
2762 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2764 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2763 _(b'[-r REV] [FILE]...'),
2765 _(b'[-r REV] [FILE]...'),
2764 )
2766 )
2765 def debugrename(ui, repo, *pats, **opts):
2767 def debugrename(ui, repo, *pats, **opts):
2766 """dump rename information"""
2768 """dump rename information"""
2767
2769
2768 opts = pycompat.byteskwargs(opts)
2770 opts = pycompat.byteskwargs(opts)
2769 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2771 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2770 m = scmutil.match(ctx, pats, opts)
2772 m = scmutil.match(ctx, pats, opts)
2771 for abs in ctx.walk(m):
2773 for abs in ctx.walk(m):
2772 fctx = ctx[abs]
2774 fctx = ctx[abs]
2773 o = fctx.filelog().renamed(fctx.filenode())
2775 o = fctx.filelog().renamed(fctx.filenode())
2774 rel = repo.pathto(abs)
2776 rel = repo.pathto(abs)
2775 if o:
2777 if o:
2776 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2778 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2777 else:
2779 else:
2778 ui.write(_(b"%s not renamed\n") % rel)
2780 ui.write(_(b"%s not renamed\n") % rel)
2779
2781
2780
2782
2781 @command(b'debugrequires|debugrequirements', [], b'')
2783 @command(b'debugrequires|debugrequirements', [], b'')
2782 def debugrequirements(ui, repo):
2784 def debugrequirements(ui, repo):
2783 """ print the current repo requirements """
2785 """ print the current repo requirements """
2784 for r in sorted(repo.requirements):
2786 for r in sorted(repo.requirements):
2785 ui.write(b"%s\n" % r)
2787 ui.write(b"%s\n" % r)
2786
2788
2787
2789
2788 @command(
2790 @command(
2789 b'debugrevlog',
2791 b'debugrevlog',
2790 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2792 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2791 _(b'-c|-m|FILE'),
2793 _(b'-c|-m|FILE'),
2792 optionalrepo=True,
2794 optionalrepo=True,
2793 )
2795 )
2794 def debugrevlog(ui, repo, file_=None, **opts):
2796 def debugrevlog(ui, repo, file_=None, **opts):
2795 """show data and statistics about a revlog"""
2797 """show data and statistics about a revlog"""
2796 opts = pycompat.byteskwargs(opts)
2798 opts = pycompat.byteskwargs(opts)
2797 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2799 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2798
2800
2799 if opts.get(b"dump"):
2801 if opts.get(b"dump"):
2800 numrevs = len(r)
2802 numrevs = len(r)
2801 ui.write(
2803 ui.write(
2802 (
2804 (
2803 b"# rev p1rev p2rev start end deltastart base p1 p2"
2805 b"# rev p1rev p2rev start end deltastart base p1 p2"
2804 b" rawsize totalsize compression heads chainlen\n"
2806 b" rawsize totalsize compression heads chainlen\n"
2805 )
2807 )
2806 )
2808 )
2807 ts = 0
2809 ts = 0
2808 heads = set()
2810 heads = set()
2809
2811
2810 for rev in pycompat.xrange(numrevs):
2812 for rev in pycompat.xrange(numrevs):
2811 dbase = r.deltaparent(rev)
2813 dbase = r.deltaparent(rev)
2812 if dbase == -1:
2814 if dbase == -1:
2813 dbase = rev
2815 dbase = rev
2814 cbase = r.chainbase(rev)
2816 cbase = r.chainbase(rev)
2815 clen = r.chainlen(rev)
2817 clen = r.chainlen(rev)
2816 p1, p2 = r.parentrevs(rev)
2818 p1, p2 = r.parentrevs(rev)
2817 rs = r.rawsize(rev)
2819 rs = r.rawsize(rev)
2818 ts = ts + rs
2820 ts = ts + rs
2819 heads -= set(r.parentrevs(rev))
2821 heads -= set(r.parentrevs(rev))
2820 heads.add(rev)
2822 heads.add(rev)
2821 try:
2823 try:
2822 compression = ts / r.end(rev)
2824 compression = ts / r.end(rev)
2823 except ZeroDivisionError:
2825 except ZeroDivisionError:
2824 compression = 0
2826 compression = 0
2825 ui.write(
2827 ui.write(
2826 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2828 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2827 b"%11d %5d %8d\n"
2829 b"%11d %5d %8d\n"
2828 % (
2830 % (
2829 rev,
2831 rev,
2830 p1,
2832 p1,
2831 p2,
2833 p2,
2832 r.start(rev),
2834 r.start(rev),
2833 r.end(rev),
2835 r.end(rev),
2834 r.start(dbase),
2836 r.start(dbase),
2835 r.start(cbase),
2837 r.start(cbase),
2836 r.start(p1),
2838 r.start(p1),
2837 r.start(p2),
2839 r.start(p2),
2838 rs,
2840 rs,
2839 ts,
2841 ts,
2840 compression,
2842 compression,
2841 len(heads),
2843 len(heads),
2842 clen,
2844 clen,
2843 )
2845 )
2844 )
2846 )
2845 return 0
2847 return 0
2846
2848
2847 v = r.version
2849 v = r.version
2848 format = v & 0xFFFF
2850 format = v & 0xFFFF
2849 flags = []
2851 flags = []
2850 gdelta = False
2852 gdelta = False
2851 if v & revlog.FLAG_INLINE_DATA:
2853 if v & revlog.FLAG_INLINE_DATA:
2852 flags.append(b'inline')
2854 flags.append(b'inline')
2853 if v & revlog.FLAG_GENERALDELTA:
2855 if v & revlog.FLAG_GENERALDELTA:
2854 gdelta = True
2856 gdelta = True
2855 flags.append(b'generaldelta')
2857 flags.append(b'generaldelta')
2856 if not flags:
2858 if not flags:
2857 flags = [b'(none)']
2859 flags = [b'(none)']
2858
2860
2859 ### tracks merge vs single parent
2861 ### tracks merge vs single parent
2860 nummerges = 0
2862 nummerges = 0
2861
2863
2862 ### tracks ways the "delta" are build
2864 ### tracks ways the "delta" are build
2863 # nodelta
2865 # nodelta
2864 numempty = 0
2866 numempty = 0
2865 numemptytext = 0
2867 numemptytext = 0
2866 numemptydelta = 0
2868 numemptydelta = 0
2867 # full file content
2869 # full file content
2868 numfull = 0
2870 numfull = 0
2869 # intermediate snapshot against a prior snapshot
2871 # intermediate snapshot against a prior snapshot
2870 numsemi = 0
2872 numsemi = 0
2871 # snapshot count per depth
2873 # snapshot count per depth
2872 numsnapdepth = collections.defaultdict(lambda: 0)
2874 numsnapdepth = collections.defaultdict(lambda: 0)
2873 # delta against previous revision
2875 # delta against previous revision
2874 numprev = 0
2876 numprev = 0
2875 # delta against first or second parent (not prev)
2877 # delta against first or second parent (not prev)
2876 nump1 = 0
2878 nump1 = 0
2877 nump2 = 0
2879 nump2 = 0
2878 # delta against neither prev nor parents
2880 # delta against neither prev nor parents
2879 numother = 0
2881 numother = 0
2880 # delta against prev that are also first or second parent
2882 # delta against prev that are also first or second parent
2881 # (details of `numprev`)
2883 # (details of `numprev`)
2882 nump1prev = 0
2884 nump1prev = 0
2883 nump2prev = 0
2885 nump2prev = 0
2884
2886
2885 # data about delta chain of each revs
2887 # data about delta chain of each revs
2886 chainlengths = []
2888 chainlengths = []
2887 chainbases = []
2889 chainbases = []
2888 chainspans = []
2890 chainspans = []
2889
2891
2890 # data about each revision
2892 # data about each revision
2891 datasize = [None, 0, 0]
2893 datasize = [None, 0, 0]
2892 fullsize = [None, 0, 0]
2894 fullsize = [None, 0, 0]
2893 semisize = [None, 0, 0]
2895 semisize = [None, 0, 0]
2894 # snapshot count per depth
2896 # snapshot count per depth
2895 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2897 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2896 deltasize = [None, 0, 0]
2898 deltasize = [None, 0, 0]
2897 chunktypecounts = {}
2899 chunktypecounts = {}
2898 chunktypesizes = {}
2900 chunktypesizes = {}
2899
2901
2900 def addsize(size, l):
2902 def addsize(size, l):
2901 if l[0] is None or size < l[0]:
2903 if l[0] is None or size < l[0]:
2902 l[0] = size
2904 l[0] = size
2903 if size > l[1]:
2905 if size > l[1]:
2904 l[1] = size
2906 l[1] = size
2905 l[2] += size
2907 l[2] += size
2906
2908
2907 numrevs = len(r)
2909 numrevs = len(r)
2908 for rev in pycompat.xrange(numrevs):
2910 for rev in pycompat.xrange(numrevs):
2909 p1, p2 = r.parentrevs(rev)
2911 p1, p2 = r.parentrevs(rev)
2910 delta = r.deltaparent(rev)
2912 delta = r.deltaparent(rev)
2911 if format > 0:
2913 if format > 0:
2912 addsize(r.rawsize(rev), datasize)
2914 addsize(r.rawsize(rev), datasize)
2913 if p2 != nullrev:
2915 if p2 != nullrev:
2914 nummerges += 1
2916 nummerges += 1
2915 size = r.length(rev)
2917 size = r.length(rev)
2916 if delta == nullrev:
2918 if delta == nullrev:
2917 chainlengths.append(0)
2919 chainlengths.append(0)
2918 chainbases.append(r.start(rev))
2920 chainbases.append(r.start(rev))
2919 chainspans.append(size)
2921 chainspans.append(size)
2920 if size == 0:
2922 if size == 0:
2921 numempty += 1
2923 numempty += 1
2922 numemptytext += 1
2924 numemptytext += 1
2923 else:
2925 else:
2924 numfull += 1
2926 numfull += 1
2925 numsnapdepth[0] += 1
2927 numsnapdepth[0] += 1
2926 addsize(size, fullsize)
2928 addsize(size, fullsize)
2927 addsize(size, snapsizedepth[0])
2929 addsize(size, snapsizedepth[0])
2928 else:
2930 else:
2929 chainlengths.append(chainlengths[delta] + 1)
2931 chainlengths.append(chainlengths[delta] + 1)
2930 baseaddr = chainbases[delta]
2932 baseaddr = chainbases[delta]
2931 revaddr = r.start(rev)
2933 revaddr = r.start(rev)
2932 chainbases.append(baseaddr)
2934 chainbases.append(baseaddr)
2933 chainspans.append((revaddr - baseaddr) + size)
2935 chainspans.append((revaddr - baseaddr) + size)
2934 if size == 0:
2936 if size == 0:
2935 numempty += 1
2937 numempty += 1
2936 numemptydelta += 1
2938 numemptydelta += 1
2937 elif r.issnapshot(rev):
2939 elif r.issnapshot(rev):
2938 addsize(size, semisize)
2940 addsize(size, semisize)
2939 numsemi += 1
2941 numsemi += 1
2940 depth = r.snapshotdepth(rev)
2942 depth = r.snapshotdepth(rev)
2941 numsnapdepth[depth] += 1
2943 numsnapdepth[depth] += 1
2942 addsize(size, snapsizedepth[depth])
2944 addsize(size, snapsizedepth[depth])
2943 else:
2945 else:
2944 addsize(size, deltasize)
2946 addsize(size, deltasize)
2945 if delta == rev - 1:
2947 if delta == rev - 1:
2946 numprev += 1
2948 numprev += 1
2947 if delta == p1:
2949 if delta == p1:
2948 nump1prev += 1
2950 nump1prev += 1
2949 elif delta == p2:
2951 elif delta == p2:
2950 nump2prev += 1
2952 nump2prev += 1
2951 elif delta == p1:
2953 elif delta == p1:
2952 nump1 += 1
2954 nump1 += 1
2953 elif delta == p2:
2955 elif delta == p2:
2954 nump2 += 1
2956 nump2 += 1
2955 elif delta != nullrev:
2957 elif delta != nullrev:
2956 numother += 1
2958 numother += 1
2957
2959
2958 # Obtain data on the raw chunks in the revlog.
2960 # Obtain data on the raw chunks in the revlog.
2959 if util.safehasattr(r, b'_getsegmentforrevs'):
2961 if util.safehasattr(r, b'_getsegmentforrevs'):
2960 segment = r._getsegmentforrevs(rev, rev)[1]
2962 segment = r._getsegmentforrevs(rev, rev)[1]
2961 else:
2963 else:
2962 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2964 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2963 if segment:
2965 if segment:
2964 chunktype = bytes(segment[0:1])
2966 chunktype = bytes(segment[0:1])
2965 else:
2967 else:
2966 chunktype = b'empty'
2968 chunktype = b'empty'
2967
2969
2968 if chunktype not in chunktypecounts:
2970 if chunktype not in chunktypecounts:
2969 chunktypecounts[chunktype] = 0
2971 chunktypecounts[chunktype] = 0
2970 chunktypesizes[chunktype] = 0
2972 chunktypesizes[chunktype] = 0
2971
2973
2972 chunktypecounts[chunktype] += 1
2974 chunktypecounts[chunktype] += 1
2973 chunktypesizes[chunktype] += size
2975 chunktypesizes[chunktype] += size
2974
2976
2975 # Adjust size min value for empty cases
2977 # Adjust size min value for empty cases
2976 for size in (datasize, fullsize, semisize, deltasize):
2978 for size in (datasize, fullsize, semisize, deltasize):
2977 if size[0] is None:
2979 if size[0] is None:
2978 size[0] = 0
2980 size[0] = 0
2979
2981
2980 numdeltas = numrevs - numfull - numempty - numsemi
2982 numdeltas = numrevs - numfull - numempty - numsemi
2981 numoprev = numprev - nump1prev - nump2prev
2983 numoprev = numprev - nump1prev - nump2prev
2982 totalrawsize = datasize[2]
2984 totalrawsize = datasize[2]
2983 datasize[2] /= numrevs
2985 datasize[2] /= numrevs
2984 fulltotal = fullsize[2]
2986 fulltotal = fullsize[2]
2985 if numfull == 0:
2987 if numfull == 0:
2986 fullsize[2] = 0
2988 fullsize[2] = 0
2987 else:
2989 else:
2988 fullsize[2] /= numfull
2990 fullsize[2] /= numfull
2989 semitotal = semisize[2]
2991 semitotal = semisize[2]
2990 snaptotal = {}
2992 snaptotal = {}
2991 if numsemi > 0:
2993 if numsemi > 0:
2992 semisize[2] /= numsemi
2994 semisize[2] /= numsemi
2993 for depth in snapsizedepth:
2995 for depth in snapsizedepth:
2994 snaptotal[depth] = snapsizedepth[depth][2]
2996 snaptotal[depth] = snapsizedepth[depth][2]
2995 snapsizedepth[depth][2] /= numsnapdepth[depth]
2997 snapsizedepth[depth][2] /= numsnapdepth[depth]
2996
2998
2997 deltatotal = deltasize[2]
2999 deltatotal = deltasize[2]
2998 if numdeltas > 0:
3000 if numdeltas > 0:
2999 deltasize[2] /= numdeltas
3001 deltasize[2] /= numdeltas
3000 totalsize = fulltotal + semitotal + deltatotal
3002 totalsize = fulltotal + semitotal + deltatotal
3001 avgchainlen = sum(chainlengths) / numrevs
3003 avgchainlen = sum(chainlengths) / numrevs
3002 maxchainlen = max(chainlengths)
3004 maxchainlen = max(chainlengths)
3003 maxchainspan = max(chainspans)
3005 maxchainspan = max(chainspans)
3004 compratio = 1
3006 compratio = 1
3005 if totalsize:
3007 if totalsize:
3006 compratio = totalrawsize / totalsize
3008 compratio = totalrawsize / totalsize
3007
3009
3008 basedfmtstr = b'%%%dd\n'
3010 basedfmtstr = b'%%%dd\n'
3009 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3011 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3010
3012
3011 def dfmtstr(max):
3013 def dfmtstr(max):
3012 return basedfmtstr % len(str(max))
3014 return basedfmtstr % len(str(max))
3013
3015
3014 def pcfmtstr(max, padding=0):
3016 def pcfmtstr(max, padding=0):
3015 return basepcfmtstr % (len(str(max)), b' ' * padding)
3017 return basepcfmtstr % (len(str(max)), b' ' * padding)
3016
3018
3017 def pcfmt(value, total):
3019 def pcfmt(value, total):
3018 if total:
3020 if total:
3019 return (value, 100 * float(value) / total)
3021 return (value, 100 * float(value) / total)
3020 else:
3022 else:
3021 return value, 100.0
3023 return value, 100.0
3022
3024
3023 ui.writenoi18n(b'format : %d\n' % format)
3025 ui.writenoi18n(b'format : %d\n' % format)
3024 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3026 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3025
3027
3026 ui.write(b'\n')
3028 ui.write(b'\n')
3027 fmt = pcfmtstr(totalsize)
3029 fmt = pcfmtstr(totalsize)
3028 fmt2 = dfmtstr(totalsize)
3030 fmt2 = dfmtstr(totalsize)
3029 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3031 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3030 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3032 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3031 ui.writenoi18n(
3033 ui.writenoi18n(
3032 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3034 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3033 )
3035 )
3034 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3036 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3035 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3037 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3036 ui.writenoi18n(
3038 ui.writenoi18n(
3037 b' text : '
3039 b' text : '
3038 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3040 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3039 )
3041 )
3040 ui.writenoi18n(
3042 ui.writenoi18n(
3041 b' delta : '
3043 b' delta : '
3042 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3044 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3043 )
3045 )
3044 ui.writenoi18n(
3046 ui.writenoi18n(
3045 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3047 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3046 )
3048 )
3047 for depth in sorted(numsnapdepth):
3049 for depth in sorted(numsnapdepth):
3048 ui.write(
3050 ui.write(
3049 (b' lvl-%-3d : ' % depth)
3051 (b' lvl-%-3d : ' % depth)
3050 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3052 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3051 )
3053 )
3052 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3054 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3053 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3055 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3054 ui.writenoi18n(
3056 ui.writenoi18n(
3055 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3057 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3056 )
3058 )
3057 for depth in sorted(numsnapdepth):
3059 for depth in sorted(numsnapdepth):
3058 ui.write(
3060 ui.write(
3059 (b' lvl-%-3d : ' % depth)
3061 (b' lvl-%-3d : ' % depth)
3060 + fmt % pcfmt(snaptotal[depth], totalsize)
3062 + fmt % pcfmt(snaptotal[depth], totalsize)
3061 )
3063 )
3062 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3064 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3063
3065
3064 def fmtchunktype(chunktype):
3066 def fmtchunktype(chunktype):
3065 if chunktype == b'empty':
3067 if chunktype == b'empty':
3066 return b' %s : ' % chunktype
3068 return b' %s : ' % chunktype
3067 elif chunktype in pycompat.bytestr(string.ascii_letters):
3069 elif chunktype in pycompat.bytestr(string.ascii_letters):
3068 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3070 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3069 else:
3071 else:
3070 return b' 0x%s : ' % hex(chunktype)
3072 return b' 0x%s : ' % hex(chunktype)
3071
3073
3072 ui.write(b'\n')
3074 ui.write(b'\n')
3073 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3075 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3074 for chunktype in sorted(chunktypecounts):
3076 for chunktype in sorted(chunktypecounts):
3075 ui.write(fmtchunktype(chunktype))
3077 ui.write(fmtchunktype(chunktype))
3076 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3078 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3077 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3079 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3078 for chunktype in sorted(chunktypecounts):
3080 for chunktype in sorted(chunktypecounts):
3079 ui.write(fmtchunktype(chunktype))
3081 ui.write(fmtchunktype(chunktype))
3080 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3082 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3081
3083
3082 ui.write(b'\n')
3084 ui.write(b'\n')
3083 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3085 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3084 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3086 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3085 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3087 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3086 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3088 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3087 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3089 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3088
3090
3089 if format > 0:
3091 if format > 0:
3090 ui.write(b'\n')
3092 ui.write(b'\n')
3091 ui.writenoi18n(
3093 ui.writenoi18n(
3092 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3094 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3093 % tuple(datasize)
3095 % tuple(datasize)
3094 )
3096 )
3095 ui.writenoi18n(
3097 ui.writenoi18n(
3096 b'full revision size (min/max/avg) : %d / %d / %d\n'
3098 b'full revision size (min/max/avg) : %d / %d / %d\n'
3097 % tuple(fullsize)
3099 % tuple(fullsize)
3098 )
3100 )
3099 ui.writenoi18n(
3101 ui.writenoi18n(
3100 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3102 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3101 % tuple(semisize)
3103 % tuple(semisize)
3102 )
3104 )
3103 for depth in sorted(snapsizedepth):
3105 for depth in sorted(snapsizedepth):
3104 if depth == 0:
3106 if depth == 0:
3105 continue
3107 continue
3106 ui.writenoi18n(
3108 ui.writenoi18n(
3107 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3109 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3108 % ((depth,) + tuple(snapsizedepth[depth]))
3110 % ((depth,) + tuple(snapsizedepth[depth]))
3109 )
3111 )
3110 ui.writenoi18n(
3112 ui.writenoi18n(
3111 b'delta size (min/max/avg) : %d / %d / %d\n'
3113 b'delta size (min/max/avg) : %d / %d / %d\n'
3112 % tuple(deltasize)
3114 % tuple(deltasize)
3113 )
3115 )
3114
3116
3115 if numdeltas > 0:
3117 if numdeltas > 0:
3116 ui.write(b'\n')
3118 ui.write(b'\n')
3117 fmt = pcfmtstr(numdeltas)
3119 fmt = pcfmtstr(numdeltas)
3118 fmt2 = pcfmtstr(numdeltas, 4)
3120 fmt2 = pcfmtstr(numdeltas, 4)
3119 ui.writenoi18n(
3121 ui.writenoi18n(
3120 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3122 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3121 )
3123 )
3122 if numprev > 0:
3124 if numprev > 0:
3123 ui.writenoi18n(
3125 ui.writenoi18n(
3124 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3126 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3125 )
3127 )
3126 ui.writenoi18n(
3128 ui.writenoi18n(
3127 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3129 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3128 )
3130 )
3129 ui.writenoi18n(
3131 ui.writenoi18n(
3130 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3132 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3131 )
3133 )
3132 if gdelta:
3134 if gdelta:
3133 ui.writenoi18n(
3135 ui.writenoi18n(
3134 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3136 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3135 )
3137 )
3136 ui.writenoi18n(
3138 ui.writenoi18n(
3137 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3139 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3138 )
3140 )
3139 ui.writenoi18n(
3141 ui.writenoi18n(
3140 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3142 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3141 )
3143 )
3142
3144
3143
3145
3144 @command(
3146 @command(
3145 b'debugrevlogindex',
3147 b'debugrevlogindex',
3146 cmdutil.debugrevlogopts
3148 cmdutil.debugrevlogopts
3147 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3149 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3148 _(b'[-f FORMAT] -c|-m|FILE'),
3150 _(b'[-f FORMAT] -c|-m|FILE'),
3149 optionalrepo=True,
3151 optionalrepo=True,
3150 )
3152 )
3151 def debugrevlogindex(ui, repo, file_=None, **opts):
3153 def debugrevlogindex(ui, repo, file_=None, **opts):
3152 """dump the contents of a revlog index"""
3154 """dump the contents of a revlog index"""
3153 opts = pycompat.byteskwargs(opts)
3155 opts = pycompat.byteskwargs(opts)
3154 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3156 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3155 format = opts.get(b'format', 0)
3157 format = opts.get(b'format', 0)
3156 if format not in (0, 1):
3158 if format not in (0, 1):
3157 raise error.Abort(_(b"unknown format %d") % format)
3159 raise error.Abort(_(b"unknown format %d") % format)
3158
3160
3159 if ui.debugflag:
3161 if ui.debugflag:
3160 shortfn = hex
3162 shortfn = hex
3161 else:
3163 else:
3162 shortfn = short
3164 shortfn = short
3163
3165
3164 # There might not be anything in r, so have a sane default
3166 # There might not be anything in r, so have a sane default
3165 idlen = 12
3167 idlen = 12
3166 for i in r:
3168 for i in r:
3167 idlen = len(shortfn(r.node(i)))
3169 idlen = len(shortfn(r.node(i)))
3168 break
3170 break
3169
3171
3170 if format == 0:
3172 if format == 0:
3171 if ui.verbose:
3173 if ui.verbose:
3172 ui.writenoi18n(
3174 ui.writenoi18n(
3173 b" rev offset length linkrev %s %s p2\n"
3175 b" rev offset length linkrev %s %s p2\n"
3174 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3176 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3175 )
3177 )
3176 else:
3178 else:
3177 ui.writenoi18n(
3179 ui.writenoi18n(
3178 b" rev linkrev %s %s p2\n"
3180 b" rev linkrev %s %s p2\n"
3179 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3181 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3180 )
3182 )
3181 elif format == 1:
3183 elif format == 1:
3182 if ui.verbose:
3184 if ui.verbose:
3183 ui.writenoi18n(
3185 ui.writenoi18n(
3184 (
3186 (
3185 b" rev flag offset length size link p1"
3187 b" rev flag offset length size link p1"
3186 b" p2 %s\n"
3188 b" p2 %s\n"
3187 )
3189 )
3188 % b"nodeid".rjust(idlen)
3190 % b"nodeid".rjust(idlen)
3189 )
3191 )
3190 else:
3192 else:
3191 ui.writenoi18n(
3193 ui.writenoi18n(
3192 b" rev flag size link p1 p2 %s\n"
3194 b" rev flag size link p1 p2 %s\n"
3193 % b"nodeid".rjust(idlen)
3195 % b"nodeid".rjust(idlen)
3194 )
3196 )
3195
3197
3196 for i in r:
3198 for i in r:
3197 node = r.node(i)
3199 node = r.node(i)
3198 if format == 0:
3200 if format == 0:
3199 try:
3201 try:
3200 pp = r.parents(node)
3202 pp = r.parents(node)
3201 except Exception:
3203 except Exception:
3202 pp = [nullid, nullid]
3204 pp = [nullid, nullid]
3203 if ui.verbose:
3205 if ui.verbose:
3204 ui.write(
3206 ui.write(
3205 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3207 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3206 % (
3208 % (
3207 i,
3209 i,
3208 r.start(i),
3210 r.start(i),
3209 r.length(i),
3211 r.length(i),
3210 r.linkrev(i),
3212 r.linkrev(i),
3211 shortfn(node),
3213 shortfn(node),
3212 shortfn(pp[0]),
3214 shortfn(pp[0]),
3213 shortfn(pp[1]),
3215 shortfn(pp[1]),
3214 )
3216 )
3215 )
3217 )
3216 else:
3218 else:
3217 ui.write(
3219 ui.write(
3218 b"% 6d % 7d %s %s %s\n"
3220 b"% 6d % 7d %s %s %s\n"
3219 % (
3221 % (
3220 i,
3222 i,
3221 r.linkrev(i),
3223 r.linkrev(i),
3222 shortfn(node),
3224 shortfn(node),
3223 shortfn(pp[0]),
3225 shortfn(pp[0]),
3224 shortfn(pp[1]),
3226 shortfn(pp[1]),
3225 )
3227 )
3226 )
3228 )
3227 elif format == 1:
3229 elif format == 1:
3228 pr = r.parentrevs(i)
3230 pr = r.parentrevs(i)
3229 if ui.verbose:
3231 if ui.verbose:
3230 ui.write(
3232 ui.write(
3231 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3233 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3232 % (
3234 % (
3233 i,
3235 i,
3234 r.flags(i),
3236 r.flags(i),
3235 r.start(i),
3237 r.start(i),
3236 r.length(i),
3238 r.length(i),
3237 r.rawsize(i),
3239 r.rawsize(i),
3238 r.linkrev(i),
3240 r.linkrev(i),
3239 pr[0],
3241 pr[0],
3240 pr[1],
3242 pr[1],
3241 shortfn(node),
3243 shortfn(node),
3242 )
3244 )
3243 )
3245 )
3244 else:
3246 else:
3245 ui.write(
3247 ui.write(
3246 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3248 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3247 % (
3249 % (
3248 i,
3250 i,
3249 r.flags(i),
3251 r.flags(i),
3250 r.rawsize(i),
3252 r.rawsize(i),
3251 r.linkrev(i),
3253 r.linkrev(i),
3252 pr[0],
3254 pr[0],
3253 pr[1],
3255 pr[1],
3254 shortfn(node),
3256 shortfn(node),
3255 )
3257 )
3256 )
3258 )
3257
3259
3258
3260
3259 @command(
3261 @command(
3260 b'debugrevspec',
3262 b'debugrevspec',
3261 [
3263 [
3262 (
3264 (
3263 b'',
3265 b'',
3264 b'optimize',
3266 b'optimize',
3265 None,
3267 None,
3266 _(b'print parsed tree after optimizing (DEPRECATED)'),
3268 _(b'print parsed tree after optimizing (DEPRECATED)'),
3267 ),
3269 ),
3268 (
3270 (
3269 b'',
3271 b'',
3270 b'show-revs',
3272 b'show-revs',
3271 True,
3273 True,
3272 _(b'print list of result revisions (default)'),
3274 _(b'print list of result revisions (default)'),
3273 ),
3275 ),
3274 (
3276 (
3275 b's',
3277 b's',
3276 b'show-set',
3278 b'show-set',
3277 None,
3279 None,
3278 _(b'print internal representation of result set'),
3280 _(b'print internal representation of result set'),
3279 ),
3281 ),
3280 (
3282 (
3281 b'p',
3283 b'p',
3282 b'show-stage',
3284 b'show-stage',
3283 [],
3285 [],
3284 _(b'print parsed tree at the given stage'),
3286 _(b'print parsed tree at the given stage'),
3285 _(b'NAME'),
3287 _(b'NAME'),
3286 ),
3288 ),
3287 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3289 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3288 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3290 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3289 ],
3291 ],
3290 b'REVSPEC',
3292 b'REVSPEC',
3291 )
3293 )
3292 def debugrevspec(ui, repo, expr, **opts):
3294 def debugrevspec(ui, repo, expr, **opts):
3293 """parse and apply a revision specification
3295 """parse and apply a revision specification
3294
3296
3295 Use -p/--show-stage option to print the parsed tree at the given stages.
3297 Use -p/--show-stage option to print the parsed tree at the given stages.
3296 Use -p all to print tree at every stage.
3298 Use -p all to print tree at every stage.
3297
3299
3298 Use --no-show-revs option with -s or -p to print only the set
3300 Use --no-show-revs option with -s or -p to print only the set
3299 representation or the parsed tree respectively.
3301 representation or the parsed tree respectively.
3300
3302
3301 Use --verify-optimized to compare the optimized result with the unoptimized
3303 Use --verify-optimized to compare the optimized result with the unoptimized
3302 one. Returns 1 if the optimized result differs.
3304 one. Returns 1 if the optimized result differs.
3303 """
3305 """
3304 opts = pycompat.byteskwargs(opts)
3306 opts = pycompat.byteskwargs(opts)
3305 aliases = ui.configitems(b'revsetalias')
3307 aliases = ui.configitems(b'revsetalias')
3306 stages = [
3308 stages = [
3307 (b'parsed', lambda tree: tree),
3309 (b'parsed', lambda tree: tree),
3308 (
3310 (
3309 b'expanded',
3311 b'expanded',
3310 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3312 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3311 ),
3313 ),
3312 (b'concatenated', revsetlang.foldconcat),
3314 (b'concatenated', revsetlang.foldconcat),
3313 (b'analyzed', revsetlang.analyze),
3315 (b'analyzed', revsetlang.analyze),
3314 (b'optimized', revsetlang.optimize),
3316 (b'optimized', revsetlang.optimize),
3315 ]
3317 ]
3316 if opts[b'no_optimized']:
3318 if opts[b'no_optimized']:
3317 stages = stages[:-1]
3319 stages = stages[:-1]
3318 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3320 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3319 raise error.Abort(
3321 raise error.Abort(
3320 _(b'cannot use --verify-optimized with --no-optimized')
3322 _(b'cannot use --verify-optimized with --no-optimized')
3321 )
3323 )
3322 stagenames = {n for n, f in stages}
3324 stagenames = {n for n, f in stages}
3323
3325
3324 showalways = set()
3326 showalways = set()
3325 showchanged = set()
3327 showchanged = set()
3326 if ui.verbose and not opts[b'show_stage']:
3328 if ui.verbose and not opts[b'show_stage']:
3327 # show parsed tree by --verbose (deprecated)
3329 # show parsed tree by --verbose (deprecated)
3328 showalways.add(b'parsed')
3330 showalways.add(b'parsed')
3329 showchanged.update([b'expanded', b'concatenated'])
3331 showchanged.update([b'expanded', b'concatenated'])
3330 if opts[b'optimize']:
3332 if opts[b'optimize']:
3331 showalways.add(b'optimized')
3333 showalways.add(b'optimized')
3332 if opts[b'show_stage'] and opts[b'optimize']:
3334 if opts[b'show_stage'] and opts[b'optimize']:
3333 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3335 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3334 if opts[b'show_stage'] == [b'all']:
3336 if opts[b'show_stage'] == [b'all']:
3335 showalways.update(stagenames)
3337 showalways.update(stagenames)
3336 else:
3338 else:
3337 for n in opts[b'show_stage']:
3339 for n in opts[b'show_stage']:
3338 if n not in stagenames:
3340 if n not in stagenames:
3339 raise error.Abort(_(b'invalid stage name: %s') % n)
3341 raise error.Abort(_(b'invalid stage name: %s') % n)
3340 showalways.update(opts[b'show_stage'])
3342 showalways.update(opts[b'show_stage'])
3341
3343
3342 treebystage = {}
3344 treebystage = {}
3343 printedtree = None
3345 printedtree = None
3344 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3346 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3345 for n, f in stages:
3347 for n, f in stages:
3346 treebystage[n] = tree = f(tree)
3348 treebystage[n] = tree = f(tree)
3347 if n in showalways or (n in showchanged and tree != printedtree):
3349 if n in showalways or (n in showchanged and tree != printedtree):
3348 if opts[b'show_stage'] or n != b'parsed':
3350 if opts[b'show_stage'] or n != b'parsed':
3349 ui.write(b"* %s:\n" % n)
3351 ui.write(b"* %s:\n" % n)
3350 ui.write(revsetlang.prettyformat(tree), b"\n")
3352 ui.write(revsetlang.prettyformat(tree), b"\n")
3351 printedtree = tree
3353 printedtree = tree
3352
3354
3353 if opts[b'verify_optimized']:
3355 if opts[b'verify_optimized']:
3354 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3356 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3355 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3357 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3356 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3358 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3357 ui.writenoi18n(
3359 ui.writenoi18n(
3358 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3360 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3359 )
3361 )
3360 ui.writenoi18n(
3362 ui.writenoi18n(
3361 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3363 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3362 )
3364 )
3363 arevs = list(arevs)
3365 arevs = list(arevs)
3364 brevs = list(brevs)
3366 brevs = list(brevs)
3365 if arevs == brevs:
3367 if arevs == brevs:
3366 return 0
3368 return 0
3367 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3369 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3368 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3370 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3369 sm = difflib.SequenceMatcher(None, arevs, brevs)
3371 sm = difflib.SequenceMatcher(None, arevs, brevs)
3370 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3372 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3371 if tag in ('delete', 'replace'):
3373 if tag in ('delete', 'replace'):
3372 for c in arevs[alo:ahi]:
3374 for c in arevs[alo:ahi]:
3373 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3375 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3374 if tag in ('insert', 'replace'):
3376 if tag in ('insert', 'replace'):
3375 for c in brevs[blo:bhi]:
3377 for c in brevs[blo:bhi]:
3376 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3378 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3377 if tag == 'equal':
3379 if tag == 'equal':
3378 for c in arevs[alo:ahi]:
3380 for c in arevs[alo:ahi]:
3379 ui.write(b' %d\n' % c)
3381 ui.write(b' %d\n' % c)
3380 return 1
3382 return 1
3381
3383
3382 func = revset.makematcher(tree)
3384 func = revset.makematcher(tree)
3383 revs = func(repo)
3385 revs = func(repo)
3384 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3386 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3385 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3387 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3386 if not opts[b'show_revs']:
3388 if not opts[b'show_revs']:
3387 return
3389 return
3388 for c in revs:
3390 for c in revs:
3389 ui.write(b"%d\n" % c)
3391 ui.write(b"%d\n" % c)
3390
3392
3391
3393
3392 @command(
3394 @command(
3393 b'debugserve',
3395 b'debugserve',
3394 [
3396 [
3395 (
3397 (
3396 b'',
3398 b'',
3397 b'sshstdio',
3399 b'sshstdio',
3398 False,
3400 False,
3399 _(b'run an SSH server bound to process handles'),
3401 _(b'run an SSH server bound to process handles'),
3400 ),
3402 ),
3401 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3403 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3402 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3404 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3403 ],
3405 ],
3404 b'',
3406 b'',
3405 )
3407 )
3406 def debugserve(ui, repo, **opts):
3408 def debugserve(ui, repo, **opts):
3407 """run a server with advanced settings
3409 """run a server with advanced settings
3408
3410
3409 This command is similar to :hg:`serve`. It exists partially as a
3411 This command is similar to :hg:`serve`. It exists partially as a
3410 workaround to the fact that ``hg serve --stdio`` must have specific
3412 workaround to the fact that ``hg serve --stdio`` must have specific
3411 arguments for security reasons.
3413 arguments for security reasons.
3412 """
3414 """
3413 opts = pycompat.byteskwargs(opts)
3415 opts = pycompat.byteskwargs(opts)
3414
3416
3415 if not opts[b'sshstdio']:
3417 if not opts[b'sshstdio']:
3416 raise error.Abort(_(b'only --sshstdio is currently supported'))
3418 raise error.Abort(_(b'only --sshstdio is currently supported'))
3417
3419
3418 logfh = None
3420 logfh = None
3419
3421
3420 if opts[b'logiofd'] and opts[b'logiofile']:
3422 if opts[b'logiofd'] and opts[b'logiofile']:
3421 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3423 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3422
3424
3423 if opts[b'logiofd']:
3425 if opts[b'logiofd']:
3424 # Ideally we would be line buffered. But line buffering in binary
3426 # Ideally we would be line buffered. But line buffering in binary
3425 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3427 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3426 # buffering could have performance impacts. But since this isn't
3428 # buffering could have performance impacts. But since this isn't
3427 # performance critical code, it should be fine.
3429 # performance critical code, it should be fine.
3428 try:
3430 try:
3429 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3431 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3430 except OSError as e:
3432 except OSError as e:
3431 if e.errno != errno.ESPIPE:
3433 if e.errno != errno.ESPIPE:
3432 raise
3434 raise
3433 # can't seek a pipe, so `ab` mode fails on py3
3435 # can't seek a pipe, so `ab` mode fails on py3
3434 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3436 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3435 elif opts[b'logiofile']:
3437 elif opts[b'logiofile']:
3436 logfh = open(opts[b'logiofile'], b'ab', 0)
3438 logfh = open(opts[b'logiofile'], b'ab', 0)
3437
3439
3438 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3440 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3439 s.serve_forever()
3441 s.serve_forever()
3440
3442
3441
3443
3442 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3444 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3443 def debugsetparents(ui, repo, rev1, rev2=None):
3445 def debugsetparents(ui, repo, rev1, rev2=None):
3444 """manually set the parents of the current working directory (DANGEROUS)
3446 """manually set the parents of the current working directory (DANGEROUS)
3445
3447
3446 This command is not what you are looking for and should not be used. Using
3448 This command is not what you are looking for and should not be used. Using
3447 this command will most certainly results in slight corruption of the file
3449 this command will most certainly results in slight corruption of the file
3448 level histories withing your repository. DO NOT USE THIS COMMAND.
3450 level histories withing your repository. DO NOT USE THIS COMMAND.
3449
3451
3450 The command update the p1 and p2 field in the dirstate, and not touching
3452 The command update the p1 and p2 field in the dirstate, and not touching
3451 anything else. This useful for writing repository conversion tools, but
3453 anything else. This useful for writing repository conversion tools, but
3452 should be used with extreme care. For example, neither the working
3454 should be used with extreme care. For example, neither the working
3453 directory nor the dirstate is updated, so file status may be incorrect
3455 directory nor the dirstate is updated, so file status may be incorrect
3454 after running this command. Only used if you are one of the few people that
3456 after running this command. Only used if you are one of the few people that
3455 deeply unstand both conversion tools and file level histories. If you are
3457 deeply unstand both conversion tools and file level histories. If you are
3456 reading this help, you are not one of this people (most of them sailed west
3458 reading this help, you are not one of this people (most of them sailed west
3457 from Mithlond anyway.
3459 from Mithlond anyway.
3458
3460
3459 So one last time DO NOT USE THIS COMMAND.
3461 So one last time DO NOT USE THIS COMMAND.
3460
3462
3461 Returns 0 on success.
3463 Returns 0 on success.
3462 """
3464 """
3463
3465
3464 node1 = scmutil.revsingle(repo, rev1).node()
3466 node1 = scmutil.revsingle(repo, rev1).node()
3465 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3467 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3466
3468
3467 with repo.wlock():
3469 with repo.wlock():
3468 repo.setparents(node1, node2)
3470 repo.setparents(node1, node2)
3469
3471
3470
3472
3471 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3473 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3472 def debugsidedata(ui, repo, file_, rev=None, **opts):
3474 def debugsidedata(ui, repo, file_, rev=None, **opts):
3473 """dump the side data for a cl/manifest/file revision
3475 """dump the side data for a cl/manifest/file revision
3474
3476
3475 Use --verbose to dump the sidedata content."""
3477 Use --verbose to dump the sidedata content."""
3476 opts = pycompat.byteskwargs(opts)
3478 opts = pycompat.byteskwargs(opts)
3477 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3479 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3478 if rev is not None:
3480 if rev is not None:
3479 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3481 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3480 file_, rev = None, file_
3482 file_, rev = None, file_
3481 elif rev is None:
3483 elif rev is None:
3482 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3484 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3483 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3485 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3484 r = getattr(r, '_revlog', r)
3486 r = getattr(r, '_revlog', r)
3485 try:
3487 try:
3486 sidedata = r.sidedata(r.lookup(rev))
3488 sidedata = r.sidedata(r.lookup(rev))
3487 except KeyError:
3489 except KeyError:
3488 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3490 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3489 if sidedata:
3491 if sidedata:
3490 sidedata = list(sidedata.items())
3492 sidedata = list(sidedata.items())
3491 sidedata.sort()
3493 sidedata.sort()
3492 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3494 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3493 for key, value in sidedata:
3495 for key, value in sidedata:
3494 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3496 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3495 if ui.verbose:
3497 if ui.verbose:
3496 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3498 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3497
3499
3498
3500
3499 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3501 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3500 def debugssl(ui, repo, source=None, **opts):
3502 def debugssl(ui, repo, source=None, **opts):
3501 """test a secure connection to a server
3503 """test a secure connection to a server
3502
3504
3503 This builds the certificate chain for the server on Windows, installing the
3505 This builds the certificate chain for the server on Windows, installing the
3504 missing intermediates and trusted root via Windows Update if necessary. It
3506 missing intermediates and trusted root via Windows Update if necessary. It
3505 does nothing on other platforms.
3507 does nothing on other platforms.
3506
3508
3507 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3509 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3508 that server is used. See :hg:`help urls` for more information.
3510 that server is used. See :hg:`help urls` for more information.
3509
3511
3510 If the update succeeds, retry the original operation. Otherwise, the cause
3512 If the update succeeds, retry the original operation. Otherwise, the cause
3511 of the SSL error is likely another issue.
3513 of the SSL error is likely another issue.
3512 """
3514 """
3513 if not pycompat.iswindows:
3515 if not pycompat.iswindows:
3514 raise error.Abort(
3516 raise error.Abort(
3515 _(b'certificate chain building is only possible on Windows')
3517 _(b'certificate chain building is only possible on Windows')
3516 )
3518 )
3517
3519
3518 if not source:
3520 if not source:
3519 if not repo:
3521 if not repo:
3520 raise error.Abort(
3522 raise error.Abort(
3521 _(
3523 _(
3522 b"there is no Mercurial repository here, and no "
3524 b"there is no Mercurial repository here, and no "
3523 b"server specified"
3525 b"server specified"
3524 )
3526 )
3525 )
3527 )
3526 source = b"default"
3528 source = b"default"
3527
3529
3528 source, branches = hg.parseurl(ui.expandpath(source))
3530 source, branches = hg.parseurl(ui.expandpath(source))
3529 url = util.url(source)
3531 url = util.url(source)
3530
3532
3531 defaultport = {b'https': 443, b'ssh': 22}
3533 defaultport = {b'https': 443, b'ssh': 22}
3532 if url.scheme in defaultport:
3534 if url.scheme in defaultport:
3533 try:
3535 try:
3534 addr = (url.host, int(url.port or defaultport[url.scheme]))
3536 addr = (url.host, int(url.port or defaultport[url.scheme]))
3535 except ValueError:
3537 except ValueError:
3536 raise error.Abort(_(b"malformed port number in URL"))
3538 raise error.Abort(_(b"malformed port number in URL"))
3537 else:
3539 else:
3538 raise error.Abort(_(b"only https and ssh connections are supported"))
3540 raise error.Abort(_(b"only https and ssh connections are supported"))
3539
3541
3540 from . import win32
3542 from . import win32
3541
3543
3542 s = ssl.wrap_socket(
3544 s = ssl.wrap_socket(
3543 socket.socket(),
3545 socket.socket(),
3544 ssl_version=ssl.PROTOCOL_TLS,
3546 ssl_version=ssl.PROTOCOL_TLS,
3545 cert_reqs=ssl.CERT_NONE,
3547 cert_reqs=ssl.CERT_NONE,
3546 ca_certs=None,
3548 ca_certs=None,
3547 )
3549 )
3548
3550
3549 try:
3551 try:
3550 s.connect(addr)
3552 s.connect(addr)
3551 cert = s.getpeercert(True)
3553 cert = s.getpeercert(True)
3552
3554
3553 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3555 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3554
3556
3555 complete = win32.checkcertificatechain(cert, build=False)
3557 complete = win32.checkcertificatechain(cert, build=False)
3556
3558
3557 if not complete:
3559 if not complete:
3558 ui.status(_(b'certificate chain is incomplete, updating... '))
3560 ui.status(_(b'certificate chain is incomplete, updating... '))
3559
3561
3560 if not win32.checkcertificatechain(cert):
3562 if not win32.checkcertificatechain(cert):
3561 ui.status(_(b'failed.\n'))
3563 ui.status(_(b'failed.\n'))
3562 else:
3564 else:
3563 ui.status(_(b'done.\n'))
3565 ui.status(_(b'done.\n'))
3564 else:
3566 else:
3565 ui.status(_(b'full certificate chain is available\n'))
3567 ui.status(_(b'full certificate chain is available\n'))
3566 finally:
3568 finally:
3567 s.close()
3569 s.close()
3568
3570
3569
3571
3570 @command(
3572 @command(
3571 b"debugbackupbundle",
3573 b"debugbackupbundle",
3572 [
3574 [
3573 (
3575 (
3574 b"",
3576 b"",
3575 b"recover",
3577 b"recover",
3576 b"",
3578 b"",
3577 b"brings the specified changeset back into the repository",
3579 b"brings the specified changeset back into the repository",
3578 )
3580 )
3579 ]
3581 ]
3580 + cmdutil.logopts,
3582 + cmdutil.logopts,
3581 _(b"hg debugbackupbundle [--recover HASH]"),
3583 _(b"hg debugbackupbundle [--recover HASH]"),
3582 )
3584 )
3583 def debugbackupbundle(ui, repo, *pats, **opts):
3585 def debugbackupbundle(ui, repo, *pats, **opts):
3584 """lists the changesets available in backup bundles
3586 """lists the changesets available in backup bundles
3585
3587
3586 Without any arguments, this command prints a list of the changesets in each
3588 Without any arguments, this command prints a list of the changesets in each
3587 backup bundle.
3589 backup bundle.
3588
3590
3589 --recover takes a changeset hash and unbundles the first bundle that
3591 --recover takes a changeset hash and unbundles the first bundle that
3590 contains that hash, which puts that changeset back in your repository.
3592 contains that hash, which puts that changeset back in your repository.
3591
3593
3592 --verbose will print the entire commit message and the bundle path for that
3594 --verbose will print the entire commit message and the bundle path for that
3593 backup.
3595 backup.
3594 """
3596 """
3595 backups = list(
3597 backups = list(
3596 filter(
3598 filter(
3597 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3599 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3598 )
3600 )
3599 )
3601 )
3600 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3602 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3601
3603
3602 opts = pycompat.byteskwargs(opts)
3604 opts = pycompat.byteskwargs(opts)
3603 opts[b"bundle"] = b""
3605 opts[b"bundle"] = b""
3604 opts[b"force"] = None
3606 opts[b"force"] = None
3605 limit = logcmdutil.getlimit(opts)
3607 limit = logcmdutil.getlimit(opts)
3606
3608
3607 def display(other, chlist, displayer):
3609 def display(other, chlist, displayer):
3608 if opts.get(b"newest_first"):
3610 if opts.get(b"newest_first"):
3609 chlist.reverse()
3611 chlist.reverse()
3610 count = 0
3612 count = 0
3611 for n in chlist:
3613 for n in chlist:
3612 if limit is not None and count >= limit:
3614 if limit is not None and count >= limit:
3613 break
3615 break
3614 parents = [True for p in other.changelog.parents(n) if p != nullid]
3616 parents = [True for p in other.changelog.parents(n) if p != nullid]
3615 if opts.get(b"no_merges") and len(parents) == 2:
3617 if opts.get(b"no_merges") and len(parents) == 2:
3616 continue
3618 continue
3617 count += 1
3619 count += 1
3618 displayer.show(other[n])
3620 displayer.show(other[n])
3619
3621
3620 recovernode = opts.get(b"recover")
3622 recovernode = opts.get(b"recover")
3621 if recovernode:
3623 if recovernode:
3622 if scmutil.isrevsymbol(repo, recovernode):
3624 if scmutil.isrevsymbol(repo, recovernode):
3623 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3625 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3624 return
3626 return
3625 elif backups:
3627 elif backups:
3626 msg = _(
3628 msg = _(
3627 b"Recover changesets using: hg debugbackupbundle --recover "
3629 b"Recover changesets using: hg debugbackupbundle --recover "
3628 b"<changeset hash>\n\nAvailable backup changesets:"
3630 b"<changeset hash>\n\nAvailable backup changesets:"
3629 )
3631 )
3630 ui.status(msg, label=b"status.removed")
3632 ui.status(msg, label=b"status.removed")
3631 else:
3633 else:
3632 ui.status(_(b"no backup changesets found\n"))
3634 ui.status(_(b"no backup changesets found\n"))
3633 return
3635 return
3634
3636
3635 for backup in backups:
3637 for backup in backups:
3636 # Much of this is copied from the hg incoming logic
3638 # Much of this is copied from the hg incoming logic
3637 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3639 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3638 source, branches = hg.parseurl(source, opts.get(b"branch"))
3640 source, branches = hg.parseurl(source, opts.get(b"branch"))
3639 try:
3641 try:
3640 other = hg.peer(repo, opts, source)
3642 other = hg.peer(repo, opts, source)
3641 except error.LookupError as ex:
3643 except error.LookupError as ex:
3642 msg = _(b"\nwarning: unable to open bundle %s") % source
3644 msg = _(b"\nwarning: unable to open bundle %s") % source
3643 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3645 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3644 ui.warn(msg, hint=hint)
3646 ui.warn(msg, hint=hint)
3645 continue
3647 continue
3646 revs, checkout = hg.addbranchrevs(
3648 revs, checkout = hg.addbranchrevs(
3647 repo, other, branches, opts.get(b"rev")
3649 repo, other, branches, opts.get(b"rev")
3648 )
3650 )
3649
3651
3650 if revs:
3652 if revs:
3651 revs = [other.lookup(rev) for rev in revs]
3653 revs = [other.lookup(rev) for rev in revs]
3652
3654
3653 quiet = ui.quiet
3655 quiet = ui.quiet
3654 try:
3656 try:
3655 ui.quiet = True
3657 ui.quiet = True
3656 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3658 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3657 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3659 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3658 )
3660 )
3659 except error.LookupError:
3661 except error.LookupError:
3660 continue
3662 continue
3661 finally:
3663 finally:
3662 ui.quiet = quiet
3664 ui.quiet = quiet
3663
3665
3664 try:
3666 try:
3665 if not chlist:
3667 if not chlist:
3666 continue
3668 continue
3667 if recovernode:
3669 if recovernode:
3668 with repo.lock(), repo.transaction(b"unbundle") as tr:
3670 with repo.lock(), repo.transaction(b"unbundle") as tr:
3669 if scmutil.isrevsymbol(other, recovernode):
3671 if scmutil.isrevsymbol(other, recovernode):
3670 ui.status(_(b"Unbundling %s\n") % (recovernode))
3672 ui.status(_(b"Unbundling %s\n") % (recovernode))
3671 f = hg.openpath(ui, source)
3673 f = hg.openpath(ui, source)
3672 gen = exchange.readbundle(ui, f, source)
3674 gen = exchange.readbundle(ui, f, source)
3673 if isinstance(gen, bundle2.unbundle20):
3675 if isinstance(gen, bundle2.unbundle20):
3674 bundle2.applybundle(
3676 bundle2.applybundle(
3675 repo,
3677 repo,
3676 gen,
3678 gen,
3677 tr,
3679 tr,
3678 source=b"unbundle",
3680 source=b"unbundle",
3679 url=b"bundle:" + source,
3681 url=b"bundle:" + source,
3680 )
3682 )
3681 else:
3683 else:
3682 gen.apply(repo, b"unbundle", b"bundle:" + source)
3684 gen.apply(repo, b"unbundle", b"bundle:" + source)
3683 break
3685 break
3684 else:
3686 else:
3685 backupdate = encoding.strtolocal(
3687 backupdate = encoding.strtolocal(
3686 time.strftime(
3688 time.strftime(
3687 "%a %H:%M, %Y-%m-%d",
3689 "%a %H:%M, %Y-%m-%d",
3688 time.localtime(os.path.getmtime(source)),
3690 time.localtime(os.path.getmtime(source)),
3689 )
3691 )
3690 )
3692 )
3691 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3693 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3692 if ui.verbose:
3694 if ui.verbose:
3693 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3695 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3694 else:
3696 else:
3695 opts[
3697 opts[
3696 b"template"
3698 b"template"
3697 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3699 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3698 displayer = logcmdutil.changesetdisplayer(
3700 displayer = logcmdutil.changesetdisplayer(
3699 ui, other, opts, False
3701 ui, other, opts, False
3700 )
3702 )
3701 display(other, chlist, displayer)
3703 display(other, chlist, displayer)
3702 displayer.close()
3704 displayer.close()
3703 finally:
3705 finally:
3704 cleanupfn()
3706 cleanupfn()
3705
3707
3706
3708
3707 @command(
3709 @command(
3708 b'debugsub',
3710 b'debugsub',
3709 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3711 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3710 _(b'[-r REV] [REV]'),
3712 _(b'[-r REV] [REV]'),
3711 )
3713 )
3712 def debugsub(ui, repo, rev=None):
3714 def debugsub(ui, repo, rev=None):
3713 ctx = scmutil.revsingle(repo, rev, None)
3715 ctx = scmutil.revsingle(repo, rev, None)
3714 for k, v in sorted(ctx.substate.items()):
3716 for k, v in sorted(ctx.substate.items()):
3715 ui.writenoi18n(b'path %s\n' % k)
3717 ui.writenoi18n(b'path %s\n' % k)
3716 ui.writenoi18n(b' source %s\n' % v[0])
3718 ui.writenoi18n(b' source %s\n' % v[0])
3717 ui.writenoi18n(b' revision %s\n' % v[1])
3719 ui.writenoi18n(b' revision %s\n' % v[1])
3718
3720
3719
3721
3720 @command(
3722 @command(
3721 b'debugsuccessorssets',
3723 b'debugsuccessorssets',
3722 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3724 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3723 _(b'[REV]'),
3725 _(b'[REV]'),
3724 )
3726 )
3725 def debugsuccessorssets(ui, repo, *revs, **opts):
3727 def debugsuccessorssets(ui, repo, *revs, **opts):
3726 """show set of successors for revision
3728 """show set of successors for revision
3727
3729
3728 A successors set of changeset A is a consistent group of revisions that
3730 A successors set of changeset A is a consistent group of revisions that
3729 succeed A. It contains non-obsolete changesets only unless closests
3731 succeed A. It contains non-obsolete changesets only unless closests
3730 successors set is set.
3732 successors set is set.
3731
3733
3732 In most cases a changeset A has a single successors set containing a single
3734 In most cases a changeset A has a single successors set containing a single
3733 successor (changeset A replaced by A').
3735 successor (changeset A replaced by A').
3734
3736
3735 A changeset that is made obsolete with no successors are called "pruned".
3737 A changeset that is made obsolete with no successors are called "pruned".
3736 Such changesets have no successors sets at all.
3738 Such changesets have no successors sets at all.
3737
3739
3738 A changeset that has been "split" will have a successors set containing
3740 A changeset that has been "split" will have a successors set containing
3739 more than one successor.
3741 more than one successor.
3740
3742
3741 A changeset that has been rewritten in multiple different ways is called
3743 A changeset that has been rewritten in multiple different ways is called
3742 "divergent". Such changesets have multiple successor sets (each of which
3744 "divergent". Such changesets have multiple successor sets (each of which
3743 may also be split, i.e. have multiple successors).
3745 may also be split, i.e. have multiple successors).
3744
3746
3745 Results are displayed as follows::
3747 Results are displayed as follows::
3746
3748
3747 <rev1>
3749 <rev1>
3748 <successors-1A>
3750 <successors-1A>
3749 <rev2>
3751 <rev2>
3750 <successors-2A>
3752 <successors-2A>
3751 <successors-2B1> <successors-2B2> <successors-2B3>
3753 <successors-2B1> <successors-2B2> <successors-2B3>
3752
3754
3753 Here rev2 has two possible (i.e. divergent) successors sets. The first
3755 Here rev2 has two possible (i.e. divergent) successors sets. The first
3754 holds one element, whereas the second holds three (i.e. the changeset has
3756 holds one element, whereas the second holds three (i.e. the changeset has
3755 been split).
3757 been split).
3756 """
3758 """
3757 # passed to successorssets caching computation from one call to another
3759 # passed to successorssets caching computation from one call to another
3758 cache = {}
3760 cache = {}
3759 ctx2str = bytes
3761 ctx2str = bytes
3760 node2str = short
3762 node2str = short
3761 for rev in scmutil.revrange(repo, revs):
3763 for rev in scmutil.revrange(repo, revs):
3762 ctx = repo[rev]
3764 ctx = repo[rev]
3763 ui.write(b'%s\n' % ctx2str(ctx))
3765 ui.write(b'%s\n' % ctx2str(ctx))
3764 for succsset in obsutil.successorssets(
3766 for succsset in obsutil.successorssets(
3765 repo, ctx.node(), closest=opts['closest'], cache=cache
3767 repo, ctx.node(), closest=opts['closest'], cache=cache
3766 ):
3768 ):
3767 if succsset:
3769 if succsset:
3768 ui.write(b' ')
3770 ui.write(b' ')
3769 ui.write(node2str(succsset[0]))
3771 ui.write(node2str(succsset[0]))
3770 for node in succsset[1:]:
3772 for node in succsset[1:]:
3771 ui.write(b' ')
3773 ui.write(b' ')
3772 ui.write(node2str(node))
3774 ui.write(node2str(node))
3773 ui.write(b'\n')
3775 ui.write(b'\n')
3774
3776
3775
3777
3776 @command(b'debugtagscache', [])
3778 @command(b'debugtagscache', [])
3777 def debugtagscache(ui, repo):
3779 def debugtagscache(ui, repo):
3778 """display the contents of .hg/cache/hgtagsfnodes1"""
3780 """display the contents of .hg/cache/hgtagsfnodes1"""
3779 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3781 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3780 for r in repo:
3782 for r in repo:
3781 node = repo[r].node()
3783 node = repo[r].node()
3782 tagsnode = cache.getfnode(node, computemissing=False)
3784 tagsnode = cache.getfnode(node, computemissing=False)
3783 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3785 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3784 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3786 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3785
3787
3786
3788
3787 @command(
3789 @command(
3788 b'debugtemplate',
3790 b'debugtemplate',
3789 [
3791 [
3790 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3792 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3791 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3793 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3792 ],
3794 ],
3793 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3795 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3794 optionalrepo=True,
3796 optionalrepo=True,
3795 )
3797 )
3796 def debugtemplate(ui, repo, tmpl, **opts):
3798 def debugtemplate(ui, repo, tmpl, **opts):
3797 """parse and apply a template
3799 """parse and apply a template
3798
3800
3799 If -r/--rev is given, the template is processed as a log template and
3801 If -r/--rev is given, the template is processed as a log template and
3800 applied to the given changesets. Otherwise, it is processed as a generic
3802 applied to the given changesets. Otherwise, it is processed as a generic
3801 template.
3803 template.
3802
3804
3803 Use --verbose to print the parsed tree.
3805 Use --verbose to print the parsed tree.
3804 """
3806 """
3805 revs = None
3807 revs = None
3806 if opts['rev']:
3808 if opts['rev']:
3807 if repo is None:
3809 if repo is None:
3808 raise error.RepoError(
3810 raise error.RepoError(
3809 _(b'there is no Mercurial repository here (.hg not found)')
3811 _(b'there is no Mercurial repository here (.hg not found)')
3810 )
3812 )
3811 revs = scmutil.revrange(repo, opts['rev'])
3813 revs = scmutil.revrange(repo, opts['rev'])
3812
3814
3813 props = {}
3815 props = {}
3814 for d in opts['define']:
3816 for d in opts['define']:
3815 try:
3817 try:
3816 k, v = (e.strip() for e in d.split(b'=', 1))
3818 k, v = (e.strip() for e in d.split(b'=', 1))
3817 if not k or k == b'ui':
3819 if not k or k == b'ui':
3818 raise ValueError
3820 raise ValueError
3819 props[k] = v
3821 props[k] = v
3820 except ValueError:
3822 except ValueError:
3821 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3823 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3822
3824
3823 if ui.verbose:
3825 if ui.verbose:
3824 aliases = ui.configitems(b'templatealias')
3826 aliases = ui.configitems(b'templatealias')
3825 tree = templater.parse(tmpl)
3827 tree = templater.parse(tmpl)
3826 ui.note(templater.prettyformat(tree), b'\n')
3828 ui.note(templater.prettyformat(tree), b'\n')
3827 newtree = templater.expandaliases(tree, aliases)
3829 newtree = templater.expandaliases(tree, aliases)
3828 if newtree != tree:
3830 if newtree != tree:
3829 ui.notenoi18n(
3831 ui.notenoi18n(
3830 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3832 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3831 )
3833 )
3832
3834
3833 if revs is None:
3835 if revs is None:
3834 tres = formatter.templateresources(ui, repo)
3836 tres = formatter.templateresources(ui, repo)
3835 t = formatter.maketemplater(ui, tmpl, resources=tres)
3837 t = formatter.maketemplater(ui, tmpl, resources=tres)
3836 if ui.verbose:
3838 if ui.verbose:
3837 kwds, funcs = t.symbolsuseddefault()
3839 kwds, funcs = t.symbolsuseddefault()
3838 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3840 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3839 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3841 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3840 ui.write(t.renderdefault(props))
3842 ui.write(t.renderdefault(props))
3841 else:
3843 else:
3842 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3844 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3843 if ui.verbose:
3845 if ui.verbose:
3844 kwds, funcs = displayer.t.symbolsuseddefault()
3846 kwds, funcs = displayer.t.symbolsuseddefault()
3845 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3847 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3846 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3848 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3847 for r in revs:
3849 for r in revs:
3848 displayer.show(repo[r], **pycompat.strkwargs(props))
3850 displayer.show(repo[r], **pycompat.strkwargs(props))
3849 displayer.close()
3851 displayer.close()
3850
3852
3851
3853
3852 @command(
3854 @command(
3853 b'debuguigetpass',
3855 b'debuguigetpass',
3854 [
3856 [
3855 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3857 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3856 ],
3858 ],
3857 _(b'[-p TEXT]'),
3859 _(b'[-p TEXT]'),
3858 norepo=True,
3860 norepo=True,
3859 )
3861 )
3860 def debuguigetpass(ui, prompt=b''):
3862 def debuguigetpass(ui, prompt=b''):
3861 """show prompt to type password"""
3863 """show prompt to type password"""
3862 r = ui.getpass(prompt)
3864 r = ui.getpass(prompt)
3863 if r is None:
3865 if r is None:
3864 r = b"<default response>"
3866 r = b"<default response>"
3865 ui.writenoi18n(b'response: %s\n' % r)
3867 ui.writenoi18n(b'response: %s\n' % r)
3866
3868
3867
3869
3868 @command(
3870 @command(
3869 b'debuguiprompt',
3871 b'debuguiprompt',
3870 [
3872 [
3871 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3873 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3872 ],
3874 ],
3873 _(b'[-p TEXT]'),
3875 _(b'[-p TEXT]'),
3874 norepo=True,
3876 norepo=True,
3875 )
3877 )
3876 def debuguiprompt(ui, prompt=b''):
3878 def debuguiprompt(ui, prompt=b''):
3877 """show plain prompt"""
3879 """show plain prompt"""
3878 r = ui.prompt(prompt)
3880 r = ui.prompt(prompt)
3879 ui.writenoi18n(b'response: %s\n' % r)
3881 ui.writenoi18n(b'response: %s\n' % r)
3880
3882
3881
3883
3882 @command(b'debugupdatecaches', [])
3884 @command(b'debugupdatecaches', [])
3883 def debugupdatecaches(ui, repo, *pats, **opts):
3885 def debugupdatecaches(ui, repo, *pats, **opts):
3884 """warm all known caches in the repository"""
3886 """warm all known caches in the repository"""
3885 with repo.wlock(), repo.lock():
3887 with repo.wlock(), repo.lock():
3886 repo.updatecaches(full=True)
3888 repo.updatecaches(full=True)
3887
3889
3888
3890
3889 @command(
3891 @command(
3890 b'debugupgraderepo',
3892 b'debugupgraderepo',
3891 [
3893 [
3892 (
3894 (
3893 b'o',
3895 b'o',
3894 b'optimize',
3896 b'optimize',
3895 [],
3897 [],
3896 _(b'extra optimization to perform'),
3898 _(b'extra optimization to perform'),
3897 _(b'NAME'),
3899 _(b'NAME'),
3898 ),
3900 ),
3899 (b'', b'run', False, _(b'performs an upgrade')),
3901 (b'', b'run', False, _(b'performs an upgrade')),
3900 (b'', b'backup', True, _(b'keep the old repository content around')),
3902 (b'', b'backup', True, _(b'keep the old repository content around')),
3901 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3903 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3902 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3904 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3903 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
3905 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
3904 ],
3906 ],
3905 )
3907 )
3906 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3908 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3907 """upgrade a repository to use different features
3909 """upgrade a repository to use different features
3908
3910
3909 If no arguments are specified, the repository is evaluated for upgrade
3911 If no arguments are specified, the repository is evaluated for upgrade
3910 and a list of problems and potential optimizations is printed.
3912 and a list of problems and potential optimizations is printed.
3911
3913
3912 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3914 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3913 can be influenced via additional arguments. More details will be provided
3915 can be influenced via additional arguments. More details will be provided
3914 by the command output when run without ``--run``.
3916 by the command output when run without ``--run``.
3915
3917
3916 During the upgrade, the repository will be locked and no writes will be
3918 During the upgrade, the repository will be locked and no writes will be
3917 allowed.
3919 allowed.
3918
3920
3919 At the end of the upgrade, the repository may not be readable while new
3921 At the end of the upgrade, the repository may not be readable while new
3920 repository data is swapped in. This window will be as long as it takes to
3922 repository data is swapped in. This window will be as long as it takes to
3921 rename some directories inside the ``.hg`` directory. On most machines, this
3923 rename some directories inside the ``.hg`` directory. On most machines, this
3922 should complete almost instantaneously and the chances of a consumer being
3924 should complete almost instantaneously and the chances of a consumer being
3923 unable to access the repository should be low.
3925 unable to access the repository should be low.
3924
3926
3925 By default, all revlog will be upgraded. You can restrict this using flag
3927 By default, all revlog will be upgraded. You can restrict this using flag
3926 such as `--manifest`:
3928 such as `--manifest`:
3927
3929
3928 * `--manifest`: only optimize the manifest
3930 * `--manifest`: only optimize the manifest
3929 * `--no-manifest`: optimize all revlog but the manifest
3931 * `--no-manifest`: optimize all revlog but the manifest
3930 * `--changelog`: optimize the changelog only
3932 * `--changelog`: optimize the changelog only
3931 * `--no-changelog --no-manifest`: optimize filelogs only
3933 * `--no-changelog --no-manifest`: optimize filelogs only
3932 * `--filelogs`: optimize the filelogs only
3934 * `--filelogs`: optimize the filelogs only
3933 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
3935 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
3934 """
3936 """
3935 return upgrade.upgraderepo(
3937 return upgrade.upgraderepo(
3936 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
3938 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
3937 )
3939 )
3938
3940
3939
3941
3940 @command(
3942 @command(
3941 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3943 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3942 )
3944 )
3943 def debugwalk(ui, repo, *pats, **opts):
3945 def debugwalk(ui, repo, *pats, **opts):
3944 """show how files match on given patterns"""
3946 """show how files match on given patterns"""
3945 opts = pycompat.byteskwargs(opts)
3947 opts = pycompat.byteskwargs(opts)
3946 m = scmutil.match(repo[None], pats, opts)
3948 m = scmutil.match(repo[None], pats, opts)
3947 if ui.verbose:
3949 if ui.verbose:
3948 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3950 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3949 items = list(repo[None].walk(m))
3951 items = list(repo[None].walk(m))
3950 if not items:
3952 if not items:
3951 return
3953 return
3952 f = lambda fn: fn
3954 f = lambda fn: fn
3953 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3955 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3954 f = lambda fn: util.normpath(fn)
3956 f = lambda fn: util.normpath(fn)
3955 fmt = b'f %%-%ds %%-%ds %%s' % (
3957 fmt = b'f %%-%ds %%-%ds %%s' % (
3956 max([len(abs) for abs in items]),
3958 max([len(abs) for abs in items]),
3957 max([len(repo.pathto(abs)) for abs in items]),
3959 max([len(repo.pathto(abs)) for abs in items]),
3958 )
3960 )
3959 for abs in items:
3961 for abs in items:
3960 line = fmt % (
3962 line = fmt % (
3961 abs,
3963 abs,
3962 f(repo.pathto(abs)),
3964 f(repo.pathto(abs)),
3963 m.exact(abs) and b'exact' or b'',
3965 m.exact(abs) and b'exact' or b'',
3964 )
3966 )
3965 ui.write(b"%s\n" % line.rstrip())
3967 ui.write(b"%s\n" % line.rstrip())
3966
3968
3967
3969
3968 @command(b'debugwhyunstable', [], _(b'REV'))
3970 @command(b'debugwhyunstable', [], _(b'REV'))
3969 def debugwhyunstable(ui, repo, rev):
3971 def debugwhyunstable(ui, repo, rev):
3970 """explain instabilities of a changeset"""
3972 """explain instabilities of a changeset"""
3971 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3973 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3972 dnodes = b''
3974 dnodes = b''
3973 if entry.get(b'divergentnodes'):
3975 if entry.get(b'divergentnodes'):
3974 dnodes = (
3976 dnodes = (
3975 b' '.join(
3977 b' '.join(
3976 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3978 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3977 for ctx in entry[b'divergentnodes']
3979 for ctx in entry[b'divergentnodes']
3978 )
3980 )
3979 + b' '
3981 + b' '
3980 )
3982 )
3981 ui.write(
3983 ui.write(
3982 b'%s: %s%s %s\n'
3984 b'%s: %s%s %s\n'
3983 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3985 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3984 )
3986 )
3985
3987
3986
3988
3987 @command(
3989 @command(
3988 b'debugwireargs',
3990 b'debugwireargs',
3989 [
3991 [
3990 (b'', b'three', b'', b'three'),
3992 (b'', b'three', b'', b'three'),
3991 (b'', b'four', b'', b'four'),
3993 (b'', b'four', b'', b'four'),
3992 (b'', b'five', b'', b'five'),
3994 (b'', b'five', b'', b'five'),
3993 ]
3995 ]
3994 + cmdutil.remoteopts,
3996 + cmdutil.remoteopts,
3995 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3997 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3996 norepo=True,
3998 norepo=True,
3997 )
3999 )
3998 def debugwireargs(ui, repopath, *vals, **opts):
4000 def debugwireargs(ui, repopath, *vals, **opts):
3999 opts = pycompat.byteskwargs(opts)
4001 opts = pycompat.byteskwargs(opts)
4000 repo = hg.peer(ui, opts, repopath)
4002 repo = hg.peer(ui, opts, repopath)
4001 for opt in cmdutil.remoteopts:
4003 for opt in cmdutil.remoteopts:
4002 del opts[opt[1]]
4004 del opts[opt[1]]
4003 args = {}
4005 args = {}
4004 for k, v in pycompat.iteritems(opts):
4006 for k, v in pycompat.iteritems(opts):
4005 if v:
4007 if v:
4006 args[k] = v
4008 args[k] = v
4007 args = pycompat.strkwargs(args)
4009 args = pycompat.strkwargs(args)
4008 # run twice to check that we don't mess up the stream for the next command
4010 # run twice to check that we don't mess up the stream for the next command
4009 res1 = repo.debugwireargs(*vals, **args)
4011 res1 = repo.debugwireargs(*vals, **args)
4010 res2 = repo.debugwireargs(*vals, **args)
4012 res2 = repo.debugwireargs(*vals, **args)
4011 ui.write(b"%s\n" % res1)
4013 ui.write(b"%s\n" % res1)
4012 if res1 != res2:
4014 if res1 != res2:
4013 ui.warn(b"%s\n" % res2)
4015 ui.warn(b"%s\n" % res2)
4014
4016
4015
4017
4016 def _parsewirelangblocks(fh):
4018 def _parsewirelangblocks(fh):
4017 activeaction = None
4019 activeaction = None
4018 blocklines = []
4020 blocklines = []
4019 lastindent = 0
4021 lastindent = 0
4020
4022
4021 for line in fh:
4023 for line in fh:
4022 line = line.rstrip()
4024 line = line.rstrip()
4023 if not line:
4025 if not line:
4024 continue
4026 continue
4025
4027
4026 if line.startswith(b'#'):
4028 if line.startswith(b'#'):
4027 continue
4029 continue
4028
4030
4029 if not line.startswith(b' '):
4031 if not line.startswith(b' '):
4030 # New block. Flush previous one.
4032 # New block. Flush previous one.
4031 if activeaction:
4033 if activeaction:
4032 yield activeaction, blocklines
4034 yield activeaction, blocklines
4033
4035
4034 activeaction = line
4036 activeaction = line
4035 blocklines = []
4037 blocklines = []
4036 lastindent = 0
4038 lastindent = 0
4037 continue
4039 continue
4038
4040
4039 # Else we start with an indent.
4041 # Else we start with an indent.
4040
4042
4041 if not activeaction:
4043 if not activeaction:
4042 raise error.Abort(_(b'indented line outside of block'))
4044 raise error.Abort(_(b'indented line outside of block'))
4043
4045
4044 indent = len(line) - len(line.lstrip())
4046 indent = len(line) - len(line.lstrip())
4045
4047
4046 # If this line is indented more than the last line, concatenate it.
4048 # If this line is indented more than the last line, concatenate it.
4047 if indent > lastindent and blocklines:
4049 if indent > lastindent and blocklines:
4048 blocklines[-1] += line.lstrip()
4050 blocklines[-1] += line.lstrip()
4049 else:
4051 else:
4050 blocklines.append(line)
4052 blocklines.append(line)
4051 lastindent = indent
4053 lastindent = indent
4052
4054
4053 # Flush last block.
4055 # Flush last block.
4054 if activeaction:
4056 if activeaction:
4055 yield activeaction, blocklines
4057 yield activeaction, blocklines
4056
4058
4057
4059
4058 @command(
4060 @command(
4059 b'debugwireproto',
4061 b'debugwireproto',
4060 [
4062 [
4061 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4063 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4062 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4064 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4063 (
4065 (
4064 b'',
4066 b'',
4065 b'noreadstderr',
4067 b'noreadstderr',
4066 False,
4068 False,
4067 _(b'do not read from stderr of the remote'),
4069 _(b'do not read from stderr of the remote'),
4068 ),
4070 ),
4069 (
4071 (
4070 b'',
4072 b'',
4071 b'nologhandshake',
4073 b'nologhandshake',
4072 False,
4074 False,
4073 _(b'do not log I/O related to the peer handshake'),
4075 _(b'do not log I/O related to the peer handshake'),
4074 ),
4076 ),
4075 ]
4077 ]
4076 + cmdutil.remoteopts,
4078 + cmdutil.remoteopts,
4077 _(b'[PATH]'),
4079 _(b'[PATH]'),
4078 optionalrepo=True,
4080 optionalrepo=True,
4079 )
4081 )
4080 def debugwireproto(ui, repo, path=None, **opts):
4082 def debugwireproto(ui, repo, path=None, **opts):
4081 """send wire protocol commands to a server
4083 """send wire protocol commands to a server
4082
4084
4083 This command can be used to issue wire protocol commands to remote
4085 This command can be used to issue wire protocol commands to remote
4084 peers and to debug the raw data being exchanged.
4086 peers and to debug the raw data being exchanged.
4085
4087
4086 ``--localssh`` will start an SSH server against the current repository
4088 ``--localssh`` will start an SSH server against the current repository
4087 and connect to that. By default, the connection will perform a handshake
4089 and connect to that. By default, the connection will perform a handshake
4088 and establish an appropriate peer instance.
4090 and establish an appropriate peer instance.
4089
4091
4090 ``--peer`` can be used to bypass the handshake protocol and construct a
4092 ``--peer`` can be used to bypass the handshake protocol and construct a
4091 peer instance using the specified class type. Valid values are ``raw``,
4093 peer instance using the specified class type. Valid values are ``raw``,
4092 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4094 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4093 raw data payloads and don't support higher-level command actions.
4095 raw data payloads and don't support higher-level command actions.
4094
4096
4095 ``--noreadstderr`` can be used to disable automatic reading from stderr
4097 ``--noreadstderr`` can be used to disable automatic reading from stderr
4096 of the peer (for SSH connections only). Disabling automatic reading of
4098 of the peer (for SSH connections only). Disabling automatic reading of
4097 stderr is useful for making output more deterministic.
4099 stderr is useful for making output more deterministic.
4098
4100
4099 Commands are issued via a mini language which is specified via stdin.
4101 Commands are issued via a mini language which is specified via stdin.
4100 The language consists of individual actions to perform. An action is
4102 The language consists of individual actions to perform. An action is
4101 defined by a block. A block is defined as a line with no leading
4103 defined by a block. A block is defined as a line with no leading
4102 space followed by 0 or more lines with leading space. Blocks are
4104 space followed by 0 or more lines with leading space. Blocks are
4103 effectively a high-level command with additional metadata.
4105 effectively a high-level command with additional metadata.
4104
4106
4105 Lines beginning with ``#`` are ignored.
4107 Lines beginning with ``#`` are ignored.
4106
4108
4107 The following sections denote available actions.
4109 The following sections denote available actions.
4108
4110
4109 raw
4111 raw
4110 ---
4112 ---
4111
4113
4112 Send raw data to the server.
4114 Send raw data to the server.
4113
4115
4114 The block payload contains the raw data to send as one atomic send
4116 The block payload contains the raw data to send as one atomic send
4115 operation. The data may not actually be delivered in a single system
4117 operation. The data may not actually be delivered in a single system
4116 call: it depends on the abilities of the transport being used.
4118 call: it depends on the abilities of the transport being used.
4117
4119
4118 Each line in the block is de-indented and concatenated. Then, that
4120 Each line in the block is de-indented and concatenated. Then, that
4119 value is evaluated as a Python b'' literal. This allows the use of
4121 value is evaluated as a Python b'' literal. This allows the use of
4120 backslash escaping, etc.
4122 backslash escaping, etc.
4121
4123
4122 raw+
4124 raw+
4123 ----
4125 ----
4124
4126
4125 Behaves like ``raw`` except flushes output afterwards.
4127 Behaves like ``raw`` except flushes output afterwards.
4126
4128
4127 command <X>
4129 command <X>
4128 -----------
4130 -----------
4129
4131
4130 Send a request to run a named command, whose name follows the ``command``
4132 Send a request to run a named command, whose name follows the ``command``
4131 string.
4133 string.
4132
4134
4133 Arguments to the command are defined as lines in this block. The format of
4135 Arguments to the command are defined as lines in this block. The format of
4134 each line is ``<key> <value>``. e.g.::
4136 each line is ``<key> <value>``. e.g.::
4135
4137
4136 command listkeys
4138 command listkeys
4137 namespace bookmarks
4139 namespace bookmarks
4138
4140
4139 If the value begins with ``eval:``, it will be interpreted as a Python
4141 If the value begins with ``eval:``, it will be interpreted as a Python
4140 literal expression. Otherwise values are interpreted as Python b'' literals.
4142 literal expression. Otherwise values are interpreted as Python b'' literals.
4141 This allows sending complex types and encoding special byte sequences via
4143 This allows sending complex types and encoding special byte sequences via
4142 backslash escaping.
4144 backslash escaping.
4143
4145
4144 The following arguments have special meaning:
4146 The following arguments have special meaning:
4145
4147
4146 ``PUSHFILE``
4148 ``PUSHFILE``
4147 When defined, the *push* mechanism of the peer will be used instead
4149 When defined, the *push* mechanism of the peer will be used instead
4148 of the static request-response mechanism and the content of the
4150 of the static request-response mechanism and the content of the
4149 file specified in the value of this argument will be sent as the
4151 file specified in the value of this argument will be sent as the
4150 command payload.
4152 command payload.
4151
4153
4152 This can be used to submit a local bundle file to the remote.
4154 This can be used to submit a local bundle file to the remote.
4153
4155
4154 batchbegin
4156 batchbegin
4155 ----------
4157 ----------
4156
4158
4157 Instruct the peer to begin a batched send.
4159 Instruct the peer to begin a batched send.
4158
4160
4159 All ``command`` blocks are queued for execution until the next
4161 All ``command`` blocks are queued for execution until the next
4160 ``batchsubmit`` block.
4162 ``batchsubmit`` block.
4161
4163
4162 batchsubmit
4164 batchsubmit
4163 -----------
4165 -----------
4164
4166
4165 Submit previously queued ``command`` blocks as a batch request.
4167 Submit previously queued ``command`` blocks as a batch request.
4166
4168
4167 This action MUST be paired with a ``batchbegin`` action.
4169 This action MUST be paired with a ``batchbegin`` action.
4168
4170
4169 httprequest <method> <path>
4171 httprequest <method> <path>
4170 ---------------------------
4172 ---------------------------
4171
4173
4172 (HTTP peer only)
4174 (HTTP peer only)
4173
4175
4174 Send an HTTP request to the peer.
4176 Send an HTTP request to the peer.
4175
4177
4176 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4178 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4177
4179
4178 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4180 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4179 headers to add to the request. e.g. ``Accept: foo``.
4181 headers to add to the request. e.g. ``Accept: foo``.
4180
4182
4181 The following arguments are special:
4183 The following arguments are special:
4182
4184
4183 ``BODYFILE``
4185 ``BODYFILE``
4184 The content of the file defined as the value to this argument will be
4186 The content of the file defined as the value to this argument will be
4185 transferred verbatim as the HTTP request body.
4187 transferred verbatim as the HTTP request body.
4186
4188
4187 ``frame <type> <flags> <payload>``
4189 ``frame <type> <flags> <payload>``
4188 Send a unified protocol frame as part of the request body.
4190 Send a unified protocol frame as part of the request body.
4189
4191
4190 All frames will be collected and sent as the body to the HTTP
4192 All frames will be collected and sent as the body to the HTTP
4191 request.
4193 request.
4192
4194
4193 close
4195 close
4194 -----
4196 -----
4195
4197
4196 Close the connection to the server.
4198 Close the connection to the server.
4197
4199
4198 flush
4200 flush
4199 -----
4201 -----
4200
4202
4201 Flush data written to the server.
4203 Flush data written to the server.
4202
4204
4203 readavailable
4205 readavailable
4204 -------------
4206 -------------
4205
4207
4206 Close the write end of the connection and read all available data from
4208 Close the write end of the connection and read all available data from
4207 the server.
4209 the server.
4208
4210
4209 If the connection to the server encompasses multiple pipes, we poll both
4211 If the connection to the server encompasses multiple pipes, we poll both
4210 pipes and read available data.
4212 pipes and read available data.
4211
4213
4212 readline
4214 readline
4213 --------
4215 --------
4214
4216
4215 Read a line of output from the server. If there are multiple output
4217 Read a line of output from the server. If there are multiple output
4216 pipes, reads only the main pipe.
4218 pipes, reads only the main pipe.
4217
4219
4218 ereadline
4220 ereadline
4219 ---------
4221 ---------
4220
4222
4221 Like ``readline``, but read from the stderr pipe, if available.
4223 Like ``readline``, but read from the stderr pipe, if available.
4222
4224
4223 read <X>
4225 read <X>
4224 --------
4226 --------
4225
4227
4226 ``read()`` N bytes from the server's main output pipe.
4228 ``read()`` N bytes from the server's main output pipe.
4227
4229
4228 eread <X>
4230 eread <X>
4229 ---------
4231 ---------
4230
4232
4231 ``read()`` N bytes from the server's stderr pipe, if available.
4233 ``read()`` N bytes from the server's stderr pipe, if available.
4232
4234
4233 Specifying Unified Frame-Based Protocol Frames
4235 Specifying Unified Frame-Based Protocol Frames
4234 ----------------------------------------------
4236 ----------------------------------------------
4235
4237
4236 It is possible to emit a *Unified Frame-Based Protocol* by using special
4238 It is possible to emit a *Unified Frame-Based Protocol* by using special
4237 syntax.
4239 syntax.
4238
4240
4239 A frame is composed as a type, flags, and payload. These can be parsed
4241 A frame is composed as a type, flags, and payload. These can be parsed
4240 from a string of the form:
4242 from a string of the form:
4241
4243
4242 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4244 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4243
4245
4244 ``request-id`` and ``stream-id`` are integers defining the request and
4246 ``request-id`` and ``stream-id`` are integers defining the request and
4245 stream identifiers.
4247 stream identifiers.
4246
4248
4247 ``type`` can be an integer value for the frame type or the string name
4249 ``type`` can be an integer value for the frame type or the string name
4248 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4250 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4249 ``command-name``.
4251 ``command-name``.
4250
4252
4251 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4253 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4252 components. Each component (and there can be just one) can be an integer
4254 components. Each component (and there can be just one) can be an integer
4253 or a flag name for stream flags or frame flags, respectively. Values are
4255 or a flag name for stream flags or frame flags, respectively. Values are
4254 resolved to integers and then bitwise OR'd together.
4256 resolved to integers and then bitwise OR'd together.
4255
4257
4256 ``payload`` represents the raw frame payload. If it begins with
4258 ``payload`` represents the raw frame payload. If it begins with
4257 ``cbor:``, the following string is evaluated as Python code and the
4259 ``cbor:``, the following string is evaluated as Python code and the
4258 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4260 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4259 as a Python byte string literal.
4261 as a Python byte string literal.
4260 """
4262 """
4261 opts = pycompat.byteskwargs(opts)
4263 opts = pycompat.byteskwargs(opts)
4262
4264
4263 if opts[b'localssh'] and not repo:
4265 if opts[b'localssh'] and not repo:
4264 raise error.Abort(_(b'--localssh requires a repository'))
4266 raise error.Abort(_(b'--localssh requires a repository'))
4265
4267
4266 if opts[b'peer'] and opts[b'peer'] not in (
4268 if opts[b'peer'] and opts[b'peer'] not in (
4267 b'raw',
4269 b'raw',
4268 b'http2',
4270 b'http2',
4269 b'ssh1',
4271 b'ssh1',
4270 b'ssh2',
4272 b'ssh2',
4271 ):
4273 ):
4272 raise error.Abort(
4274 raise error.Abort(
4273 _(b'invalid value for --peer'),
4275 _(b'invalid value for --peer'),
4274 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4276 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4275 )
4277 )
4276
4278
4277 if path and opts[b'localssh']:
4279 if path and opts[b'localssh']:
4278 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4280 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4279
4281
4280 if ui.interactive():
4282 if ui.interactive():
4281 ui.write(_(b'(waiting for commands on stdin)\n'))
4283 ui.write(_(b'(waiting for commands on stdin)\n'))
4282
4284
4283 blocks = list(_parsewirelangblocks(ui.fin))
4285 blocks = list(_parsewirelangblocks(ui.fin))
4284
4286
4285 proc = None
4287 proc = None
4286 stdin = None
4288 stdin = None
4287 stdout = None
4289 stdout = None
4288 stderr = None
4290 stderr = None
4289 opener = None
4291 opener = None
4290
4292
4291 if opts[b'localssh']:
4293 if opts[b'localssh']:
4292 # We start the SSH server in its own process so there is process
4294 # We start the SSH server in its own process so there is process
4293 # separation. This prevents a whole class of potential bugs around
4295 # separation. This prevents a whole class of potential bugs around
4294 # shared state from interfering with server operation.
4296 # shared state from interfering with server operation.
4295 args = procutil.hgcmd() + [
4297 args = procutil.hgcmd() + [
4296 b'-R',
4298 b'-R',
4297 repo.root,
4299 repo.root,
4298 b'debugserve',
4300 b'debugserve',
4299 b'--sshstdio',
4301 b'--sshstdio',
4300 ]
4302 ]
4301 proc = subprocess.Popen(
4303 proc = subprocess.Popen(
4302 pycompat.rapply(procutil.tonativestr, args),
4304 pycompat.rapply(procutil.tonativestr, args),
4303 stdin=subprocess.PIPE,
4305 stdin=subprocess.PIPE,
4304 stdout=subprocess.PIPE,
4306 stdout=subprocess.PIPE,
4305 stderr=subprocess.PIPE,
4307 stderr=subprocess.PIPE,
4306 bufsize=0,
4308 bufsize=0,
4307 )
4309 )
4308
4310
4309 stdin = proc.stdin
4311 stdin = proc.stdin
4310 stdout = proc.stdout
4312 stdout = proc.stdout
4311 stderr = proc.stderr
4313 stderr = proc.stderr
4312
4314
4313 # We turn the pipes into observers so we can log I/O.
4315 # We turn the pipes into observers so we can log I/O.
4314 if ui.verbose or opts[b'peer'] == b'raw':
4316 if ui.verbose or opts[b'peer'] == b'raw':
4315 stdin = util.makeloggingfileobject(
4317 stdin = util.makeloggingfileobject(
4316 ui, proc.stdin, b'i', logdata=True
4318 ui, proc.stdin, b'i', logdata=True
4317 )
4319 )
4318 stdout = util.makeloggingfileobject(
4320 stdout = util.makeloggingfileobject(
4319 ui, proc.stdout, b'o', logdata=True
4321 ui, proc.stdout, b'o', logdata=True
4320 )
4322 )
4321 stderr = util.makeloggingfileobject(
4323 stderr = util.makeloggingfileobject(
4322 ui, proc.stderr, b'e', logdata=True
4324 ui, proc.stderr, b'e', logdata=True
4323 )
4325 )
4324
4326
4325 # --localssh also implies the peer connection settings.
4327 # --localssh also implies the peer connection settings.
4326
4328
4327 url = b'ssh://localserver'
4329 url = b'ssh://localserver'
4328 autoreadstderr = not opts[b'noreadstderr']
4330 autoreadstderr = not opts[b'noreadstderr']
4329
4331
4330 if opts[b'peer'] == b'ssh1':
4332 if opts[b'peer'] == b'ssh1':
4331 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4333 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4332 peer = sshpeer.sshv1peer(
4334 peer = sshpeer.sshv1peer(
4333 ui,
4335 ui,
4334 url,
4336 url,
4335 proc,
4337 proc,
4336 stdin,
4338 stdin,
4337 stdout,
4339 stdout,
4338 stderr,
4340 stderr,
4339 None,
4341 None,
4340 autoreadstderr=autoreadstderr,
4342 autoreadstderr=autoreadstderr,
4341 )
4343 )
4342 elif opts[b'peer'] == b'ssh2':
4344 elif opts[b'peer'] == b'ssh2':
4343 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4345 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4344 peer = sshpeer.sshv2peer(
4346 peer = sshpeer.sshv2peer(
4345 ui,
4347 ui,
4346 url,
4348 url,
4347 proc,
4349 proc,
4348 stdin,
4350 stdin,
4349 stdout,
4351 stdout,
4350 stderr,
4352 stderr,
4351 None,
4353 None,
4352 autoreadstderr=autoreadstderr,
4354 autoreadstderr=autoreadstderr,
4353 )
4355 )
4354 elif opts[b'peer'] == b'raw':
4356 elif opts[b'peer'] == b'raw':
4355 ui.write(_(b'using raw connection to peer\n'))
4357 ui.write(_(b'using raw connection to peer\n'))
4356 peer = None
4358 peer = None
4357 else:
4359 else:
4358 ui.write(_(b'creating ssh peer from handshake results\n'))
4360 ui.write(_(b'creating ssh peer from handshake results\n'))
4359 peer = sshpeer.makepeer(
4361 peer = sshpeer.makepeer(
4360 ui,
4362 ui,
4361 url,
4363 url,
4362 proc,
4364 proc,
4363 stdin,
4365 stdin,
4364 stdout,
4366 stdout,
4365 stderr,
4367 stderr,
4366 autoreadstderr=autoreadstderr,
4368 autoreadstderr=autoreadstderr,
4367 )
4369 )
4368
4370
4369 elif path:
4371 elif path:
4370 # We bypass hg.peer() so we can proxy the sockets.
4372 # We bypass hg.peer() so we can proxy the sockets.
4371 # TODO consider not doing this because we skip
4373 # TODO consider not doing this because we skip
4372 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4374 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4373 u = util.url(path)
4375 u = util.url(path)
4374 if u.scheme != b'http':
4376 if u.scheme != b'http':
4375 raise error.Abort(_(b'only http:// paths are currently supported'))
4377 raise error.Abort(_(b'only http:// paths are currently supported'))
4376
4378
4377 url, authinfo = u.authinfo()
4379 url, authinfo = u.authinfo()
4378 openerargs = {
4380 openerargs = {
4379 'useragent': b'Mercurial debugwireproto',
4381 'useragent': b'Mercurial debugwireproto',
4380 }
4382 }
4381
4383
4382 # Turn pipes/sockets into observers so we can log I/O.
4384 # Turn pipes/sockets into observers so we can log I/O.
4383 if ui.verbose:
4385 if ui.verbose:
4384 openerargs.update(
4386 openerargs.update(
4385 {
4387 {
4386 'loggingfh': ui,
4388 'loggingfh': ui,
4387 'loggingname': b's',
4389 'loggingname': b's',
4388 'loggingopts': {
4390 'loggingopts': {
4389 'logdata': True,
4391 'logdata': True,
4390 'logdataapis': False,
4392 'logdataapis': False,
4391 },
4393 },
4392 }
4394 }
4393 )
4395 )
4394
4396
4395 if ui.debugflag:
4397 if ui.debugflag:
4396 openerargs['loggingopts']['logdataapis'] = True
4398 openerargs['loggingopts']['logdataapis'] = True
4397
4399
4398 # Don't send default headers when in raw mode. This allows us to
4400 # Don't send default headers when in raw mode. This allows us to
4399 # bypass most of the behavior of our URL handling code so we can
4401 # bypass most of the behavior of our URL handling code so we can
4400 # have near complete control over what's sent on the wire.
4402 # have near complete control over what's sent on the wire.
4401 if opts[b'peer'] == b'raw':
4403 if opts[b'peer'] == b'raw':
4402 openerargs['sendaccept'] = False
4404 openerargs['sendaccept'] = False
4403
4405
4404 opener = urlmod.opener(ui, authinfo, **openerargs)
4406 opener = urlmod.opener(ui, authinfo, **openerargs)
4405
4407
4406 if opts[b'peer'] == b'http2':
4408 if opts[b'peer'] == b'http2':
4407 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4409 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4408 # We go through makepeer() because we need an API descriptor for
4410 # We go through makepeer() because we need an API descriptor for
4409 # the peer instance to be useful.
4411 # the peer instance to be useful.
4410 with ui.configoverride(
4412 with ui.configoverride(
4411 {(b'experimental', b'httppeer.advertise-v2'): True}
4413 {(b'experimental', b'httppeer.advertise-v2'): True}
4412 ):
4414 ):
4413 if opts[b'nologhandshake']:
4415 if opts[b'nologhandshake']:
4414 ui.pushbuffer()
4416 ui.pushbuffer()
4415
4417
4416 peer = httppeer.makepeer(ui, path, opener=opener)
4418 peer = httppeer.makepeer(ui, path, opener=opener)
4417
4419
4418 if opts[b'nologhandshake']:
4420 if opts[b'nologhandshake']:
4419 ui.popbuffer()
4421 ui.popbuffer()
4420
4422
4421 if not isinstance(peer, httppeer.httpv2peer):
4423 if not isinstance(peer, httppeer.httpv2peer):
4422 raise error.Abort(
4424 raise error.Abort(
4423 _(
4425 _(
4424 b'could not instantiate HTTP peer for '
4426 b'could not instantiate HTTP peer for '
4425 b'wire protocol version 2'
4427 b'wire protocol version 2'
4426 ),
4428 ),
4427 hint=_(
4429 hint=_(
4428 b'the server may not have the feature '
4430 b'the server may not have the feature '
4429 b'enabled or is not allowing this '
4431 b'enabled or is not allowing this '
4430 b'client version'
4432 b'client version'
4431 ),
4433 ),
4432 )
4434 )
4433
4435
4434 elif opts[b'peer'] == b'raw':
4436 elif opts[b'peer'] == b'raw':
4435 ui.write(_(b'using raw connection to peer\n'))
4437 ui.write(_(b'using raw connection to peer\n'))
4436 peer = None
4438 peer = None
4437 elif opts[b'peer']:
4439 elif opts[b'peer']:
4438 raise error.Abort(
4440 raise error.Abort(
4439 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4441 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4440 )
4442 )
4441 else:
4443 else:
4442 peer = httppeer.makepeer(ui, path, opener=opener)
4444 peer = httppeer.makepeer(ui, path, opener=opener)
4443
4445
4444 # We /could/ populate stdin/stdout with sock.makefile()...
4446 # We /could/ populate stdin/stdout with sock.makefile()...
4445 else:
4447 else:
4446 raise error.Abort(_(b'unsupported connection configuration'))
4448 raise error.Abort(_(b'unsupported connection configuration'))
4447
4449
4448 batchedcommands = None
4450 batchedcommands = None
4449
4451
4450 # Now perform actions based on the parsed wire language instructions.
4452 # Now perform actions based on the parsed wire language instructions.
4451 for action, lines in blocks:
4453 for action, lines in blocks:
4452 if action in (b'raw', b'raw+'):
4454 if action in (b'raw', b'raw+'):
4453 if not stdin:
4455 if not stdin:
4454 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4456 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4455
4457
4456 # Concatenate the data together.
4458 # Concatenate the data together.
4457 data = b''.join(l.lstrip() for l in lines)
4459 data = b''.join(l.lstrip() for l in lines)
4458 data = stringutil.unescapestr(data)
4460 data = stringutil.unescapestr(data)
4459 stdin.write(data)
4461 stdin.write(data)
4460
4462
4461 if action == b'raw+':
4463 if action == b'raw+':
4462 stdin.flush()
4464 stdin.flush()
4463 elif action == b'flush':
4465 elif action == b'flush':
4464 if not stdin:
4466 if not stdin:
4465 raise error.Abort(_(b'cannot call flush on this peer'))
4467 raise error.Abort(_(b'cannot call flush on this peer'))
4466 stdin.flush()
4468 stdin.flush()
4467 elif action.startswith(b'command'):
4469 elif action.startswith(b'command'):
4468 if not peer:
4470 if not peer:
4469 raise error.Abort(
4471 raise error.Abort(
4470 _(
4472 _(
4471 b'cannot send commands unless peer instance '
4473 b'cannot send commands unless peer instance '
4472 b'is available'
4474 b'is available'
4473 )
4475 )
4474 )
4476 )
4475
4477
4476 command = action.split(b' ', 1)[1]
4478 command = action.split(b' ', 1)[1]
4477
4479
4478 args = {}
4480 args = {}
4479 for line in lines:
4481 for line in lines:
4480 # We need to allow empty values.
4482 # We need to allow empty values.
4481 fields = line.lstrip().split(b' ', 1)
4483 fields = line.lstrip().split(b' ', 1)
4482 if len(fields) == 1:
4484 if len(fields) == 1:
4483 key = fields[0]
4485 key = fields[0]
4484 value = b''
4486 value = b''
4485 else:
4487 else:
4486 key, value = fields
4488 key, value = fields
4487
4489
4488 if value.startswith(b'eval:'):
4490 if value.startswith(b'eval:'):
4489 value = stringutil.evalpythonliteral(value[5:])
4491 value = stringutil.evalpythonliteral(value[5:])
4490 else:
4492 else:
4491 value = stringutil.unescapestr(value)
4493 value = stringutil.unescapestr(value)
4492
4494
4493 args[key] = value
4495 args[key] = value
4494
4496
4495 if batchedcommands is not None:
4497 if batchedcommands is not None:
4496 batchedcommands.append((command, args))
4498 batchedcommands.append((command, args))
4497 continue
4499 continue
4498
4500
4499 ui.status(_(b'sending %s command\n') % command)
4501 ui.status(_(b'sending %s command\n') % command)
4500
4502
4501 if b'PUSHFILE' in args:
4503 if b'PUSHFILE' in args:
4502 with open(args[b'PUSHFILE'], 'rb') as fh:
4504 with open(args[b'PUSHFILE'], 'rb') as fh:
4503 del args[b'PUSHFILE']
4505 del args[b'PUSHFILE']
4504 res, output = peer._callpush(
4506 res, output = peer._callpush(
4505 command, fh, **pycompat.strkwargs(args)
4507 command, fh, **pycompat.strkwargs(args)
4506 )
4508 )
4507 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4509 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4508 ui.status(
4510 ui.status(
4509 _(b'remote output: %s\n') % stringutil.escapestr(output)
4511 _(b'remote output: %s\n') % stringutil.escapestr(output)
4510 )
4512 )
4511 else:
4513 else:
4512 with peer.commandexecutor() as e:
4514 with peer.commandexecutor() as e:
4513 res = e.callcommand(command, args).result()
4515 res = e.callcommand(command, args).result()
4514
4516
4515 if isinstance(res, wireprotov2peer.commandresponse):
4517 if isinstance(res, wireprotov2peer.commandresponse):
4516 val = res.objects()
4518 val = res.objects()
4517 ui.status(
4519 ui.status(
4518 _(b'response: %s\n')
4520 _(b'response: %s\n')
4519 % stringutil.pprint(val, bprefix=True, indent=2)
4521 % stringutil.pprint(val, bprefix=True, indent=2)
4520 )
4522 )
4521 else:
4523 else:
4522 ui.status(
4524 ui.status(
4523 _(b'response: %s\n')
4525 _(b'response: %s\n')
4524 % stringutil.pprint(res, bprefix=True, indent=2)
4526 % stringutil.pprint(res, bprefix=True, indent=2)
4525 )
4527 )
4526
4528
4527 elif action == b'batchbegin':
4529 elif action == b'batchbegin':
4528 if batchedcommands is not None:
4530 if batchedcommands is not None:
4529 raise error.Abort(_(b'nested batchbegin not allowed'))
4531 raise error.Abort(_(b'nested batchbegin not allowed'))
4530
4532
4531 batchedcommands = []
4533 batchedcommands = []
4532 elif action == b'batchsubmit':
4534 elif action == b'batchsubmit':
4533 # There is a batching API we could go through. But it would be
4535 # There is a batching API we could go through. But it would be
4534 # difficult to normalize requests into function calls. It is easier
4536 # difficult to normalize requests into function calls. It is easier
4535 # to bypass this layer and normalize to commands + args.
4537 # to bypass this layer and normalize to commands + args.
4536 ui.status(
4538 ui.status(
4537 _(b'sending batch with %d sub-commands\n')
4539 _(b'sending batch with %d sub-commands\n')
4538 % len(batchedcommands)
4540 % len(batchedcommands)
4539 )
4541 )
4540 assert peer is not None
4542 assert peer is not None
4541 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4543 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4542 ui.status(
4544 ui.status(
4543 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4545 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4544 )
4546 )
4545
4547
4546 batchedcommands = None
4548 batchedcommands = None
4547
4549
4548 elif action.startswith(b'httprequest '):
4550 elif action.startswith(b'httprequest '):
4549 if not opener:
4551 if not opener:
4550 raise error.Abort(
4552 raise error.Abort(
4551 _(b'cannot use httprequest without an HTTP peer')
4553 _(b'cannot use httprequest without an HTTP peer')
4552 )
4554 )
4553
4555
4554 request = action.split(b' ', 2)
4556 request = action.split(b' ', 2)
4555 if len(request) != 3:
4557 if len(request) != 3:
4556 raise error.Abort(
4558 raise error.Abort(
4557 _(
4559 _(
4558 b'invalid httprequest: expected format is '
4560 b'invalid httprequest: expected format is '
4559 b'"httprequest <method> <path>'
4561 b'"httprequest <method> <path>'
4560 )
4562 )
4561 )
4563 )
4562
4564
4563 method, httppath = request[1:]
4565 method, httppath = request[1:]
4564 headers = {}
4566 headers = {}
4565 body = None
4567 body = None
4566 frames = []
4568 frames = []
4567 for line in lines:
4569 for line in lines:
4568 line = line.lstrip()
4570 line = line.lstrip()
4569 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4571 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4570 if m:
4572 if m:
4571 # Headers need to use native strings.
4573 # Headers need to use native strings.
4572 key = pycompat.strurl(m.group(1))
4574 key = pycompat.strurl(m.group(1))
4573 value = pycompat.strurl(m.group(2))
4575 value = pycompat.strurl(m.group(2))
4574 headers[key] = value
4576 headers[key] = value
4575 continue
4577 continue
4576
4578
4577 if line.startswith(b'BODYFILE '):
4579 if line.startswith(b'BODYFILE '):
4578 with open(line.split(b' ', 1), b'rb') as fh:
4580 with open(line.split(b' ', 1), b'rb') as fh:
4579 body = fh.read()
4581 body = fh.read()
4580 elif line.startswith(b'frame '):
4582 elif line.startswith(b'frame '):
4581 frame = wireprotoframing.makeframefromhumanstring(
4583 frame = wireprotoframing.makeframefromhumanstring(
4582 line[len(b'frame ') :]
4584 line[len(b'frame ') :]
4583 )
4585 )
4584
4586
4585 frames.append(frame)
4587 frames.append(frame)
4586 else:
4588 else:
4587 raise error.Abort(
4589 raise error.Abort(
4588 _(b'unknown argument to httprequest: %s') % line
4590 _(b'unknown argument to httprequest: %s') % line
4589 )
4591 )
4590
4592
4591 url = path + httppath
4593 url = path + httppath
4592
4594
4593 if frames:
4595 if frames:
4594 body = b''.join(bytes(f) for f in frames)
4596 body = b''.join(bytes(f) for f in frames)
4595
4597
4596 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4598 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4597
4599
4598 # urllib.Request insists on using has_data() as a proxy for
4600 # urllib.Request insists on using has_data() as a proxy for
4599 # determining the request method. Override that to use our
4601 # determining the request method. Override that to use our
4600 # explicitly requested method.
4602 # explicitly requested method.
4601 req.get_method = lambda: pycompat.sysstr(method)
4603 req.get_method = lambda: pycompat.sysstr(method)
4602
4604
4603 try:
4605 try:
4604 res = opener.open(req)
4606 res = opener.open(req)
4605 body = res.read()
4607 body = res.read()
4606 except util.urlerr.urlerror as e:
4608 except util.urlerr.urlerror as e:
4607 # read() method must be called, but only exists in Python 2
4609 # read() method must be called, but only exists in Python 2
4608 getattr(e, 'read', lambda: None)()
4610 getattr(e, 'read', lambda: None)()
4609 continue
4611 continue
4610
4612
4611 ct = res.headers.get('Content-Type')
4613 ct = res.headers.get('Content-Type')
4612 if ct == 'application/mercurial-cbor':
4614 if ct == 'application/mercurial-cbor':
4613 ui.write(
4615 ui.write(
4614 _(b'cbor> %s\n')
4616 _(b'cbor> %s\n')
4615 % stringutil.pprint(
4617 % stringutil.pprint(
4616 cborutil.decodeall(body), bprefix=True, indent=2
4618 cborutil.decodeall(body), bprefix=True, indent=2
4617 )
4619 )
4618 )
4620 )
4619
4621
4620 elif action == b'close':
4622 elif action == b'close':
4621 assert peer is not None
4623 assert peer is not None
4622 peer.close()
4624 peer.close()
4623 elif action == b'readavailable':
4625 elif action == b'readavailable':
4624 if not stdout or not stderr:
4626 if not stdout or not stderr:
4625 raise error.Abort(
4627 raise error.Abort(
4626 _(b'readavailable not available on this peer')
4628 _(b'readavailable not available on this peer')
4627 )
4629 )
4628
4630
4629 stdin.close()
4631 stdin.close()
4630 stdout.read()
4632 stdout.read()
4631 stderr.read()
4633 stderr.read()
4632
4634
4633 elif action == b'readline':
4635 elif action == b'readline':
4634 if not stdout:
4636 if not stdout:
4635 raise error.Abort(_(b'readline not available on this peer'))
4637 raise error.Abort(_(b'readline not available on this peer'))
4636 stdout.readline()
4638 stdout.readline()
4637 elif action == b'ereadline':
4639 elif action == b'ereadline':
4638 if not stderr:
4640 if not stderr:
4639 raise error.Abort(_(b'ereadline not available on this peer'))
4641 raise error.Abort(_(b'ereadline not available on this peer'))
4640 stderr.readline()
4642 stderr.readline()
4641 elif action.startswith(b'read '):
4643 elif action.startswith(b'read '):
4642 count = int(action.split(b' ', 1)[1])
4644 count = int(action.split(b' ', 1)[1])
4643 if not stdout:
4645 if not stdout:
4644 raise error.Abort(_(b'read not available on this peer'))
4646 raise error.Abort(_(b'read not available on this peer'))
4645 stdout.read(count)
4647 stdout.read(count)
4646 elif action.startswith(b'eread '):
4648 elif action.startswith(b'eread '):
4647 count = int(action.split(b' ', 1)[1])
4649 count = int(action.split(b' ', 1)[1])
4648 if not stderr:
4650 if not stderr:
4649 raise error.Abort(_(b'eread not available on this peer'))
4651 raise error.Abort(_(b'eread not available on this peer'))
4650 stderr.read(count)
4652 stderr.read(count)
4651 else:
4653 else:
4652 raise error.Abort(_(b'unknown action: %s') % action)
4654 raise error.Abort(_(b'unknown action: %s') % action)
4653
4655
4654 if batchedcommands is not None:
4656 if batchedcommands is not None:
4655 raise error.Abort(_(b'unclosed "batchbegin" request'))
4657 raise error.Abort(_(b'unclosed "batchbegin" request'))
4656
4658
4657 if peer:
4659 if peer:
4658 peer.close()
4660 peer.close()
4659
4661
4660 if proc:
4662 if proc:
4661 proc.kill()
4663 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now