##// END OF EJS Templates
debugcommands: s/stdin/stdout in debugnodemap help...
Pulkit Goyal -
r47193:3e3b81b6 default
parent child Browse files
Show More
@@ -1,4678 +1,4678 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import glob
14 import glob
15 import operator
15 import operator
16 import os
16 import os
17 import platform
17 import platform
18 import random
18 import random
19 import re
19 import re
20 import socket
20 import socket
21 import ssl
21 import ssl
22 import stat
22 import stat
23 import string
23 import string
24 import subprocess
24 import subprocess
25 import sys
25 import sys
26 import time
26 import time
27
27
28 from .i18n import _
28 from .i18n import _
29 from .node import (
29 from .node import (
30 bin,
30 bin,
31 hex,
31 hex,
32 nullid,
32 nullid,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 encoding,
49 encoding,
50 error,
50 error,
51 exchange,
51 exchange,
52 extensions,
52 extensions,
53 filemerge,
53 filemerge,
54 filesetlang,
54 filesetlang,
55 formatter,
55 formatter,
56 hg,
56 hg,
57 httppeer,
57 httppeer,
58 localrepo,
58 localrepo,
59 lock as lockmod,
59 lock as lockmod,
60 logcmdutil,
60 logcmdutil,
61 mergestate as mergestatemod,
61 mergestate as mergestatemod,
62 metadata,
62 metadata,
63 obsolete,
63 obsolete,
64 obsutil,
64 obsutil,
65 pathutil,
65 pathutil,
66 phases,
66 phases,
67 policy,
67 policy,
68 pvec,
68 pvec,
69 pycompat,
69 pycompat,
70 registrar,
70 registrar,
71 repair,
71 repair,
72 revlog,
72 revlog,
73 revset,
73 revset,
74 revsetlang,
74 revsetlang,
75 scmutil,
75 scmutil,
76 setdiscovery,
76 setdiscovery,
77 simplemerge,
77 simplemerge,
78 sshpeer,
78 sshpeer,
79 sslutil,
79 sslutil,
80 streamclone,
80 streamclone,
81 strip,
81 strip,
82 tags as tagsmod,
82 tags as tagsmod,
83 templater,
83 templater,
84 treediscovery,
84 treediscovery,
85 upgrade,
85 upgrade,
86 url as urlmod,
86 url as urlmod,
87 util,
87 util,
88 vfs as vfsmod,
88 vfs as vfsmod,
89 wireprotoframing,
89 wireprotoframing,
90 wireprotoserver,
90 wireprotoserver,
91 wireprotov2peer,
91 wireprotov2peer,
92 )
92 )
93 from .utils import (
93 from .utils import (
94 cborutil,
94 cborutil,
95 compression,
95 compression,
96 dateutil,
96 dateutil,
97 procutil,
97 procutil,
98 stringutil,
98 stringutil,
99 )
99 )
100
100
101 from .revlogutils import (
101 from .revlogutils import (
102 deltas as deltautil,
102 deltas as deltautil,
103 nodemap,
103 nodemap,
104 sidedata,
104 sidedata,
105 )
105 )
106
106
107 release = lockmod.release
107 release = lockmod.release
108
108
109 table = {}
109 table = {}
110 table.update(strip.command._table)
110 table.update(strip.command._table)
111 command = registrar.command(table)
111 command = registrar.command(table)
112
112
113
113
114 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
114 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
115 def debugancestor(ui, repo, *args):
115 def debugancestor(ui, repo, *args):
116 """find the ancestor revision of two revisions in a given index"""
116 """find the ancestor revision of two revisions in a given index"""
117 if len(args) == 3:
117 if len(args) == 3:
118 index, rev1, rev2 = args
118 index, rev1, rev2 = args
119 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
119 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
120 lookup = r.lookup
120 lookup = r.lookup
121 elif len(args) == 2:
121 elif len(args) == 2:
122 if not repo:
122 if not repo:
123 raise error.Abort(
123 raise error.Abort(
124 _(b'there is no Mercurial repository here (.hg not found)')
124 _(b'there is no Mercurial repository here (.hg not found)')
125 )
125 )
126 rev1, rev2 = args
126 rev1, rev2 = args
127 r = repo.changelog
127 r = repo.changelog
128 lookup = repo.lookup
128 lookup = repo.lookup
129 else:
129 else:
130 raise error.Abort(_(b'either two or three arguments required'))
130 raise error.Abort(_(b'either two or three arguments required'))
131 a = r.ancestor(lookup(rev1), lookup(rev2))
131 a = r.ancestor(lookup(rev1), lookup(rev2))
132 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
132 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
133
133
134
134
135 @command(b'debugantivirusrunning', [])
135 @command(b'debugantivirusrunning', [])
136 def debugantivirusrunning(ui, repo):
136 def debugantivirusrunning(ui, repo):
137 """attempt to trigger an antivirus scanner to see if one is active"""
137 """attempt to trigger an antivirus scanner to see if one is active"""
138 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
138 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
139 f.write(
139 f.write(
140 util.b85decode(
140 util.b85decode(
141 # This is a base85-armored version of the EICAR test file. See
141 # This is a base85-armored version of the EICAR test file. See
142 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
142 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
143 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
143 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
144 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
144 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
145 )
145 )
146 )
146 )
147 # Give an AV engine time to scan the file.
147 # Give an AV engine time to scan the file.
148 time.sleep(2)
148 time.sleep(2)
149 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
149 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
150
150
151
151
152 @command(b'debugapplystreamclonebundle', [], b'FILE')
152 @command(b'debugapplystreamclonebundle', [], b'FILE')
153 def debugapplystreamclonebundle(ui, repo, fname):
153 def debugapplystreamclonebundle(ui, repo, fname):
154 """apply a stream clone bundle file"""
154 """apply a stream clone bundle file"""
155 f = hg.openpath(ui, fname)
155 f = hg.openpath(ui, fname)
156 gen = exchange.readbundle(ui, f, fname)
156 gen = exchange.readbundle(ui, f, fname)
157 gen.apply(repo)
157 gen.apply(repo)
158
158
159
159
160 @command(
160 @command(
161 b'debugbuilddag',
161 b'debugbuilddag',
162 [
162 [
163 (
163 (
164 b'm',
164 b'm',
165 b'mergeable-file',
165 b'mergeable-file',
166 None,
166 None,
167 _(b'add single file mergeable changes'),
167 _(b'add single file mergeable changes'),
168 ),
168 ),
169 (
169 (
170 b'o',
170 b'o',
171 b'overwritten-file',
171 b'overwritten-file',
172 None,
172 None,
173 _(b'add single file all revs overwrite'),
173 _(b'add single file all revs overwrite'),
174 ),
174 ),
175 (b'n', b'new-file', None, _(b'add new file at each rev')),
175 (b'n', b'new-file', None, _(b'add new file at each rev')),
176 ],
176 ],
177 _(b'[OPTION]... [TEXT]'),
177 _(b'[OPTION]... [TEXT]'),
178 )
178 )
179 def debugbuilddag(
179 def debugbuilddag(
180 ui,
180 ui,
181 repo,
181 repo,
182 text=None,
182 text=None,
183 mergeable_file=False,
183 mergeable_file=False,
184 overwritten_file=False,
184 overwritten_file=False,
185 new_file=False,
185 new_file=False,
186 ):
186 ):
187 """builds a repo with a given DAG from scratch in the current empty repo
187 """builds a repo with a given DAG from scratch in the current empty repo
188
188
189 The description of the DAG is read from stdin if not given on the
189 The description of the DAG is read from stdin if not given on the
190 command line.
190 command line.
191
191
192 Elements:
192 Elements:
193
193
194 - "+n" is a linear run of n nodes based on the current default parent
194 - "+n" is a linear run of n nodes based on the current default parent
195 - "." is a single node based on the current default parent
195 - "." is a single node based on the current default parent
196 - "$" resets the default parent to null (implied at the start);
196 - "$" resets the default parent to null (implied at the start);
197 otherwise the default parent is always the last node created
197 otherwise the default parent is always the last node created
198 - "<p" sets the default parent to the backref p
198 - "<p" sets the default parent to the backref p
199 - "*p" is a fork at parent p, which is a backref
199 - "*p" is a fork at parent p, which is a backref
200 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
200 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
201 - "/p2" is a merge of the preceding node and p2
201 - "/p2" is a merge of the preceding node and p2
202 - ":tag" defines a local tag for the preceding node
202 - ":tag" defines a local tag for the preceding node
203 - "@branch" sets the named branch for subsequent nodes
203 - "@branch" sets the named branch for subsequent nodes
204 - "#...\\n" is a comment up to the end of the line
204 - "#...\\n" is a comment up to the end of the line
205
205
206 Whitespace between the above elements is ignored.
206 Whitespace between the above elements is ignored.
207
207
208 A backref is either
208 A backref is either
209
209
210 - a number n, which references the node curr-n, where curr is the current
210 - a number n, which references the node curr-n, where curr is the current
211 node, or
211 node, or
212 - the name of a local tag you placed earlier using ":tag", or
212 - the name of a local tag you placed earlier using ":tag", or
213 - empty to denote the default parent.
213 - empty to denote the default parent.
214
214
215 All string valued-elements are either strictly alphanumeric, or must
215 All string valued-elements are either strictly alphanumeric, or must
216 be enclosed in double quotes ("..."), with "\\" as escape character.
216 be enclosed in double quotes ("..."), with "\\" as escape character.
217 """
217 """
218
218
219 if text is None:
219 if text is None:
220 ui.status(_(b"reading DAG from stdin\n"))
220 ui.status(_(b"reading DAG from stdin\n"))
221 text = ui.fin.read()
221 text = ui.fin.read()
222
222
223 cl = repo.changelog
223 cl = repo.changelog
224 if len(cl) > 0:
224 if len(cl) > 0:
225 raise error.Abort(_(b'repository is not empty'))
225 raise error.Abort(_(b'repository is not empty'))
226
226
227 # determine number of revs in DAG
227 # determine number of revs in DAG
228 total = 0
228 total = 0
229 for type, data in dagparser.parsedag(text):
229 for type, data in dagparser.parsedag(text):
230 if type == b'n':
230 if type == b'n':
231 total += 1
231 total += 1
232
232
233 if mergeable_file:
233 if mergeable_file:
234 linesperrev = 2
234 linesperrev = 2
235 # make a file with k lines per rev
235 # make a file with k lines per rev
236 initialmergedlines = [
236 initialmergedlines = [
237 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
237 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
238 ]
238 ]
239 initialmergedlines.append(b"")
239 initialmergedlines.append(b"")
240
240
241 tags = []
241 tags = []
242 progress = ui.makeprogress(
242 progress = ui.makeprogress(
243 _(b'building'), unit=_(b'revisions'), total=total
243 _(b'building'), unit=_(b'revisions'), total=total
244 )
244 )
245 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
245 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
246 at = -1
246 at = -1
247 atbranch = b'default'
247 atbranch = b'default'
248 nodeids = []
248 nodeids = []
249 id = 0
249 id = 0
250 progress.update(id)
250 progress.update(id)
251 for type, data in dagparser.parsedag(text):
251 for type, data in dagparser.parsedag(text):
252 if type == b'n':
252 if type == b'n':
253 ui.note((b'node %s\n' % pycompat.bytestr(data)))
253 ui.note((b'node %s\n' % pycompat.bytestr(data)))
254 id, ps = data
254 id, ps = data
255
255
256 files = []
256 files = []
257 filecontent = {}
257 filecontent = {}
258
258
259 p2 = None
259 p2 = None
260 if mergeable_file:
260 if mergeable_file:
261 fn = b"mf"
261 fn = b"mf"
262 p1 = repo[ps[0]]
262 p1 = repo[ps[0]]
263 if len(ps) > 1:
263 if len(ps) > 1:
264 p2 = repo[ps[1]]
264 p2 = repo[ps[1]]
265 pa = p1.ancestor(p2)
265 pa = p1.ancestor(p2)
266 base, local, other = [
266 base, local, other = [
267 x[fn].data() for x in (pa, p1, p2)
267 x[fn].data() for x in (pa, p1, p2)
268 ]
268 ]
269 m3 = simplemerge.Merge3Text(base, local, other)
269 m3 = simplemerge.Merge3Text(base, local, other)
270 ml = [l.strip() for l in m3.merge_lines()]
270 ml = [l.strip() for l in m3.merge_lines()]
271 ml.append(b"")
271 ml.append(b"")
272 elif at > 0:
272 elif at > 0:
273 ml = p1[fn].data().split(b"\n")
273 ml = p1[fn].data().split(b"\n")
274 else:
274 else:
275 ml = initialmergedlines
275 ml = initialmergedlines
276 ml[id * linesperrev] += b" r%i" % id
276 ml[id * linesperrev] += b" r%i" % id
277 mergedtext = b"\n".join(ml)
277 mergedtext = b"\n".join(ml)
278 files.append(fn)
278 files.append(fn)
279 filecontent[fn] = mergedtext
279 filecontent[fn] = mergedtext
280
280
281 if overwritten_file:
281 if overwritten_file:
282 fn = b"of"
282 fn = b"of"
283 files.append(fn)
283 files.append(fn)
284 filecontent[fn] = b"r%i\n" % id
284 filecontent[fn] = b"r%i\n" % id
285
285
286 if new_file:
286 if new_file:
287 fn = b"nf%i" % id
287 fn = b"nf%i" % id
288 files.append(fn)
288 files.append(fn)
289 filecontent[fn] = b"r%i\n" % id
289 filecontent[fn] = b"r%i\n" % id
290 if len(ps) > 1:
290 if len(ps) > 1:
291 if not p2:
291 if not p2:
292 p2 = repo[ps[1]]
292 p2 = repo[ps[1]]
293 for fn in p2:
293 for fn in p2:
294 if fn.startswith(b"nf"):
294 if fn.startswith(b"nf"):
295 files.append(fn)
295 files.append(fn)
296 filecontent[fn] = p2[fn].data()
296 filecontent[fn] = p2[fn].data()
297
297
298 def fctxfn(repo, cx, path):
298 def fctxfn(repo, cx, path):
299 if path in filecontent:
299 if path in filecontent:
300 return context.memfilectx(
300 return context.memfilectx(
301 repo, cx, path, filecontent[path]
301 repo, cx, path, filecontent[path]
302 )
302 )
303 return None
303 return None
304
304
305 if len(ps) == 0 or ps[0] < 0:
305 if len(ps) == 0 or ps[0] < 0:
306 pars = [None, None]
306 pars = [None, None]
307 elif len(ps) == 1:
307 elif len(ps) == 1:
308 pars = [nodeids[ps[0]], None]
308 pars = [nodeids[ps[0]], None]
309 else:
309 else:
310 pars = [nodeids[p] for p in ps]
310 pars = [nodeids[p] for p in ps]
311 cx = context.memctx(
311 cx = context.memctx(
312 repo,
312 repo,
313 pars,
313 pars,
314 b"r%i" % id,
314 b"r%i" % id,
315 files,
315 files,
316 fctxfn,
316 fctxfn,
317 date=(id, 0),
317 date=(id, 0),
318 user=b"debugbuilddag",
318 user=b"debugbuilddag",
319 extra={b'branch': atbranch},
319 extra={b'branch': atbranch},
320 )
320 )
321 nodeid = repo.commitctx(cx)
321 nodeid = repo.commitctx(cx)
322 nodeids.append(nodeid)
322 nodeids.append(nodeid)
323 at = id
323 at = id
324 elif type == b'l':
324 elif type == b'l':
325 id, name = data
325 id, name = data
326 ui.note((b'tag %s\n' % name))
326 ui.note((b'tag %s\n' % name))
327 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
327 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
328 elif type == b'a':
328 elif type == b'a':
329 ui.note((b'branch %s\n' % data))
329 ui.note((b'branch %s\n' % data))
330 atbranch = data
330 atbranch = data
331 progress.update(id)
331 progress.update(id)
332
332
333 if tags:
333 if tags:
334 repo.vfs.write(b"localtags", b"".join(tags))
334 repo.vfs.write(b"localtags", b"".join(tags))
335
335
336
336
337 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
337 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
338 indent_string = b' ' * indent
338 indent_string = b' ' * indent
339 if all:
339 if all:
340 ui.writenoi18n(
340 ui.writenoi18n(
341 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
341 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
342 % indent_string
342 % indent_string
343 )
343 )
344
344
345 def showchunks(named):
345 def showchunks(named):
346 ui.write(b"\n%s%s\n" % (indent_string, named))
346 ui.write(b"\n%s%s\n" % (indent_string, named))
347 for deltadata in gen.deltaiter():
347 for deltadata in gen.deltaiter():
348 node, p1, p2, cs, deltabase, delta, flags = deltadata
348 node, p1, p2, cs, deltabase, delta, flags = deltadata
349 ui.write(
349 ui.write(
350 b"%s%s %s %s %s %s %d\n"
350 b"%s%s %s %s %s %s %d\n"
351 % (
351 % (
352 indent_string,
352 indent_string,
353 hex(node),
353 hex(node),
354 hex(p1),
354 hex(p1),
355 hex(p2),
355 hex(p2),
356 hex(cs),
356 hex(cs),
357 hex(deltabase),
357 hex(deltabase),
358 len(delta),
358 len(delta),
359 )
359 )
360 )
360 )
361
361
362 gen.changelogheader()
362 gen.changelogheader()
363 showchunks(b"changelog")
363 showchunks(b"changelog")
364 gen.manifestheader()
364 gen.manifestheader()
365 showchunks(b"manifest")
365 showchunks(b"manifest")
366 for chunkdata in iter(gen.filelogheader, {}):
366 for chunkdata in iter(gen.filelogheader, {}):
367 fname = chunkdata[b'filename']
367 fname = chunkdata[b'filename']
368 showchunks(fname)
368 showchunks(fname)
369 else:
369 else:
370 if isinstance(gen, bundle2.unbundle20):
370 if isinstance(gen, bundle2.unbundle20):
371 raise error.Abort(_(b'use debugbundle2 for this file'))
371 raise error.Abort(_(b'use debugbundle2 for this file'))
372 gen.changelogheader()
372 gen.changelogheader()
373 for deltadata in gen.deltaiter():
373 for deltadata in gen.deltaiter():
374 node, p1, p2, cs, deltabase, delta, flags = deltadata
374 node, p1, p2, cs, deltabase, delta, flags = deltadata
375 ui.write(b"%s%s\n" % (indent_string, hex(node)))
375 ui.write(b"%s%s\n" % (indent_string, hex(node)))
376
376
377
377
378 def _debugobsmarkers(ui, part, indent=0, **opts):
378 def _debugobsmarkers(ui, part, indent=0, **opts):
379 """display version and markers contained in 'data'"""
379 """display version and markers contained in 'data'"""
380 opts = pycompat.byteskwargs(opts)
380 opts = pycompat.byteskwargs(opts)
381 data = part.read()
381 data = part.read()
382 indent_string = b' ' * indent
382 indent_string = b' ' * indent
383 try:
383 try:
384 version, markers = obsolete._readmarkers(data)
384 version, markers = obsolete._readmarkers(data)
385 except error.UnknownVersion as exc:
385 except error.UnknownVersion as exc:
386 msg = b"%sunsupported version: %s (%d bytes)\n"
386 msg = b"%sunsupported version: %s (%d bytes)\n"
387 msg %= indent_string, exc.version, len(data)
387 msg %= indent_string, exc.version, len(data)
388 ui.write(msg)
388 ui.write(msg)
389 else:
389 else:
390 msg = b"%sversion: %d (%d bytes)\n"
390 msg = b"%sversion: %d (%d bytes)\n"
391 msg %= indent_string, version, len(data)
391 msg %= indent_string, version, len(data)
392 ui.write(msg)
392 ui.write(msg)
393 fm = ui.formatter(b'debugobsolete', opts)
393 fm = ui.formatter(b'debugobsolete', opts)
394 for rawmarker in sorted(markers):
394 for rawmarker in sorted(markers):
395 m = obsutil.marker(None, rawmarker)
395 m = obsutil.marker(None, rawmarker)
396 fm.startitem()
396 fm.startitem()
397 fm.plain(indent_string)
397 fm.plain(indent_string)
398 cmdutil.showmarker(fm, m)
398 cmdutil.showmarker(fm, m)
399 fm.end()
399 fm.end()
400
400
401
401
402 def _debugphaseheads(ui, data, indent=0):
402 def _debugphaseheads(ui, data, indent=0):
403 """display version and markers contained in 'data'"""
403 """display version and markers contained in 'data'"""
404 indent_string = b' ' * indent
404 indent_string = b' ' * indent
405 headsbyphase = phases.binarydecode(data)
405 headsbyphase = phases.binarydecode(data)
406 for phase in phases.allphases:
406 for phase in phases.allphases:
407 for head in headsbyphase[phase]:
407 for head in headsbyphase[phase]:
408 ui.write(indent_string)
408 ui.write(indent_string)
409 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
409 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
410
410
411
411
412 def _quasirepr(thing):
412 def _quasirepr(thing):
413 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
413 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
414 return b'{%s}' % (
414 return b'{%s}' % (
415 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
415 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
416 )
416 )
417 return pycompat.bytestr(repr(thing))
417 return pycompat.bytestr(repr(thing))
418
418
419
419
420 def _debugbundle2(ui, gen, all=None, **opts):
420 def _debugbundle2(ui, gen, all=None, **opts):
421 """lists the contents of a bundle2"""
421 """lists the contents of a bundle2"""
422 if not isinstance(gen, bundle2.unbundle20):
422 if not isinstance(gen, bundle2.unbundle20):
423 raise error.Abort(_(b'not a bundle2 file'))
423 raise error.Abort(_(b'not a bundle2 file'))
424 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
424 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
425 parttypes = opts.get('part_type', [])
425 parttypes = opts.get('part_type', [])
426 for part in gen.iterparts():
426 for part in gen.iterparts():
427 if parttypes and part.type not in parttypes:
427 if parttypes and part.type not in parttypes:
428 continue
428 continue
429 msg = b'%s -- %s (mandatory: %r)\n'
429 msg = b'%s -- %s (mandatory: %r)\n'
430 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
430 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
431 if part.type == b'changegroup':
431 if part.type == b'changegroup':
432 version = part.params.get(b'version', b'01')
432 version = part.params.get(b'version', b'01')
433 cg = changegroup.getunbundler(version, part, b'UN')
433 cg = changegroup.getunbundler(version, part, b'UN')
434 if not ui.quiet:
434 if not ui.quiet:
435 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
435 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
436 if part.type == b'obsmarkers':
436 if part.type == b'obsmarkers':
437 if not ui.quiet:
437 if not ui.quiet:
438 _debugobsmarkers(ui, part, indent=4, **opts)
438 _debugobsmarkers(ui, part, indent=4, **opts)
439 if part.type == b'phase-heads':
439 if part.type == b'phase-heads':
440 if not ui.quiet:
440 if not ui.quiet:
441 _debugphaseheads(ui, part, indent=4)
441 _debugphaseheads(ui, part, indent=4)
442
442
443
443
444 @command(
444 @command(
445 b'debugbundle',
445 b'debugbundle',
446 [
446 [
447 (b'a', b'all', None, _(b'show all details')),
447 (b'a', b'all', None, _(b'show all details')),
448 (b'', b'part-type', [], _(b'show only the named part type')),
448 (b'', b'part-type', [], _(b'show only the named part type')),
449 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
449 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
450 ],
450 ],
451 _(b'FILE'),
451 _(b'FILE'),
452 norepo=True,
452 norepo=True,
453 )
453 )
454 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
454 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
455 """lists the contents of a bundle"""
455 """lists the contents of a bundle"""
456 with hg.openpath(ui, bundlepath) as f:
456 with hg.openpath(ui, bundlepath) as f:
457 if spec:
457 if spec:
458 spec = exchange.getbundlespec(ui, f)
458 spec = exchange.getbundlespec(ui, f)
459 ui.write(b'%s\n' % spec)
459 ui.write(b'%s\n' % spec)
460 return
460 return
461
461
462 gen = exchange.readbundle(ui, f, bundlepath)
462 gen = exchange.readbundle(ui, f, bundlepath)
463 if isinstance(gen, bundle2.unbundle20):
463 if isinstance(gen, bundle2.unbundle20):
464 return _debugbundle2(ui, gen, all=all, **opts)
464 return _debugbundle2(ui, gen, all=all, **opts)
465 _debugchangegroup(ui, gen, all=all, **opts)
465 _debugchangegroup(ui, gen, all=all, **opts)
466
466
467
467
468 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
468 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
469 def debugcapabilities(ui, path, **opts):
469 def debugcapabilities(ui, path, **opts):
470 """lists the capabilities of a remote peer"""
470 """lists the capabilities of a remote peer"""
471 opts = pycompat.byteskwargs(opts)
471 opts = pycompat.byteskwargs(opts)
472 peer = hg.peer(ui, opts, path)
472 peer = hg.peer(ui, opts, path)
473 caps = peer.capabilities()
473 caps = peer.capabilities()
474 ui.writenoi18n(b'Main capabilities:\n')
474 ui.writenoi18n(b'Main capabilities:\n')
475 for c in sorted(caps):
475 for c in sorted(caps):
476 ui.write(b' %s\n' % c)
476 ui.write(b' %s\n' % c)
477 b2caps = bundle2.bundle2caps(peer)
477 b2caps = bundle2.bundle2caps(peer)
478 if b2caps:
478 if b2caps:
479 ui.writenoi18n(b'Bundle2 capabilities:\n')
479 ui.writenoi18n(b'Bundle2 capabilities:\n')
480 for key, values in sorted(pycompat.iteritems(b2caps)):
480 for key, values in sorted(pycompat.iteritems(b2caps)):
481 ui.write(b' %s\n' % key)
481 ui.write(b' %s\n' % key)
482 for v in values:
482 for v in values:
483 ui.write(b' %s\n' % v)
483 ui.write(b' %s\n' % v)
484
484
485
485
486 @command(b'debugchangedfiles', [], b'REV')
486 @command(b'debugchangedfiles', [], b'REV')
487 def debugchangedfiles(ui, repo, rev):
487 def debugchangedfiles(ui, repo, rev):
488 """list the stored files changes for a revision"""
488 """list the stored files changes for a revision"""
489 ctx = scmutil.revsingle(repo, rev, None)
489 ctx = scmutil.revsingle(repo, rev, None)
490 sd = repo.changelog.sidedata(ctx.rev())
490 sd = repo.changelog.sidedata(ctx.rev())
491 files_block = sd.get(sidedata.SD_FILES)
491 files_block = sd.get(sidedata.SD_FILES)
492 if files_block is not None:
492 if files_block is not None:
493 files = metadata.decode_files_sidedata(sd)
493 files = metadata.decode_files_sidedata(sd)
494 for f in sorted(files.touched):
494 for f in sorted(files.touched):
495 if f in files.added:
495 if f in files.added:
496 action = b"added"
496 action = b"added"
497 elif f in files.removed:
497 elif f in files.removed:
498 action = b"removed"
498 action = b"removed"
499 elif f in files.merged:
499 elif f in files.merged:
500 action = b"merged"
500 action = b"merged"
501 elif f in files.salvaged:
501 elif f in files.salvaged:
502 action = b"salvaged"
502 action = b"salvaged"
503 else:
503 else:
504 action = b"touched"
504 action = b"touched"
505
505
506 copy_parent = b""
506 copy_parent = b""
507 copy_source = b""
507 copy_source = b""
508 if f in files.copied_from_p1:
508 if f in files.copied_from_p1:
509 copy_parent = b"p1"
509 copy_parent = b"p1"
510 copy_source = files.copied_from_p1[f]
510 copy_source = files.copied_from_p1[f]
511 elif f in files.copied_from_p2:
511 elif f in files.copied_from_p2:
512 copy_parent = b"p2"
512 copy_parent = b"p2"
513 copy_source = files.copied_from_p2[f]
513 copy_source = files.copied_from_p2[f]
514
514
515 data = (action, copy_parent, f, copy_source)
515 data = (action, copy_parent, f, copy_source)
516 template = b"%-8s %2s: %s, %s;\n"
516 template = b"%-8s %2s: %s, %s;\n"
517 ui.write(template % data)
517 ui.write(template % data)
518
518
519
519
520 @command(b'debugcheckstate', [], b'')
520 @command(b'debugcheckstate', [], b'')
521 def debugcheckstate(ui, repo):
521 def debugcheckstate(ui, repo):
522 """validate the correctness of the current dirstate"""
522 """validate the correctness of the current dirstate"""
523 parent1, parent2 = repo.dirstate.parents()
523 parent1, parent2 = repo.dirstate.parents()
524 m1 = repo[parent1].manifest()
524 m1 = repo[parent1].manifest()
525 m2 = repo[parent2].manifest()
525 m2 = repo[parent2].manifest()
526 errors = 0
526 errors = 0
527 for f in repo.dirstate:
527 for f in repo.dirstate:
528 state = repo.dirstate[f]
528 state = repo.dirstate[f]
529 if state in b"nr" and f not in m1:
529 if state in b"nr" and f not in m1:
530 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
530 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
531 errors += 1
531 errors += 1
532 if state in b"a" and f in m1:
532 if state in b"a" and f in m1:
533 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
533 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
534 errors += 1
534 errors += 1
535 if state in b"m" and f not in m1 and f not in m2:
535 if state in b"m" and f not in m1 and f not in m2:
536 ui.warn(
536 ui.warn(
537 _(b"%s in state %s, but not in either manifest\n") % (f, state)
537 _(b"%s in state %s, but not in either manifest\n") % (f, state)
538 )
538 )
539 errors += 1
539 errors += 1
540 for f in m1:
540 for f in m1:
541 state = repo.dirstate[f]
541 state = repo.dirstate[f]
542 if state not in b"nrm":
542 if state not in b"nrm":
543 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
543 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
544 errors += 1
544 errors += 1
545 if errors:
545 if errors:
546 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
546 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
547 raise error.Abort(errstr)
547 raise error.Abort(errstr)
548
548
549
549
550 @command(
550 @command(
551 b'debugcolor',
551 b'debugcolor',
552 [(b'', b'style', None, _(b'show all configured styles'))],
552 [(b'', b'style', None, _(b'show all configured styles'))],
553 b'hg debugcolor',
553 b'hg debugcolor',
554 )
554 )
555 def debugcolor(ui, repo, **opts):
555 def debugcolor(ui, repo, **opts):
556 """show available color, effects or style"""
556 """show available color, effects or style"""
557 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
557 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
558 if opts.get('style'):
558 if opts.get('style'):
559 return _debugdisplaystyle(ui)
559 return _debugdisplaystyle(ui)
560 else:
560 else:
561 return _debugdisplaycolor(ui)
561 return _debugdisplaycolor(ui)
562
562
563
563
564 def _debugdisplaycolor(ui):
564 def _debugdisplaycolor(ui):
565 ui = ui.copy()
565 ui = ui.copy()
566 ui._styles.clear()
566 ui._styles.clear()
567 for effect in color._activeeffects(ui).keys():
567 for effect in color._activeeffects(ui).keys():
568 ui._styles[effect] = effect
568 ui._styles[effect] = effect
569 if ui._terminfoparams:
569 if ui._terminfoparams:
570 for k, v in ui.configitems(b'color'):
570 for k, v in ui.configitems(b'color'):
571 if k.startswith(b'color.'):
571 if k.startswith(b'color.'):
572 ui._styles[k] = k[6:]
572 ui._styles[k] = k[6:]
573 elif k.startswith(b'terminfo.'):
573 elif k.startswith(b'terminfo.'):
574 ui._styles[k] = k[9:]
574 ui._styles[k] = k[9:]
575 ui.write(_(b'available colors:\n'))
575 ui.write(_(b'available colors:\n'))
576 # sort label with a '_' after the other to group '_background' entry.
576 # sort label with a '_' after the other to group '_background' entry.
577 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
577 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
578 for colorname, label in items:
578 for colorname, label in items:
579 ui.write(b'%s\n' % colorname, label=label)
579 ui.write(b'%s\n' % colorname, label=label)
580
580
581
581
582 def _debugdisplaystyle(ui):
582 def _debugdisplaystyle(ui):
583 ui.write(_(b'available style:\n'))
583 ui.write(_(b'available style:\n'))
584 if not ui._styles:
584 if not ui._styles:
585 return
585 return
586 width = max(len(s) for s in ui._styles)
586 width = max(len(s) for s in ui._styles)
587 for label, effects in sorted(ui._styles.items()):
587 for label, effects in sorted(ui._styles.items()):
588 ui.write(b'%s' % label, label=label)
588 ui.write(b'%s' % label, label=label)
589 if effects:
589 if effects:
590 # 50
590 # 50
591 ui.write(b': ')
591 ui.write(b': ')
592 ui.write(b' ' * (max(0, width - len(label))))
592 ui.write(b' ' * (max(0, width - len(label))))
593 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
593 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
594 ui.write(b'\n')
594 ui.write(b'\n')
595
595
596
596
597 @command(b'debugcreatestreamclonebundle', [], b'FILE')
597 @command(b'debugcreatestreamclonebundle', [], b'FILE')
598 def debugcreatestreamclonebundle(ui, repo, fname):
598 def debugcreatestreamclonebundle(ui, repo, fname):
599 """create a stream clone bundle file
599 """create a stream clone bundle file
600
600
601 Stream bundles are special bundles that are essentially archives of
601 Stream bundles are special bundles that are essentially archives of
602 revlog files. They are commonly used for cloning very quickly.
602 revlog files. They are commonly used for cloning very quickly.
603 """
603 """
604 # TODO we may want to turn this into an abort when this functionality
604 # TODO we may want to turn this into an abort when this functionality
605 # is moved into `hg bundle`.
605 # is moved into `hg bundle`.
606 if phases.hassecret(repo):
606 if phases.hassecret(repo):
607 ui.warn(
607 ui.warn(
608 _(
608 _(
609 b'(warning: stream clone bundle will contain secret '
609 b'(warning: stream clone bundle will contain secret '
610 b'revisions)\n'
610 b'revisions)\n'
611 )
611 )
612 )
612 )
613
613
614 requirements, gen = streamclone.generatebundlev1(repo)
614 requirements, gen = streamclone.generatebundlev1(repo)
615 changegroup.writechunks(ui, gen, fname)
615 changegroup.writechunks(ui, gen, fname)
616
616
617 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
617 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
618
618
619
619
620 @command(
620 @command(
621 b'debugdag',
621 b'debugdag',
622 [
622 [
623 (b't', b'tags', None, _(b'use tags as labels')),
623 (b't', b'tags', None, _(b'use tags as labels')),
624 (b'b', b'branches', None, _(b'annotate with branch names')),
624 (b'b', b'branches', None, _(b'annotate with branch names')),
625 (b'', b'dots', None, _(b'use dots for runs')),
625 (b'', b'dots', None, _(b'use dots for runs')),
626 (b's', b'spaces', None, _(b'separate elements by spaces')),
626 (b's', b'spaces', None, _(b'separate elements by spaces')),
627 ],
627 ],
628 _(b'[OPTION]... [FILE [REV]...]'),
628 _(b'[OPTION]... [FILE [REV]...]'),
629 optionalrepo=True,
629 optionalrepo=True,
630 )
630 )
631 def debugdag(ui, repo, file_=None, *revs, **opts):
631 def debugdag(ui, repo, file_=None, *revs, **opts):
632 """format the changelog or an index DAG as a concise textual description
632 """format the changelog or an index DAG as a concise textual description
633
633
634 If you pass a revlog index, the revlog's DAG is emitted. If you list
634 If you pass a revlog index, the revlog's DAG is emitted. If you list
635 revision numbers, they get labeled in the output as rN.
635 revision numbers, they get labeled in the output as rN.
636
636
637 Otherwise, the changelog DAG of the current repo is emitted.
637 Otherwise, the changelog DAG of the current repo is emitted.
638 """
638 """
639 spaces = opts.get('spaces')
639 spaces = opts.get('spaces')
640 dots = opts.get('dots')
640 dots = opts.get('dots')
641 if file_:
641 if file_:
642 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
642 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
643 revs = {int(r) for r in revs}
643 revs = {int(r) for r in revs}
644
644
645 def events():
645 def events():
646 for r in rlog:
646 for r in rlog:
647 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
647 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
648 if r in revs:
648 if r in revs:
649 yield b'l', (r, b"r%i" % r)
649 yield b'l', (r, b"r%i" % r)
650
650
651 elif repo:
651 elif repo:
652 cl = repo.changelog
652 cl = repo.changelog
653 tags = opts.get('tags')
653 tags = opts.get('tags')
654 branches = opts.get('branches')
654 branches = opts.get('branches')
655 if tags:
655 if tags:
656 labels = {}
656 labels = {}
657 for l, n in repo.tags().items():
657 for l, n in repo.tags().items():
658 labels.setdefault(cl.rev(n), []).append(l)
658 labels.setdefault(cl.rev(n), []).append(l)
659
659
660 def events():
660 def events():
661 b = b"default"
661 b = b"default"
662 for r in cl:
662 for r in cl:
663 if branches:
663 if branches:
664 newb = cl.read(cl.node(r))[5][b'branch']
664 newb = cl.read(cl.node(r))[5][b'branch']
665 if newb != b:
665 if newb != b:
666 yield b'a', newb
666 yield b'a', newb
667 b = newb
667 b = newb
668 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
668 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
669 if tags:
669 if tags:
670 ls = labels.get(r)
670 ls = labels.get(r)
671 if ls:
671 if ls:
672 for l in ls:
672 for l in ls:
673 yield b'l', (r, l)
673 yield b'l', (r, l)
674
674
675 else:
675 else:
676 raise error.Abort(_(b'need repo for changelog dag'))
676 raise error.Abort(_(b'need repo for changelog dag'))
677
677
678 for line in dagparser.dagtextlines(
678 for line in dagparser.dagtextlines(
679 events(),
679 events(),
680 addspaces=spaces,
680 addspaces=spaces,
681 wraplabels=True,
681 wraplabels=True,
682 wrapannotations=True,
682 wrapannotations=True,
683 wrapnonlinear=dots,
683 wrapnonlinear=dots,
684 usedots=dots,
684 usedots=dots,
685 maxlinewidth=70,
685 maxlinewidth=70,
686 ):
686 ):
687 ui.write(line)
687 ui.write(line)
688 ui.write(b"\n")
688 ui.write(b"\n")
689
689
690
690
691 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
691 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
692 def debugdata(ui, repo, file_, rev=None, **opts):
692 def debugdata(ui, repo, file_, rev=None, **opts):
693 """dump the contents of a data file revision"""
693 """dump the contents of a data file revision"""
694 opts = pycompat.byteskwargs(opts)
694 opts = pycompat.byteskwargs(opts)
695 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
695 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
696 if rev is not None:
696 if rev is not None:
697 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
697 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
698 file_, rev = None, file_
698 file_, rev = None, file_
699 elif rev is None:
699 elif rev is None:
700 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
700 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
701 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
701 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
702 try:
702 try:
703 ui.write(r.rawdata(r.lookup(rev)))
703 ui.write(r.rawdata(r.lookup(rev)))
704 except KeyError:
704 except KeyError:
705 raise error.Abort(_(b'invalid revision identifier %s') % rev)
705 raise error.Abort(_(b'invalid revision identifier %s') % rev)
706
706
707
707
708 @command(
708 @command(
709 b'debugdate',
709 b'debugdate',
710 [(b'e', b'extended', None, _(b'try extended date formats'))],
710 [(b'e', b'extended', None, _(b'try extended date formats'))],
711 _(b'[-e] DATE [RANGE]'),
711 _(b'[-e] DATE [RANGE]'),
712 norepo=True,
712 norepo=True,
713 optionalrepo=True,
713 optionalrepo=True,
714 )
714 )
715 def debugdate(ui, date, range=None, **opts):
715 def debugdate(ui, date, range=None, **opts):
716 """parse and display a date"""
716 """parse and display a date"""
717 if opts["extended"]:
717 if opts["extended"]:
718 d = dateutil.parsedate(date, dateutil.extendeddateformats)
718 d = dateutil.parsedate(date, dateutil.extendeddateformats)
719 else:
719 else:
720 d = dateutil.parsedate(date)
720 d = dateutil.parsedate(date)
721 ui.writenoi18n(b"internal: %d %d\n" % d)
721 ui.writenoi18n(b"internal: %d %d\n" % d)
722 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
722 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
723 if range:
723 if range:
724 m = dateutil.matchdate(range)
724 m = dateutil.matchdate(range)
725 ui.writenoi18n(b"match: %s\n" % m(d[0]))
725 ui.writenoi18n(b"match: %s\n" % m(d[0]))
726
726
727
727
728 @command(
728 @command(
729 b'debugdeltachain',
729 b'debugdeltachain',
730 cmdutil.debugrevlogopts + cmdutil.formatteropts,
730 cmdutil.debugrevlogopts + cmdutil.formatteropts,
731 _(b'-c|-m|FILE'),
731 _(b'-c|-m|FILE'),
732 optionalrepo=True,
732 optionalrepo=True,
733 )
733 )
734 def debugdeltachain(ui, repo, file_=None, **opts):
734 def debugdeltachain(ui, repo, file_=None, **opts):
735 """dump information about delta chains in a revlog
735 """dump information about delta chains in a revlog
736
736
737 Output can be templatized. Available template keywords are:
737 Output can be templatized. Available template keywords are:
738
738
739 :``rev``: revision number
739 :``rev``: revision number
740 :``chainid``: delta chain identifier (numbered by unique base)
740 :``chainid``: delta chain identifier (numbered by unique base)
741 :``chainlen``: delta chain length to this revision
741 :``chainlen``: delta chain length to this revision
742 :``prevrev``: previous revision in delta chain
742 :``prevrev``: previous revision in delta chain
743 :``deltatype``: role of delta / how it was computed
743 :``deltatype``: role of delta / how it was computed
744 :``compsize``: compressed size of revision
744 :``compsize``: compressed size of revision
745 :``uncompsize``: uncompressed size of revision
745 :``uncompsize``: uncompressed size of revision
746 :``chainsize``: total size of compressed revisions in chain
746 :``chainsize``: total size of compressed revisions in chain
747 :``chainratio``: total chain size divided by uncompressed revision size
747 :``chainratio``: total chain size divided by uncompressed revision size
748 (new delta chains typically start at ratio 2.00)
748 (new delta chains typically start at ratio 2.00)
749 :``lindist``: linear distance from base revision in delta chain to end
749 :``lindist``: linear distance from base revision in delta chain to end
750 of this revision
750 of this revision
751 :``extradist``: total size of revisions not part of this delta chain from
751 :``extradist``: total size of revisions not part of this delta chain from
752 base of delta chain to end of this revision; a measurement
752 base of delta chain to end of this revision; a measurement
753 of how much extra data we need to read/seek across to read
753 of how much extra data we need to read/seek across to read
754 the delta chain for this revision
754 the delta chain for this revision
755 :``extraratio``: extradist divided by chainsize; another representation of
755 :``extraratio``: extradist divided by chainsize; another representation of
756 how much unrelated data is needed to load this delta chain
756 how much unrelated data is needed to load this delta chain
757
757
758 If the repository is configured to use the sparse read, additional keywords
758 If the repository is configured to use the sparse read, additional keywords
759 are available:
759 are available:
760
760
761 :``readsize``: total size of data read from the disk for a revision
761 :``readsize``: total size of data read from the disk for a revision
762 (sum of the sizes of all the blocks)
762 (sum of the sizes of all the blocks)
763 :``largestblock``: size of the largest block of data read from the disk
763 :``largestblock``: size of the largest block of data read from the disk
764 :``readdensity``: density of useful bytes in the data read from the disk
764 :``readdensity``: density of useful bytes in the data read from the disk
765 :``srchunks``: in how many data hunks the whole revision would be read
765 :``srchunks``: in how many data hunks the whole revision would be read
766
766
767 The sparse read can be enabled with experimental.sparse-read = True
767 The sparse read can be enabled with experimental.sparse-read = True
768 """
768 """
769 opts = pycompat.byteskwargs(opts)
769 opts = pycompat.byteskwargs(opts)
770 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
770 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
771 index = r.index
771 index = r.index
772 start = r.start
772 start = r.start
773 length = r.length
773 length = r.length
774 generaldelta = r.version & revlog.FLAG_GENERALDELTA
774 generaldelta = r.version & revlog.FLAG_GENERALDELTA
775 withsparseread = getattr(r, '_withsparseread', False)
775 withsparseread = getattr(r, '_withsparseread', False)
776
776
777 def revinfo(rev):
777 def revinfo(rev):
778 e = index[rev]
778 e = index[rev]
779 compsize = e[1]
779 compsize = e[1]
780 uncompsize = e[2]
780 uncompsize = e[2]
781 chainsize = 0
781 chainsize = 0
782
782
783 if generaldelta:
783 if generaldelta:
784 if e[3] == e[5]:
784 if e[3] == e[5]:
785 deltatype = b'p1'
785 deltatype = b'p1'
786 elif e[3] == e[6]:
786 elif e[3] == e[6]:
787 deltatype = b'p2'
787 deltatype = b'p2'
788 elif e[3] == rev - 1:
788 elif e[3] == rev - 1:
789 deltatype = b'prev'
789 deltatype = b'prev'
790 elif e[3] == rev:
790 elif e[3] == rev:
791 deltatype = b'base'
791 deltatype = b'base'
792 else:
792 else:
793 deltatype = b'other'
793 deltatype = b'other'
794 else:
794 else:
795 if e[3] == rev:
795 if e[3] == rev:
796 deltatype = b'base'
796 deltatype = b'base'
797 else:
797 else:
798 deltatype = b'prev'
798 deltatype = b'prev'
799
799
800 chain = r._deltachain(rev)[0]
800 chain = r._deltachain(rev)[0]
801 for iterrev in chain:
801 for iterrev in chain:
802 e = index[iterrev]
802 e = index[iterrev]
803 chainsize += e[1]
803 chainsize += e[1]
804
804
805 return compsize, uncompsize, deltatype, chain, chainsize
805 return compsize, uncompsize, deltatype, chain, chainsize
806
806
807 fm = ui.formatter(b'debugdeltachain', opts)
807 fm = ui.formatter(b'debugdeltachain', opts)
808
808
809 fm.plain(
809 fm.plain(
810 b' rev chain# chainlen prev delta '
810 b' rev chain# chainlen prev delta '
811 b'size rawsize chainsize ratio lindist extradist '
811 b'size rawsize chainsize ratio lindist extradist '
812 b'extraratio'
812 b'extraratio'
813 )
813 )
814 if withsparseread:
814 if withsparseread:
815 fm.plain(b' readsize largestblk rddensity srchunks')
815 fm.plain(b' readsize largestblk rddensity srchunks')
816 fm.plain(b'\n')
816 fm.plain(b'\n')
817
817
818 chainbases = {}
818 chainbases = {}
819 for rev in r:
819 for rev in r:
820 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
820 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
821 chainbase = chain[0]
821 chainbase = chain[0]
822 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
822 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
823 basestart = start(chainbase)
823 basestart = start(chainbase)
824 revstart = start(rev)
824 revstart = start(rev)
825 lineardist = revstart + comp - basestart
825 lineardist = revstart + comp - basestart
826 extradist = lineardist - chainsize
826 extradist = lineardist - chainsize
827 try:
827 try:
828 prevrev = chain[-2]
828 prevrev = chain[-2]
829 except IndexError:
829 except IndexError:
830 prevrev = -1
830 prevrev = -1
831
831
832 if uncomp != 0:
832 if uncomp != 0:
833 chainratio = float(chainsize) / float(uncomp)
833 chainratio = float(chainsize) / float(uncomp)
834 else:
834 else:
835 chainratio = chainsize
835 chainratio = chainsize
836
836
837 if chainsize != 0:
837 if chainsize != 0:
838 extraratio = float(extradist) / float(chainsize)
838 extraratio = float(extradist) / float(chainsize)
839 else:
839 else:
840 extraratio = extradist
840 extraratio = extradist
841
841
842 fm.startitem()
842 fm.startitem()
843 fm.write(
843 fm.write(
844 b'rev chainid chainlen prevrev deltatype compsize '
844 b'rev chainid chainlen prevrev deltatype compsize '
845 b'uncompsize chainsize chainratio lindist extradist '
845 b'uncompsize chainsize chainratio lindist extradist '
846 b'extraratio',
846 b'extraratio',
847 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
847 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
848 rev,
848 rev,
849 chainid,
849 chainid,
850 len(chain),
850 len(chain),
851 prevrev,
851 prevrev,
852 deltatype,
852 deltatype,
853 comp,
853 comp,
854 uncomp,
854 uncomp,
855 chainsize,
855 chainsize,
856 chainratio,
856 chainratio,
857 lineardist,
857 lineardist,
858 extradist,
858 extradist,
859 extraratio,
859 extraratio,
860 rev=rev,
860 rev=rev,
861 chainid=chainid,
861 chainid=chainid,
862 chainlen=len(chain),
862 chainlen=len(chain),
863 prevrev=prevrev,
863 prevrev=prevrev,
864 deltatype=deltatype,
864 deltatype=deltatype,
865 compsize=comp,
865 compsize=comp,
866 uncompsize=uncomp,
866 uncompsize=uncomp,
867 chainsize=chainsize,
867 chainsize=chainsize,
868 chainratio=chainratio,
868 chainratio=chainratio,
869 lindist=lineardist,
869 lindist=lineardist,
870 extradist=extradist,
870 extradist=extradist,
871 extraratio=extraratio,
871 extraratio=extraratio,
872 )
872 )
873 if withsparseread:
873 if withsparseread:
874 readsize = 0
874 readsize = 0
875 largestblock = 0
875 largestblock = 0
876 srchunks = 0
876 srchunks = 0
877
877
878 for revschunk in deltautil.slicechunk(r, chain):
878 for revschunk in deltautil.slicechunk(r, chain):
879 srchunks += 1
879 srchunks += 1
880 blkend = start(revschunk[-1]) + length(revschunk[-1])
880 blkend = start(revschunk[-1]) + length(revschunk[-1])
881 blksize = blkend - start(revschunk[0])
881 blksize = blkend - start(revschunk[0])
882
882
883 readsize += blksize
883 readsize += blksize
884 if largestblock < blksize:
884 if largestblock < blksize:
885 largestblock = blksize
885 largestblock = blksize
886
886
887 if readsize:
887 if readsize:
888 readdensity = float(chainsize) / float(readsize)
888 readdensity = float(chainsize) / float(readsize)
889 else:
889 else:
890 readdensity = 1
890 readdensity = 1
891
891
892 fm.write(
892 fm.write(
893 b'readsize largestblock readdensity srchunks',
893 b'readsize largestblock readdensity srchunks',
894 b' %10d %10d %9.5f %8d',
894 b' %10d %10d %9.5f %8d',
895 readsize,
895 readsize,
896 largestblock,
896 largestblock,
897 readdensity,
897 readdensity,
898 srchunks,
898 srchunks,
899 readsize=readsize,
899 readsize=readsize,
900 largestblock=largestblock,
900 largestblock=largestblock,
901 readdensity=readdensity,
901 readdensity=readdensity,
902 srchunks=srchunks,
902 srchunks=srchunks,
903 )
903 )
904
904
905 fm.plain(b'\n')
905 fm.plain(b'\n')
906
906
907 fm.end()
907 fm.end()
908
908
909
909
910 @command(
910 @command(
911 b'debugdirstate|debugstate',
911 b'debugdirstate|debugstate',
912 [
912 [
913 (
913 (
914 b'',
914 b'',
915 b'nodates',
915 b'nodates',
916 None,
916 None,
917 _(b'do not display the saved mtime (DEPRECATED)'),
917 _(b'do not display the saved mtime (DEPRECATED)'),
918 ),
918 ),
919 (b'', b'dates', True, _(b'display the saved mtime')),
919 (b'', b'dates', True, _(b'display the saved mtime')),
920 (b'', b'datesort', None, _(b'sort by saved mtime')),
920 (b'', b'datesort', None, _(b'sort by saved mtime')),
921 ],
921 ],
922 _(b'[OPTION]...'),
922 _(b'[OPTION]...'),
923 )
923 )
924 def debugstate(ui, repo, **opts):
924 def debugstate(ui, repo, **opts):
925 """show the contents of the current dirstate"""
925 """show the contents of the current dirstate"""
926
926
927 nodates = not opts['dates']
927 nodates = not opts['dates']
928 if opts.get('nodates') is not None:
928 if opts.get('nodates') is not None:
929 nodates = True
929 nodates = True
930 datesort = opts.get('datesort')
930 datesort = opts.get('datesort')
931
931
932 if datesort:
932 if datesort:
933 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
933 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
934 else:
934 else:
935 keyfunc = None # sort by filename
935 keyfunc = None # sort by filename
936 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
936 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
937 if ent[3] == -1:
937 if ent[3] == -1:
938 timestr = b'unset '
938 timestr = b'unset '
939 elif nodates:
939 elif nodates:
940 timestr = b'set '
940 timestr = b'set '
941 else:
941 else:
942 timestr = time.strftime(
942 timestr = time.strftime(
943 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
943 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
944 )
944 )
945 timestr = encoding.strtolocal(timestr)
945 timestr = encoding.strtolocal(timestr)
946 if ent[1] & 0o20000:
946 if ent[1] & 0o20000:
947 mode = b'lnk'
947 mode = b'lnk'
948 else:
948 else:
949 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
949 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
950 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
950 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
951 for f in repo.dirstate.copies():
951 for f in repo.dirstate.copies():
952 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
952 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
953
953
954
954
955 @command(
955 @command(
956 b'debugdiscovery',
956 b'debugdiscovery',
957 [
957 [
958 (b'', b'old', None, _(b'use old-style discovery')),
958 (b'', b'old', None, _(b'use old-style discovery')),
959 (
959 (
960 b'',
960 b'',
961 b'nonheads',
961 b'nonheads',
962 None,
962 None,
963 _(b'use old-style discovery with non-heads included'),
963 _(b'use old-style discovery with non-heads included'),
964 ),
964 ),
965 (b'', b'rev', [], b'restrict discovery to this set of revs'),
965 (b'', b'rev', [], b'restrict discovery to this set of revs'),
966 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
966 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
967 ]
967 ]
968 + cmdutil.remoteopts,
968 + cmdutil.remoteopts,
969 _(b'[--rev REV] [OTHER]'),
969 _(b'[--rev REV] [OTHER]'),
970 )
970 )
971 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
971 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
972 """runs the changeset discovery protocol in isolation"""
972 """runs the changeset discovery protocol in isolation"""
973 opts = pycompat.byteskwargs(opts)
973 opts = pycompat.byteskwargs(opts)
974 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
974 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
975 remote = hg.peer(repo, opts, remoteurl)
975 remote = hg.peer(repo, opts, remoteurl)
976 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
976 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
977
977
978 # make sure tests are repeatable
978 # make sure tests are repeatable
979 random.seed(int(opts[b'seed']))
979 random.seed(int(opts[b'seed']))
980
980
981 data = {}
981 data = {}
982 if opts.get(b'old'):
982 if opts.get(b'old'):
983
983
984 def doit(pushedrevs, remoteheads, remote=remote):
984 def doit(pushedrevs, remoteheads, remote=remote):
985 if not util.safehasattr(remote, b'branches'):
985 if not util.safehasattr(remote, b'branches'):
986 # enable in-client legacy support
986 # enable in-client legacy support
987 remote = localrepo.locallegacypeer(remote.local())
987 remote = localrepo.locallegacypeer(remote.local())
988 common, _in, hds = treediscovery.findcommonincoming(
988 common, _in, hds = treediscovery.findcommonincoming(
989 repo, remote, force=True, audit=data
989 repo, remote, force=True, audit=data
990 )
990 )
991 common = set(common)
991 common = set(common)
992 if not opts.get(b'nonheads'):
992 if not opts.get(b'nonheads'):
993 ui.writenoi18n(
993 ui.writenoi18n(
994 b"unpruned common: %s\n"
994 b"unpruned common: %s\n"
995 % b" ".join(sorted(short(n) for n in common))
995 % b" ".join(sorted(short(n) for n in common))
996 )
996 )
997
997
998 clnode = repo.changelog.node
998 clnode = repo.changelog.node
999 common = repo.revs(b'heads(::%ln)', common)
999 common = repo.revs(b'heads(::%ln)', common)
1000 common = {clnode(r) for r in common}
1000 common = {clnode(r) for r in common}
1001 return common, hds
1001 return common, hds
1002
1002
1003 else:
1003 else:
1004
1004
1005 def doit(pushedrevs, remoteheads, remote=remote):
1005 def doit(pushedrevs, remoteheads, remote=remote):
1006 nodes = None
1006 nodes = None
1007 if pushedrevs:
1007 if pushedrevs:
1008 revs = scmutil.revrange(repo, pushedrevs)
1008 revs = scmutil.revrange(repo, pushedrevs)
1009 nodes = [repo[r].node() for r in revs]
1009 nodes = [repo[r].node() for r in revs]
1010 common, any, hds = setdiscovery.findcommonheads(
1010 common, any, hds = setdiscovery.findcommonheads(
1011 ui, repo, remote, ancestorsof=nodes, audit=data
1011 ui, repo, remote, ancestorsof=nodes, audit=data
1012 )
1012 )
1013 return common, hds
1013 return common, hds
1014
1014
1015 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1015 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1016 localrevs = opts[b'rev']
1016 localrevs = opts[b'rev']
1017 with util.timedcm('debug-discovery') as t:
1017 with util.timedcm('debug-discovery') as t:
1018 common, hds = doit(localrevs, remoterevs)
1018 common, hds = doit(localrevs, remoterevs)
1019
1019
1020 # compute all statistics
1020 # compute all statistics
1021 heads_common = set(common)
1021 heads_common = set(common)
1022 heads_remote = set(hds)
1022 heads_remote = set(hds)
1023 heads_local = set(repo.heads())
1023 heads_local = set(repo.heads())
1024 # note: they cannot be a local or remote head that is in common and not
1024 # note: they cannot be a local or remote head that is in common and not
1025 # itself a head of common.
1025 # itself a head of common.
1026 heads_common_local = heads_common & heads_local
1026 heads_common_local = heads_common & heads_local
1027 heads_common_remote = heads_common & heads_remote
1027 heads_common_remote = heads_common & heads_remote
1028 heads_common_both = heads_common & heads_remote & heads_local
1028 heads_common_both = heads_common & heads_remote & heads_local
1029
1029
1030 all = repo.revs(b'all()')
1030 all = repo.revs(b'all()')
1031 common = repo.revs(b'::%ln', common)
1031 common = repo.revs(b'::%ln', common)
1032 roots_common = repo.revs(b'roots(::%ld)', common)
1032 roots_common = repo.revs(b'roots(::%ld)', common)
1033 missing = repo.revs(b'not ::%ld', common)
1033 missing = repo.revs(b'not ::%ld', common)
1034 heads_missing = repo.revs(b'heads(%ld)', missing)
1034 heads_missing = repo.revs(b'heads(%ld)', missing)
1035 roots_missing = repo.revs(b'roots(%ld)', missing)
1035 roots_missing = repo.revs(b'roots(%ld)', missing)
1036 assert len(common) + len(missing) == len(all)
1036 assert len(common) + len(missing) == len(all)
1037
1037
1038 initial_undecided = repo.revs(
1038 initial_undecided = repo.revs(
1039 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1039 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1040 )
1040 )
1041 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1041 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1042 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1042 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1043 common_initial_undecided = initial_undecided & common
1043 common_initial_undecided = initial_undecided & common
1044 missing_initial_undecided = initial_undecided & missing
1044 missing_initial_undecided = initial_undecided & missing
1045
1045
1046 data[b'elapsed'] = t.elapsed
1046 data[b'elapsed'] = t.elapsed
1047 data[b'nb-common-heads'] = len(heads_common)
1047 data[b'nb-common-heads'] = len(heads_common)
1048 data[b'nb-common-heads-local'] = len(heads_common_local)
1048 data[b'nb-common-heads-local'] = len(heads_common_local)
1049 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1049 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1050 data[b'nb-common-heads-both'] = len(heads_common_both)
1050 data[b'nb-common-heads-both'] = len(heads_common_both)
1051 data[b'nb-common-roots'] = len(roots_common)
1051 data[b'nb-common-roots'] = len(roots_common)
1052 data[b'nb-head-local'] = len(heads_local)
1052 data[b'nb-head-local'] = len(heads_local)
1053 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1053 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1054 data[b'nb-head-remote'] = len(heads_remote)
1054 data[b'nb-head-remote'] = len(heads_remote)
1055 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1055 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1056 heads_common_remote
1056 heads_common_remote
1057 )
1057 )
1058 data[b'nb-revs'] = len(all)
1058 data[b'nb-revs'] = len(all)
1059 data[b'nb-revs-common'] = len(common)
1059 data[b'nb-revs-common'] = len(common)
1060 data[b'nb-revs-missing'] = len(missing)
1060 data[b'nb-revs-missing'] = len(missing)
1061 data[b'nb-missing-heads'] = len(heads_missing)
1061 data[b'nb-missing-heads'] = len(heads_missing)
1062 data[b'nb-missing-roots'] = len(roots_missing)
1062 data[b'nb-missing-roots'] = len(roots_missing)
1063 data[b'nb-ini_und'] = len(initial_undecided)
1063 data[b'nb-ini_und'] = len(initial_undecided)
1064 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1064 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1065 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1065 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1066 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1066 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1067 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1067 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1068
1068
1069 # display discovery summary
1069 # display discovery summary
1070 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
1070 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
1071 ui.writenoi18n(b"round-trips: %(total-roundtrips)9d\n" % data)
1071 ui.writenoi18n(b"round-trips: %(total-roundtrips)9d\n" % data)
1072 ui.writenoi18n(b"heads summary:\n")
1072 ui.writenoi18n(b"heads summary:\n")
1073 ui.writenoi18n(b" total common heads: %(nb-common-heads)9d\n" % data)
1073 ui.writenoi18n(b" total common heads: %(nb-common-heads)9d\n" % data)
1074 ui.writenoi18n(
1074 ui.writenoi18n(
1075 b" also local heads: %(nb-common-heads-local)9d\n" % data
1075 b" also local heads: %(nb-common-heads-local)9d\n" % data
1076 )
1076 )
1077 ui.writenoi18n(
1077 ui.writenoi18n(
1078 b" also remote heads: %(nb-common-heads-remote)9d\n" % data
1078 b" also remote heads: %(nb-common-heads-remote)9d\n" % data
1079 )
1079 )
1080 ui.writenoi18n(b" both: %(nb-common-heads-both)9d\n" % data)
1080 ui.writenoi18n(b" both: %(nb-common-heads-both)9d\n" % data)
1081 ui.writenoi18n(b" local heads: %(nb-head-local)9d\n" % data)
1081 ui.writenoi18n(b" local heads: %(nb-head-local)9d\n" % data)
1082 ui.writenoi18n(
1082 ui.writenoi18n(
1083 b" common: %(nb-common-heads-local)9d\n" % data
1083 b" common: %(nb-common-heads-local)9d\n" % data
1084 )
1084 )
1085 ui.writenoi18n(
1085 ui.writenoi18n(
1086 b" missing: %(nb-head-local-missing)9d\n" % data
1086 b" missing: %(nb-head-local-missing)9d\n" % data
1087 )
1087 )
1088 ui.writenoi18n(b" remote heads: %(nb-head-remote)9d\n" % data)
1088 ui.writenoi18n(b" remote heads: %(nb-head-remote)9d\n" % data)
1089 ui.writenoi18n(
1089 ui.writenoi18n(
1090 b" common: %(nb-common-heads-remote)9d\n" % data
1090 b" common: %(nb-common-heads-remote)9d\n" % data
1091 )
1091 )
1092 ui.writenoi18n(
1092 ui.writenoi18n(
1093 b" unknown: %(nb-head-remote-unknown)9d\n" % data
1093 b" unknown: %(nb-head-remote-unknown)9d\n" % data
1094 )
1094 )
1095 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
1095 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
1096 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
1096 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
1097 ui.writenoi18n(b" heads: %(nb-common-heads)9d\n" % data)
1097 ui.writenoi18n(b" heads: %(nb-common-heads)9d\n" % data)
1098 ui.writenoi18n(b" roots: %(nb-common-roots)9d\n" % data)
1098 ui.writenoi18n(b" roots: %(nb-common-roots)9d\n" % data)
1099 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
1099 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
1100 ui.writenoi18n(b" heads: %(nb-missing-heads)9d\n" % data)
1100 ui.writenoi18n(b" heads: %(nb-missing-heads)9d\n" % data)
1101 ui.writenoi18n(b" roots: %(nb-missing-roots)9d\n" % data)
1101 ui.writenoi18n(b" roots: %(nb-missing-roots)9d\n" % data)
1102 ui.writenoi18n(b" first undecided set: %(nb-ini_und)9d\n" % data)
1102 ui.writenoi18n(b" first undecided set: %(nb-ini_und)9d\n" % data)
1103 ui.writenoi18n(b" heads: %(nb-ini_und-heads)9d\n" % data)
1103 ui.writenoi18n(b" heads: %(nb-ini_und-heads)9d\n" % data)
1104 ui.writenoi18n(b" roots: %(nb-ini_und-roots)9d\n" % data)
1104 ui.writenoi18n(b" roots: %(nb-ini_und-roots)9d\n" % data)
1105 ui.writenoi18n(b" common: %(nb-ini_und-common)9d\n" % data)
1105 ui.writenoi18n(b" common: %(nb-ini_und-common)9d\n" % data)
1106 ui.writenoi18n(b" missing: %(nb-ini_und-missing)9d\n" % data)
1106 ui.writenoi18n(b" missing: %(nb-ini_und-missing)9d\n" % data)
1107
1107
1108 if ui.verbose:
1108 if ui.verbose:
1109 ui.writenoi18n(
1109 ui.writenoi18n(
1110 b"common heads: %s\n"
1110 b"common heads: %s\n"
1111 % b" ".join(sorted(short(n) for n in heads_common))
1111 % b" ".join(sorted(short(n) for n in heads_common))
1112 )
1112 )
1113
1113
1114
1114
1115 _chunksize = 4 << 10
1115 _chunksize = 4 << 10
1116
1116
1117
1117
1118 @command(
1118 @command(
1119 b'debugdownload',
1119 b'debugdownload',
1120 [
1120 [
1121 (b'o', b'output', b'', _(b'path')),
1121 (b'o', b'output', b'', _(b'path')),
1122 ],
1122 ],
1123 optionalrepo=True,
1123 optionalrepo=True,
1124 )
1124 )
1125 def debugdownload(ui, repo, url, output=None, **opts):
1125 def debugdownload(ui, repo, url, output=None, **opts):
1126 """download a resource using Mercurial logic and config"""
1126 """download a resource using Mercurial logic and config"""
1127 fh = urlmod.open(ui, url, output)
1127 fh = urlmod.open(ui, url, output)
1128
1128
1129 dest = ui
1129 dest = ui
1130 if output:
1130 if output:
1131 dest = open(output, b"wb", _chunksize)
1131 dest = open(output, b"wb", _chunksize)
1132 try:
1132 try:
1133 data = fh.read(_chunksize)
1133 data = fh.read(_chunksize)
1134 while data:
1134 while data:
1135 dest.write(data)
1135 dest.write(data)
1136 data = fh.read(_chunksize)
1136 data = fh.read(_chunksize)
1137 finally:
1137 finally:
1138 if output:
1138 if output:
1139 dest.close()
1139 dest.close()
1140
1140
1141
1141
1142 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1142 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1143 def debugextensions(ui, repo, **opts):
1143 def debugextensions(ui, repo, **opts):
1144 '''show information about active extensions'''
1144 '''show information about active extensions'''
1145 opts = pycompat.byteskwargs(opts)
1145 opts = pycompat.byteskwargs(opts)
1146 exts = extensions.extensions(ui)
1146 exts = extensions.extensions(ui)
1147 hgver = util.version()
1147 hgver = util.version()
1148 fm = ui.formatter(b'debugextensions', opts)
1148 fm = ui.formatter(b'debugextensions', opts)
1149 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1149 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1150 isinternal = extensions.ismoduleinternal(extmod)
1150 isinternal = extensions.ismoduleinternal(extmod)
1151 extsource = None
1151 extsource = None
1152
1152
1153 if util.safehasattr(extmod, '__file__'):
1153 if util.safehasattr(extmod, '__file__'):
1154 extsource = pycompat.fsencode(extmod.__file__)
1154 extsource = pycompat.fsencode(extmod.__file__)
1155 elif getattr(sys, 'oxidized', False):
1155 elif getattr(sys, 'oxidized', False):
1156 extsource = pycompat.sysexecutable
1156 extsource = pycompat.sysexecutable
1157 if isinternal:
1157 if isinternal:
1158 exttestedwith = [] # never expose magic string to users
1158 exttestedwith = [] # never expose magic string to users
1159 else:
1159 else:
1160 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1160 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1161 extbuglink = getattr(extmod, 'buglink', None)
1161 extbuglink = getattr(extmod, 'buglink', None)
1162
1162
1163 fm.startitem()
1163 fm.startitem()
1164
1164
1165 if ui.quiet or ui.verbose:
1165 if ui.quiet or ui.verbose:
1166 fm.write(b'name', b'%s\n', extname)
1166 fm.write(b'name', b'%s\n', extname)
1167 else:
1167 else:
1168 fm.write(b'name', b'%s', extname)
1168 fm.write(b'name', b'%s', extname)
1169 if isinternal or hgver in exttestedwith:
1169 if isinternal or hgver in exttestedwith:
1170 fm.plain(b'\n')
1170 fm.plain(b'\n')
1171 elif not exttestedwith:
1171 elif not exttestedwith:
1172 fm.plain(_(b' (untested!)\n'))
1172 fm.plain(_(b' (untested!)\n'))
1173 else:
1173 else:
1174 lasttestedversion = exttestedwith[-1]
1174 lasttestedversion = exttestedwith[-1]
1175 fm.plain(b' (%s!)\n' % lasttestedversion)
1175 fm.plain(b' (%s!)\n' % lasttestedversion)
1176
1176
1177 fm.condwrite(
1177 fm.condwrite(
1178 ui.verbose and extsource,
1178 ui.verbose and extsource,
1179 b'source',
1179 b'source',
1180 _(b' location: %s\n'),
1180 _(b' location: %s\n'),
1181 extsource or b"",
1181 extsource or b"",
1182 )
1182 )
1183
1183
1184 if ui.verbose:
1184 if ui.verbose:
1185 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1185 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1186 fm.data(bundled=isinternal)
1186 fm.data(bundled=isinternal)
1187
1187
1188 fm.condwrite(
1188 fm.condwrite(
1189 ui.verbose and exttestedwith,
1189 ui.verbose and exttestedwith,
1190 b'testedwith',
1190 b'testedwith',
1191 _(b' tested with: %s\n'),
1191 _(b' tested with: %s\n'),
1192 fm.formatlist(exttestedwith, name=b'ver'),
1192 fm.formatlist(exttestedwith, name=b'ver'),
1193 )
1193 )
1194
1194
1195 fm.condwrite(
1195 fm.condwrite(
1196 ui.verbose and extbuglink,
1196 ui.verbose and extbuglink,
1197 b'buglink',
1197 b'buglink',
1198 _(b' bug reporting: %s\n'),
1198 _(b' bug reporting: %s\n'),
1199 extbuglink or b"",
1199 extbuglink or b"",
1200 )
1200 )
1201
1201
1202 fm.end()
1202 fm.end()
1203
1203
1204
1204
1205 @command(
1205 @command(
1206 b'debugfileset',
1206 b'debugfileset',
1207 [
1207 [
1208 (
1208 (
1209 b'r',
1209 b'r',
1210 b'rev',
1210 b'rev',
1211 b'',
1211 b'',
1212 _(b'apply the filespec on this revision'),
1212 _(b'apply the filespec on this revision'),
1213 _(b'REV'),
1213 _(b'REV'),
1214 ),
1214 ),
1215 (
1215 (
1216 b'',
1216 b'',
1217 b'all-files',
1217 b'all-files',
1218 False,
1218 False,
1219 _(b'test files from all revisions and working directory'),
1219 _(b'test files from all revisions and working directory'),
1220 ),
1220 ),
1221 (
1221 (
1222 b's',
1222 b's',
1223 b'show-matcher',
1223 b'show-matcher',
1224 None,
1224 None,
1225 _(b'print internal representation of matcher'),
1225 _(b'print internal representation of matcher'),
1226 ),
1226 ),
1227 (
1227 (
1228 b'p',
1228 b'p',
1229 b'show-stage',
1229 b'show-stage',
1230 [],
1230 [],
1231 _(b'print parsed tree at the given stage'),
1231 _(b'print parsed tree at the given stage'),
1232 _(b'NAME'),
1232 _(b'NAME'),
1233 ),
1233 ),
1234 ],
1234 ],
1235 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1235 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1236 )
1236 )
1237 def debugfileset(ui, repo, expr, **opts):
1237 def debugfileset(ui, repo, expr, **opts):
1238 '''parse and apply a fileset specification'''
1238 '''parse and apply a fileset specification'''
1239 from . import fileset
1239 from . import fileset
1240
1240
1241 fileset.symbols # force import of fileset so we have predicates to optimize
1241 fileset.symbols # force import of fileset so we have predicates to optimize
1242 opts = pycompat.byteskwargs(opts)
1242 opts = pycompat.byteskwargs(opts)
1243 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1243 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1244
1244
1245 stages = [
1245 stages = [
1246 (b'parsed', pycompat.identity),
1246 (b'parsed', pycompat.identity),
1247 (b'analyzed', filesetlang.analyze),
1247 (b'analyzed', filesetlang.analyze),
1248 (b'optimized', filesetlang.optimize),
1248 (b'optimized', filesetlang.optimize),
1249 ]
1249 ]
1250 stagenames = {n for n, f in stages}
1250 stagenames = {n for n, f in stages}
1251
1251
1252 showalways = set()
1252 showalways = set()
1253 if ui.verbose and not opts[b'show_stage']:
1253 if ui.verbose and not opts[b'show_stage']:
1254 # show parsed tree by --verbose (deprecated)
1254 # show parsed tree by --verbose (deprecated)
1255 showalways.add(b'parsed')
1255 showalways.add(b'parsed')
1256 if opts[b'show_stage'] == [b'all']:
1256 if opts[b'show_stage'] == [b'all']:
1257 showalways.update(stagenames)
1257 showalways.update(stagenames)
1258 else:
1258 else:
1259 for n in opts[b'show_stage']:
1259 for n in opts[b'show_stage']:
1260 if n not in stagenames:
1260 if n not in stagenames:
1261 raise error.Abort(_(b'invalid stage name: %s') % n)
1261 raise error.Abort(_(b'invalid stage name: %s') % n)
1262 showalways.update(opts[b'show_stage'])
1262 showalways.update(opts[b'show_stage'])
1263
1263
1264 tree = filesetlang.parse(expr)
1264 tree = filesetlang.parse(expr)
1265 for n, f in stages:
1265 for n, f in stages:
1266 tree = f(tree)
1266 tree = f(tree)
1267 if n in showalways:
1267 if n in showalways:
1268 if opts[b'show_stage'] or n != b'parsed':
1268 if opts[b'show_stage'] or n != b'parsed':
1269 ui.write(b"* %s:\n" % n)
1269 ui.write(b"* %s:\n" % n)
1270 ui.write(filesetlang.prettyformat(tree), b"\n")
1270 ui.write(filesetlang.prettyformat(tree), b"\n")
1271
1271
1272 files = set()
1272 files = set()
1273 if opts[b'all_files']:
1273 if opts[b'all_files']:
1274 for r in repo:
1274 for r in repo:
1275 c = repo[r]
1275 c = repo[r]
1276 files.update(c.files())
1276 files.update(c.files())
1277 files.update(c.substate)
1277 files.update(c.substate)
1278 if opts[b'all_files'] or ctx.rev() is None:
1278 if opts[b'all_files'] or ctx.rev() is None:
1279 wctx = repo[None]
1279 wctx = repo[None]
1280 files.update(
1280 files.update(
1281 repo.dirstate.walk(
1281 repo.dirstate.walk(
1282 scmutil.matchall(repo),
1282 scmutil.matchall(repo),
1283 subrepos=list(wctx.substate),
1283 subrepos=list(wctx.substate),
1284 unknown=True,
1284 unknown=True,
1285 ignored=True,
1285 ignored=True,
1286 )
1286 )
1287 )
1287 )
1288 files.update(wctx.substate)
1288 files.update(wctx.substate)
1289 else:
1289 else:
1290 files.update(ctx.files())
1290 files.update(ctx.files())
1291 files.update(ctx.substate)
1291 files.update(ctx.substate)
1292
1292
1293 m = ctx.matchfileset(repo.getcwd(), expr)
1293 m = ctx.matchfileset(repo.getcwd(), expr)
1294 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1294 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1295 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1295 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1296 for f in sorted(files):
1296 for f in sorted(files):
1297 if not m(f):
1297 if not m(f):
1298 continue
1298 continue
1299 ui.write(b"%s\n" % f)
1299 ui.write(b"%s\n" % f)
1300
1300
1301
1301
1302 @command(b'debugformat', [] + cmdutil.formatteropts)
1302 @command(b'debugformat', [] + cmdutil.formatteropts)
1303 def debugformat(ui, repo, **opts):
1303 def debugformat(ui, repo, **opts):
1304 """display format information about the current repository
1304 """display format information about the current repository
1305
1305
1306 Use --verbose to get extra information about current config value and
1306 Use --verbose to get extra information about current config value and
1307 Mercurial default."""
1307 Mercurial default."""
1308 opts = pycompat.byteskwargs(opts)
1308 opts = pycompat.byteskwargs(opts)
1309 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1309 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1310 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1310 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1311
1311
1312 def makeformatname(name):
1312 def makeformatname(name):
1313 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1313 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1314
1314
1315 fm = ui.formatter(b'debugformat', opts)
1315 fm = ui.formatter(b'debugformat', opts)
1316 if fm.isplain():
1316 if fm.isplain():
1317
1317
1318 def formatvalue(value):
1318 def formatvalue(value):
1319 if util.safehasattr(value, b'startswith'):
1319 if util.safehasattr(value, b'startswith'):
1320 return value
1320 return value
1321 if value:
1321 if value:
1322 return b'yes'
1322 return b'yes'
1323 else:
1323 else:
1324 return b'no'
1324 return b'no'
1325
1325
1326 else:
1326 else:
1327 formatvalue = pycompat.identity
1327 formatvalue = pycompat.identity
1328
1328
1329 fm.plain(b'format-variant')
1329 fm.plain(b'format-variant')
1330 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1330 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1331 fm.plain(b' repo')
1331 fm.plain(b' repo')
1332 if ui.verbose:
1332 if ui.verbose:
1333 fm.plain(b' config default')
1333 fm.plain(b' config default')
1334 fm.plain(b'\n')
1334 fm.plain(b'\n')
1335 for fv in upgrade.allformatvariant:
1335 for fv in upgrade.allformatvariant:
1336 fm.startitem()
1336 fm.startitem()
1337 repovalue = fv.fromrepo(repo)
1337 repovalue = fv.fromrepo(repo)
1338 configvalue = fv.fromconfig(repo)
1338 configvalue = fv.fromconfig(repo)
1339
1339
1340 if repovalue != configvalue:
1340 if repovalue != configvalue:
1341 namelabel = b'formatvariant.name.mismatchconfig'
1341 namelabel = b'formatvariant.name.mismatchconfig'
1342 repolabel = b'formatvariant.repo.mismatchconfig'
1342 repolabel = b'formatvariant.repo.mismatchconfig'
1343 elif repovalue != fv.default:
1343 elif repovalue != fv.default:
1344 namelabel = b'formatvariant.name.mismatchdefault'
1344 namelabel = b'formatvariant.name.mismatchdefault'
1345 repolabel = b'formatvariant.repo.mismatchdefault'
1345 repolabel = b'formatvariant.repo.mismatchdefault'
1346 else:
1346 else:
1347 namelabel = b'formatvariant.name.uptodate'
1347 namelabel = b'formatvariant.name.uptodate'
1348 repolabel = b'formatvariant.repo.uptodate'
1348 repolabel = b'formatvariant.repo.uptodate'
1349
1349
1350 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1350 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1351 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1351 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1352 if fv.default != configvalue:
1352 if fv.default != configvalue:
1353 configlabel = b'formatvariant.config.special'
1353 configlabel = b'formatvariant.config.special'
1354 else:
1354 else:
1355 configlabel = b'formatvariant.config.default'
1355 configlabel = b'formatvariant.config.default'
1356 fm.condwrite(
1356 fm.condwrite(
1357 ui.verbose,
1357 ui.verbose,
1358 b'config',
1358 b'config',
1359 b' %6s',
1359 b' %6s',
1360 formatvalue(configvalue),
1360 formatvalue(configvalue),
1361 label=configlabel,
1361 label=configlabel,
1362 )
1362 )
1363 fm.condwrite(
1363 fm.condwrite(
1364 ui.verbose,
1364 ui.verbose,
1365 b'default',
1365 b'default',
1366 b' %7s',
1366 b' %7s',
1367 formatvalue(fv.default),
1367 formatvalue(fv.default),
1368 label=b'formatvariant.default',
1368 label=b'formatvariant.default',
1369 )
1369 )
1370 fm.plain(b'\n')
1370 fm.plain(b'\n')
1371 fm.end()
1371 fm.end()
1372
1372
1373
1373
1374 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1374 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1375 def debugfsinfo(ui, path=b"."):
1375 def debugfsinfo(ui, path=b"."):
1376 """show information detected about current filesystem"""
1376 """show information detected about current filesystem"""
1377 ui.writenoi18n(b'path: %s\n' % path)
1377 ui.writenoi18n(b'path: %s\n' % path)
1378 ui.writenoi18n(
1378 ui.writenoi18n(
1379 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1379 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1380 )
1380 )
1381 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1381 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1382 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1382 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1383 ui.writenoi18n(
1383 ui.writenoi18n(
1384 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1384 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1385 )
1385 )
1386 ui.writenoi18n(
1386 ui.writenoi18n(
1387 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1387 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1388 )
1388 )
1389 casesensitive = b'(unknown)'
1389 casesensitive = b'(unknown)'
1390 try:
1390 try:
1391 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1391 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1392 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1392 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1393 except OSError:
1393 except OSError:
1394 pass
1394 pass
1395 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1395 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1396
1396
1397
1397
1398 @command(
1398 @command(
1399 b'debuggetbundle',
1399 b'debuggetbundle',
1400 [
1400 [
1401 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1401 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1402 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1402 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1403 (
1403 (
1404 b't',
1404 b't',
1405 b'type',
1405 b'type',
1406 b'bzip2',
1406 b'bzip2',
1407 _(b'bundle compression type to use'),
1407 _(b'bundle compression type to use'),
1408 _(b'TYPE'),
1408 _(b'TYPE'),
1409 ),
1409 ),
1410 ],
1410 ],
1411 _(b'REPO FILE [-H|-C ID]...'),
1411 _(b'REPO FILE [-H|-C ID]...'),
1412 norepo=True,
1412 norepo=True,
1413 )
1413 )
1414 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1414 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1415 """retrieves a bundle from a repo
1415 """retrieves a bundle from a repo
1416
1416
1417 Every ID must be a full-length hex node id string. Saves the bundle to the
1417 Every ID must be a full-length hex node id string. Saves the bundle to the
1418 given file.
1418 given file.
1419 """
1419 """
1420 opts = pycompat.byteskwargs(opts)
1420 opts = pycompat.byteskwargs(opts)
1421 repo = hg.peer(ui, opts, repopath)
1421 repo = hg.peer(ui, opts, repopath)
1422 if not repo.capable(b'getbundle'):
1422 if not repo.capable(b'getbundle'):
1423 raise error.Abort(b"getbundle() not supported by target repository")
1423 raise error.Abort(b"getbundle() not supported by target repository")
1424 args = {}
1424 args = {}
1425 if common:
1425 if common:
1426 args['common'] = [bin(s) for s in common]
1426 args['common'] = [bin(s) for s in common]
1427 if head:
1427 if head:
1428 args['heads'] = [bin(s) for s in head]
1428 args['heads'] = [bin(s) for s in head]
1429 # TODO: get desired bundlecaps from command line.
1429 # TODO: get desired bundlecaps from command line.
1430 args['bundlecaps'] = None
1430 args['bundlecaps'] = None
1431 bundle = repo.getbundle(b'debug', **args)
1431 bundle = repo.getbundle(b'debug', **args)
1432
1432
1433 bundletype = opts.get(b'type', b'bzip2').lower()
1433 bundletype = opts.get(b'type', b'bzip2').lower()
1434 btypes = {
1434 btypes = {
1435 b'none': b'HG10UN',
1435 b'none': b'HG10UN',
1436 b'bzip2': b'HG10BZ',
1436 b'bzip2': b'HG10BZ',
1437 b'gzip': b'HG10GZ',
1437 b'gzip': b'HG10GZ',
1438 b'bundle2': b'HG20',
1438 b'bundle2': b'HG20',
1439 }
1439 }
1440 bundletype = btypes.get(bundletype)
1440 bundletype = btypes.get(bundletype)
1441 if bundletype not in bundle2.bundletypes:
1441 if bundletype not in bundle2.bundletypes:
1442 raise error.Abort(_(b'unknown bundle type specified with --type'))
1442 raise error.Abort(_(b'unknown bundle type specified with --type'))
1443 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1443 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1444
1444
1445
1445
1446 @command(b'debugignore', [], b'[FILE]')
1446 @command(b'debugignore', [], b'[FILE]')
1447 def debugignore(ui, repo, *files, **opts):
1447 def debugignore(ui, repo, *files, **opts):
1448 """display the combined ignore pattern and information about ignored files
1448 """display the combined ignore pattern and information about ignored files
1449
1449
1450 With no argument display the combined ignore pattern.
1450 With no argument display the combined ignore pattern.
1451
1451
1452 Given space separated file names, shows if the given file is ignored and
1452 Given space separated file names, shows if the given file is ignored and
1453 if so, show the ignore rule (file and line number) that matched it.
1453 if so, show the ignore rule (file and line number) that matched it.
1454 """
1454 """
1455 ignore = repo.dirstate._ignore
1455 ignore = repo.dirstate._ignore
1456 if not files:
1456 if not files:
1457 # Show all the patterns
1457 # Show all the patterns
1458 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1458 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1459 else:
1459 else:
1460 m = scmutil.match(repo[None], pats=files)
1460 m = scmutil.match(repo[None], pats=files)
1461 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1461 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1462 for f in m.files():
1462 for f in m.files():
1463 nf = util.normpath(f)
1463 nf = util.normpath(f)
1464 ignored = None
1464 ignored = None
1465 ignoredata = None
1465 ignoredata = None
1466 if nf != b'.':
1466 if nf != b'.':
1467 if ignore(nf):
1467 if ignore(nf):
1468 ignored = nf
1468 ignored = nf
1469 ignoredata = repo.dirstate._ignorefileandline(nf)
1469 ignoredata = repo.dirstate._ignorefileandline(nf)
1470 else:
1470 else:
1471 for p in pathutil.finddirs(nf):
1471 for p in pathutil.finddirs(nf):
1472 if ignore(p):
1472 if ignore(p):
1473 ignored = p
1473 ignored = p
1474 ignoredata = repo.dirstate._ignorefileandline(p)
1474 ignoredata = repo.dirstate._ignorefileandline(p)
1475 break
1475 break
1476 if ignored:
1476 if ignored:
1477 if ignored == nf:
1477 if ignored == nf:
1478 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1478 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1479 else:
1479 else:
1480 ui.write(
1480 ui.write(
1481 _(
1481 _(
1482 b"%s is ignored because of "
1482 b"%s is ignored because of "
1483 b"containing directory %s\n"
1483 b"containing directory %s\n"
1484 )
1484 )
1485 % (uipathfn(f), ignored)
1485 % (uipathfn(f), ignored)
1486 )
1486 )
1487 ignorefile, lineno, line = ignoredata
1487 ignorefile, lineno, line = ignoredata
1488 ui.write(
1488 ui.write(
1489 _(b"(ignore rule in %s, line %d: '%s')\n")
1489 _(b"(ignore rule in %s, line %d: '%s')\n")
1490 % (ignorefile, lineno, line)
1490 % (ignorefile, lineno, line)
1491 )
1491 )
1492 else:
1492 else:
1493 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1493 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1494
1494
1495
1495
1496 @command(
1496 @command(
1497 b'debugindex',
1497 b'debugindex',
1498 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1498 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1499 _(b'-c|-m|FILE'),
1499 _(b'-c|-m|FILE'),
1500 )
1500 )
1501 def debugindex(ui, repo, file_=None, **opts):
1501 def debugindex(ui, repo, file_=None, **opts):
1502 """dump index data for a storage primitive"""
1502 """dump index data for a storage primitive"""
1503 opts = pycompat.byteskwargs(opts)
1503 opts = pycompat.byteskwargs(opts)
1504 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1504 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1505
1505
1506 if ui.debugflag:
1506 if ui.debugflag:
1507 shortfn = hex
1507 shortfn = hex
1508 else:
1508 else:
1509 shortfn = short
1509 shortfn = short
1510
1510
1511 idlen = 12
1511 idlen = 12
1512 for i in store:
1512 for i in store:
1513 idlen = len(shortfn(store.node(i)))
1513 idlen = len(shortfn(store.node(i)))
1514 break
1514 break
1515
1515
1516 fm = ui.formatter(b'debugindex', opts)
1516 fm = ui.formatter(b'debugindex', opts)
1517 fm.plain(
1517 fm.plain(
1518 b' rev linkrev %s %s p2\n'
1518 b' rev linkrev %s %s p2\n'
1519 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1519 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1520 )
1520 )
1521
1521
1522 for rev in store:
1522 for rev in store:
1523 node = store.node(rev)
1523 node = store.node(rev)
1524 parents = store.parents(node)
1524 parents = store.parents(node)
1525
1525
1526 fm.startitem()
1526 fm.startitem()
1527 fm.write(b'rev', b'%6d ', rev)
1527 fm.write(b'rev', b'%6d ', rev)
1528 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1528 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1529 fm.write(b'node', b'%s ', shortfn(node))
1529 fm.write(b'node', b'%s ', shortfn(node))
1530 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1530 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1531 fm.write(b'p2', b'%s', shortfn(parents[1]))
1531 fm.write(b'p2', b'%s', shortfn(parents[1]))
1532 fm.plain(b'\n')
1532 fm.plain(b'\n')
1533
1533
1534 fm.end()
1534 fm.end()
1535
1535
1536
1536
1537 @command(
1537 @command(
1538 b'debugindexdot',
1538 b'debugindexdot',
1539 cmdutil.debugrevlogopts,
1539 cmdutil.debugrevlogopts,
1540 _(b'-c|-m|FILE'),
1540 _(b'-c|-m|FILE'),
1541 optionalrepo=True,
1541 optionalrepo=True,
1542 )
1542 )
1543 def debugindexdot(ui, repo, file_=None, **opts):
1543 def debugindexdot(ui, repo, file_=None, **opts):
1544 """dump an index DAG as a graphviz dot file"""
1544 """dump an index DAG as a graphviz dot file"""
1545 opts = pycompat.byteskwargs(opts)
1545 opts = pycompat.byteskwargs(opts)
1546 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1546 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1547 ui.writenoi18n(b"digraph G {\n")
1547 ui.writenoi18n(b"digraph G {\n")
1548 for i in r:
1548 for i in r:
1549 node = r.node(i)
1549 node = r.node(i)
1550 pp = r.parents(node)
1550 pp = r.parents(node)
1551 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1551 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1552 if pp[1] != nullid:
1552 if pp[1] != nullid:
1553 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1553 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1554 ui.write(b"}\n")
1554 ui.write(b"}\n")
1555
1555
1556
1556
1557 @command(b'debugindexstats', [])
1557 @command(b'debugindexstats', [])
1558 def debugindexstats(ui, repo):
1558 def debugindexstats(ui, repo):
1559 """show stats related to the changelog index"""
1559 """show stats related to the changelog index"""
1560 repo.changelog.shortest(nullid, 1)
1560 repo.changelog.shortest(nullid, 1)
1561 index = repo.changelog.index
1561 index = repo.changelog.index
1562 if not util.safehasattr(index, b'stats'):
1562 if not util.safehasattr(index, b'stats'):
1563 raise error.Abort(_(b'debugindexstats only works with native code'))
1563 raise error.Abort(_(b'debugindexstats only works with native code'))
1564 for k, v in sorted(index.stats().items()):
1564 for k, v in sorted(index.stats().items()):
1565 ui.write(b'%s: %d\n' % (k, v))
1565 ui.write(b'%s: %d\n' % (k, v))
1566
1566
1567
1567
1568 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1568 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1569 def debuginstall(ui, **opts):
1569 def debuginstall(ui, **opts):
1570 """test Mercurial installation
1570 """test Mercurial installation
1571
1571
1572 Returns 0 on success.
1572 Returns 0 on success.
1573 """
1573 """
1574 opts = pycompat.byteskwargs(opts)
1574 opts = pycompat.byteskwargs(opts)
1575
1575
1576 problems = 0
1576 problems = 0
1577
1577
1578 fm = ui.formatter(b'debuginstall', opts)
1578 fm = ui.formatter(b'debuginstall', opts)
1579 fm.startitem()
1579 fm.startitem()
1580
1580
1581 # encoding might be unknown or wrong. don't translate these messages.
1581 # encoding might be unknown or wrong. don't translate these messages.
1582 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1582 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1583 err = None
1583 err = None
1584 try:
1584 try:
1585 codecs.lookup(pycompat.sysstr(encoding.encoding))
1585 codecs.lookup(pycompat.sysstr(encoding.encoding))
1586 except LookupError as inst:
1586 except LookupError as inst:
1587 err = stringutil.forcebytestr(inst)
1587 err = stringutil.forcebytestr(inst)
1588 problems += 1
1588 problems += 1
1589 fm.condwrite(
1589 fm.condwrite(
1590 err,
1590 err,
1591 b'encodingerror',
1591 b'encodingerror',
1592 b" %s\n (check that your locale is properly set)\n",
1592 b" %s\n (check that your locale is properly set)\n",
1593 err,
1593 err,
1594 )
1594 )
1595
1595
1596 # Python
1596 # Python
1597 pythonlib = None
1597 pythonlib = None
1598 if util.safehasattr(os, '__file__'):
1598 if util.safehasattr(os, '__file__'):
1599 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1599 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1600 elif getattr(sys, 'oxidized', False):
1600 elif getattr(sys, 'oxidized', False):
1601 pythonlib = pycompat.sysexecutable
1601 pythonlib = pycompat.sysexecutable
1602
1602
1603 fm.write(
1603 fm.write(
1604 b'pythonexe',
1604 b'pythonexe',
1605 _(b"checking Python executable (%s)\n"),
1605 _(b"checking Python executable (%s)\n"),
1606 pycompat.sysexecutable or _(b"unknown"),
1606 pycompat.sysexecutable or _(b"unknown"),
1607 )
1607 )
1608 fm.write(
1608 fm.write(
1609 b'pythonimplementation',
1609 b'pythonimplementation',
1610 _(b"checking Python implementation (%s)\n"),
1610 _(b"checking Python implementation (%s)\n"),
1611 pycompat.sysbytes(platform.python_implementation()),
1611 pycompat.sysbytes(platform.python_implementation()),
1612 )
1612 )
1613 fm.write(
1613 fm.write(
1614 b'pythonver',
1614 b'pythonver',
1615 _(b"checking Python version (%s)\n"),
1615 _(b"checking Python version (%s)\n"),
1616 (b"%d.%d.%d" % sys.version_info[:3]),
1616 (b"%d.%d.%d" % sys.version_info[:3]),
1617 )
1617 )
1618 fm.write(
1618 fm.write(
1619 b'pythonlib',
1619 b'pythonlib',
1620 _(b"checking Python lib (%s)...\n"),
1620 _(b"checking Python lib (%s)...\n"),
1621 pythonlib or _(b"unknown"),
1621 pythonlib or _(b"unknown"),
1622 )
1622 )
1623
1623
1624 try:
1624 try:
1625 from . import rustext
1625 from . import rustext
1626
1626
1627 rustext.__doc__ # trigger lazy import
1627 rustext.__doc__ # trigger lazy import
1628 except ImportError:
1628 except ImportError:
1629 rustext = None
1629 rustext = None
1630
1630
1631 security = set(sslutil.supportedprotocols)
1631 security = set(sslutil.supportedprotocols)
1632 if sslutil.hassni:
1632 if sslutil.hassni:
1633 security.add(b'sni')
1633 security.add(b'sni')
1634
1634
1635 fm.write(
1635 fm.write(
1636 b'pythonsecurity',
1636 b'pythonsecurity',
1637 _(b"checking Python security support (%s)\n"),
1637 _(b"checking Python security support (%s)\n"),
1638 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1638 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1639 )
1639 )
1640
1640
1641 # These are warnings, not errors. So don't increment problem count. This
1641 # These are warnings, not errors. So don't increment problem count. This
1642 # may change in the future.
1642 # may change in the future.
1643 if b'tls1.2' not in security:
1643 if b'tls1.2' not in security:
1644 fm.plain(
1644 fm.plain(
1645 _(
1645 _(
1646 b' TLS 1.2 not supported by Python install; '
1646 b' TLS 1.2 not supported by Python install; '
1647 b'network connections lack modern security\n'
1647 b'network connections lack modern security\n'
1648 )
1648 )
1649 )
1649 )
1650 if b'sni' not in security:
1650 if b'sni' not in security:
1651 fm.plain(
1651 fm.plain(
1652 _(
1652 _(
1653 b' SNI not supported by Python install; may have '
1653 b' SNI not supported by Python install; may have '
1654 b'connectivity issues with some servers\n'
1654 b'connectivity issues with some servers\n'
1655 )
1655 )
1656 )
1656 )
1657
1657
1658 fm.plain(
1658 fm.plain(
1659 _(
1659 _(
1660 b"checking Rust extensions (%s)\n"
1660 b"checking Rust extensions (%s)\n"
1661 % (b'missing' if rustext is None else b'installed')
1661 % (b'missing' if rustext is None else b'installed')
1662 ),
1662 ),
1663 )
1663 )
1664
1664
1665 # TODO print CA cert info
1665 # TODO print CA cert info
1666
1666
1667 # hg version
1667 # hg version
1668 hgver = util.version()
1668 hgver = util.version()
1669 fm.write(
1669 fm.write(
1670 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1670 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1671 )
1671 )
1672 fm.write(
1672 fm.write(
1673 b'hgverextra',
1673 b'hgverextra',
1674 _(b"checking Mercurial custom build (%s)\n"),
1674 _(b"checking Mercurial custom build (%s)\n"),
1675 b'+'.join(hgver.split(b'+')[1:]),
1675 b'+'.join(hgver.split(b'+')[1:]),
1676 )
1676 )
1677
1677
1678 # compiled modules
1678 # compiled modules
1679 hgmodules = None
1679 hgmodules = None
1680 if util.safehasattr(sys.modules[__name__], '__file__'):
1680 if util.safehasattr(sys.modules[__name__], '__file__'):
1681 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1681 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1682 elif getattr(sys, 'oxidized', False):
1682 elif getattr(sys, 'oxidized', False):
1683 hgmodules = pycompat.sysexecutable
1683 hgmodules = pycompat.sysexecutable
1684
1684
1685 fm.write(
1685 fm.write(
1686 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1686 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1687 )
1687 )
1688 fm.write(
1688 fm.write(
1689 b'hgmodules',
1689 b'hgmodules',
1690 _(b"checking installed modules (%s)...\n"),
1690 _(b"checking installed modules (%s)...\n"),
1691 hgmodules or _(b"unknown"),
1691 hgmodules or _(b"unknown"),
1692 )
1692 )
1693
1693
1694 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1694 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1695 rustext = rustandc # for now, that's the only case
1695 rustext = rustandc # for now, that's the only case
1696 cext = policy.policy in (b'c', b'allow') or rustandc
1696 cext = policy.policy in (b'c', b'allow') or rustandc
1697 nopure = cext or rustext
1697 nopure = cext or rustext
1698 if nopure:
1698 if nopure:
1699 err = None
1699 err = None
1700 try:
1700 try:
1701 if cext:
1701 if cext:
1702 from .cext import ( # pytype: disable=import-error
1702 from .cext import ( # pytype: disable=import-error
1703 base85,
1703 base85,
1704 bdiff,
1704 bdiff,
1705 mpatch,
1705 mpatch,
1706 osutil,
1706 osutil,
1707 )
1707 )
1708
1708
1709 # quiet pyflakes
1709 # quiet pyflakes
1710 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1710 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1711 if rustext:
1711 if rustext:
1712 from .rustext import ( # pytype: disable=import-error
1712 from .rustext import ( # pytype: disable=import-error
1713 ancestor,
1713 ancestor,
1714 dirstate,
1714 dirstate,
1715 )
1715 )
1716
1716
1717 dir(ancestor), dir(dirstate) # quiet pyflakes
1717 dir(ancestor), dir(dirstate) # quiet pyflakes
1718 except Exception as inst:
1718 except Exception as inst:
1719 err = stringutil.forcebytestr(inst)
1719 err = stringutil.forcebytestr(inst)
1720 problems += 1
1720 problems += 1
1721 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1721 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1722
1722
1723 compengines = util.compengines._engines.values()
1723 compengines = util.compengines._engines.values()
1724 fm.write(
1724 fm.write(
1725 b'compengines',
1725 b'compengines',
1726 _(b'checking registered compression engines (%s)\n'),
1726 _(b'checking registered compression engines (%s)\n'),
1727 fm.formatlist(
1727 fm.formatlist(
1728 sorted(e.name() for e in compengines),
1728 sorted(e.name() for e in compengines),
1729 name=b'compengine',
1729 name=b'compengine',
1730 fmt=b'%s',
1730 fmt=b'%s',
1731 sep=b', ',
1731 sep=b', ',
1732 ),
1732 ),
1733 )
1733 )
1734 fm.write(
1734 fm.write(
1735 b'compenginesavail',
1735 b'compenginesavail',
1736 _(b'checking available compression engines (%s)\n'),
1736 _(b'checking available compression engines (%s)\n'),
1737 fm.formatlist(
1737 fm.formatlist(
1738 sorted(e.name() for e in compengines if e.available()),
1738 sorted(e.name() for e in compengines if e.available()),
1739 name=b'compengine',
1739 name=b'compengine',
1740 fmt=b'%s',
1740 fmt=b'%s',
1741 sep=b', ',
1741 sep=b', ',
1742 ),
1742 ),
1743 )
1743 )
1744 wirecompengines = compression.compengines.supportedwireengines(
1744 wirecompengines = compression.compengines.supportedwireengines(
1745 compression.SERVERROLE
1745 compression.SERVERROLE
1746 )
1746 )
1747 fm.write(
1747 fm.write(
1748 b'compenginesserver',
1748 b'compenginesserver',
1749 _(
1749 _(
1750 b'checking available compression engines '
1750 b'checking available compression engines '
1751 b'for wire protocol (%s)\n'
1751 b'for wire protocol (%s)\n'
1752 ),
1752 ),
1753 fm.formatlist(
1753 fm.formatlist(
1754 [e.name() for e in wirecompengines if e.wireprotosupport()],
1754 [e.name() for e in wirecompengines if e.wireprotosupport()],
1755 name=b'compengine',
1755 name=b'compengine',
1756 fmt=b'%s',
1756 fmt=b'%s',
1757 sep=b', ',
1757 sep=b', ',
1758 ),
1758 ),
1759 )
1759 )
1760 re2 = b'missing'
1760 re2 = b'missing'
1761 if util._re2:
1761 if util._re2:
1762 re2 = b'available'
1762 re2 = b'available'
1763 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1763 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1764 fm.data(re2=bool(util._re2))
1764 fm.data(re2=bool(util._re2))
1765
1765
1766 # templates
1766 # templates
1767 p = templater.templatedir()
1767 p = templater.templatedir()
1768 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1768 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1769 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1769 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1770 if p:
1770 if p:
1771 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1771 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1772 if m:
1772 if m:
1773 # template found, check if it is working
1773 # template found, check if it is working
1774 err = None
1774 err = None
1775 try:
1775 try:
1776 templater.templater.frommapfile(m)
1776 templater.templater.frommapfile(m)
1777 except Exception as inst:
1777 except Exception as inst:
1778 err = stringutil.forcebytestr(inst)
1778 err = stringutil.forcebytestr(inst)
1779 p = None
1779 p = None
1780 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1780 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1781 else:
1781 else:
1782 p = None
1782 p = None
1783 fm.condwrite(
1783 fm.condwrite(
1784 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1784 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1785 )
1785 )
1786 fm.condwrite(
1786 fm.condwrite(
1787 not m,
1787 not m,
1788 b'defaulttemplatenotfound',
1788 b'defaulttemplatenotfound',
1789 _(b" template '%s' not found\n"),
1789 _(b" template '%s' not found\n"),
1790 b"default",
1790 b"default",
1791 )
1791 )
1792 if not p:
1792 if not p:
1793 problems += 1
1793 problems += 1
1794 fm.condwrite(
1794 fm.condwrite(
1795 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1795 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1796 )
1796 )
1797
1797
1798 # editor
1798 # editor
1799 editor = ui.geteditor()
1799 editor = ui.geteditor()
1800 editor = util.expandpath(editor)
1800 editor = util.expandpath(editor)
1801 editorbin = procutil.shellsplit(editor)[0]
1801 editorbin = procutil.shellsplit(editor)[0]
1802 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1802 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1803 cmdpath = procutil.findexe(editorbin)
1803 cmdpath = procutil.findexe(editorbin)
1804 fm.condwrite(
1804 fm.condwrite(
1805 not cmdpath and editor == b'vi',
1805 not cmdpath and editor == b'vi',
1806 b'vinotfound',
1806 b'vinotfound',
1807 _(
1807 _(
1808 b" No commit editor set and can't find %s in PATH\n"
1808 b" No commit editor set and can't find %s in PATH\n"
1809 b" (specify a commit editor in your configuration"
1809 b" (specify a commit editor in your configuration"
1810 b" file)\n"
1810 b" file)\n"
1811 ),
1811 ),
1812 not cmdpath and editor == b'vi' and editorbin,
1812 not cmdpath and editor == b'vi' and editorbin,
1813 )
1813 )
1814 fm.condwrite(
1814 fm.condwrite(
1815 not cmdpath and editor != b'vi',
1815 not cmdpath and editor != b'vi',
1816 b'editornotfound',
1816 b'editornotfound',
1817 _(
1817 _(
1818 b" Can't find editor '%s' in PATH\n"
1818 b" Can't find editor '%s' in PATH\n"
1819 b" (specify a commit editor in your configuration"
1819 b" (specify a commit editor in your configuration"
1820 b" file)\n"
1820 b" file)\n"
1821 ),
1821 ),
1822 not cmdpath and editorbin,
1822 not cmdpath and editorbin,
1823 )
1823 )
1824 if not cmdpath and editor != b'vi':
1824 if not cmdpath and editor != b'vi':
1825 problems += 1
1825 problems += 1
1826
1826
1827 # check username
1827 # check username
1828 username = None
1828 username = None
1829 err = None
1829 err = None
1830 try:
1830 try:
1831 username = ui.username()
1831 username = ui.username()
1832 except error.Abort as e:
1832 except error.Abort as e:
1833 err = e.message
1833 err = e.message
1834 problems += 1
1834 problems += 1
1835
1835
1836 fm.condwrite(
1836 fm.condwrite(
1837 username, b'username', _(b"checking username (%s)\n"), username
1837 username, b'username', _(b"checking username (%s)\n"), username
1838 )
1838 )
1839 fm.condwrite(
1839 fm.condwrite(
1840 err,
1840 err,
1841 b'usernameerror',
1841 b'usernameerror',
1842 _(
1842 _(
1843 b"checking username...\n %s\n"
1843 b"checking username...\n %s\n"
1844 b" (specify a username in your configuration file)\n"
1844 b" (specify a username in your configuration file)\n"
1845 ),
1845 ),
1846 err,
1846 err,
1847 )
1847 )
1848
1848
1849 for name, mod in extensions.extensions():
1849 for name, mod in extensions.extensions():
1850 handler = getattr(mod, 'debuginstall', None)
1850 handler = getattr(mod, 'debuginstall', None)
1851 if handler is not None:
1851 if handler is not None:
1852 problems += handler(ui, fm)
1852 problems += handler(ui, fm)
1853
1853
1854 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1854 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1855 if not problems:
1855 if not problems:
1856 fm.data(problems=problems)
1856 fm.data(problems=problems)
1857 fm.condwrite(
1857 fm.condwrite(
1858 problems,
1858 problems,
1859 b'problems',
1859 b'problems',
1860 _(b"%d problems detected, please check your install!\n"),
1860 _(b"%d problems detected, please check your install!\n"),
1861 problems,
1861 problems,
1862 )
1862 )
1863 fm.end()
1863 fm.end()
1864
1864
1865 return problems
1865 return problems
1866
1866
1867
1867
1868 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1868 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1869 def debugknown(ui, repopath, *ids, **opts):
1869 def debugknown(ui, repopath, *ids, **opts):
1870 """test whether node ids are known to a repo
1870 """test whether node ids are known to a repo
1871
1871
1872 Every ID must be a full-length hex node id string. Returns a list of 0s
1872 Every ID must be a full-length hex node id string. Returns a list of 0s
1873 and 1s indicating unknown/known.
1873 and 1s indicating unknown/known.
1874 """
1874 """
1875 opts = pycompat.byteskwargs(opts)
1875 opts = pycompat.byteskwargs(opts)
1876 repo = hg.peer(ui, opts, repopath)
1876 repo = hg.peer(ui, opts, repopath)
1877 if not repo.capable(b'known'):
1877 if not repo.capable(b'known'):
1878 raise error.Abort(b"known() not supported by target repository")
1878 raise error.Abort(b"known() not supported by target repository")
1879 flags = repo.known([bin(s) for s in ids])
1879 flags = repo.known([bin(s) for s in ids])
1880 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1880 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1881
1881
1882
1882
1883 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1883 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1884 def debuglabelcomplete(ui, repo, *args):
1884 def debuglabelcomplete(ui, repo, *args):
1885 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1885 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1886 debugnamecomplete(ui, repo, *args)
1886 debugnamecomplete(ui, repo, *args)
1887
1887
1888
1888
1889 @command(
1889 @command(
1890 b'debuglocks',
1890 b'debuglocks',
1891 [
1891 [
1892 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
1892 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
1893 (
1893 (
1894 b'W',
1894 b'W',
1895 b'force-free-wlock',
1895 b'force-free-wlock',
1896 None,
1896 None,
1897 _(b'free the working state lock (DANGEROUS)'),
1897 _(b'free the working state lock (DANGEROUS)'),
1898 ),
1898 ),
1899 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1899 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1900 (
1900 (
1901 b'S',
1901 b'S',
1902 b'set-wlock',
1902 b'set-wlock',
1903 None,
1903 None,
1904 _(b'set the working state lock until stopped'),
1904 _(b'set the working state lock until stopped'),
1905 ),
1905 ),
1906 ],
1906 ],
1907 _(b'[OPTION]...'),
1907 _(b'[OPTION]...'),
1908 )
1908 )
1909 def debuglocks(ui, repo, **opts):
1909 def debuglocks(ui, repo, **opts):
1910 """show or modify state of locks
1910 """show or modify state of locks
1911
1911
1912 By default, this command will show which locks are held. This
1912 By default, this command will show which locks are held. This
1913 includes the user and process holding the lock, the amount of time
1913 includes the user and process holding the lock, the amount of time
1914 the lock has been held, and the machine name where the process is
1914 the lock has been held, and the machine name where the process is
1915 running if it's not local.
1915 running if it's not local.
1916
1916
1917 Locks protect the integrity of Mercurial's data, so should be
1917 Locks protect the integrity of Mercurial's data, so should be
1918 treated with care. System crashes or other interruptions may cause
1918 treated with care. System crashes or other interruptions may cause
1919 locks to not be properly released, though Mercurial will usually
1919 locks to not be properly released, though Mercurial will usually
1920 detect and remove such stale locks automatically.
1920 detect and remove such stale locks automatically.
1921
1921
1922 However, detecting stale locks may not always be possible (for
1922 However, detecting stale locks may not always be possible (for
1923 instance, on a shared filesystem). Removing locks may also be
1923 instance, on a shared filesystem). Removing locks may also be
1924 blocked by filesystem permissions.
1924 blocked by filesystem permissions.
1925
1925
1926 Setting a lock will prevent other commands from changing the data.
1926 Setting a lock will prevent other commands from changing the data.
1927 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1927 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1928 The set locks are removed when the command exits.
1928 The set locks are removed when the command exits.
1929
1929
1930 Returns 0 if no locks are held.
1930 Returns 0 if no locks are held.
1931
1931
1932 """
1932 """
1933
1933
1934 if opts.get('force_free_lock'):
1934 if opts.get('force_free_lock'):
1935 repo.svfs.unlink(b'lock')
1935 repo.svfs.unlink(b'lock')
1936 if opts.get('force_free_wlock'):
1936 if opts.get('force_free_wlock'):
1937 repo.vfs.unlink(b'wlock')
1937 repo.vfs.unlink(b'wlock')
1938 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
1938 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
1939 return 0
1939 return 0
1940
1940
1941 locks = []
1941 locks = []
1942 try:
1942 try:
1943 if opts.get('set_wlock'):
1943 if opts.get('set_wlock'):
1944 try:
1944 try:
1945 locks.append(repo.wlock(False))
1945 locks.append(repo.wlock(False))
1946 except error.LockHeld:
1946 except error.LockHeld:
1947 raise error.Abort(_(b'wlock is already held'))
1947 raise error.Abort(_(b'wlock is already held'))
1948 if opts.get('set_lock'):
1948 if opts.get('set_lock'):
1949 try:
1949 try:
1950 locks.append(repo.lock(False))
1950 locks.append(repo.lock(False))
1951 except error.LockHeld:
1951 except error.LockHeld:
1952 raise error.Abort(_(b'lock is already held'))
1952 raise error.Abort(_(b'lock is already held'))
1953 if len(locks):
1953 if len(locks):
1954 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1954 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1955 return 0
1955 return 0
1956 finally:
1956 finally:
1957 release(*locks)
1957 release(*locks)
1958
1958
1959 now = time.time()
1959 now = time.time()
1960 held = 0
1960 held = 0
1961
1961
1962 def report(vfs, name, method):
1962 def report(vfs, name, method):
1963 # this causes stale locks to get reaped for more accurate reporting
1963 # this causes stale locks to get reaped for more accurate reporting
1964 try:
1964 try:
1965 l = method(False)
1965 l = method(False)
1966 except error.LockHeld:
1966 except error.LockHeld:
1967 l = None
1967 l = None
1968
1968
1969 if l:
1969 if l:
1970 l.release()
1970 l.release()
1971 else:
1971 else:
1972 try:
1972 try:
1973 st = vfs.lstat(name)
1973 st = vfs.lstat(name)
1974 age = now - st[stat.ST_MTIME]
1974 age = now - st[stat.ST_MTIME]
1975 user = util.username(st.st_uid)
1975 user = util.username(st.st_uid)
1976 locker = vfs.readlock(name)
1976 locker = vfs.readlock(name)
1977 if b":" in locker:
1977 if b":" in locker:
1978 host, pid = locker.split(b':')
1978 host, pid = locker.split(b':')
1979 if host == socket.gethostname():
1979 if host == socket.gethostname():
1980 locker = b'user %s, process %s' % (user or b'None', pid)
1980 locker = b'user %s, process %s' % (user or b'None', pid)
1981 else:
1981 else:
1982 locker = b'user %s, process %s, host %s' % (
1982 locker = b'user %s, process %s, host %s' % (
1983 user or b'None',
1983 user or b'None',
1984 pid,
1984 pid,
1985 host,
1985 host,
1986 )
1986 )
1987 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1987 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1988 return 1
1988 return 1
1989 except OSError as e:
1989 except OSError as e:
1990 if e.errno != errno.ENOENT:
1990 if e.errno != errno.ENOENT:
1991 raise
1991 raise
1992
1992
1993 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1993 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1994 return 0
1994 return 0
1995
1995
1996 held += report(repo.svfs, b"lock", repo.lock)
1996 held += report(repo.svfs, b"lock", repo.lock)
1997 held += report(repo.vfs, b"wlock", repo.wlock)
1997 held += report(repo.vfs, b"wlock", repo.wlock)
1998
1998
1999 return held
1999 return held
2000
2000
2001
2001
2002 @command(
2002 @command(
2003 b'debugmanifestfulltextcache',
2003 b'debugmanifestfulltextcache',
2004 [
2004 [
2005 (b'', b'clear', False, _(b'clear the cache')),
2005 (b'', b'clear', False, _(b'clear the cache')),
2006 (
2006 (
2007 b'a',
2007 b'a',
2008 b'add',
2008 b'add',
2009 [],
2009 [],
2010 _(b'add the given manifest nodes to the cache'),
2010 _(b'add the given manifest nodes to the cache'),
2011 _(b'NODE'),
2011 _(b'NODE'),
2012 ),
2012 ),
2013 ],
2013 ],
2014 b'',
2014 b'',
2015 )
2015 )
2016 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2016 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2017 """show, clear or amend the contents of the manifest fulltext cache"""
2017 """show, clear or amend the contents of the manifest fulltext cache"""
2018
2018
2019 def getcache():
2019 def getcache():
2020 r = repo.manifestlog.getstorage(b'')
2020 r = repo.manifestlog.getstorage(b'')
2021 try:
2021 try:
2022 return r._fulltextcache
2022 return r._fulltextcache
2023 except AttributeError:
2023 except AttributeError:
2024 msg = _(
2024 msg = _(
2025 b"Current revlog implementation doesn't appear to have a "
2025 b"Current revlog implementation doesn't appear to have a "
2026 b"manifest fulltext cache\n"
2026 b"manifest fulltext cache\n"
2027 )
2027 )
2028 raise error.Abort(msg)
2028 raise error.Abort(msg)
2029
2029
2030 if opts.get('clear'):
2030 if opts.get('clear'):
2031 with repo.wlock():
2031 with repo.wlock():
2032 cache = getcache()
2032 cache = getcache()
2033 cache.clear(clear_persisted_data=True)
2033 cache.clear(clear_persisted_data=True)
2034 return
2034 return
2035
2035
2036 if add:
2036 if add:
2037 with repo.wlock():
2037 with repo.wlock():
2038 m = repo.manifestlog
2038 m = repo.manifestlog
2039 store = m.getstorage(b'')
2039 store = m.getstorage(b'')
2040 for n in add:
2040 for n in add:
2041 try:
2041 try:
2042 manifest = m[store.lookup(n)]
2042 manifest = m[store.lookup(n)]
2043 except error.LookupError as e:
2043 except error.LookupError as e:
2044 raise error.Abort(e, hint=b"Check your manifest node id")
2044 raise error.Abort(e, hint=b"Check your manifest node id")
2045 manifest.read() # stores revisision in cache too
2045 manifest.read() # stores revisision in cache too
2046 return
2046 return
2047
2047
2048 cache = getcache()
2048 cache = getcache()
2049 if not len(cache):
2049 if not len(cache):
2050 ui.write(_(b'cache empty\n'))
2050 ui.write(_(b'cache empty\n'))
2051 else:
2051 else:
2052 ui.write(
2052 ui.write(
2053 _(
2053 _(
2054 b'cache contains %d manifest entries, in order of most to '
2054 b'cache contains %d manifest entries, in order of most to '
2055 b'least recent:\n'
2055 b'least recent:\n'
2056 )
2056 )
2057 % (len(cache),)
2057 % (len(cache),)
2058 )
2058 )
2059 totalsize = 0
2059 totalsize = 0
2060 for nodeid in cache:
2060 for nodeid in cache:
2061 # Use cache.get to not update the LRU order
2061 # Use cache.get to not update the LRU order
2062 data = cache.peek(nodeid)
2062 data = cache.peek(nodeid)
2063 size = len(data)
2063 size = len(data)
2064 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2064 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2065 ui.write(
2065 ui.write(
2066 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2066 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2067 )
2067 )
2068 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2068 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2069 ui.write(
2069 ui.write(
2070 _(b'total cache data size %s, on-disk %s\n')
2070 _(b'total cache data size %s, on-disk %s\n')
2071 % (util.bytecount(totalsize), util.bytecount(ondisk))
2071 % (util.bytecount(totalsize), util.bytecount(ondisk))
2072 )
2072 )
2073
2073
2074
2074
2075 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2075 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2076 def debugmergestate(ui, repo, *args, **opts):
2076 def debugmergestate(ui, repo, *args, **opts):
2077 """print merge state
2077 """print merge state
2078
2078
2079 Use --verbose to print out information about whether v1 or v2 merge state
2079 Use --verbose to print out information about whether v1 or v2 merge state
2080 was chosen."""
2080 was chosen."""
2081
2081
2082 if ui.verbose:
2082 if ui.verbose:
2083 ms = mergestatemod.mergestate(repo)
2083 ms = mergestatemod.mergestate(repo)
2084
2084
2085 # sort so that reasonable information is on top
2085 # sort so that reasonable information is on top
2086 v1records = ms._readrecordsv1()
2086 v1records = ms._readrecordsv1()
2087 v2records = ms._readrecordsv2()
2087 v2records = ms._readrecordsv2()
2088
2088
2089 if not v1records and not v2records:
2089 if not v1records and not v2records:
2090 pass
2090 pass
2091 elif not v2records:
2091 elif not v2records:
2092 ui.writenoi18n(b'no version 2 merge state\n')
2092 ui.writenoi18n(b'no version 2 merge state\n')
2093 elif ms._v1v2match(v1records, v2records):
2093 elif ms._v1v2match(v1records, v2records):
2094 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2094 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2095 else:
2095 else:
2096 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2096 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2097
2097
2098 opts = pycompat.byteskwargs(opts)
2098 opts = pycompat.byteskwargs(opts)
2099 if not opts[b'template']:
2099 if not opts[b'template']:
2100 opts[b'template'] = (
2100 opts[b'template'] = (
2101 b'{if(commits, "", "no merge state found\n")}'
2101 b'{if(commits, "", "no merge state found\n")}'
2102 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2102 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2103 b'{files % "file: {path} (state \\"{state}\\")\n'
2103 b'{files % "file: {path} (state \\"{state}\\")\n'
2104 b'{if(local_path, "'
2104 b'{if(local_path, "'
2105 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2105 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2106 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2106 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2107 b' other path: {other_path} (node {other_node})\n'
2107 b' other path: {other_path} (node {other_node})\n'
2108 b'")}'
2108 b'")}'
2109 b'{if(rename_side, "'
2109 b'{if(rename_side, "'
2110 b' rename side: {rename_side}\n'
2110 b' rename side: {rename_side}\n'
2111 b' renamed path: {renamed_path}\n'
2111 b' renamed path: {renamed_path}\n'
2112 b'")}'
2112 b'")}'
2113 b'{extras % " extra: {key} = {value}\n"}'
2113 b'{extras % " extra: {key} = {value}\n"}'
2114 b'"}'
2114 b'"}'
2115 b'{extras % "extra: {file} ({key} = {value})\n"}'
2115 b'{extras % "extra: {file} ({key} = {value})\n"}'
2116 )
2116 )
2117
2117
2118 ms = mergestatemod.mergestate.read(repo)
2118 ms = mergestatemod.mergestate.read(repo)
2119
2119
2120 fm = ui.formatter(b'debugmergestate', opts)
2120 fm = ui.formatter(b'debugmergestate', opts)
2121 fm.startitem()
2121 fm.startitem()
2122
2122
2123 fm_commits = fm.nested(b'commits')
2123 fm_commits = fm.nested(b'commits')
2124 if ms.active():
2124 if ms.active():
2125 for name, node, label_index in (
2125 for name, node, label_index in (
2126 (b'local', ms.local, 0),
2126 (b'local', ms.local, 0),
2127 (b'other', ms.other, 1),
2127 (b'other', ms.other, 1),
2128 ):
2128 ):
2129 fm_commits.startitem()
2129 fm_commits.startitem()
2130 fm_commits.data(name=name)
2130 fm_commits.data(name=name)
2131 fm_commits.data(node=hex(node))
2131 fm_commits.data(node=hex(node))
2132 if ms._labels and len(ms._labels) > label_index:
2132 if ms._labels and len(ms._labels) > label_index:
2133 fm_commits.data(label=ms._labels[label_index])
2133 fm_commits.data(label=ms._labels[label_index])
2134 fm_commits.end()
2134 fm_commits.end()
2135
2135
2136 fm_files = fm.nested(b'files')
2136 fm_files = fm.nested(b'files')
2137 if ms.active():
2137 if ms.active():
2138 for f in ms:
2138 for f in ms:
2139 fm_files.startitem()
2139 fm_files.startitem()
2140 fm_files.data(path=f)
2140 fm_files.data(path=f)
2141 state = ms._state[f]
2141 state = ms._state[f]
2142 fm_files.data(state=state[0])
2142 fm_files.data(state=state[0])
2143 if state[0] in (
2143 if state[0] in (
2144 mergestatemod.MERGE_RECORD_UNRESOLVED,
2144 mergestatemod.MERGE_RECORD_UNRESOLVED,
2145 mergestatemod.MERGE_RECORD_RESOLVED,
2145 mergestatemod.MERGE_RECORD_RESOLVED,
2146 ):
2146 ):
2147 fm_files.data(local_key=state[1])
2147 fm_files.data(local_key=state[1])
2148 fm_files.data(local_path=state[2])
2148 fm_files.data(local_path=state[2])
2149 fm_files.data(ancestor_path=state[3])
2149 fm_files.data(ancestor_path=state[3])
2150 fm_files.data(ancestor_node=state[4])
2150 fm_files.data(ancestor_node=state[4])
2151 fm_files.data(other_path=state[5])
2151 fm_files.data(other_path=state[5])
2152 fm_files.data(other_node=state[6])
2152 fm_files.data(other_node=state[6])
2153 fm_files.data(local_flags=state[7])
2153 fm_files.data(local_flags=state[7])
2154 elif state[0] in (
2154 elif state[0] in (
2155 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2155 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2156 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2156 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2157 ):
2157 ):
2158 fm_files.data(renamed_path=state[1])
2158 fm_files.data(renamed_path=state[1])
2159 fm_files.data(rename_side=state[2])
2159 fm_files.data(rename_side=state[2])
2160 fm_extras = fm_files.nested(b'extras')
2160 fm_extras = fm_files.nested(b'extras')
2161 for k, v in sorted(ms.extras(f).items()):
2161 for k, v in sorted(ms.extras(f).items()):
2162 fm_extras.startitem()
2162 fm_extras.startitem()
2163 fm_extras.data(key=k)
2163 fm_extras.data(key=k)
2164 fm_extras.data(value=v)
2164 fm_extras.data(value=v)
2165 fm_extras.end()
2165 fm_extras.end()
2166
2166
2167 fm_files.end()
2167 fm_files.end()
2168
2168
2169 fm_extras = fm.nested(b'extras')
2169 fm_extras = fm.nested(b'extras')
2170 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2170 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2171 if f in ms:
2171 if f in ms:
2172 # If file is in mergestate, we have already processed it's extras
2172 # If file is in mergestate, we have already processed it's extras
2173 continue
2173 continue
2174 for k, v in pycompat.iteritems(d):
2174 for k, v in pycompat.iteritems(d):
2175 fm_extras.startitem()
2175 fm_extras.startitem()
2176 fm_extras.data(file=f)
2176 fm_extras.data(file=f)
2177 fm_extras.data(key=k)
2177 fm_extras.data(key=k)
2178 fm_extras.data(value=v)
2178 fm_extras.data(value=v)
2179 fm_extras.end()
2179 fm_extras.end()
2180
2180
2181 fm.end()
2181 fm.end()
2182
2182
2183
2183
2184 @command(b'debugnamecomplete', [], _(b'NAME...'))
2184 @command(b'debugnamecomplete', [], _(b'NAME...'))
2185 def debugnamecomplete(ui, repo, *args):
2185 def debugnamecomplete(ui, repo, *args):
2186 '''complete "names" - tags, open branch names, bookmark names'''
2186 '''complete "names" - tags, open branch names, bookmark names'''
2187
2187
2188 names = set()
2188 names = set()
2189 # since we previously only listed open branches, we will handle that
2189 # since we previously only listed open branches, we will handle that
2190 # specially (after this for loop)
2190 # specially (after this for loop)
2191 for name, ns in pycompat.iteritems(repo.names):
2191 for name, ns in pycompat.iteritems(repo.names):
2192 if name != b'branches':
2192 if name != b'branches':
2193 names.update(ns.listnames(repo))
2193 names.update(ns.listnames(repo))
2194 names.update(
2194 names.update(
2195 tag
2195 tag
2196 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2196 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2197 if not closed
2197 if not closed
2198 )
2198 )
2199 completions = set()
2199 completions = set()
2200 if not args:
2200 if not args:
2201 args = [b'']
2201 args = [b'']
2202 for a in args:
2202 for a in args:
2203 completions.update(n for n in names if n.startswith(a))
2203 completions.update(n for n in names if n.startswith(a))
2204 ui.write(b'\n'.join(sorted(completions)))
2204 ui.write(b'\n'.join(sorted(completions)))
2205 ui.write(b'\n')
2205 ui.write(b'\n')
2206
2206
2207
2207
2208 @command(
2208 @command(
2209 b'debugnodemap',
2209 b'debugnodemap',
2210 [
2210 [
2211 (
2211 (
2212 b'',
2212 b'',
2213 b'dump-new',
2213 b'dump-new',
2214 False,
2214 False,
2215 _(b'write a (new) persistent binary nodemap on stdin'),
2215 _(b'write a (new) persistent binary nodemap on stdout'),
2216 ),
2216 ),
2217 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2217 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2218 (
2218 (
2219 b'',
2219 b'',
2220 b'check',
2220 b'check',
2221 False,
2221 False,
2222 _(b'check that the data on disk data are correct.'),
2222 _(b'check that the data on disk data are correct.'),
2223 ),
2223 ),
2224 (
2224 (
2225 b'',
2225 b'',
2226 b'metadata',
2226 b'metadata',
2227 False,
2227 False,
2228 _(b'display the on disk meta data for the nodemap'),
2228 _(b'display the on disk meta data for the nodemap'),
2229 ),
2229 ),
2230 ],
2230 ],
2231 )
2231 )
2232 def debugnodemap(ui, repo, **opts):
2232 def debugnodemap(ui, repo, **opts):
2233 """write and inspect on disk nodemap"""
2233 """write and inspect on disk nodemap"""
2234 if opts['dump_new']:
2234 if opts['dump_new']:
2235 unfi = repo.unfiltered()
2235 unfi = repo.unfiltered()
2236 cl = unfi.changelog
2236 cl = unfi.changelog
2237 if util.safehasattr(cl.index, "nodemap_data_all"):
2237 if util.safehasattr(cl.index, "nodemap_data_all"):
2238 data = cl.index.nodemap_data_all()
2238 data = cl.index.nodemap_data_all()
2239 else:
2239 else:
2240 data = nodemap.persistent_data(cl.index)
2240 data = nodemap.persistent_data(cl.index)
2241 ui.write(data)
2241 ui.write(data)
2242 elif opts['dump_disk']:
2242 elif opts['dump_disk']:
2243 unfi = repo.unfiltered()
2243 unfi = repo.unfiltered()
2244 cl = unfi.changelog
2244 cl = unfi.changelog
2245 nm_data = nodemap.persisted_data(cl)
2245 nm_data = nodemap.persisted_data(cl)
2246 if nm_data is not None:
2246 if nm_data is not None:
2247 docket, data = nm_data
2247 docket, data = nm_data
2248 ui.write(data[:])
2248 ui.write(data[:])
2249 elif opts['check']:
2249 elif opts['check']:
2250 unfi = repo.unfiltered()
2250 unfi = repo.unfiltered()
2251 cl = unfi.changelog
2251 cl = unfi.changelog
2252 nm_data = nodemap.persisted_data(cl)
2252 nm_data = nodemap.persisted_data(cl)
2253 if nm_data is not None:
2253 if nm_data is not None:
2254 docket, data = nm_data
2254 docket, data = nm_data
2255 return nodemap.check_data(ui, cl.index, data)
2255 return nodemap.check_data(ui, cl.index, data)
2256 elif opts['metadata']:
2256 elif opts['metadata']:
2257 unfi = repo.unfiltered()
2257 unfi = repo.unfiltered()
2258 cl = unfi.changelog
2258 cl = unfi.changelog
2259 nm_data = nodemap.persisted_data(cl)
2259 nm_data = nodemap.persisted_data(cl)
2260 if nm_data is not None:
2260 if nm_data is not None:
2261 docket, data = nm_data
2261 docket, data = nm_data
2262 ui.write((b"uid: %s\n") % docket.uid)
2262 ui.write((b"uid: %s\n") % docket.uid)
2263 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2263 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2264 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2264 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2265 ui.write((b"data-length: %d\n") % docket.data_length)
2265 ui.write((b"data-length: %d\n") % docket.data_length)
2266 ui.write((b"data-unused: %d\n") % docket.data_unused)
2266 ui.write((b"data-unused: %d\n") % docket.data_unused)
2267 unused_perc = docket.data_unused * 100.0 / docket.data_length
2267 unused_perc = docket.data_unused * 100.0 / docket.data_length
2268 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2268 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2269
2269
2270
2270
2271 @command(
2271 @command(
2272 b'debugobsolete',
2272 b'debugobsolete',
2273 [
2273 [
2274 (b'', b'flags', 0, _(b'markers flag')),
2274 (b'', b'flags', 0, _(b'markers flag')),
2275 (
2275 (
2276 b'',
2276 b'',
2277 b'record-parents',
2277 b'record-parents',
2278 False,
2278 False,
2279 _(b'record parent information for the precursor'),
2279 _(b'record parent information for the precursor'),
2280 ),
2280 ),
2281 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2281 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2282 (
2282 (
2283 b'',
2283 b'',
2284 b'exclusive',
2284 b'exclusive',
2285 False,
2285 False,
2286 _(b'restrict display to markers only relevant to REV'),
2286 _(b'restrict display to markers only relevant to REV'),
2287 ),
2287 ),
2288 (b'', b'index', False, _(b'display index of the marker')),
2288 (b'', b'index', False, _(b'display index of the marker')),
2289 (b'', b'delete', [], _(b'delete markers specified by indices')),
2289 (b'', b'delete', [], _(b'delete markers specified by indices')),
2290 ]
2290 ]
2291 + cmdutil.commitopts2
2291 + cmdutil.commitopts2
2292 + cmdutil.formatteropts,
2292 + cmdutil.formatteropts,
2293 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2293 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2294 )
2294 )
2295 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2295 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2296 """create arbitrary obsolete marker
2296 """create arbitrary obsolete marker
2297
2297
2298 With no arguments, displays the list of obsolescence markers."""
2298 With no arguments, displays the list of obsolescence markers."""
2299
2299
2300 opts = pycompat.byteskwargs(opts)
2300 opts = pycompat.byteskwargs(opts)
2301
2301
2302 def parsenodeid(s):
2302 def parsenodeid(s):
2303 try:
2303 try:
2304 # We do not use revsingle/revrange functions here to accept
2304 # We do not use revsingle/revrange functions here to accept
2305 # arbitrary node identifiers, possibly not present in the
2305 # arbitrary node identifiers, possibly not present in the
2306 # local repository.
2306 # local repository.
2307 n = bin(s)
2307 n = bin(s)
2308 if len(n) != len(nullid):
2308 if len(n) != len(nullid):
2309 raise TypeError()
2309 raise TypeError()
2310 return n
2310 return n
2311 except TypeError:
2311 except TypeError:
2312 raise error.InputError(
2312 raise error.InputError(
2313 b'changeset references must be full hexadecimal '
2313 b'changeset references must be full hexadecimal '
2314 b'node identifiers'
2314 b'node identifiers'
2315 )
2315 )
2316
2316
2317 if opts.get(b'delete'):
2317 if opts.get(b'delete'):
2318 indices = []
2318 indices = []
2319 for v in opts.get(b'delete'):
2319 for v in opts.get(b'delete'):
2320 try:
2320 try:
2321 indices.append(int(v))
2321 indices.append(int(v))
2322 except ValueError:
2322 except ValueError:
2323 raise error.InputError(
2323 raise error.InputError(
2324 _(b'invalid index value: %r') % v,
2324 _(b'invalid index value: %r') % v,
2325 hint=_(b'use integers for indices'),
2325 hint=_(b'use integers for indices'),
2326 )
2326 )
2327
2327
2328 if repo.currenttransaction():
2328 if repo.currenttransaction():
2329 raise error.Abort(
2329 raise error.Abort(
2330 _(b'cannot delete obsmarkers in the middle of transaction.')
2330 _(b'cannot delete obsmarkers in the middle of transaction.')
2331 )
2331 )
2332
2332
2333 with repo.lock():
2333 with repo.lock():
2334 n = repair.deleteobsmarkers(repo.obsstore, indices)
2334 n = repair.deleteobsmarkers(repo.obsstore, indices)
2335 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2335 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2336
2336
2337 return
2337 return
2338
2338
2339 if precursor is not None:
2339 if precursor is not None:
2340 if opts[b'rev']:
2340 if opts[b'rev']:
2341 raise error.InputError(
2341 raise error.InputError(
2342 b'cannot select revision when creating marker'
2342 b'cannot select revision when creating marker'
2343 )
2343 )
2344 metadata = {}
2344 metadata = {}
2345 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2345 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2346 succs = tuple(parsenodeid(succ) for succ in successors)
2346 succs = tuple(parsenodeid(succ) for succ in successors)
2347 l = repo.lock()
2347 l = repo.lock()
2348 try:
2348 try:
2349 tr = repo.transaction(b'debugobsolete')
2349 tr = repo.transaction(b'debugobsolete')
2350 try:
2350 try:
2351 date = opts.get(b'date')
2351 date = opts.get(b'date')
2352 if date:
2352 if date:
2353 date = dateutil.parsedate(date)
2353 date = dateutil.parsedate(date)
2354 else:
2354 else:
2355 date = None
2355 date = None
2356 prec = parsenodeid(precursor)
2356 prec = parsenodeid(precursor)
2357 parents = None
2357 parents = None
2358 if opts[b'record_parents']:
2358 if opts[b'record_parents']:
2359 if prec not in repo.unfiltered():
2359 if prec not in repo.unfiltered():
2360 raise error.Abort(
2360 raise error.Abort(
2361 b'cannot used --record-parents on '
2361 b'cannot used --record-parents on '
2362 b'unknown changesets'
2362 b'unknown changesets'
2363 )
2363 )
2364 parents = repo.unfiltered()[prec].parents()
2364 parents = repo.unfiltered()[prec].parents()
2365 parents = tuple(p.node() for p in parents)
2365 parents = tuple(p.node() for p in parents)
2366 repo.obsstore.create(
2366 repo.obsstore.create(
2367 tr,
2367 tr,
2368 prec,
2368 prec,
2369 succs,
2369 succs,
2370 opts[b'flags'],
2370 opts[b'flags'],
2371 parents=parents,
2371 parents=parents,
2372 date=date,
2372 date=date,
2373 metadata=metadata,
2373 metadata=metadata,
2374 ui=ui,
2374 ui=ui,
2375 )
2375 )
2376 tr.close()
2376 tr.close()
2377 except ValueError as exc:
2377 except ValueError as exc:
2378 raise error.Abort(
2378 raise error.Abort(
2379 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2379 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2380 )
2380 )
2381 finally:
2381 finally:
2382 tr.release()
2382 tr.release()
2383 finally:
2383 finally:
2384 l.release()
2384 l.release()
2385 else:
2385 else:
2386 if opts[b'rev']:
2386 if opts[b'rev']:
2387 revs = scmutil.revrange(repo, opts[b'rev'])
2387 revs = scmutil.revrange(repo, opts[b'rev'])
2388 nodes = [repo[r].node() for r in revs]
2388 nodes = [repo[r].node() for r in revs]
2389 markers = list(
2389 markers = list(
2390 obsutil.getmarkers(
2390 obsutil.getmarkers(
2391 repo, nodes=nodes, exclusive=opts[b'exclusive']
2391 repo, nodes=nodes, exclusive=opts[b'exclusive']
2392 )
2392 )
2393 )
2393 )
2394 markers.sort(key=lambda x: x._data)
2394 markers.sort(key=lambda x: x._data)
2395 else:
2395 else:
2396 markers = obsutil.getmarkers(repo)
2396 markers = obsutil.getmarkers(repo)
2397
2397
2398 markerstoiter = markers
2398 markerstoiter = markers
2399 isrelevant = lambda m: True
2399 isrelevant = lambda m: True
2400 if opts.get(b'rev') and opts.get(b'index'):
2400 if opts.get(b'rev') and opts.get(b'index'):
2401 markerstoiter = obsutil.getmarkers(repo)
2401 markerstoiter = obsutil.getmarkers(repo)
2402 markerset = set(markers)
2402 markerset = set(markers)
2403 isrelevant = lambda m: m in markerset
2403 isrelevant = lambda m: m in markerset
2404
2404
2405 fm = ui.formatter(b'debugobsolete', opts)
2405 fm = ui.formatter(b'debugobsolete', opts)
2406 for i, m in enumerate(markerstoiter):
2406 for i, m in enumerate(markerstoiter):
2407 if not isrelevant(m):
2407 if not isrelevant(m):
2408 # marker can be irrelevant when we're iterating over a set
2408 # marker can be irrelevant when we're iterating over a set
2409 # of markers (markerstoiter) which is bigger than the set
2409 # of markers (markerstoiter) which is bigger than the set
2410 # of markers we want to display (markers)
2410 # of markers we want to display (markers)
2411 # this can happen if both --index and --rev options are
2411 # this can happen if both --index and --rev options are
2412 # provided and thus we need to iterate over all of the markers
2412 # provided and thus we need to iterate over all of the markers
2413 # to get the correct indices, but only display the ones that
2413 # to get the correct indices, but only display the ones that
2414 # are relevant to --rev value
2414 # are relevant to --rev value
2415 continue
2415 continue
2416 fm.startitem()
2416 fm.startitem()
2417 ind = i if opts.get(b'index') else None
2417 ind = i if opts.get(b'index') else None
2418 cmdutil.showmarker(fm, m, index=ind)
2418 cmdutil.showmarker(fm, m, index=ind)
2419 fm.end()
2419 fm.end()
2420
2420
2421
2421
2422 @command(
2422 @command(
2423 b'debugp1copies',
2423 b'debugp1copies',
2424 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2424 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2425 _(b'[-r REV]'),
2425 _(b'[-r REV]'),
2426 )
2426 )
2427 def debugp1copies(ui, repo, **opts):
2427 def debugp1copies(ui, repo, **opts):
2428 """dump copy information compared to p1"""
2428 """dump copy information compared to p1"""
2429
2429
2430 opts = pycompat.byteskwargs(opts)
2430 opts = pycompat.byteskwargs(opts)
2431 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2431 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2432 for dst, src in ctx.p1copies().items():
2432 for dst, src in ctx.p1copies().items():
2433 ui.write(b'%s -> %s\n' % (src, dst))
2433 ui.write(b'%s -> %s\n' % (src, dst))
2434
2434
2435
2435
2436 @command(
2436 @command(
2437 b'debugp2copies',
2437 b'debugp2copies',
2438 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2438 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2439 _(b'[-r REV]'),
2439 _(b'[-r REV]'),
2440 )
2440 )
2441 def debugp1copies(ui, repo, **opts):
2441 def debugp1copies(ui, repo, **opts):
2442 """dump copy information compared to p2"""
2442 """dump copy information compared to p2"""
2443
2443
2444 opts = pycompat.byteskwargs(opts)
2444 opts = pycompat.byteskwargs(opts)
2445 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2445 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2446 for dst, src in ctx.p2copies().items():
2446 for dst, src in ctx.p2copies().items():
2447 ui.write(b'%s -> %s\n' % (src, dst))
2447 ui.write(b'%s -> %s\n' % (src, dst))
2448
2448
2449
2449
2450 @command(
2450 @command(
2451 b'debugpathcomplete',
2451 b'debugpathcomplete',
2452 [
2452 [
2453 (b'f', b'full', None, _(b'complete an entire path')),
2453 (b'f', b'full', None, _(b'complete an entire path')),
2454 (b'n', b'normal', None, _(b'show only normal files')),
2454 (b'n', b'normal', None, _(b'show only normal files')),
2455 (b'a', b'added', None, _(b'show only added files')),
2455 (b'a', b'added', None, _(b'show only added files')),
2456 (b'r', b'removed', None, _(b'show only removed files')),
2456 (b'r', b'removed', None, _(b'show only removed files')),
2457 ],
2457 ],
2458 _(b'FILESPEC...'),
2458 _(b'FILESPEC...'),
2459 )
2459 )
2460 def debugpathcomplete(ui, repo, *specs, **opts):
2460 def debugpathcomplete(ui, repo, *specs, **opts):
2461 """complete part or all of a tracked path
2461 """complete part or all of a tracked path
2462
2462
2463 This command supports shells that offer path name completion. It
2463 This command supports shells that offer path name completion. It
2464 currently completes only files already known to the dirstate.
2464 currently completes only files already known to the dirstate.
2465
2465
2466 Completion extends only to the next path segment unless
2466 Completion extends only to the next path segment unless
2467 --full is specified, in which case entire paths are used."""
2467 --full is specified, in which case entire paths are used."""
2468
2468
2469 def complete(path, acceptable):
2469 def complete(path, acceptable):
2470 dirstate = repo.dirstate
2470 dirstate = repo.dirstate
2471 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2471 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2472 rootdir = repo.root + pycompat.ossep
2472 rootdir = repo.root + pycompat.ossep
2473 if spec != repo.root and not spec.startswith(rootdir):
2473 if spec != repo.root and not spec.startswith(rootdir):
2474 return [], []
2474 return [], []
2475 if os.path.isdir(spec):
2475 if os.path.isdir(spec):
2476 spec += b'/'
2476 spec += b'/'
2477 spec = spec[len(rootdir) :]
2477 spec = spec[len(rootdir) :]
2478 fixpaths = pycompat.ossep != b'/'
2478 fixpaths = pycompat.ossep != b'/'
2479 if fixpaths:
2479 if fixpaths:
2480 spec = spec.replace(pycompat.ossep, b'/')
2480 spec = spec.replace(pycompat.ossep, b'/')
2481 speclen = len(spec)
2481 speclen = len(spec)
2482 fullpaths = opts['full']
2482 fullpaths = opts['full']
2483 files, dirs = set(), set()
2483 files, dirs = set(), set()
2484 adddir, addfile = dirs.add, files.add
2484 adddir, addfile = dirs.add, files.add
2485 for f, st in pycompat.iteritems(dirstate):
2485 for f, st in pycompat.iteritems(dirstate):
2486 if f.startswith(spec) and st[0] in acceptable:
2486 if f.startswith(spec) and st[0] in acceptable:
2487 if fixpaths:
2487 if fixpaths:
2488 f = f.replace(b'/', pycompat.ossep)
2488 f = f.replace(b'/', pycompat.ossep)
2489 if fullpaths:
2489 if fullpaths:
2490 addfile(f)
2490 addfile(f)
2491 continue
2491 continue
2492 s = f.find(pycompat.ossep, speclen)
2492 s = f.find(pycompat.ossep, speclen)
2493 if s >= 0:
2493 if s >= 0:
2494 adddir(f[:s])
2494 adddir(f[:s])
2495 else:
2495 else:
2496 addfile(f)
2496 addfile(f)
2497 return files, dirs
2497 return files, dirs
2498
2498
2499 acceptable = b''
2499 acceptable = b''
2500 if opts['normal']:
2500 if opts['normal']:
2501 acceptable += b'nm'
2501 acceptable += b'nm'
2502 if opts['added']:
2502 if opts['added']:
2503 acceptable += b'a'
2503 acceptable += b'a'
2504 if opts['removed']:
2504 if opts['removed']:
2505 acceptable += b'r'
2505 acceptable += b'r'
2506 cwd = repo.getcwd()
2506 cwd = repo.getcwd()
2507 if not specs:
2507 if not specs:
2508 specs = [b'.']
2508 specs = [b'.']
2509
2509
2510 files, dirs = set(), set()
2510 files, dirs = set(), set()
2511 for spec in specs:
2511 for spec in specs:
2512 f, d = complete(spec, acceptable or b'nmar')
2512 f, d = complete(spec, acceptable or b'nmar')
2513 files.update(f)
2513 files.update(f)
2514 dirs.update(d)
2514 dirs.update(d)
2515 files.update(dirs)
2515 files.update(dirs)
2516 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2516 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2517 ui.write(b'\n')
2517 ui.write(b'\n')
2518
2518
2519
2519
2520 @command(
2520 @command(
2521 b'debugpathcopies',
2521 b'debugpathcopies',
2522 cmdutil.walkopts,
2522 cmdutil.walkopts,
2523 b'hg debugpathcopies REV1 REV2 [FILE]',
2523 b'hg debugpathcopies REV1 REV2 [FILE]',
2524 inferrepo=True,
2524 inferrepo=True,
2525 )
2525 )
2526 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2526 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2527 """show copies between two revisions"""
2527 """show copies between two revisions"""
2528 ctx1 = scmutil.revsingle(repo, rev1)
2528 ctx1 = scmutil.revsingle(repo, rev1)
2529 ctx2 = scmutil.revsingle(repo, rev2)
2529 ctx2 = scmutil.revsingle(repo, rev2)
2530 m = scmutil.match(ctx1, pats, opts)
2530 m = scmutil.match(ctx1, pats, opts)
2531 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2531 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2532 ui.write(b'%s -> %s\n' % (src, dst))
2532 ui.write(b'%s -> %s\n' % (src, dst))
2533
2533
2534
2534
2535 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2535 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2536 def debugpeer(ui, path):
2536 def debugpeer(ui, path):
2537 """establish a connection to a peer repository"""
2537 """establish a connection to a peer repository"""
2538 # Always enable peer request logging. Requires --debug to display
2538 # Always enable peer request logging. Requires --debug to display
2539 # though.
2539 # though.
2540 overrides = {
2540 overrides = {
2541 (b'devel', b'debug.peer-request'): True,
2541 (b'devel', b'debug.peer-request'): True,
2542 }
2542 }
2543
2543
2544 with ui.configoverride(overrides):
2544 with ui.configoverride(overrides):
2545 peer = hg.peer(ui, {}, path)
2545 peer = hg.peer(ui, {}, path)
2546
2546
2547 local = peer.local() is not None
2547 local = peer.local() is not None
2548 canpush = peer.canpush()
2548 canpush = peer.canpush()
2549
2549
2550 ui.write(_(b'url: %s\n') % peer.url())
2550 ui.write(_(b'url: %s\n') % peer.url())
2551 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2551 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2552 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2552 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2553
2553
2554
2554
2555 @command(
2555 @command(
2556 b'debugpickmergetool',
2556 b'debugpickmergetool',
2557 [
2557 [
2558 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2558 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2559 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2559 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2560 ]
2560 ]
2561 + cmdutil.walkopts
2561 + cmdutil.walkopts
2562 + cmdutil.mergetoolopts,
2562 + cmdutil.mergetoolopts,
2563 _(b'[PATTERN]...'),
2563 _(b'[PATTERN]...'),
2564 inferrepo=True,
2564 inferrepo=True,
2565 )
2565 )
2566 def debugpickmergetool(ui, repo, *pats, **opts):
2566 def debugpickmergetool(ui, repo, *pats, **opts):
2567 """examine which merge tool is chosen for specified file
2567 """examine which merge tool is chosen for specified file
2568
2568
2569 As described in :hg:`help merge-tools`, Mercurial examines
2569 As described in :hg:`help merge-tools`, Mercurial examines
2570 configurations below in this order to decide which merge tool is
2570 configurations below in this order to decide which merge tool is
2571 chosen for specified file.
2571 chosen for specified file.
2572
2572
2573 1. ``--tool`` option
2573 1. ``--tool`` option
2574 2. ``HGMERGE`` environment variable
2574 2. ``HGMERGE`` environment variable
2575 3. configurations in ``merge-patterns`` section
2575 3. configurations in ``merge-patterns`` section
2576 4. configuration of ``ui.merge``
2576 4. configuration of ``ui.merge``
2577 5. configurations in ``merge-tools`` section
2577 5. configurations in ``merge-tools`` section
2578 6. ``hgmerge`` tool (for historical reason only)
2578 6. ``hgmerge`` tool (for historical reason only)
2579 7. default tool for fallback (``:merge`` or ``:prompt``)
2579 7. default tool for fallback (``:merge`` or ``:prompt``)
2580
2580
2581 This command writes out examination result in the style below::
2581 This command writes out examination result in the style below::
2582
2582
2583 FILE = MERGETOOL
2583 FILE = MERGETOOL
2584
2584
2585 By default, all files known in the first parent context of the
2585 By default, all files known in the first parent context of the
2586 working directory are examined. Use file patterns and/or -I/-X
2586 working directory are examined. Use file patterns and/or -I/-X
2587 options to limit target files. -r/--rev is also useful to examine
2587 options to limit target files. -r/--rev is also useful to examine
2588 files in another context without actual updating to it.
2588 files in another context without actual updating to it.
2589
2589
2590 With --debug, this command shows warning messages while matching
2590 With --debug, this command shows warning messages while matching
2591 against ``merge-patterns`` and so on, too. It is recommended to
2591 against ``merge-patterns`` and so on, too. It is recommended to
2592 use this option with explicit file patterns and/or -I/-X options,
2592 use this option with explicit file patterns and/or -I/-X options,
2593 because this option increases amount of output per file according
2593 because this option increases amount of output per file according
2594 to configurations in hgrc.
2594 to configurations in hgrc.
2595
2595
2596 With -v/--verbose, this command shows configurations below at
2596 With -v/--verbose, this command shows configurations below at
2597 first (only if specified).
2597 first (only if specified).
2598
2598
2599 - ``--tool`` option
2599 - ``--tool`` option
2600 - ``HGMERGE`` environment variable
2600 - ``HGMERGE`` environment variable
2601 - configuration of ``ui.merge``
2601 - configuration of ``ui.merge``
2602
2602
2603 If merge tool is chosen before matching against
2603 If merge tool is chosen before matching against
2604 ``merge-patterns``, this command can't show any helpful
2604 ``merge-patterns``, this command can't show any helpful
2605 information, even with --debug. In such case, information above is
2605 information, even with --debug. In such case, information above is
2606 useful to know why a merge tool is chosen.
2606 useful to know why a merge tool is chosen.
2607 """
2607 """
2608 opts = pycompat.byteskwargs(opts)
2608 opts = pycompat.byteskwargs(opts)
2609 overrides = {}
2609 overrides = {}
2610 if opts[b'tool']:
2610 if opts[b'tool']:
2611 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2611 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2612 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2612 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2613
2613
2614 with ui.configoverride(overrides, b'debugmergepatterns'):
2614 with ui.configoverride(overrides, b'debugmergepatterns'):
2615 hgmerge = encoding.environ.get(b"HGMERGE")
2615 hgmerge = encoding.environ.get(b"HGMERGE")
2616 if hgmerge is not None:
2616 if hgmerge is not None:
2617 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2617 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2618 uimerge = ui.config(b"ui", b"merge")
2618 uimerge = ui.config(b"ui", b"merge")
2619 if uimerge:
2619 if uimerge:
2620 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2620 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2621
2621
2622 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2622 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2623 m = scmutil.match(ctx, pats, opts)
2623 m = scmutil.match(ctx, pats, opts)
2624 changedelete = opts[b'changedelete']
2624 changedelete = opts[b'changedelete']
2625 for path in ctx.walk(m):
2625 for path in ctx.walk(m):
2626 fctx = ctx[path]
2626 fctx = ctx[path]
2627 try:
2627 try:
2628 if not ui.debugflag:
2628 if not ui.debugflag:
2629 ui.pushbuffer(error=True)
2629 ui.pushbuffer(error=True)
2630 tool, toolpath = filemerge._picktool(
2630 tool, toolpath = filemerge._picktool(
2631 repo,
2631 repo,
2632 ui,
2632 ui,
2633 path,
2633 path,
2634 fctx.isbinary(),
2634 fctx.isbinary(),
2635 b'l' in fctx.flags(),
2635 b'l' in fctx.flags(),
2636 changedelete,
2636 changedelete,
2637 )
2637 )
2638 finally:
2638 finally:
2639 if not ui.debugflag:
2639 if not ui.debugflag:
2640 ui.popbuffer()
2640 ui.popbuffer()
2641 ui.write(b'%s = %s\n' % (path, tool))
2641 ui.write(b'%s = %s\n' % (path, tool))
2642
2642
2643
2643
2644 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2644 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2645 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2645 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2646 """access the pushkey key/value protocol
2646 """access the pushkey key/value protocol
2647
2647
2648 With two args, list the keys in the given namespace.
2648 With two args, list the keys in the given namespace.
2649
2649
2650 With five args, set a key to new if it currently is set to old.
2650 With five args, set a key to new if it currently is set to old.
2651 Reports success or failure.
2651 Reports success or failure.
2652 """
2652 """
2653
2653
2654 target = hg.peer(ui, {}, repopath)
2654 target = hg.peer(ui, {}, repopath)
2655 if keyinfo:
2655 if keyinfo:
2656 key, old, new = keyinfo
2656 key, old, new = keyinfo
2657 with target.commandexecutor() as e:
2657 with target.commandexecutor() as e:
2658 r = e.callcommand(
2658 r = e.callcommand(
2659 b'pushkey',
2659 b'pushkey',
2660 {
2660 {
2661 b'namespace': namespace,
2661 b'namespace': namespace,
2662 b'key': key,
2662 b'key': key,
2663 b'old': old,
2663 b'old': old,
2664 b'new': new,
2664 b'new': new,
2665 },
2665 },
2666 ).result()
2666 ).result()
2667
2667
2668 ui.status(pycompat.bytestr(r) + b'\n')
2668 ui.status(pycompat.bytestr(r) + b'\n')
2669 return not r
2669 return not r
2670 else:
2670 else:
2671 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2671 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2672 ui.write(
2672 ui.write(
2673 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2673 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2674 )
2674 )
2675
2675
2676
2676
2677 @command(b'debugpvec', [], _(b'A B'))
2677 @command(b'debugpvec', [], _(b'A B'))
2678 def debugpvec(ui, repo, a, b=None):
2678 def debugpvec(ui, repo, a, b=None):
2679 ca = scmutil.revsingle(repo, a)
2679 ca = scmutil.revsingle(repo, a)
2680 cb = scmutil.revsingle(repo, b)
2680 cb = scmutil.revsingle(repo, b)
2681 pa = pvec.ctxpvec(ca)
2681 pa = pvec.ctxpvec(ca)
2682 pb = pvec.ctxpvec(cb)
2682 pb = pvec.ctxpvec(cb)
2683 if pa == pb:
2683 if pa == pb:
2684 rel = b"="
2684 rel = b"="
2685 elif pa > pb:
2685 elif pa > pb:
2686 rel = b">"
2686 rel = b">"
2687 elif pa < pb:
2687 elif pa < pb:
2688 rel = b"<"
2688 rel = b"<"
2689 elif pa | pb:
2689 elif pa | pb:
2690 rel = b"|"
2690 rel = b"|"
2691 ui.write(_(b"a: %s\n") % pa)
2691 ui.write(_(b"a: %s\n") % pa)
2692 ui.write(_(b"b: %s\n") % pb)
2692 ui.write(_(b"b: %s\n") % pb)
2693 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2693 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2694 ui.write(
2694 ui.write(
2695 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2695 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2696 % (
2696 % (
2697 abs(pa._depth - pb._depth),
2697 abs(pa._depth - pb._depth),
2698 pvec._hamming(pa._vec, pb._vec),
2698 pvec._hamming(pa._vec, pb._vec),
2699 pa.distance(pb),
2699 pa.distance(pb),
2700 rel,
2700 rel,
2701 )
2701 )
2702 )
2702 )
2703
2703
2704
2704
2705 @command(
2705 @command(
2706 b'debugrebuilddirstate|debugrebuildstate',
2706 b'debugrebuilddirstate|debugrebuildstate',
2707 [
2707 [
2708 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2708 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2709 (
2709 (
2710 b'',
2710 b'',
2711 b'minimal',
2711 b'minimal',
2712 None,
2712 None,
2713 _(
2713 _(
2714 b'only rebuild files that are inconsistent with '
2714 b'only rebuild files that are inconsistent with '
2715 b'the working copy parent'
2715 b'the working copy parent'
2716 ),
2716 ),
2717 ),
2717 ),
2718 ],
2718 ],
2719 _(b'[-r REV]'),
2719 _(b'[-r REV]'),
2720 )
2720 )
2721 def debugrebuilddirstate(ui, repo, rev, **opts):
2721 def debugrebuilddirstate(ui, repo, rev, **opts):
2722 """rebuild the dirstate as it would look like for the given revision
2722 """rebuild the dirstate as it would look like for the given revision
2723
2723
2724 If no revision is specified the first current parent will be used.
2724 If no revision is specified the first current parent will be used.
2725
2725
2726 The dirstate will be set to the files of the given revision.
2726 The dirstate will be set to the files of the given revision.
2727 The actual working directory content or existing dirstate
2727 The actual working directory content or existing dirstate
2728 information such as adds or removes is not considered.
2728 information such as adds or removes is not considered.
2729
2729
2730 ``minimal`` will only rebuild the dirstate status for files that claim to be
2730 ``minimal`` will only rebuild the dirstate status for files that claim to be
2731 tracked but are not in the parent manifest, or that exist in the parent
2731 tracked but are not in the parent manifest, or that exist in the parent
2732 manifest but are not in the dirstate. It will not change adds, removes, or
2732 manifest but are not in the dirstate. It will not change adds, removes, or
2733 modified files that are in the working copy parent.
2733 modified files that are in the working copy parent.
2734
2734
2735 One use of this command is to make the next :hg:`status` invocation
2735 One use of this command is to make the next :hg:`status` invocation
2736 check the actual file content.
2736 check the actual file content.
2737 """
2737 """
2738 ctx = scmutil.revsingle(repo, rev)
2738 ctx = scmutil.revsingle(repo, rev)
2739 with repo.wlock():
2739 with repo.wlock():
2740 dirstate = repo.dirstate
2740 dirstate = repo.dirstate
2741 changedfiles = None
2741 changedfiles = None
2742 # See command doc for what minimal does.
2742 # See command doc for what minimal does.
2743 if opts.get('minimal'):
2743 if opts.get('minimal'):
2744 manifestfiles = set(ctx.manifest().keys())
2744 manifestfiles = set(ctx.manifest().keys())
2745 dirstatefiles = set(dirstate)
2745 dirstatefiles = set(dirstate)
2746 manifestonly = manifestfiles - dirstatefiles
2746 manifestonly = manifestfiles - dirstatefiles
2747 dsonly = dirstatefiles - manifestfiles
2747 dsonly = dirstatefiles - manifestfiles
2748 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2748 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2749 changedfiles = manifestonly | dsnotadded
2749 changedfiles = manifestonly | dsnotadded
2750
2750
2751 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2751 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2752
2752
2753
2753
2754 @command(b'debugrebuildfncache', [], b'')
2754 @command(b'debugrebuildfncache', [], b'')
2755 def debugrebuildfncache(ui, repo):
2755 def debugrebuildfncache(ui, repo):
2756 """rebuild the fncache file"""
2756 """rebuild the fncache file"""
2757 repair.rebuildfncache(ui, repo)
2757 repair.rebuildfncache(ui, repo)
2758
2758
2759
2759
2760 @command(
2760 @command(
2761 b'debugrename',
2761 b'debugrename',
2762 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2762 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2763 _(b'[-r REV] [FILE]...'),
2763 _(b'[-r REV] [FILE]...'),
2764 )
2764 )
2765 def debugrename(ui, repo, *pats, **opts):
2765 def debugrename(ui, repo, *pats, **opts):
2766 """dump rename information"""
2766 """dump rename information"""
2767
2767
2768 opts = pycompat.byteskwargs(opts)
2768 opts = pycompat.byteskwargs(opts)
2769 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2769 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2770 m = scmutil.match(ctx, pats, opts)
2770 m = scmutil.match(ctx, pats, opts)
2771 for abs in ctx.walk(m):
2771 for abs in ctx.walk(m):
2772 fctx = ctx[abs]
2772 fctx = ctx[abs]
2773 o = fctx.filelog().renamed(fctx.filenode())
2773 o = fctx.filelog().renamed(fctx.filenode())
2774 rel = repo.pathto(abs)
2774 rel = repo.pathto(abs)
2775 if o:
2775 if o:
2776 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2776 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2777 else:
2777 else:
2778 ui.write(_(b"%s not renamed\n") % rel)
2778 ui.write(_(b"%s not renamed\n") % rel)
2779
2779
2780
2780
2781 @command(b'debugrequires|debugrequirements', [], b'')
2781 @command(b'debugrequires|debugrequirements', [], b'')
2782 def debugrequirements(ui, repo):
2782 def debugrequirements(ui, repo):
2783 """ print the current repo requirements """
2783 """ print the current repo requirements """
2784 for r in sorted(repo.requirements):
2784 for r in sorted(repo.requirements):
2785 ui.write(b"%s\n" % r)
2785 ui.write(b"%s\n" % r)
2786
2786
2787
2787
2788 @command(
2788 @command(
2789 b'debugrevlog',
2789 b'debugrevlog',
2790 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2790 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2791 _(b'-c|-m|FILE'),
2791 _(b'-c|-m|FILE'),
2792 optionalrepo=True,
2792 optionalrepo=True,
2793 )
2793 )
2794 def debugrevlog(ui, repo, file_=None, **opts):
2794 def debugrevlog(ui, repo, file_=None, **opts):
2795 """show data and statistics about a revlog"""
2795 """show data and statistics about a revlog"""
2796 opts = pycompat.byteskwargs(opts)
2796 opts = pycompat.byteskwargs(opts)
2797 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2797 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2798
2798
2799 if opts.get(b"dump"):
2799 if opts.get(b"dump"):
2800 numrevs = len(r)
2800 numrevs = len(r)
2801 ui.write(
2801 ui.write(
2802 (
2802 (
2803 b"# rev p1rev p2rev start end deltastart base p1 p2"
2803 b"# rev p1rev p2rev start end deltastart base p1 p2"
2804 b" rawsize totalsize compression heads chainlen\n"
2804 b" rawsize totalsize compression heads chainlen\n"
2805 )
2805 )
2806 )
2806 )
2807 ts = 0
2807 ts = 0
2808 heads = set()
2808 heads = set()
2809
2809
2810 for rev in pycompat.xrange(numrevs):
2810 for rev in pycompat.xrange(numrevs):
2811 dbase = r.deltaparent(rev)
2811 dbase = r.deltaparent(rev)
2812 if dbase == -1:
2812 if dbase == -1:
2813 dbase = rev
2813 dbase = rev
2814 cbase = r.chainbase(rev)
2814 cbase = r.chainbase(rev)
2815 clen = r.chainlen(rev)
2815 clen = r.chainlen(rev)
2816 p1, p2 = r.parentrevs(rev)
2816 p1, p2 = r.parentrevs(rev)
2817 rs = r.rawsize(rev)
2817 rs = r.rawsize(rev)
2818 ts = ts + rs
2818 ts = ts + rs
2819 heads -= set(r.parentrevs(rev))
2819 heads -= set(r.parentrevs(rev))
2820 heads.add(rev)
2820 heads.add(rev)
2821 try:
2821 try:
2822 compression = ts / r.end(rev)
2822 compression = ts / r.end(rev)
2823 except ZeroDivisionError:
2823 except ZeroDivisionError:
2824 compression = 0
2824 compression = 0
2825 ui.write(
2825 ui.write(
2826 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2826 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2827 b"%11d %5d %8d\n"
2827 b"%11d %5d %8d\n"
2828 % (
2828 % (
2829 rev,
2829 rev,
2830 p1,
2830 p1,
2831 p2,
2831 p2,
2832 r.start(rev),
2832 r.start(rev),
2833 r.end(rev),
2833 r.end(rev),
2834 r.start(dbase),
2834 r.start(dbase),
2835 r.start(cbase),
2835 r.start(cbase),
2836 r.start(p1),
2836 r.start(p1),
2837 r.start(p2),
2837 r.start(p2),
2838 rs,
2838 rs,
2839 ts,
2839 ts,
2840 compression,
2840 compression,
2841 len(heads),
2841 len(heads),
2842 clen,
2842 clen,
2843 )
2843 )
2844 )
2844 )
2845 return 0
2845 return 0
2846
2846
2847 v = r.version
2847 v = r.version
2848 format = v & 0xFFFF
2848 format = v & 0xFFFF
2849 flags = []
2849 flags = []
2850 gdelta = False
2850 gdelta = False
2851 if v & revlog.FLAG_INLINE_DATA:
2851 if v & revlog.FLAG_INLINE_DATA:
2852 flags.append(b'inline')
2852 flags.append(b'inline')
2853 if v & revlog.FLAG_GENERALDELTA:
2853 if v & revlog.FLAG_GENERALDELTA:
2854 gdelta = True
2854 gdelta = True
2855 flags.append(b'generaldelta')
2855 flags.append(b'generaldelta')
2856 if not flags:
2856 if not flags:
2857 flags = [b'(none)']
2857 flags = [b'(none)']
2858
2858
2859 ### tracks merge vs single parent
2859 ### tracks merge vs single parent
2860 nummerges = 0
2860 nummerges = 0
2861
2861
2862 ### tracks ways the "delta" are build
2862 ### tracks ways the "delta" are build
2863 # nodelta
2863 # nodelta
2864 numempty = 0
2864 numempty = 0
2865 numemptytext = 0
2865 numemptytext = 0
2866 numemptydelta = 0
2866 numemptydelta = 0
2867 # full file content
2867 # full file content
2868 numfull = 0
2868 numfull = 0
2869 # intermediate snapshot against a prior snapshot
2869 # intermediate snapshot against a prior snapshot
2870 numsemi = 0
2870 numsemi = 0
2871 # snapshot count per depth
2871 # snapshot count per depth
2872 numsnapdepth = collections.defaultdict(lambda: 0)
2872 numsnapdepth = collections.defaultdict(lambda: 0)
2873 # delta against previous revision
2873 # delta against previous revision
2874 numprev = 0
2874 numprev = 0
2875 # delta against first or second parent (not prev)
2875 # delta against first or second parent (not prev)
2876 nump1 = 0
2876 nump1 = 0
2877 nump2 = 0
2877 nump2 = 0
2878 # delta against neither prev nor parents
2878 # delta against neither prev nor parents
2879 numother = 0
2879 numother = 0
2880 # delta against prev that are also first or second parent
2880 # delta against prev that are also first or second parent
2881 # (details of `numprev`)
2881 # (details of `numprev`)
2882 nump1prev = 0
2882 nump1prev = 0
2883 nump2prev = 0
2883 nump2prev = 0
2884
2884
2885 # data about delta chain of each revs
2885 # data about delta chain of each revs
2886 chainlengths = []
2886 chainlengths = []
2887 chainbases = []
2887 chainbases = []
2888 chainspans = []
2888 chainspans = []
2889
2889
2890 # data about each revision
2890 # data about each revision
2891 datasize = [None, 0, 0]
2891 datasize = [None, 0, 0]
2892 fullsize = [None, 0, 0]
2892 fullsize = [None, 0, 0]
2893 semisize = [None, 0, 0]
2893 semisize = [None, 0, 0]
2894 # snapshot count per depth
2894 # snapshot count per depth
2895 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2895 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2896 deltasize = [None, 0, 0]
2896 deltasize = [None, 0, 0]
2897 chunktypecounts = {}
2897 chunktypecounts = {}
2898 chunktypesizes = {}
2898 chunktypesizes = {}
2899
2899
2900 def addsize(size, l):
2900 def addsize(size, l):
2901 if l[0] is None or size < l[0]:
2901 if l[0] is None or size < l[0]:
2902 l[0] = size
2902 l[0] = size
2903 if size > l[1]:
2903 if size > l[1]:
2904 l[1] = size
2904 l[1] = size
2905 l[2] += size
2905 l[2] += size
2906
2906
2907 numrevs = len(r)
2907 numrevs = len(r)
2908 for rev in pycompat.xrange(numrevs):
2908 for rev in pycompat.xrange(numrevs):
2909 p1, p2 = r.parentrevs(rev)
2909 p1, p2 = r.parentrevs(rev)
2910 delta = r.deltaparent(rev)
2910 delta = r.deltaparent(rev)
2911 if format > 0:
2911 if format > 0:
2912 addsize(r.rawsize(rev), datasize)
2912 addsize(r.rawsize(rev), datasize)
2913 if p2 != nullrev:
2913 if p2 != nullrev:
2914 nummerges += 1
2914 nummerges += 1
2915 size = r.length(rev)
2915 size = r.length(rev)
2916 if delta == nullrev:
2916 if delta == nullrev:
2917 chainlengths.append(0)
2917 chainlengths.append(0)
2918 chainbases.append(r.start(rev))
2918 chainbases.append(r.start(rev))
2919 chainspans.append(size)
2919 chainspans.append(size)
2920 if size == 0:
2920 if size == 0:
2921 numempty += 1
2921 numempty += 1
2922 numemptytext += 1
2922 numemptytext += 1
2923 else:
2923 else:
2924 numfull += 1
2924 numfull += 1
2925 numsnapdepth[0] += 1
2925 numsnapdepth[0] += 1
2926 addsize(size, fullsize)
2926 addsize(size, fullsize)
2927 addsize(size, snapsizedepth[0])
2927 addsize(size, snapsizedepth[0])
2928 else:
2928 else:
2929 chainlengths.append(chainlengths[delta] + 1)
2929 chainlengths.append(chainlengths[delta] + 1)
2930 baseaddr = chainbases[delta]
2930 baseaddr = chainbases[delta]
2931 revaddr = r.start(rev)
2931 revaddr = r.start(rev)
2932 chainbases.append(baseaddr)
2932 chainbases.append(baseaddr)
2933 chainspans.append((revaddr - baseaddr) + size)
2933 chainspans.append((revaddr - baseaddr) + size)
2934 if size == 0:
2934 if size == 0:
2935 numempty += 1
2935 numempty += 1
2936 numemptydelta += 1
2936 numemptydelta += 1
2937 elif r.issnapshot(rev):
2937 elif r.issnapshot(rev):
2938 addsize(size, semisize)
2938 addsize(size, semisize)
2939 numsemi += 1
2939 numsemi += 1
2940 depth = r.snapshotdepth(rev)
2940 depth = r.snapshotdepth(rev)
2941 numsnapdepth[depth] += 1
2941 numsnapdepth[depth] += 1
2942 addsize(size, snapsizedepth[depth])
2942 addsize(size, snapsizedepth[depth])
2943 else:
2943 else:
2944 addsize(size, deltasize)
2944 addsize(size, deltasize)
2945 if delta == rev - 1:
2945 if delta == rev - 1:
2946 numprev += 1
2946 numprev += 1
2947 if delta == p1:
2947 if delta == p1:
2948 nump1prev += 1
2948 nump1prev += 1
2949 elif delta == p2:
2949 elif delta == p2:
2950 nump2prev += 1
2950 nump2prev += 1
2951 elif delta == p1:
2951 elif delta == p1:
2952 nump1 += 1
2952 nump1 += 1
2953 elif delta == p2:
2953 elif delta == p2:
2954 nump2 += 1
2954 nump2 += 1
2955 elif delta != nullrev:
2955 elif delta != nullrev:
2956 numother += 1
2956 numother += 1
2957
2957
2958 # Obtain data on the raw chunks in the revlog.
2958 # Obtain data on the raw chunks in the revlog.
2959 if util.safehasattr(r, b'_getsegmentforrevs'):
2959 if util.safehasattr(r, b'_getsegmentforrevs'):
2960 segment = r._getsegmentforrevs(rev, rev)[1]
2960 segment = r._getsegmentforrevs(rev, rev)[1]
2961 else:
2961 else:
2962 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2962 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2963 if segment:
2963 if segment:
2964 chunktype = bytes(segment[0:1])
2964 chunktype = bytes(segment[0:1])
2965 else:
2965 else:
2966 chunktype = b'empty'
2966 chunktype = b'empty'
2967
2967
2968 if chunktype not in chunktypecounts:
2968 if chunktype not in chunktypecounts:
2969 chunktypecounts[chunktype] = 0
2969 chunktypecounts[chunktype] = 0
2970 chunktypesizes[chunktype] = 0
2970 chunktypesizes[chunktype] = 0
2971
2971
2972 chunktypecounts[chunktype] += 1
2972 chunktypecounts[chunktype] += 1
2973 chunktypesizes[chunktype] += size
2973 chunktypesizes[chunktype] += size
2974
2974
2975 # Adjust size min value for empty cases
2975 # Adjust size min value for empty cases
2976 for size in (datasize, fullsize, semisize, deltasize):
2976 for size in (datasize, fullsize, semisize, deltasize):
2977 if size[0] is None:
2977 if size[0] is None:
2978 size[0] = 0
2978 size[0] = 0
2979
2979
2980 numdeltas = numrevs - numfull - numempty - numsemi
2980 numdeltas = numrevs - numfull - numempty - numsemi
2981 numoprev = numprev - nump1prev - nump2prev
2981 numoprev = numprev - nump1prev - nump2prev
2982 totalrawsize = datasize[2]
2982 totalrawsize = datasize[2]
2983 datasize[2] /= numrevs
2983 datasize[2] /= numrevs
2984 fulltotal = fullsize[2]
2984 fulltotal = fullsize[2]
2985 if numfull == 0:
2985 if numfull == 0:
2986 fullsize[2] = 0
2986 fullsize[2] = 0
2987 else:
2987 else:
2988 fullsize[2] /= numfull
2988 fullsize[2] /= numfull
2989 semitotal = semisize[2]
2989 semitotal = semisize[2]
2990 snaptotal = {}
2990 snaptotal = {}
2991 if numsemi > 0:
2991 if numsemi > 0:
2992 semisize[2] /= numsemi
2992 semisize[2] /= numsemi
2993 for depth in snapsizedepth:
2993 for depth in snapsizedepth:
2994 snaptotal[depth] = snapsizedepth[depth][2]
2994 snaptotal[depth] = snapsizedepth[depth][2]
2995 snapsizedepth[depth][2] /= numsnapdepth[depth]
2995 snapsizedepth[depth][2] /= numsnapdepth[depth]
2996
2996
2997 deltatotal = deltasize[2]
2997 deltatotal = deltasize[2]
2998 if numdeltas > 0:
2998 if numdeltas > 0:
2999 deltasize[2] /= numdeltas
2999 deltasize[2] /= numdeltas
3000 totalsize = fulltotal + semitotal + deltatotal
3000 totalsize = fulltotal + semitotal + deltatotal
3001 avgchainlen = sum(chainlengths) / numrevs
3001 avgchainlen = sum(chainlengths) / numrevs
3002 maxchainlen = max(chainlengths)
3002 maxchainlen = max(chainlengths)
3003 maxchainspan = max(chainspans)
3003 maxchainspan = max(chainspans)
3004 compratio = 1
3004 compratio = 1
3005 if totalsize:
3005 if totalsize:
3006 compratio = totalrawsize / totalsize
3006 compratio = totalrawsize / totalsize
3007
3007
3008 basedfmtstr = b'%%%dd\n'
3008 basedfmtstr = b'%%%dd\n'
3009 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3009 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3010
3010
3011 def dfmtstr(max):
3011 def dfmtstr(max):
3012 return basedfmtstr % len(str(max))
3012 return basedfmtstr % len(str(max))
3013
3013
3014 def pcfmtstr(max, padding=0):
3014 def pcfmtstr(max, padding=0):
3015 return basepcfmtstr % (len(str(max)), b' ' * padding)
3015 return basepcfmtstr % (len(str(max)), b' ' * padding)
3016
3016
3017 def pcfmt(value, total):
3017 def pcfmt(value, total):
3018 if total:
3018 if total:
3019 return (value, 100 * float(value) / total)
3019 return (value, 100 * float(value) / total)
3020 else:
3020 else:
3021 return value, 100.0
3021 return value, 100.0
3022
3022
3023 ui.writenoi18n(b'format : %d\n' % format)
3023 ui.writenoi18n(b'format : %d\n' % format)
3024 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3024 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3025
3025
3026 ui.write(b'\n')
3026 ui.write(b'\n')
3027 fmt = pcfmtstr(totalsize)
3027 fmt = pcfmtstr(totalsize)
3028 fmt2 = dfmtstr(totalsize)
3028 fmt2 = dfmtstr(totalsize)
3029 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3029 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3030 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3030 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3031 ui.writenoi18n(
3031 ui.writenoi18n(
3032 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3032 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3033 )
3033 )
3034 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3034 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3035 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3035 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3036 ui.writenoi18n(
3036 ui.writenoi18n(
3037 b' text : '
3037 b' text : '
3038 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3038 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3039 )
3039 )
3040 ui.writenoi18n(
3040 ui.writenoi18n(
3041 b' delta : '
3041 b' delta : '
3042 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3042 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3043 )
3043 )
3044 ui.writenoi18n(
3044 ui.writenoi18n(
3045 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3045 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3046 )
3046 )
3047 for depth in sorted(numsnapdepth):
3047 for depth in sorted(numsnapdepth):
3048 ui.write(
3048 ui.write(
3049 (b' lvl-%-3d : ' % depth)
3049 (b' lvl-%-3d : ' % depth)
3050 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3050 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3051 )
3051 )
3052 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3052 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3053 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3053 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3054 ui.writenoi18n(
3054 ui.writenoi18n(
3055 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3055 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3056 )
3056 )
3057 for depth in sorted(numsnapdepth):
3057 for depth in sorted(numsnapdepth):
3058 ui.write(
3058 ui.write(
3059 (b' lvl-%-3d : ' % depth)
3059 (b' lvl-%-3d : ' % depth)
3060 + fmt % pcfmt(snaptotal[depth], totalsize)
3060 + fmt % pcfmt(snaptotal[depth], totalsize)
3061 )
3061 )
3062 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3062 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3063
3063
3064 def fmtchunktype(chunktype):
3064 def fmtchunktype(chunktype):
3065 if chunktype == b'empty':
3065 if chunktype == b'empty':
3066 return b' %s : ' % chunktype
3066 return b' %s : ' % chunktype
3067 elif chunktype in pycompat.bytestr(string.ascii_letters):
3067 elif chunktype in pycompat.bytestr(string.ascii_letters):
3068 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3068 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3069 else:
3069 else:
3070 return b' 0x%s : ' % hex(chunktype)
3070 return b' 0x%s : ' % hex(chunktype)
3071
3071
3072 ui.write(b'\n')
3072 ui.write(b'\n')
3073 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3073 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3074 for chunktype in sorted(chunktypecounts):
3074 for chunktype in sorted(chunktypecounts):
3075 ui.write(fmtchunktype(chunktype))
3075 ui.write(fmtchunktype(chunktype))
3076 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3076 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3077 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3077 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3078 for chunktype in sorted(chunktypecounts):
3078 for chunktype in sorted(chunktypecounts):
3079 ui.write(fmtchunktype(chunktype))
3079 ui.write(fmtchunktype(chunktype))
3080 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3080 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3081
3081
3082 ui.write(b'\n')
3082 ui.write(b'\n')
3083 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3083 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3084 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3084 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3085 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3085 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3086 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3086 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3087 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3087 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3088
3088
3089 if format > 0:
3089 if format > 0:
3090 ui.write(b'\n')
3090 ui.write(b'\n')
3091 ui.writenoi18n(
3091 ui.writenoi18n(
3092 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3092 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3093 % tuple(datasize)
3093 % tuple(datasize)
3094 )
3094 )
3095 ui.writenoi18n(
3095 ui.writenoi18n(
3096 b'full revision size (min/max/avg) : %d / %d / %d\n'
3096 b'full revision size (min/max/avg) : %d / %d / %d\n'
3097 % tuple(fullsize)
3097 % tuple(fullsize)
3098 )
3098 )
3099 ui.writenoi18n(
3099 ui.writenoi18n(
3100 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3100 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3101 % tuple(semisize)
3101 % tuple(semisize)
3102 )
3102 )
3103 for depth in sorted(snapsizedepth):
3103 for depth in sorted(snapsizedepth):
3104 if depth == 0:
3104 if depth == 0:
3105 continue
3105 continue
3106 ui.writenoi18n(
3106 ui.writenoi18n(
3107 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3107 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3108 % ((depth,) + tuple(snapsizedepth[depth]))
3108 % ((depth,) + tuple(snapsizedepth[depth]))
3109 )
3109 )
3110 ui.writenoi18n(
3110 ui.writenoi18n(
3111 b'delta size (min/max/avg) : %d / %d / %d\n'
3111 b'delta size (min/max/avg) : %d / %d / %d\n'
3112 % tuple(deltasize)
3112 % tuple(deltasize)
3113 )
3113 )
3114
3114
3115 if numdeltas > 0:
3115 if numdeltas > 0:
3116 ui.write(b'\n')
3116 ui.write(b'\n')
3117 fmt = pcfmtstr(numdeltas)
3117 fmt = pcfmtstr(numdeltas)
3118 fmt2 = pcfmtstr(numdeltas, 4)
3118 fmt2 = pcfmtstr(numdeltas, 4)
3119 ui.writenoi18n(
3119 ui.writenoi18n(
3120 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3120 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3121 )
3121 )
3122 if numprev > 0:
3122 if numprev > 0:
3123 ui.writenoi18n(
3123 ui.writenoi18n(
3124 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3124 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3125 )
3125 )
3126 ui.writenoi18n(
3126 ui.writenoi18n(
3127 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3127 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3128 )
3128 )
3129 ui.writenoi18n(
3129 ui.writenoi18n(
3130 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3130 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3131 )
3131 )
3132 if gdelta:
3132 if gdelta:
3133 ui.writenoi18n(
3133 ui.writenoi18n(
3134 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3134 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3135 )
3135 )
3136 ui.writenoi18n(
3136 ui.writenoi18n(
3137 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3137 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3138 )
3138 )
3139 ui.writenoi18n(
3139 ui.writenoi18n(
3140 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3140 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3141 )
3141 )
3142
3142
3143
3143
3144 @command(
3144 @command(
3145 b'debugrevlogindex',
3145 b'debugrevlogindex',
3146 cmdutil.debugrevlogopts
3146 cmdutil.debugrevlogopts
3147 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3147 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3148 _(b'[-f FORMAT] -c|-m|FILE'),
3148 _(b'[-f FORMAT] -c|-m|FILE'),
3149 optionalrepo=True,
3149 optionalrepo=True,
3150 )
3150 )
3151 def debugrevlogindex(ui, repo, file_=None, **opts):
3151 def debugrevlogindex(ui, repo, file_=None, **opts):
3152 """dump the contents of a revlog index"""
3152 """dump the contents of a revlog index"""
3153 opts = pycompat.byteskwargs(opts)
3153 opts = pycompat.byteskwargs(opts)
3154 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3154 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3155 format = opts.get(b'format', 0)
3155 format = opts.get(b'format', 0)
3156 if format not in (0, 1):
3156 if format not in (0, 1):
3157 raise error.Abort(_(b"unknown format %d") % format)
3157 raise error.Abort(_(b"unknown format %d") % format)
3158
3158
3159 if ui.debugflag:
3159 if ui.debugflag:
3160 shortfn = hex
3160 shortfn = hex
3161 else:
3161 else:
3162 shortfn = short
3162 shortfn = short
3163
3163
3164 # There might not be anything in r, so have a sane default
3164 # There might not be anything in r, so have a sane default
3165 idlen = 12
3165 idlen = 12
3166 for i in r:
3166 for i in r:
3167 idlen = len(shortfn(r.node(i)))
3167 idlen = len(shortfn(r.node(i)))
3168 break
3168 break
3169
3169
3170 if format == 0:
3170 if format == 0:
3171 if ui.verbose:
3171 if ui.verbose:
3172 ui.writenoi18n(
3172 ui.writenoi18n(
3173 b" rev offset length linkrev %s %s p2\n"
3173 b" rev offset length linkrev %s %s p2\n"
3174 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3174 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3175 )
3175 )
3176 else:
3176 else:
3177 ui.writenoi18n(
3177 ui.writenoi18n(
3178 b" rev linkrev %s %s p2\n"
3178 b" rev linkrev %s %s p2\n"
3179 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3179 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3180 )
3180 )
3181 elif format == 1:
3181 elif format == 1:
3182 if ui.verbose:
3182 if ui.verbose:
3183 ui.writenoi18n(
3183 ui.writenoi18n(
3184 (
3184 (
3185 b" rev flag offset length size link p1"
3185 b" rev flag offset length size link p1"
3186 b" p2 %s\n"
3186 b" p2 %s\n"
3187 )
3187 )
3188 % b"nodeid".rjust(idlen)
3188 % b"nodeid".rjust(idlen)
3189 )
3189 )
3190 else:
3190 else:
3191 ui.writenoi18n(
3191 ui.writenoi18n(
3192 b" rev flag size link p1 p2 %s\n"
3192 b" rev flag size link p1 p2 %s\n"
3193 % b"nodeid".rjust(idlen)
3193 % b"nodeid".rjust(idlen)
3194 )
3194 )
3195
3195
3196 for i in r:
3196 for i in r:
3197 node = r.node(i)
3197 node = r.node(i)
3198 if format == 0:
3198 if format == 0:
3199 try:
3199 try:
3200 pp = r.parents(node)
3200 pp = r.parents(node)
3201 except Exception:
3201 except Exception:
3202 pp = [nullid, nullid]
3202 pp = [nullid, nullid]
3203 if ui.verbose:
3203 if ui.verbose:
3204 ui.write(
3204 ui.write(
3205 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3205 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3206 % (
3206 % (
3207 i,
3207 i,
3208 r.start(i),
3208 r.start(i),
3209 r.length(i),
3209 r.length(i),
3210 r.linkrev(i),
3210 r.linkrev(i),
3211 shortfn(node),
3211 shortfn(node),
3212 shortfn(pp[0]),
3212 shortfn(pp[0]),
3213 shortfn(pp[1]),
3213 shortfn(pp[1]),
3214 )
3214 )
3215 )
3215 )
3216 else:
3216 else:
3217 ui.write(
3217 ui.write(
3218 b"% 6d % 7d %s %s %s\n"
3218 b"% 6d % 7d %s %s %s\n"
3219 % (
3219 % (
3220 i,
3220 i,
3221 r.linkrev(i),
3221 r.linkrev(i),
3222 shortfn(node),
3222 shortfn(node),
3223 shortfn(pp[0]),
3223 shortfn(pp[0]),
3224 shortfn(pp[1]),
3224 shortfn(pp[1]),
3225 )
3225 )
3226 )
3226 )
3227 elif format == 1:
3227 elif format == 1:
3228 pr = r.parentrevs(i)
3228 pr = r.parentrevs(i)
3229 if ui.verbose:
3229 if ui.verbose:
3230 ui.write(
3230 ui.write(
3231 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3231 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3232 % (
3232 % (
3233 i,
3233 i,
3234 r.flags(i),
3234 r.flags(i),
3235 r.start(i),
3235 r.start(i),
3236 r.length(i),
3236 r.length(i),
3237 r.rawsize(i),
3237 r.rawsize(i),
3238 r.linkrev(i),
3238 r.linkrev(i),
3239 pr[0],
3239 pr[0],
3240 pr[1],
3240 pr[1],
3241 shortfn(node),
3241 shortfn(node),
3242 )
3242 )
3243 )
3243 )
3244 else:
3244 else:
3245 ui.write(
3245 ui.write(
3246 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3246 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3247 % (
3247 % (
3248 i,
3248 i,
3249 r.flags(i),
3249 r.flags(i),
3250 r.rawsize(i),
3250 r.rawsize(i),
3251 r.linkrev(i),
3251 r.linkrev(i),
3252 pr[0],
3252 pr[0],
3253 pr[1],
3253 pr[1],
3254 shortfn(node),
3254 shortfn(node),
3255 )
3255 )
3256 )
3256 )
3257
3257
3258
3258
3259 @command(
3259 @command(
3260 b'debugrevspec',
3260 b'debugrevspec',
3261 [
3261 [
3262 (
3262 (
3263 b'',
3263 b'',
3264 b'optimize',
3264 b'optimize',
3265 None,
3265 None,
3266 _(b'print parsed tree after optimizing (DEPRECATED)'),
3266 _(b'print parsed tree after optimizing (DEPRECATED)'),
3267 ),
3267 ),
3268 (
3268 (
3269 b'',
3269 b'',
3270 b'show-revs',
3270 b'show-revs',
3271 True,
3271 True,
3272 _(b'print list of result revisions (default)'),
3272 _(b'print list of result revisions (default)'),
3273 ),
3273 ),
3274 (
3274 (
3275 b's',
3275 b's',
3276 b'show-set',
3276 b'show-set',
3277 None,
3277 None,
3278 _(b'print internal representation of result set'),
3278 _(b'print internal representation of result set'),
3279 ),
3279 ),
3280 (
3280 (
3281 b'p',
3281 b'p',
3282 b'show-stage',
3282 b'show-stage',
3283 [],
3283 [],
3284 _(b'print parsed tree at the given stage'),
3284 _(b'print parsed tree at the given stage'),
3285 _(b'NAME'),
3285 _(b'NAME'),
3286 ),
3286 ),
3287 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3287 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3288 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3288 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3289 ],
3289 ],
3290 b'REVSPEC',
3290 b'REVSPEC',
3291 )
3291 )
3292 def debugrevspec(ui, repo, expr, **opts):
3292 def debugrevspec(ui, repo, expr, **opts):
3293 """parse and apply a revision specification
3293 """parse and apply a revision specification
3294
3294
3295 Use -p/--show-stage option to print the parsed tree at the given stages.
3295 Use -p/--show-stage option to print the parsed tree at the given stages.
3296 Use -p all to print tree at every stage.
3296 Use -p all to print tree at every stage.
3297
3297
3298 Use --no-show-revs option with -s or -p to print only the set
3298 Use --no-show-revs option with -s or -p to print only the set
3299 representation or the parsed tree respectively.
3299 representation or the parsed tree respectively.
3300
3300
3301 Use --verify-optimized to compare the optimized result with the unoptimized
3301 Use --verify-optimized to compare the optimized result with the unoptimized
3302 one. Returns 1 if the optimized result differs.
3302 one. Returns 1 if the optimized result differs.
3303 """
3303 """
3304 opts = pycompat.byteskwargs(opts)
3304 opts = pycompat.byteskwargs(opts)
3305 aliases = ui.configitems(b'revsetalias')
3305 aliases = ui.configitems(b'revsetalias')
3306 stages = [
3306 stages = [
3307 (b'parsed', lambda tree: tree),
3307 (b'parsed', lambda tree: tree),
3308 (
3308 (
3309 b'expanded',
3309 b'expanded',
3310 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3310 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3311 ),
3311 ),
3312 (b'concatenated', revsetlang.foldconcat),
3312 (b'concatenated', revsetlang.foldconcat),
3313 (b'analyzed', revsetlang.analyze),
3313 (b'analyzed', revsetlang.analyze),
3314 (b'optimized', revsetlang.optimize),
3314 (b'optimized', revsetlang.optimize),
3315 ]
3315 ]
3316 if opts[b'no_optimized']:
3316 if opts[b'no_optimized']:
3317 stages = stages[:-1]
3317 stages = stages[:-1]
3318 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3318 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3319 raise error.Abort(
3319 raise error.Abort(
3320 _(b'cannot use --verify-optimized with --no-optimized')
3320 _(b'cannot use --verify-optimized with --no-optimized')
3321 )
3321 )
3322 stagenames = {n for n, f in stages}
3322 stagenames = {n for n, f in stages}
3323
3323
3324 showalways = set()
3324 showalways = set()
3325 showchanged = set()
3325 showchanged = set()
3326 if ui.verbose and not opts[b'show_stage']:
3326 if ui.verbose and not opts[b'show_stage']:
3327 # show parsed tree by --verbose (deprecated)
3327 # show parsed tree by --verbose (deprecated)
3328 showalways.add(b'parsed')
3328 showalways.add(b'parsed')
3329 showchanged.update([b'expanded', b'concatenated'])
3329 showchanged.update([b'expanded', b'concatenated'])
3330 if opts[b'optimize']:
3330 if opts[b'optimize']:
3331 showalways.add(b'optimized')
3331 showalways.add(b'optimized')
3332 if opts[b'show_stage'] and opts[b'optimize']:
3332 if opts[b'show_stage'] and opts[b'optimize']:
3333 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3333 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3334 if opts[b'show_stage'] == [b'all']:
3334 if opts[b'show_stage'] == [b'all']:
3335 showalways.update(stagenames)
3335 showalways.update(stagenames)
3336 else:
3336 else:
3337 for n in opts[b'show_stage']:
3337 for n in opts[b'show_stage']:
3338 if n not in stagenames:
3338 if n not in stagenames:
3339 raise error.Abort(_(b'invalid stage name: %s') % n)
3339 raise error.Abort(_(b'invalid stage name: %s') % n)
3340 showalways.update(opts[b'show_stage'])
3340 showalways.update(opts[b'show_stage'])
3341
3341
3342 treebystage = {}
3342 treebystage = {}
3343 printedtree = None
3343 printedtree = None
3344 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3344 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3345 for n, f in stages:
3345 for n, f in stages:
3346 treebystage[n] = tree = f(tree)
3346 treebystage[n] = tree = f(tree)
3347 if n in showalways or (n in showchanged and tree != printedtree):
3347 if n in showalways or (n in showchanged and tree != printedtree):
3348 if opts[b'show_stage'] or n != b'parsed':
3348 if opts[b'show_stage'] or n != b'parsed':
3349 ui.write(b"* %s:\n" % n)
3349 ui.write(b"* %s:\n" % n)
3350 ui.write(revsetlang.prettyformat(tree), b"\n")
3350 ui.write(revsetlang.prettyformat(tree), b"\n")
3351 printedtree = tree
3351 printedtree = tree
3352
3352
3353 if opts[b'verify_optimized']:
3353 if opts[b'verify_optimized']:
3354 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3354 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3355 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3355 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3356 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3356 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3357 ui.writenoi18n(
3357 ui.writenoi18n(
3358 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3358 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3359 )
3359 )
3360 ui.writenoi18n(
3360 ui.writenoi18n(
3361 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3361 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3362 )
3362 )
3363 arevs = list(arevs)
3363 arevs = list(arevs)
3364 brevs = list(brevs)
3364 brevs = list(brevs)
3365 if arevs == brevs:
3365 if arevs == brevs:
3366 return 0
3366 return 0
3367 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3367 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3368 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3368 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3369 sm = difflib.SequenceMatcher(None, arevs, brevs)
3369 sm = difflib.SequenceMatcher(None, arevs, brevs)
3370 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3370 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3371 if tag in ('delete', 'replace'):
3371 if tag in ('delete', 'replace'):
3372 for c in arevs[alo:ahi]:
3372 for c in arevs[alo:ahi]:
3373 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3373 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3374 if tag in ('insert', 'replace'):
3374 if tag in ('insert', 'replace'):
3375 for c in brevs[blo:bhi]:
3375 for c in brevs[blo:bhi]:
3376 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3376 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3377 if tag == 'equal':
3377 if tag == 'equal':
3378 for c in arevs[alo:ahi]:
3378 for c in arevs[alo:ahi]:
3379 ui.write(b' %d\n' % c)
3379 ui.write(b' %d\n' % c)
3380 return 1
3380 return 1
3381
3381
3382 func = revset.makematcher(tree)
3382 func = revset.makematcher(tree)
3383 revs = func(repo)
3383 revs = func(repo)
3384 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3384 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3385 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3385 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3386 if not opts[b'show_revs']:
3386 if not opts[b'show_revs']:
3387 return
3387 return
3388 for c in revs:
3388 for c in revs:
3389 ui.write(b"%d\n" % c)
3389 ui.write(b"%d\n" % c)
3390
3390
3391
3391
3392 @command(
3392 @command(
3393 b'debugserve',
3393 b'debugserve',
3394 [
3394 [
3395 (
3395 (
3396 b'',
3396 b'',
3397 b'sshstdio',
3397 b'sshstdio',
3398 False,
3398 False,
3399 _(b'run an SSH server bound to process handles'),
3399 _(b'run an SSH server bound to process handles'),
3400 ),
3400 ),
3401 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3401 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3402 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3402 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3403 ],
3403 ],
3404 b'',
3404 b'',
3405 )
3405 )
3406 def debugserve(ui, repo, **opts):
3406 def debugserve(ui, repo, **opts):
3407 """run a server with advanced settings
3407 """run a server with advanced settings
3408
3408
3409 This command is similar to :hg:`serve`. It exists partially as a
3409 This command is similar to :hg:`serve`. It exists partially as a
3410 workaround to the fact that ``hg serve --stdio`` must have specific
3410 workaround to the fact that ``hg serve --stdio`` must have specific
3411 arguments for security reasons.
3411 arguments for security reasons.
3412 """
3412 """
3413 opts = pycompat.byteskwargs(opts)
3413 opts = pycompat.byteskwargs(opts)
3414
3414
3415 if not opts[b'sshstdio']:
3415 if not opts[b'sshstdio']:
3416 raise error.Abort(_(b'only --sshstdio is currently supported'))
3416 raise error.Abort(_(b'only --sshstdio is currently supported'))
3417
3417
3418 logfh = None
3418 logfh = None
3419
3419
3420 if opts[b'logiofd'] and opts[b'logiofile']:
3420 if opts[b'logiofd'] and opts[b'logiofile']:
3421 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3421 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3422
3422
3423 if opts[b'logiofd']:
3423 if opts[b'logiofd']:
3424 # Ideally we would be line buffered. But line buffering in binary
3424 # Ideally we would be line buffered. But line buffering in binary
3425 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3425 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3426 # buffering could have performance impacts. But since this isn't
3426 # buffering could have performance impacts. But since this isn't
3427 # performance critical code, it should be fine.
3427 # performance critical code, it should be fine.
3428 try:
3428 try:
3429 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3429 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3430 except OSError as e:
3430 except OSError as e:
3431 if e.errno != errno.ESPIPE:
3431 if e.errno != errno.ESPIPE:
3432 raise
3432 raise
3433 # can't seek a pipe, so `ab` mode fails on py3
3433 # can't seek a pipe, so `ab` mode fails on py3
3434 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3434 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3435 elif opts[b'logiofile']:
3435 elif opts[b'logiofile']:
3436 logfh = open(opts[b'logiofile'], b'ab', 0)
3436 logfh = open(opts[b'logiofile'], b'ab', 0)
3437
3437
3438 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3438 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3439 s.serve_forever()
3439 s.serve_forever()
3440
3440
3441
3441
3442 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3442 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3443 def debugsetparents(ui, repo, rev1, rev2=None):
3443 def debugsetparents(ui, repo, rev1, rev2=None):
3444 """manually set the parents of the current working directory (DANGEROUS)
3444 """manually set the parents of the current working directory (DANGEROUS)
3445
3445
3446 This command is not what you are looking for and should not be used. Using
3446 This command is not what you are looking for and should not be used. Using
3447 this command will most certainly results in slight corruption of the file
3447 this command will most certainly results in slight corruption of the file
3448 level histories withing your repository. DO NOT USE THIS COMMAND.
3448 level histories withing your repository. DO NOT USE THIS COMMAND.
3449
3449
3450 The command update the p1 and p2 field in the dirstate, and not touching
3450 The command update the p1 and p2 field in the dirstate, and not touching
3451 anything else. This useful for writing repository conversion tools, but
3451 anything else. This useful for writing repository conversion tools, but
3452 should be used with extreme care. For example, neither the working
3452 should be used with extreme care. For example, neither the working
3453 directory nor the dirstate is updated, so file status may be incorrect
3453 directory nor the dirstate is updated, so file status may be incorrect
3454 after running this command. Only used if you are one of the few people that
3454 after running this command. Only used if you are one of the few people that
3455 deeply unstand both conversion tools and file level histories. If you are
3455 deeply unstand both conversion tools and file level histories. If you are
3456 reading this help, you are not one of this people (most of them sailed west
3456 reading this help, you are not one of this people (most of them sailed west
3457 from Mithlond anyway.
3457 from Mithlond anyway.
3458
3458
3459 So one last time DO NOT USE THIS COMMAND.
3459 So one last time DO NOT USE THIS COMMAND.
3460
3460
3461 Returns 0 on success.
3461 Returns 0 on success.
3462 """
3462 """
3463
3463
3464 node1 = scmutil.revsingle(repo, rev1).node()
3464 node1 = scmutil.revsingle(repo, rev1).node()
3465 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3465 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3466
3466
3467 with repo.wlock():
3467 with repo.wlock():
3468 repo.setparents(node1, node2)
3468 repo.setparents(node1, node2)
3469
3469
3470
3470
3471 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3471 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3472 def debugsidedata(ui, repo, file_, rev=None, **opts):
3472 def debugsidedata(ui, repo, file_, rev=None, **opts):
3473 """dump the side data for a cl/manifest/file revision
3473 """dump the side data for a cl/manifest/file revision
3474
3474
3475 Use --verbose to dump the sidedata content."""
3475 Use --verbose to dump the sidedata content."""
3476 opts = pycompat.byteskwargs(opts)
3476 opts = pycompat.byteskwargs(opts)
3477 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3477 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3478 if rev is not None:
3478 if rev is not None:
3479 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3479 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3480 file_, rev = None, file_
3480 file_, rev = None, file_
3481 elif rev is None:
3481 elif rev is None:
3482 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3482 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3483 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3483 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3484 r = getattr(r, '_revlog', r)
3484 r = getattr(r, '_revlog', r)
3485 try:
3485 try:
3486 sidedata = r.sidedata(r.lookup(rev))
3486 sidedata = r.sidedata(r.lookup(rev))
3487 except KeyError:
3487 except KeyError:
3488 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3488 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3489 if sidedata:
3489 if sidedata:
3490 sidedata = list(sidedata.items())
3490 sidedata = list(sidedata.items())
3491 sidedata.sort()
3491 sidedata.sort()
3492 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3492 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3493 for key, value in sidedata:
3493 for key, value in sidedata:
3494 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3494 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3495 if ui.verbose:
3495 if ui.verbose:
3496 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3496 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3497
3497
3498
3498
3499 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3499 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3500 def debugssl(ui, repo, source=None, **opts):
3500 def debugssl(ui, repo, source=None, **opts):
3501 """test a secure connection to a server
3501 """test a secure connection to a server
3502
3502
3503 This builds the certificate chain for the server on Windows, installing the
3503 This builds the certificate chain for the server on Windows, installing the
3504 missing intermediates and trusted root via Windows Update if necessary. It
3504 missing intermediates and trusted root via Windows Update if necessary. It
3505 does nothing on other platforms.
3505 does nothing on other platforms.
3506
3506
3507 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3507 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3508 that server is used. See :hg:`help urls` for more information.
3508 that server is used. See :hg:`help urls` for more information.
3509
3509
3510 If the update succeeds, retry the original operation. Otherwise, the cause
3510 If the update succeeds, retry the original operation. Otherwise, the cause
3511 of the SSL error is likely another issue.
3511 of the SSL error is likely another issue.
3512 """
3512 """
3513 if not pycompat.iswindows:
3513 if not pycompat.iswindows:
3514 raise error.Abort(
3514 raise error.Abort(
3515 _(b'certificate chain building is only possible on Windows')
3515 _(b'certificate chain building is only possible on Windows')
3516 )
3516 )
3517
3517
3518 if not source:
3518 if not source:
3519 if not repo:
3519 if not repo:
3520 raise error.Abort(
3520 raise error.Abort(
3521 _(
3521 _(
3522 b"there is no Mercurial repository here, and no "
3522 b"there is no Mercurial repository here, and no "
3523 b"server specified"
3523 b"server specified"
3524 )
3524 )
3525 )
3525 )
3526 source = b"default"
3526 source = b"default"
3527
3527
3528 source, branches = hg.parseurl(ui.expandpath(source))
3528 source, branches = hg.parseurl(ui.expandpath(source))
3529 url = util.url(source)
3529 url = util.url(source)
3530
3530
3531 defaultport = {b'https': 443, b'ssh': 22}
3531 defaultport = {b'https': 443, b'ssh': 22}
3532 if url.scheme in defaultport:
3532 if url.scheme in defaultport:
3533 try:
3533 try:
3534 addr = (url.host, int(url.port or defaultport[url.scheme]))
3534 addr = (url.host, int(url.port or defaultport[url.scheme]))
3535 except ValueError:
3535 except ValueError:
3536 raise error.Abort(_(b"malformed port number in URL"))
3536 raise error.Abort(_(b"malformed port number in URL"))
3537 else:
3537 else:
3538 raise error.Abort(_(b"only https and ssh connections are supported"))
3538 raise error.Abort(_(b"only https and ssh connections are supported"))
3539
3539
3540 from . import win32
3540 from . import win32
3541
3541
3542 s = ssl.wrap_socket(
3542 s = ssl.wrap_socket(
3543 socket.socket(),
3543 socket.socket(),
3544 ssl_version=ssl.PROTOCOL_TLS,
3544 ssl_version=ssl.PROTOCOL_TLS,
3545 cert_reqs=ssl.CERT_NONE,
3545 cert_reqs=ssl.CERT_NONE,
3546 ca_certs=None,
3546 ca_certs=None,
3547 )
3547 )
3548
3548
3549 try:
3549 try:
3550 s.connect(addr)
3550 s.connect(addr)
3551 cert = s.getpeercert(True)
3551 cert = s.getpeercert(True)
3552
3552
3553 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3553 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3554
3554
3555 complete = win32.checkcertificatechain(cert, build=False)
3555 complete = win32.checkcertificatechain(cert, build=False)
3556
3556
3557 if not complete:
3557 if not complete:
3558 ui.status(_(b'certificate chain is incomplete, updating... '))
3558 ui.status(_(b'certificate chain is incomplete, updating... '))
3559
3559
3560 if not win32.checkcertificatechain(cert):
3560 if not win32.checkcertificatechain(cert):
3561 ui.status(_(b'failed.\n'))
3561 ui.status(_(b'failed.\n'))
3562 else:
3562 else:
3563 ui.status(_(b'done.\n'))
3563 ui.status(_(b'done.\n'))
3564 else:
3564 else:
3565 ui.status(_(b'full certificate chain is available\n'))
3565 ui.status(_(b'full certificate chain is available\n'))
3566 finally:
3566 finally:
3567 s.close()
3567 s.close()
3568
3568
3569
3569
3570 @command(
3570 @command(
3571 b"debugbackupbundle",
3571 b"debugbackupbundle",
3572 [
3572 [
3573 (
3573 (
3574 b"",
3574 b"",
3575 b"recover",
3575 b"recover",
3576 b"",
3576 b"",
3577 b"brings the specified changeset back into the repository",
3577 b"brings the specified changeset back into the repository",
3578 )
3578 )
3579 ]
3579 ]
3580 + cmdutil.logopts,
3580 + cmdutil.logopts,
3581 _(b"hg debugbackupbundle [--recover HASH]"),
3581 _(b"hg debugbackupbundle [--recover HASH]"),
3582 )
3582 )
3583 def debugbackupbundle(ui, repo, *pats, **opts):
3583 def debugbackupbundle(ui, repo, *pats, **opts):
3584 """lists the changesets available in backup bundles
3584 """lists the changesets available in backup bundles
3585
3585
3586 Without any arguments, this command prints a list of the changesets in each
3586 Without any arguments, this command prints a list of the changesets in each
3587 backup bundle.
3587 backup bundle.
3588
3588
3589 --recover takes a changeset hash and unbundles the first bundle that
3589 --recover takes a changeset hash and unbundles the first bundle that
3590 contains that hash, which puts that changeset back in your repository.
3590 contains that hash, which puts that changeset back in your repository.
3591
3591
3592 --verbose will print the entire commit message and the bundle path for that
3592 --verbose will print the entire commit message and the bundle path for that
3593 backup.
3593 backup.
3594 """
3594 """
3595 backups = list(
3595 backups = list(
3596 filter(
3596 filter(
3597 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3597 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3598 )
3598 )
3599 )
3599 )
3600 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3600 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3601
3601
3602 opts = pycompat.byteskwargs(opts)
3602 opts = pycompat.byteskwargs(opts)
3603 opts[b"bundle"] = b""
3603 opts[b"bundle"] = b""
3604 opts[b"force"] = None
3604 opts[b"force"] = None
3605 limit = logcmdutil.getlimit(opts)
3605 limit = logcmdutil.getlimit(opts)
3606
3606
3607 def display(other, chlist, displayer):
3607 def display(other, chlist, displayer):
3608 if opts.get(b"newest_first"):
3608 if opts.get(b"newest_first"):
3609 chlist.reverse()
3609 chlist.reverse()
3610 count = 0
3610 count = 0
3611 for n in chlist:
3611 for n in chlist:
3612 if limit is not None and count >= limit:
3612 if limit is not None and count >= limit:
3613 break
3613 break
3614 parents = [True for p in other.changelog.parents(n) if p != nullid]
3614 parents = [True for p in other.changelog.parents(n) if p != nullid]
3615 if opts.get(b"no_merges") and len(parents) == 2:
3615 if opts.get(b"no_merges") and len(parents) == 2:
3616 continue
3616 continue
3617 count += 1
3617 count += 1
3618 displayer.show(other[n])
3618 displayer.show(other[n])
3619
3619
3620 recovernode = opts.get(b"recover")
3620 recovernode = opts.get(b"recover")
3621 if recovernode:
3621 if recovernode:
3622 if scmutil.isrevsymbol(repo, recovernode):
3622 if scmutil.isrevsymbol(repo, recovernode):
3623 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3623 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3624 return
3624 return
3625 elif backups:
3625 elif backups:
3626 msg = _(
3626 msg = _(
3627 b"Recover changesets using: hg debugbackupbundle --recover "
3627 b"Recover changesets using: hg debugbackupbundle --recover "
3628 b"<changeset hash>\n\nAvailable backup changesets:"
3628 b"<changeset hash>\n\nAvailable backup changesets:"
3629 )
3629 )
3630 ui.status(msg, label=b"status.removed")
3630 ui.status(msg, label=b"status.removed")
3631 else:
3631 else:
3632 ui.status(_(b"no backup changesets found\n"))
3632 ui.status(_(b"no backup changesets found\n"))
3633 return
3633 return
3634
3634
3635 for backup in backups:
3635 for backup in backups:
3636 # Much of this is copied from the hg incoming logic
3636 # Much of this is copied from the hg incoming logic
3637 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3637 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3638 source, branches = hg.parseurl(source, opts.get(b"branch"))
3638 source, branches = hg.parseurl(source, opts.get(b"branch"))
3639 try:
3639 try:
3640 other = hg.peer(repo, opts, source)
3640 other = hg.peer(repo, opts, source)
3641 except error.LookupError as ex:
3641 except error.LookupError as ex:
3642 msg = _(b"\nwarning: unable to open bundle %s") % source
3642 msg = _(b"\nwarning: unable to open bundle %s") % source
3643 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3643 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3644 ui.warn(msg, hint=hint)
3644 ui.warn(msg, hint=hint)
3645 continue
3645 continue
3646 revs, checkout = hg.addbranchrevs(
3646 revs, checkout = hg.addbranchrevs(
3647 repo, other, branches, opts.get(b"rev")
3647 repo, other, branches, opts.get(b"rev")
3648 )
3648 )
3649
3649
3650 if revs:
3650 if revs:
3651 revs = [other.lookup(rev) for rev in revs]
3651 revs = [other.lookup(rev) for rev in revs]
3652
3652
3653 quiet = ui.quiet
3653 quiet = ui.quiet
3654 try:
3654 try:
3655 ui.quiet = True
3655 ui.quiet = True
3656 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3656 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3657 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3657 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3658 )
3658 )
3659 except error.LookupError:
3659 except error.LookupError:
3660 continue
3660 continue
3661 finally:
3661 finally:
3662 ui.quiet = quiet
3662 ui.quiet = quiet
3663
3663
3664 try:
3664 try:
3665 if not chlist:
3665 if not chlist:
3666 continue
3666 continue
3667 if recovernode:
3667 if recovernode:
3668 with repo.lock(), repo.transaction(b"unbundle") as tr:
3668 with repo.lock(), repo.transaction(b"unbundle") as tr:
3669 if scmutil.isrevsymbol(other, recovernode):
3669 if scmutil.isrevsymbol(other, recovernode):
3670 ui.status(_(b"Unbundling %s\n") % (recovernode))
3670 ui.status(_(b"Unbundling %s\n") % (recovernode))
3671 f = hg.openpath(ui, source)
3671 f = hg.openpath(ui, source)
3672 gen = exchange.readbundle(ui, f, source)
3672 gen = exchange.readbundle(ui, f, source)
3673 if isinstance(gen, bundle2.unbundle20):
3673 if isinstance(gen, bundle2.unbundle20):
3674 bundle2.applybundle(
3674 bundle2.applybundle(
3675 repo,
3675 repo,
3676 gen,
3676 gen,
3677 tr,
3677 tr,
3678 source=b"unbundle",
3678 source=b"unbundle",
3679 url=b"bundle:" + source,
3679 url=b"bundle:" + source,
3680 )
3680 )
3681 else:
3681 else:
3682 gen.apply(repo, b"unbundle", b"bundle:" + source)
3682 gen.apply(repo, b"unbundle", b"bundle:" + source)
3683 break
3683 break
3684 else:
3684 else:
3685 backupdate = encoding.strtolocal(
3685 backupdate = encoding.strtolocal(
3686 time.strftime(
3686 time.strftime(
3687 "%a %H:%M, %Y-%m-%d",
3687 "%a %H:%M, %Y-%m-%d",
3688 time.localtime(os.path.getmtime(source)),
3688 time.localtime(os.path.getmtime(source)),
3689 )
3689 )
3690 )
3690 )
3691 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3691 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3692 if ui.verbose:
3692 if ui.verbose:
3693 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3693 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3694 else:
3694 else:
3695 opts[
3695 opts[
3696 b"template"
3696 b"template"
3697 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3697 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3698 displayer = logcmdutil.changesetdisplayer(
3698 displayer = logcmdutil.changesetdisplayer(
3699 ui, other, opts, False
3699 ui, other, opts, False
3700 )
3700 )
3701 display(other, chlist, displayer)
3701 display(other, chlist, displayer)
3702 displayer.close()
3702 displayer.close()
3703 finally:
3703 finally:
3704 cleanupfn()
3704 cleanupfn()
3705
3705
3706
3706
3707 @command(
3707 @command(
3708 b'debugsub',
3708 b'debugsub',
3709 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3709 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3710 _(b'[-r REV] [REV]'),
3710 _(b'[-r REV] [REV]'),
3711 )
3711 )
3712 def debugsub(ui, repo, rev=None):
3712 def debugsub(ui, repo, rev=None):
3713 ctx = scmutil.revsingle(repo, rev, None)
3713 ctx = scmutil.revsingle(repo, rev, None)
3714 for k, v in sorted(ctx.substate.items()):
3714 for k, v in sorted(ctx.substate.items()):
3715 ui.writenoi18n(b'path %s\n' % k)
3715 ui.writenoi18n(b'path %s\n' % k)
3716 ui.writenoi18n(b' source %s\n' % v[0])
3716 ui.writenoi18n(b' source %s\n' % v[0])
3717 ui.writenoi18n(b' revision %s\n' % v[1])
3717 ui.writenoi18n(b' revision %s\n' % v[1])
3718
3718
3719
3719
3720 @command(b'debugshell', optionalrepo=True)
3720 @command(b'debugshell', optionalrepo=True)
3721 def debugshell(ui, repo):
3721 def debugshell(ui, repo):
3722 """run an interactive Python interpreter
3722 """run an interactive Python interpreter
3723
3723
3724 The local namespace is provided with a reference to the ui and
3724 The local namespace is provided with a reference to the ui and
3725 the repo instance (if available).
3725 the repo instance (if available).
3726 """
3726 """
3727 import code
3727 import code
3728
3728
3729 imported_objects = {
3729 imported_objects = {
3730 'ui': ui,
3730 'ui': ui,
3731 'repo': repo,
3731 'repo': repo,
3732 }
3732 }
3733
3733
3734 code.interact(local=imported_objects)
3734 code.interact(local=imported_objects)
3735
3735
3736
3736
3737 @command(
3737 @command(
3738 b'debugsuccessorssets',
3738 b'debugsuccessorssets',
3739 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3739 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3740 _(b'[REV]'),
3740 _(b'[REV]'),
3741 )
3741 )
3742 def debugsuccessorssets(ui, repo, *revs, **opts):
3742 def debugsuccessorssets(ui, repo, *revs, **opts):
3743 """show set of successors for revision
3743 """show set of successors for revision
3744
3744
3745 A successors set of changeset A is a consistent group of revisions that
3745 A successors set of changeset A is a consistent group of revisions that
3746 succeed A. It contains non-obsolete changesets only unless closests
3746 succeed A. It contains non-obsolete changesets only unless closests
3747 successors set is set.
3747 successors set is set.
3748
3748
3749 In most cases a changeset A has a single successors set containing a single
3749 In most cases a changeset A has a single successors set containing a single
3750 successor (changeset A replaced by A').
3750 successor (changeset A replaced by A').
3751
3751
3752 A changeset that is made obsolete with no successors are called "pruned".
3752 A changeset that is made obsolete with no successors are called "pruned".
3753 Such changesets have no successors sets at all.
3753 Such changesets have no successors sets at all.
3754
3754
3755 A changeset that has been "split" will have a successors set containing
3755 A changeset that has been "split" will have a successors set containing
3756 more than one successor.
3756 more than one successor.
3757
3757
3758 A changeset that has been rewritten in multiple different ways is called
3758 A changeset that has been rewritten in multiple different ways is called
3759 "divergent". Such changesets have multiple successor sets (each of which
3759 "divergent". Such changesets have multiple successor sets (each of which
3760 may also be split, i.e. have multiple successors).
3760 may also be split, i.e. have multiple successors).
3761
3761
3762 Results are displayed as follows::
3762 Results are displayed as follows::
3763
3763
3764 <rev1>
3764 <rev1>
3765 <successors-1A>
3765 <successors-1A>
3766 <rev2>
3766 <rev2>
3767 <successors-2A>
3767 <successors-2A>
3768 <successors-2B1> <successors-2B2> <successors-2B3>
3768 <successors-2B1> <successors-2B2> <successors-2B3>
3769
3769
3770 Here rev2 has two possible (i.e. divergent) successors sets. The first
3770 Here rev2 has two possible (i.e. divergent) successors sets. The first
3771 holds one element, whereas the second holds three (i.e. the changeset has
3771 holds one element, whereas the second holds three (i.e. the changeset has
3772 been split).
3772 been split).
3773 """
3773 """
3774 # passed to successorssets caching computation from one call to another
3774 # passed to successorssets caching computation from one call to another
3775 cache = {}
3775 cache = {}
3776 ctx2str = bytes
3776 ctx2str = bytes
3777 node2str = short
3777 node2str = short
3778 for rev in scmutil.revrange(repo, revs):
3778 for rev in scmutil.revrange(repo, revs):
3779 ctx = repo[rev]
3779 ctx = repo[rev]
3780 ui.write(b'%s\n' % ctx2str(ctx))
3780 ui.write(b'%s\n' % ctx2str(ctx))
3781 for succsset in obsutil.successorssets(
3781 for succsset in obsutil.successorssets(
3782 repo, ctx.node(), closest=opts['closest'], cache=cache
3782 repo, ctx.node(), closest=opts['closest'], cache=cache
3783 ):
3783 ):
3784 if succsset:
3784 if succsset:
3785 ui.write(b' ')
3785 ui.write(b' ')
3786 ui.write(node2str(succsset[0]))
3786 ui.write(node2str(succsset[0]))
3787 for node in succsset[1:]:
3787 for node in succsset[1:]:
3788 ui.write(b' ')
3788 ui.write(b' ')
3789 ui.write(node2str(node))
3789 ui.write(node2str(node))
3790 ui.write(b'\n')
3790 ui.write(b'\n')
3791
3791
3792
3792
3793 @command(b'debugtagscache', [])
3793 @command(b'debugtagscache', [])
3794 def debugtagscache(ui, repo):
3794 def debugtagscache(ui, repo):
3795 """display the contents of .hg/cache/hgtagsfnodes1"""
3795 """display the contents of .hg/cache/hgtagsfnodes1"""
3796 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3796 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3797 for r in repo:
3797 for r in repo:
3798 node = repo[r].node()
3798 node = repo[r].node()
3799 tagsnode = cache.getfnode(node, computemissing=False)
3799 tagsnode = cache.getfnode(node, computemissing=False)
3800 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3800 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3801 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3801 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3802
3802
3803
3803
3804 @command(
3804 @command(
3805 b'debugtemplate',
3805 b'debugtemplate',
3806 [
3806 [
3807 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3807 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3808 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3808 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3809 ],
3809 ],
3810 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3810 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3811 optionalrepo=True,
3811 optionalrepo=True,
3812 )
3812 )
3813 def debugtemplate(ui, repo, tmpl, **opts):
3813 def debugtemplate(ui, repo, tmpl, **opts):
3814 """parse and apply a template
3814 """parse and apply a template
3815
3815
3816 If -r/--rev is given, the template is processed as a log template and
3816 If -r/--rev is given, the template is processed as a log template and
3817 applied to the given changesets. Otherwise, it is processed as a generic
3817 applied to the given changesets. Otherwise, it is processed as a generic
3818 template.
3818 template.
3819
3819
3820 Use --verbose to print the parsed tree.
3820 Use --verbose to print the parsed tree.
3821 """
3821 """
3822 revs = None
3822 revs = None
3823 if opts['rev']:
3823 if opts['rev']:
3824 if repo is None:
3824 if repo is None:
3825 raise error.RepoError(
3825 raise error.RepoError(
3826 _(b'there is no Mercurial repository here (.hg not found)')
3826 _(b'there is no Mercurial repository here (.hg not found)')
3827 )
3827 )
3828 revs = scmutil.revrange(repo, opts['rev'])
3828 revs = scmutil.revrange(repo, opts['rev'])
3829
3829
3830 props = {}
3830 props = {}
3831 for d in opts['define']:
3831 for d in opts['define']:
3832 try:
3832 try:
3833 k, v = (e.strip() for e in d.split(b'=', 1))
3833 k, v = (e.strip() for e in d.split(b'=', 1))
3834 if not k or k == b'ui':
3834 if not k or k == b'ui':
3835 raise ValueError
3835 raise ValueError
3836 props[k] = v
3836 props[k] = v
3837 except ValueError:
3837 except ValueError:
3838 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3838 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3839
3839
3840 if ui.verbose:
3840 if ui.verbose:
3841 aliases = ui.configitems(b'templatealias')
3841 aliases = ui.configitems(b'templatealias')
3842 tree = templater.parse(tmpl)
3842 tree = templater.parse(tmpl)
3843 ui.note(templater.prettyformat(tree), b'\n')
3843 ui.note(templater.prettyformat(tree), b'\n')
3844 newtree = templater.expandaliases(tree, aliases)
3844 newtree = templater.expandaliases(tree, aliases)
3845 if newtree != tree:
3845 if newtree != tree:
3846 ui.notenoi18n(
3846 ui.notenoi18n(
3847 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3847 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3848 )
3848 )
3849
3849
3850 if revs is None:
3850 if revs is None:
3851 tres = formatter.templateresources(ui, repo)
3851 tres = formatter.templateresources(ui, repo)
3852 t = formatter.maketemplater(ui, tmpl, resources=tres)
3852 t = formatter.maketemplater(ui, tmpl, resources=tres)
3853 if ui.verbose:
3853 if ui.verbose:
3854 kwds, funcs = t.symbolsuseddefault()
3854 kwds, funcs = t.symbolsuseddefault()
3855 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3855 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3856 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3856 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3857 ui.write(t.renderdefault(props))
3857 ui.write(t.renderdefault(props))
3858 else:
3858 else:
3859 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3859 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3860 if ui.verbose:
3860 if ui.verbose:
3861 kwds, funcs = displayer.t.symbolsuseddefault()
3861 kwds, funcs = displayer.t.symbolsuseddefault()
3862 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3862 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3863 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3863 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3864 for r in revs:
3864 for r in revs:
3865 displayer.show(repo[r], **pycompat.strkwargs(props))
3865 displayer.show(repo[r], **pycompat.strkwargs(props))
3866 displayer.close()
3866 displayer.close()
3867
3867
3868
3868
3869 @command(
3869 @command(
3870 b'debuguigetpass',
3870 b'debuguigetpass',
3871 [
3871 [
3872 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3872 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3873 ],
3873 ],
3874 _(b'[-p TEXT]'),
3874 _(b'[-p TEXT]'),
3875 norepo=True,
3875 norepo=True,
3876 )
3876 )
3877 def debuguigetpass(ui, prompt=b''):
3877 def debuguigetpass(ui, prompt=b''):
3878 """show prompt to type password"""
3878 """show prompt to type password"""
3879 r = ui.getpass(prompt)
3879 r = ui.getpass(prompt)
3880 if r is None:
3880 if r is None:
3881 r = b"<default response>"
3881 r = b"<default response>"
3882 ui.writenoi18n(b'response: %s\n' % r)
3882 ui.writenoi18n(b'response: %s\n' % r)
3883
3883
3884
3884
3885 @command(
3885 @command(
3886 b'debuguiprompt',
3886 b'debuguiprompt',
3887 [
3887 [
3888 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3888 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3889 ],
3889 ],
3890 _(b'[-p TEXT]'),
3890 _(b'[-p TEXT]'),
3891 norepo=True,
3891 norepo=True,
3892 )
3892 )
3893 def debuguiprompt(ui, prompt=b''):
3893 def debuguiprompt(ui, prompt=b''):
3894 """show plain prompt"""
3894 """show plain prompt"""
3895 r = ui.prompt(prompt)
3895 r = ui.prompt(prompt)
3896 ui.writenoi18n(b'response: %s\n' % r)
3896 ui.writenoi18n(b'response: %s\n' % r)
3897
3897
3898
3898
3899 @command(b'debugupdatecaches', [])
3899 @command(b'debugupdatecaches', [])
3900 def debugupdatecaches(ui, repo, *pats, **opts):
3900 def debugupdatecaches(ui, repo, *pats, **opts):
3901 """warm all known caches in the repository"""
3901 """warm all known caches in the repository"""
3902 with repo.wlock(), repo.lock():
3902 with repo.wlock(), repo.lock():
3903 repo.updatecaches(full=True)
3903 repo.updatecaches(full=True)
3904
3904
3905
3905
3906 @command(
3906 @command(
3907 b'debugupgraderepo',
3907 b'debugupgraderepo',
3908 [
3908 [
3909 (
3909 (
3910 b'o',
3910 b'o',
3911 b'optimize',
3911 b'optimize',
3912 [],
3912 [],
3913 _(b'extra optimization to perform'),
3913 _(b'extra optimization to perform'),
3914 _(b'NAME'),
3914 _(b'NAME'),
3915 ),
3915 ),
3916 (b'', b'run', False, _(b'performs an upgrade')),
3916 (b'', b'run', False, _(b'performs an upgrade')),
3917 (b'', b'backup', True, _(b'keep the old repository content around')),
3917 (b'', b'backup', True, _(b'keep the old repository content around')),
3918 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3918 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3919 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3919 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3920 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
3920 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
3921 ],
3921 ],
3922 )
3922 )
3923 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3923 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3924 """upgrade a repository to use different features
3924 """upgrade a repository to use different features
3925
3925
3926 If no arguments are specified, the repository is evaluated for upgrade
3926 If no arguments are specified, the repository is evaluated for upgrade
3927 and a list of problems and potential optimizations is printed.
3927 and a list of problems and potential optimizations is printed.
3928
3928
3929 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3929 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3930 can be influenced via additional arguments. More details will be provided
3930 can be influenced via additional arguments. More details will be provided
3931 by the command output when run without ``--run``.
3931 by the command output when run without ``--run``.
3932
3932
3933 During the upgrade, the repository will be locked and no writes will be
3933 During the upgrade, the repository will be locked and no writes will be
3934 allowed.
3934 allowed.
3935
3935
3936 At the end of the upgrade, the repository may not be readable while new
3936 At the end of the upgrade, the repository may not be readable while new
3937 repository data is swapped in. This window will be as long as it takes to
3937 repository data is swapped in. This window will be as long as it takes to
3938 rename some directories inside the ``.hg`` directory. On most machines, this
3938 rename some directories inside the ``.hg`` directory. On most machines, this
3939 should complete almost instantaneously and the chances of a consumer being
3939 should complete almost instantaneously and the chances of a consumer being
3940 unable to access the repository should be low.
3940 unable to access the repository should be low.
3941
3941
3942 By default, all revlog will be upgraded. You can restrict this using flag
3942 By default, all revlog will be upgraded. You can restrict this using flag
3943 such as `--manifest`:
3943 such as `--manifest`:
3944
3944
3945 * `--manifest`: only optimize the manifest
3945 * `--manifest`: only optimize the manifest
3946 * `--no-manifest`: optimize all revlog but the manifest
3946 * `--no-manifest`: optimize all revlog but the manifest
3947 * `--changelog`: optimize the changelog only
3947 * `--changelog`: optimize the changelog only
3948 * `--no-changelog --no-manifest`: optimize filelogs only
3948 * `--no-changelog --no-manifest`: optimize filelogs only
3949 * `--filelogs`: optimize the filelogs only
3949 * `--filelogs`: optimize the filelogs only
3950 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
3950 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
3951 """
3951 """
3952 return upgrade.upgraderepo(
3952 return upgrade.upgraderepo(
3953 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
3953 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
3954 )
3954 )
3955
3955
3956
3956
3957 @command(
3957 @command(
3958 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3958 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3959 )
3959 )
3960 def debugwalk(ui, repo, *pats, **opts):
3960 def debugwalk(ui, repo, *pats, **opts):
3961 """show how files match on given patterns"""
3961 """show how files match on given patterns"""
3962 opts = pycompat.byteskwargs(opts)
3962 opts = pycompat.byteskwargs(opts)
3963 m = scmutil.match(repo[None], pats, opts)
3963 m = scmutil.match(repo[None], pats, opts)
3964 if ui.verbose:
3964 if ui.verbose:
3965 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3965 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3966 items = list(repo[None].walk(m))
3966 items = list(repo[None].walk(m))
3967 if not items:
3967 if not items:
3968 return
3968 return
3969 f = lambda fn: fn
3969 f = lambda fn: fn
3970 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3970 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3971 f = lambda fn: util.normpath(fn)
3971 f = lambda fn: util.normpath(fn)
3972 fmt = b'f %%-%ds %%-%ds %%s' % (
3972 fmt = b'f %%-%ds %%-%ds %%s' % (
3973 max([len(abs) for abs in items]),
3973 max([len(abs) for abs in items]),
3974 max([len(repo.pathto(abs)) for abs in items]),
3974 max([len(repo.pathto(abs)) for abs in items]),
3975 )
3975 )
3976 for abs in items:
3976 for abs in items:
3977 line = fmt % (
3977 line = fmt % (
3978 abs,
3978 abs,
3979 f(repo.pathto(abs)),
3979 f(repo.pathto(abs)),
3980 m.exact(abs) and b'exact' or b'',
3980 m.exact(abs) and b'exact' or b'',
3981 )
3981 )
3982 ui.write(b"%s\n" % line.rstrip())
3982 ui.write(b"%s\n" % line.rstrip())
3983
3983
3984
3984
3985 @command(b'debugwhyunstable', [], _(b'REV'))
3985 @command(b'debugwhyunstable', [], _(b'REV'))
3986 def debugwhyunstable(ui, repo, rev):
3986 def debugwhyunstable(ui, repo, rev):
3987 """explain instabilities of a changeset"""
3987 """explain instabilities of a changeset"""
3988 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3988 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3989 dnodes = b''
3989 dnodes = b''
3990 if entry.get(b'divergentnodes'):
3990 if entry.get(b'divergentnodes'):
3991 dnodes = (
3991 dnodes = (
3992 b' '.join(
3992 b' '.join(
3993 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3993 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3994 for ctx in entry[b'divergentnodes']
3994 for ctx in entry[b'divergentnodes']
3995 )
3995 )
3996 + b' '
3996 + b' '
3997 )
3997 )
3998 ui.write(
3998 ui.write(
3999 b'%s: %s%s %s\n'
3999 b'%s: %s%s %s\n'
4000 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4000 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4001 )
4001 )
4002
4002
4003
4003
4004 @command(
4004 @command(
4005 b'debugwireargs',
4005 b'debugwireargs',
4006 [
4006 [
4007 (b'', b'three', b'', b'three'),
4007 (b'', b'three', b'', b'three'),
4008 (b'', b'four', b'', b'four'),
4008 (b'', b'four', b'', b'four'),
4009 (b'', b'five', b'', b'five'),
4009 (b'', b'five', b'', b'five'),
4010 ]
4010 ]
4011 + cmdutil.remoteopts,
4011 + cmdutil.remoteopts,
4012 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4012 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4013 norepo=True,
4013 norepo=True,
4014 )
4014 )
4015 def debugwireargs(ui, repopath, *vals, **opts):
4015 def debugwireargs(ui, repopath, *vals, **opts):
4016 opts = pycompat.byteskwargs(opts)
4016 opts = pycompat.byteskwargs(opts)
4017 repo = hg.peer(ui, opts, repopath)
4017 repo = hg.peer(ui, opts, repopath)
4018 for opt in cmdutil.remoteopts:
4018 for opt in cmdutil.remoteopts:
4019 del opts[opt[1]]
4019 del opts[opt[1]]
4020 args = {}
4020 args = {}
4021 for k, v in pycompat.iteritems(opts):
4021 for k, v in pycompat.iteritems(opts):
4022 if v:
4022 if v:
4023 args[k] = v
4023 args[k] = v
4024 args = pycompat.strkwargs(args)
4024 args = pycompat.strkwargs(args)
4025 # run twice to check that we don't mess up the stream for the next command
4025 # run twice to check that we don't mess up the stream for the next command
4026 res1 = repo.debugwireargs(*vals, **args)
4026 res1 = repo.debugwireargs(*vals, **args)
4027 res2 = repo.debugwireargs(*vals, **args)
4027 res2 = repo.debugwireargs(*vals, **args)
4028 ui.write(b"%s\n" % res1)
4028 ui.write(b"%s\n" % res1)
4029 if res1 != res2:
4029 if res1 != res2:
4030 ui.warn(b"%s\n" % res2)
4030 ui.warn(b"%s\n" % res2)
4031
4031
4032
4032
4033 def _parsewirelangblocks(fh):
4033 def _parsewirelangblocks(fh):
4034 activeaction = None
4034 activeaction = None
4035 blocklines = []
4035 blocklines = []
4036 lastindent = 0
4036 lastindent = 0
4037
4037
4038 for line in fh:
4038 for line in fh:
4039 line = line.rstrip()
4039 line = line.rstrip()
4040 if not line:
4040 if not line:
4041 continue
4041 continue
4042
4042
4043 if line.startswith(b'#'):
4043 if line.startswith(b'#'):
4044 continue
4044 continue
4045
4045
4046 if not line.startswith(b' '):
4046 if not line.startswith(b' '):
4047 # New block. Flush previous one.
4047 # New block. Flush previous one.
4048 if activeaction:
4048 if activeaction:
4049 yield activeaction, blocklines
4049 yield activeaction, blocklines
4050
4050
4051 activeaction = line
4051 activeaction = line
4052 blocklines = []
4052 blocklines = []
4053 lastindent = 0
4053 lastindent = 0
4054 continue
4054 continue
4055
4055
4056 # Else we start with an indent.
4056 # Else we start with an indent.
4057
4057
4058 if not activeaction:
4058 if not activeaction:
4059 raise error.Abort(_(b'indented line outside of block'))
4059 raise error.Abort(_(b'indented line outside of block'))
4060
4060
4061 indent = len(line) - len(line.lstrip())
4061 indent = len(line) - len(line.lstrip())
4062
4062
4063 # If this line is indented more than the last line, concatenate it.
4063 # If this line is indented more than the last line, concatenate it.
4064 if indent > lastindent and blocklines:
4064 if indent > lastindent and blocklines:
4065 blocklines[-1] += line.lstrip()
4065 blocklines[-1] += line.lstrip()
4066 else:
4066 else:
4067 blocklines.append(line)
4067 blocklines.append(line)
4068 lastindent = indent
4068 lastindent = indent
4069
4069
4070 # Flush last block.
4070 # Flush last block.
4071 if activeaction:
4071 if activeaction:
4072 yield activeaction, blocklines
4072 yield activeaction, blocklines
4073
4073
4074
4074
4075 @command(
4075 @command(
4076 b'debugwireproto',
4076 b'debugwireproto',
4077 [
4077 [
4078 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4078 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4079 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4079 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4080 (
4080 (
4081 b'',
4081 b'',
4082 b'noreadstderr',
4082 b'noreadstderr',
4083 False,
4083 False,
4084 _(b'do not read from stderr of the remote'),
4084 _(b'do not read from stderr of the remote'),
4085 ),
4085 ),
4086 (
4086 (
4087 b'',
4087 b'',
4088 b'nologhandshake',
4088 b'nologhandshake',
4089 False,
4089 False,
4090 _(b'do not log I/O related to the peer handshake'),
4090 _(b'do not log I/O related to the peer handshake'),
4091 ),
4091 ),
4092 ]
4092 ]
4093 + cmdutil.remoteopts,
4093 + cmdutil.remoteopts,
4094 _(b'[PATH]'),
4094 _(b'[PATH]'),
4095 optionalrepo=True,
4095 optionalrepo=True,
4096 )
4096 )
4097 def debugwireproto(ui, repo, path=None, **opts):
4097 def debugwireproto(ui, repo, path=None, **opts):
4098 """send wire protocol commands to a server
4098 """send wire protocol commands to a server
4099
4099
4100 This command can be used to issue wire protocol commands to remote
4100 This command can be used to issue wire protocol commands to remote
4101 peers and to debug the raw data being exchanged.
4101 peers and to debug the raw data being exchanged.
4102
4102
4103 ``--localssh`` will start an SSH server against the current repository
4103 ``--localssh`` will start an SSH server against the current repository
4104 and connect to that. By default, the connection will perform a handshake
4104 and connect to that. By default, the connection will perform a handshake
4105 and establish an appropriate peer instance.
4105 and establish an appropriate peer instance.
4106
4106
4107 ``--peer`` can be used to bypass the handshake protocol and construct a
4107 ``--peer`` can be used to bypass the handshake protocol and construct a
4108 peer instance using the specified class type. Valid values are ``raw``,
4108 peer instance using the specified class type. Valid values are ``raw``,
4109 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4109 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4110 raw data payloads and don't support higher-level command actions.
4110 raw data payloads and don't support higher-level command actions.
4111
4111
4112 ``--noreadstderr`` can be used to disable automatic reading from stderr
4112 ``--noreadstderr`` can be used to disable automatic reading from stderr
4113 of the peer (for SSH connections only). Disabling automatic reading of
4113 of the peer (for SSH connections only). Disabling automatic reading of
4114 stderr is useful for making output more deterministic.
4114 stderr is useful for making output more deterministic.
4115
4115
4116 Commands are issued via a mini language which is specified via stdin.
4116 Commands are issued via a mini language which is specified via stdin.
4117 The language consists of individual actions to perform. An action is
4117 The language consists of individual actions to perform. An action is
4118 defined by a block. A block is defined as a line with no leading
4118 defined by a block. A block is defined as a line with no leading
4119 space followed by 0 or more lines with leading space. Blocks are
4119 space followed by 0 or more lines with leading space. Blocks are
4120 effectively a high-level command with additional metadata.
4120 effectively a high-level command with additional metadata.
4121
4121
4122 Lines beginning with ``#`` are ignored.
4122 Lines beginning with ``#`` are ignored.
4123
4123
4124 The following sections denote available actions.
4124 The following sections denote available actions.
4125
4125
4126 raw
4126 raw
4127 ---
4127 ---
4128
4128
4129 Send raw data to the server.
4129 Send raw data to the server.
4130
4130
4131 The block payload contains the raw data to send as one atomic send
4131 The block payload contains the raw data to send as one atomic send
4132 operation. The data may not actually be delivered in a single system
4132 operation. The data may not actually be delivered in a single system
4133 call: it depends on the abilities of the transport being used.
4133 call: it depends on the abilities of the transport being used.
4134
4134
4135 Each line in the block is de-indented and concatenated. Then, that
4135 Each line in the block is de-indented and concatenated. Then, that
4136 value is evaluated as a Python b'' literal. This allows the use of
4136 value is evaluated as a Python b'' literal. This allows the use of
4137 backslash escaping, etc.
4137 backslash escaping, etc.
4138
4138
4139 raw+
4139 raw+
4140 ----
4140 ----
4141
4141
4142 Behaves like ``raw`` except flushes output afterwards.
4142 Behaves like ``raw`` except flushes output afterwards.
4143
4143
4144 command <X>
4144 command <X>
4145 -----------
4145 -----------
4146
4146
4147 Send a request to run a named command, whose name follows the ``command``
4147 Send a request to run a named command, whose name follows the ``command``
4148 string.
4148 string.
4149
4149
4150 Arguments to the command are defined as lines in this block. The format of
4150 Arguments to the command are defined as lines in this block. The format of
4151 each line is ``<key> <value>``. e.g.::
4151 each line is ``<key> <value>``. e.g.::
4152
4152
4153 command listkeys
4153 command listkeys
4154 namespace bookmarks
4154 namespace bookmarks
4155
4155
4156 If the value begins with ``eval:``, it will be interpreted as a Python
4156 If the value begins with ``eval:``, it will be interpreted as a Python
4157 literal expression. Otherwise values are interpreted as Python b'' literals.
4157 literal expression. Otherwise values are interpreted as Python b'' literals.
4158 This allows sending complex types and encoding special byte sequences via
4158 This allows sending complex types and encoding special byte sequences via
4159 backslash escaping.
4159 backslash escaping.
4160
4160
4161 The following arguments have special meaning:
4161 The following arguments have special meaning:
4162
4162
4163 ``PUSHFILE``
4163 ``PUSHFILE``
4164 When defined, the *push* mechanism of the peer will be used instead
4164 When defined, the *push* mechanism of the peer will be used instead
4165 of the static request-response mechanism and the content of the
4165 of the static request-response mechanism and the content of the
4166 file specified in the value of this argument will be sent as the
4166 file specified in the value of this argument will be sent as the
4167 command payload.
4167 command payload.
4168
4168
4169 This can be used to submit a local bundle file to the remote.
4169 This can be used to submit a local bundle file to the remote.
4170
4170
4171 batchbegin
4171 batchbegin
4172 ----------
4172 ----------
4173
4173
4174 Instruct the peer to begin a batched send.
4174 Instruct the peer to begin a batched send.
4175
4175
4176 All ``command`` blocks are queued for execution until the next
4176 All ``command`` blocks are queued for execution until the next
4177 ``batchsubmit`` block.
4177 ``batchsubmit`` block.
4178
4178
4179 batchsubmit
4179 batchsubmit
4180 -----------
4180 -----------
4181
4181
4182 Submit previously queued ``command`` blocks as a batch request.
4182 Submit previously queued ``command`` blocks as a batch request.
4183
4183
4184 This action MUST be paired with a ``batchbegin`` action.
4184 This action MUST be paired with a ``batchbegin`` action.
4185
4185
4186 httprequest <method> <path>
4186 httprequest <method> <path>
4187 ---------------------------
4187 ---------------------------
4188
4188
4189 (HTTP peer only)
4189 (HTTP peer only)
4190
4190
4191 Send an HTTP request to the peer.
4191 Send an HTTP request to the peer.
4192
4192
4193 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4193 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4194
4194
4195 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4195 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4196 headers to add to the request. e.g. ``Accept: foo``.
4196 headers to add to the request. e.g. ``Accept: foo``.
4197
4197
4198 The following arguments are special:
4198 The following arguments are special:
4199
4199
4200 ``BODYFILE``
4200 ``BODYFILE``
4201 The content of the file defined as the value to this argument will be
4201 The content of the file defined as the value to this argument will be
4202 transferred verbatim as the HTTP request body.
4202 transferred verbatim as the HTTP request body.
4203
4203
4204 ``frame <type> <flags> <payload>``
4204 ``frame <type> <flags> <payload>``
4205 Send a unified protocol frame as part of the request body.
4205 Send a unified protocol frame as part of the request body.
4206
4206
4207 All frames will be collected and sent as the body to the HTTP
4207 All frames will be collected and sent as the body to the HTTP
4208 request.
4208 request.
4209
4209
4210 close
4210 close
4211 -----
4211 -----
4212
4212
4213 Close the connection to the server.
4213 Close the connection to the server.
4214
4214
4215 flush
4215 flush
4216 -----
4216 -----
4217
4217
4218 Flush data written to the server.
4218 Flush data written to the server.
4219
4219
4220 readavailable
4220 readavailable
4221 -------------
4221 -------------
4222
4222
4223 Close the write end of the connection and read all available data from
4223 Close the write end of the connection and read all available data from
4224 the server.
4224 the server.
4225
4225
4226 If the connection to the server encompasses multiple pipes, we poll both
4226 If the connection to the server encompasses multiple pipes, we poll both
4227 pipes and read available data.
4227 pipes and read available data.
4228
4228
4229 readline
4229 readline
4230 --------
4230 --------
4231
4231
4232 Read a line of output from the server. If there are multiple output
4232 Read a line of output from the server. If there are multiple output
4233 pipes, reads only the main pipe.
4233 pipes, reads only the main pipe.
4234
4234
4235 ereadline
4235 ereadline
4236 ---------
4236 ---------
4237
4237
4238 Like ``readline``, but read from the stderr pipe, if available.
4238 Like ``readline``, but read from the stderr pipe, if available.
4239
4239
4240 read <X>
4240 read <X>
4241 --------
4241 --------
4242
4242
4243 ``read()`` N bytes from the server's main output pipe.
4243 ``read()`` N bytes from the server's main output pipe.
4244
4244
4245 eread <X>
4245 eread <X>
4246 ---------
4246 ---------
4247
4247
4248 ``read()`` N bytes from the server's stderr pipe, if available.
4248 ``read()`` N bytes from the server's stderr pipe, if available.
4249
4249
4250 Specifying Unified Frame-Based Protocol Frames
4250 Specifying Unified Frame-Based Protocol Frames
4251 ----------------------------------------------
4251 ----------------------------------------------
4252
4252
4253 It is possible to emit a *Unified Frame-Based Protocol* by using special
4253 It is possible to emit a *Unified Frame-Based Protocol* by using special
4254 syntax.
4254 syntax.
4255
4255
4256 A frame is composed as a type, flags, and payload. These can be parsed
4256 A frame is composed as a type, flags, and payload. These can be parsed
4257 from a string of the form:
4257 from a string of the form:
4258
4258
4259 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4259 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4260
4260
4261 ``request-id`` and ``stream-id`` are integers defining the request and
4261 ``request-id`` and ``stream-id`` are integers defining the request and
4262 stream identifiers.
4262 stream identifiers.
4263
4263
4264 ``type`` can be an integer value for the frame type or the string name
4264 ``type`` can be an integer value for the frame type or the string name
4265 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4265 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4266 ``command-name``.
4266 ``command-name``.
4267
4267
4268 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4268 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4269 components. Each component (and there can be just one) can be an integer
4269 components. Each component (and there can be just one) can be an integer
4270 or a flag name for stream flags or frame flags, respectively. Values are
4270 or a flag name for stream flags or frame flags, respectively. Values are
4271 resolved to integers and then bitwise OR'd together.
4271 resolved to integers and then bitwise OR'd together.
4272
4272
4273 ``payload`` represents the raw frame payload. If it begins with
4273 ``payload`` represents the raw frame payload. If it begins with
4274 ``cbor:``, the following string is evaluated as Python code and the
4274 ``cbor:``, the following string is evaluated as Python code and the
4275 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4275 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4276 as a Python byte string literal.
4276 as a Python byte string literal.
4277 """
4277 """
4278 opts = pycompat.byteskwargs(opts)
4278 opts = pycompat.byteskwargs(opts)
4279
4279
4280 if opts[b'localssh'] and not repo:
4280 if opts[b'localssh'] and not repo:
4281 raise error.Abort(_(b'--localssh requires a repository'))
4281 raise error.Abort(_(b'--localssh requires a repository'))
4282
4282
4283 if opts[b'peer'] and opts[b'peer'] not in (
4283 if opts[b'peer'] and opts[b'peer'] not in (
4284 b'raw',
4284 b'raw',
4285 b'http2',
4285 b'http2',
4286 b'ssh1',
4286 b'ssh1',
4287 b'ssh2',
4287 b'ssh2',
4288 ):
4288 ):
4289 raise error.Abort(
4289 raise error.Abort(
4290 _(b'invalid value for --peer'),
4290 _(b'invalid value for --peer'),
4291 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4291 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4292 )
4292 )
4293
4293
4294 if path and opts[b'localssh']:
4294 if path and opts[b'localssh']:
4295 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4295 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4296
4296
4297 if ui.interactive():
4297 if ui.interactive():
4298 ui.write(_(b'(waiting for commands on stdin)\n'))
4298 ui.write(_(b'(waiting for commands on stdin)\n'))
4299
4299
4300 blocks = list(_parsewirelangblocks(ui.fin))
4300 blocks = list(_parsewirelangblocks(ui.fin))
4301
4301
4302 proc = None
4302 proc = None
4303 stdin = None
4303 stdin = None
4304 stdout = None
4304 stdout = None
4305 stderr = None
4305 stderr = None
4306 opener = None
4306 opener = None
4307
4307
4308 if opts[b'localssh']:
4308 if opts[b'localssh']:
4309 # We start the SSH server in its own process so there is process
4309 # We start the SSH server in its own process so there is process
4310 # separation. This prevents a whole class of potential bugs around
4310 # separation. This prevents a whole class of potential bugs around
4311 # shared state from interfering with server operation.
4311 # shared state from interfering with server operation.
4312 args = procutil.hgcmd() + [
4312 args = procutil.hgcmd() + [
4313 b'-R',
4313 b'-R',
4314 repo.root,
4314 repo.root,
4315 b'debugserve',
4315 b'debugserve',
4316 b'--sshstdio',
4316 b'--sshstdio',
4317 ]
4317 ]
4318 proc = subprocess.Popen(
4318 proc = subprocess.Popen(
4319 pycompat.rapply(procutil.tonativestr, args),
4319 pycompat.rapply(procutil.tonativestr, args),
4320 stdin=subprocess.PIPE,
4320 stdin=subprocess.PIPE,
4321 stdout=subprocess.PIPE,
4321 stdout=subprocess.PIPE,
4322 stderr=subprocess.PIPE,
4322 stderr=subprocess.PIPE,
4323 bufsize=0,
4323 bufsize=0,
4324 )
4324 )
4325
4325
4326 stdin = proc.stdin
4326 stdin = proc.stdin
4327 stdout = proc.stdout
4327 stdout = proc.stdout
4328 stderr = proc.stderr
4328 stderr = proc.stderr
4329
4329
4330 # We turn the pipes into observers so we can log I/O.
4330 # We turn the pipes into observers so we can log I/O.
4331 if ui.verbose or opts[b'peer'] == b'raw':
4331 if ui.verbose or opts[b'peer'] == b'raw':
4332 stdin = util.makeloggingfileobject(
4332 stdin = util.makeloggingfileobject(
4333 ui, proc.stdin, b'i', logdata=True
4333 ui, proc.stdin, b'i', logdata=True
4334 )
4334 )
4335 stdout = util.makeloggingfileobject(
4335 stdout = util.makeloggingfileobject(
4336 ui, proc.stdout, b'o', logdata=True
4336 ui, proc.stdout, b'o', logdata=True
4337 )
4337 )
4338 stderr = util.makeloggingfileobject(
4338 stderr = util.makeloggingfileobject(
4339 ui, proc.stderr, b'e', logdata=True
4339 ui, proc.stderr, b'e', logdata=True
4340 )
4340 )
4341
4341
4342 # --localssh also implies the peer connection settings.
4342 # --localssh also implies the peer connection settings.
4343
4343
4344 url = b'ssh://localserver'
4344 url = b'ssh://localserver'
4345 autoreadstderr = not opts[b'noreadstderr']
4345 autoreadstderr = not opts[b'noreadstderr']
4346
4346
4347 if opts[b'peer'] == b'ssh1':
4347 if opts[b'peer'] == b'ssh1':
4348 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4348 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4349 peer = sshpeer.sshv1peer(
4349 peer = sshpeer.sshv1peer(
4350 ui,
4350 ui,
4351 url,
4351 url,
4352 proc,
4352 proc,
4353 stdin,
4353 stdin,
4354 stdout,
4354 stdout,
4355 stderr,
4355 stderr,
4356 None,
4356 None,
4357 autoreadstderr=autoreadstderr,
4357 autoreadstderr=autoreadstderr,
4358 )
4358 )
4359 elif opts[b'peer'] == b'ssh2':
4359 elif opts[b'peer'] == b'ssh2':
4360 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4360 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4361 peer = sshpeer.sshv2peer(
4361 peer = sshpeer.sshv2peer(
4362 ui,
4362 ui,
4363 url,
4363 url,
4364 proc,
4364 proc,
4365 stdin,
4365 stdin,
4366 stdout,
4366 stdout,
4367 stderr,
4367 stderr,
4368 None,
4368 None,
4369 autoreadstderr=autoreadstderr,
4369 autoreadstderr=autoreadstderr,
4370 )
4370 )
4371 elif opts[b'peer'] == b'raw':
4371 elif opts[b'peer'] == b'raw':
4372 ui.write(_(b'using raw connection to peer\n'))
4372 ui.write(_(b'using raw connection to peer\n'))
4373 peer = None
4373 peer = None
4374 else:
4374 else:
4375 ui.write(_(b'creating ssh peer from handshake results\n'))
4375 ui.write(_(b'creating ssh peer from handshake results\n'))
4376 peer = sshpeer.makepeer(
4376 peer = sshpeer.makepeer(
4377 ui,
4377 ui,
4378 url,
4378 url,
4379 proc,
4379 proc,
4380 stdin,
4380 stdin,
4381 stdout,
4381 stdout,
4382 stderr,
4382 stderr,
4383 autoreadstderr=autoreadstderr,
4383 autoreadstderr=autoreadstderr,
4384 )
4384 )
4385
4385
4386 elif path:
4386 elif path:
4387 # We bypass hg.peer() so we can proxy the sockets.
4387 # We bypass hg.peer() so we can proxy the sockets.
4388 # TODO consider not doing this because we skip
4388 # TODO consider not doing this because we skip
4389 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4389 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4390 u = util.url(path)
4390 u = util.url(path)
4391 if u.scheme != b'http':
4391 if u.scheme != b'http':
4392 raise error.Abort(_(b'only http:// paths are currently supported'))
4392 raise error.Abort(_(b'only http:// paths are currently supported'))
4393
4393
4394 url, authinfo = u.authinfo()
4394 url, authinfo = u.authinfo()
4395 openerargs = {
4395 openerargs = {
4396 'useragent': b'Mercurial debugwireproto',
4396 'useragent': b'Mercurial debugwireproto',
4397 }
4397 }
4398
4398
4399 # Turn pipes/sockets into observers so we can log I/O.
4399 # Turn pipes/sockets into observers so we can log I/O.
4400 if ui.verbose:
4400 if ui.verbose:
4401 openerargs.update(
4401 openerargs.update(
4402 {
4402 {
4403 'loggingfh': ui,
4403 'loggingfh': ui,
4404 'loggingname': b's',
4404 'loggingname': b's',
4405 'loggingopts': {
4405 'loggingopts': {
4406 'logdata': True,
4406 'logdata': True,
4407 'logdataapis': False,
4407 'logdataapis': False,
4408 },
4408 },
4409 }
4409 }
4410 )
4410 )
4411
4411
4412 if ui.debugflag:
4412 if ui.debugflag:
4413 openerargs['loggingopts']['logdataapis'] = True
4413 openerargs['loggingopts']['logdataapis'] = True
4414
4414
4415 # Don't send default headers when in raw mode. This allows us to
4415 # Don't send default headers when in raw mode. This allows us to
4416 # bypass most of the behavior of our URL handling code so we can
4416 # bypass most of the behavior of our URL handling code so we can
4417 # have near complete control over what's sent on the wire.
4417 # have near complete control over what's sent on the wire.
4418 if opts[b'peer'] == b'raw':
4418 if opts[b'peer'] == b'raw':
4419 openerargs['sendaccept'] = False
4419 openerargs['sendaccept'] = False
4420
4420
4421 opener = urlmod.opener(ui, authinfo, **openerargs)
4421 opener = urlmod.opener(ui, authinfo, **openerargs)
4422
4422
4423 if opts[b'peer'] == b'http2':
4423 if opts[b'peer'] == b'http2':
4424 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4424 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4425 # We go through makepeer() because we need an API descriptor for
4425 # We go through makepeer() because we need an API descriptor for
4426 # the peer instance to be useful.
4426 # the peer instance to be useful.
4427 with ui.configoverride(
4427 with ui.configoverride(
4428 {(b'experimental', b'httppeer.advertise-v2'): True}
4428 {(b'experimental', b'httppeer.advertise-v2'): True}
4429 ):
4429 ):
4430 if opts[b'nologhandshake']:
4430 if opts[b'nologhandshake']:
4431 ui.pushbuffer()
4431 ui.pushbuffer()
4432
4432
4433 peer = httppeer.makepeer(ui, path, opener=opener)
4433 peer = httppeer.makepeer(ui, path, opener=opener)
4434
4434
4435 if opts[b'nologhandshake']:
4435 if opts[b'nologhandshake']:
4436 ui.popbuffer()
4436 ui.popbuffer()
4437
4437
4438 if not isinstance(peer, httppeer.httpv2peer):
4438 if not isinstance(peer, httppeer.httpv2peer):
4439 raise error.Abort(
4439 raise error.Abort(
4440 _(
4440 _(
4441 b'could not instantiate HTTP peer for '
4441 b'could not instantiate HTTP peer for '
4442 b'wire protocol version 2'
4442 b'wire protocol version 2'
4443 ),
4443 ),
4444 hint=_(
4444 hint=_(
4445 b'the server may not have the feature '
4445 b'the server may not have the feature '
4446 b'enabled or is not allowing this '
4446 b'enabled or is not allowing this '
4447 b'client version'
4447 b'client version'
4448 ),
4448 ),
4449 )
4449 )
4450
4450
4451 elif opts[b'peer'] == b'raw':
4451 elif opts[b'peer'] == b'raw':
4452 ui.write(_(b'using raw connection to peer\n'))
4452 ui.write(_(b'using raw connection to peer\n'))
4453 peer = None
4453 peer = None
4454 elif opts[b'peer']:
4454 elif opts[b'peer']:
4455 raise error.Abort(
4455 raise error.Abort(
4456 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4456 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4457 )
4457 )
4458 else:
4458 else:
4459 peer = httppeer.makepeer(ui, path, opener=opener)
4459 peer = httppeer.makepeer(ui, path, opener=opener)
4460
4460
4461 # We /could/ populate stdin/stdout with sock.makefile()...
4461 # We /could/ populate stdin/stdout with sock.makefile()...
4462 else:
4462 else:
4463 raise error.Abort(_(b'unsupported connection configuration'))
4463 raise error.Abort(_(b'unsupported connection configuration'))
4464
4464
4465 batchedcommands = None
4465 batchedcommands = None
4466
4466
4467 # Now perform actions based on the parsed wire language instructions.
4467 # Now perform actions based on the parsed wire language instructions.
4468 for action, lines in blocks:
4468 for action, lines in blocks:
4469 if action in (b'raw', b'raw+'):
4469 if action in (b'raw', b'raw+'):
4470 if not stdin:
4470 if not stdin:
4471 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4471 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4472
4472
4473 # Concatenate the data together.
4473 # Concatenate the data together.
4474 data = b''.join(l.lstrip() for l in lines)
4474 data = b''.join(l.lstrip() for l in lines)
4475 data = stringutil.unescapestr(data)
4475 data = stringutil.unescapestr(data)
4476 stdin.write(data)
4476 stdin.write(data)
4477
4477
4478 if action == b'raw+':
4478 if action == b'raw+':
4479 stdin.flush()
4479 stdin.flush()
4480 elif action == b'flush':
4480 elif action == b'flush':
4481 if not stdin:
4481 if not stdin:
4482 raise error.Abort(_(b'cannot call flush on this peer'))
4482 raise error.Abort(_(b'cannot call flush on this peer'))
4483 stdin.flush()
4483 stdin.flush()
4484 elif action.startswith(b'command'):
4484 elif action.startswith(b'command'):
4485 if not peer:
4485 if not peer:
4486 raise error.Abort(
4486 raise error.Abort(
4487 _(
4487 _(
4488 b'cannot send commands unless peer instance '
4488 b'cannot send commands unless peer instance '
4489 b'is available'
4489 b'is available'
4490 )
4490 )
4491 )
4491 )
4492
4492
4493 command = action.split(b' ', 1)[1]
4493 command = action.split(b' ', 1)[1]
4494
4494
4495 args = {}
4495 args = {}
4496 for line in lines:
4496 for line in lines:
4497 # We need to allow empty values.
4497 # We need to allow empty values.
4498 fields = line.lstrip().split(b' ', 1)
4498 fields = line.lstrip().split(b' ', 1)
4499 if len(fields) == 1:
4499 if len(fields) == 1:
4500 key = fields[0]
4500 key = fields[0]
4501 value = b''
4501 value = b''
4502 else:
4502 else:
4503 key, value = fields
4503 key, value = fields
4504
4504
4505 if value.startswith(b'eval:'):
4505 if value.startswith(b'eval:'):
4506 value = stringutil.evalpythonliteral(value[5:])
4506 value = stringutil.evalpythonliteral(value[5:])
4507 else:
4507 else:
4508 value = stringutil.unescapestr(value)
4508 value = stringutil.unescapestr(value)
4509
4509
4510 args[key] = value
4510 args[key] = value
4511
4511
4512 if batchedcommands is not None:
4512 if batchedcommands is not None:
4513 batchedcommands.append((command, args))
4513 batchedcommands.append((command, args))
4514 continue
4514 continue
4515
4515
4516 ui.status(_(b'sending %s command\n') % command)
4516 ui.status(_(b'sending %s command\n') % command)
4517
4517
4518 if b'PUSHFILE' in args:
4518 if b'PUSHFILE' in args:
4519 with open(args[b'PUSHFILE'], 'rb') as fh:
4519 with open(args[b'PUSHFILE'], 'rb') as fh:
4520 del args[b'PUSHFILE']
4520 del args[b'PUSHFILE']
4521 res, output = peer._callpush(
4521 res, output = peer._callpush(
4522 command, fh, **pycompat.strkwargs(args)
4522 command, fh, **pycompat.strkwargs(args)
4523 )
4523 )
4524 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4524 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4525 ui.status(
4525 ui.status(
4526 _(b'remote output: %s\n') % stringutil.escapestr(output)
4526 _(b'remote output: %s\n') % stringutil.escapestr(output)
4527 )
4527 )
4528 else:
4528 else:
4529 with peer.commandexecutor() as e:
4529 with peer.commandexecutor() as e:
4530 res = e.callcommand(command, args).result()
4530 res = e.callcommand(command, args).result()
4531
4531
4532 if isinstance(res, wireprotov2peer.commandresponse):
4532 if isinstance(res, wireprotov2peer.commandresponse):
4533 val = res.objects()
4533 val = res.objects()
4534 ui.status(
4534 ui.status(
4535 _(b'response: %s\n')
4535 _(b'response: %s\n')
4536 % stringutil.pprint(val, bprefix=True, indent=2)
4536 % stringutil.pprint(val, bprefix=True, indent=2)
4537 )
4537 )
4538 else:
4538 else:
4539 ui.status(
4539 ui.status(
4540 _(b'response: %s\n')
4540 _(b'response: %s\n')
4541 % stringutil.pprint(res, bprefix=True, indent=2)
4541 % stringutil.pprint(res, bprefix=True, indent=2)
4542 )
4542 )
4543
4543
4544 elif action == b'batchbegin':
4544 elif action == b'batchbegin':
4545 if batchedcommands is not None:
4545 if batchedcommands is not None:
4546 raise error.Abort(_(b'nested batchbegin not allowed'))
4546 raise error.Abort(_(b'nested batchbegin not allowed'))
4547
4547
4548 batchedcommands = []
4548 batchedcommands = []
4549 elif action == b'batchsubmit':
4549 elif action == b'batchsubmit':
4550 # There is a batching API we could go through. But it would be
4550 # There is a batching API we could go through. But it would be
4551 # difficult to normalize requests into function calls. It is easier
4551 # difficult to normalize requests into function calls. It is easier
4552 # to bypass this layer and normalize to commands + args.
4552 # to bypass this layer and normalize to commands + args.
4553 ui.status(
4553 ui.status(
4554 _(b'sending batch with %d sub-commands\n')
4554 _(b'sending batch with %d sub-commands\n')
4555 % len(batchedcommands)
4555 % len(batchedcommands)
4556 )
4556 )
4557 assert peer is not None
4557 assert peer is not None
4558 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4558 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4559 ui.status(
4559 ui.status(
4560 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4560 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4561 )
4561 )
4562
4562
4563 batchedcommands = None
4563 batchedcommands = None
4564
4564
4565 elif action.startswith(b'httprequest '):
4565 elif action.startswith(b'httprequest '):
4566 if not opener:
4566 if not opener:
4567 raise error.Abort(
4567 raise error.Abort(
4568 _(b'cannot use httprequest without an HTTP peer')
4568 _(b'cannot use httprequest without an HTTP peer')
4569 )
4569 )
4570
4570
4571 request = action.split(b' ', 2)
4571 request = action.split(b' ', 2)
4572 if len(request) != 3:
4572 if len(request) != 3:
4573 raise error.Abort(
4573 raise error.Abort(
4574 _(
4574 _(
4575 b'invalid httprequest: expected format is '
4575 b'invalid httprequest: expected format is '
4576 b'"httprequest <method> <path>'
4576 b'"httprequest <method> <path>'
4577 )
4577 )
4578 )
4578 )
4579
4579
4580 method, httppath = request[1:]
4580 method, httppath = request[1:]
4581 headers = {}
4581 headers = {}
4582 body = None
4582 body = None
4583 frames = []
4583 frames = []
4584 for line in lines:
4584 for line in lines:
4585 line = line.lstrip()
4585 line = line.lstrip()
4586 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4586 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4587 if m:
4587 if m:
4588 # Headers need to use native strings.
4588 # Headers need to use native strings.
4589 key = pycompat.strurl(m.group(1))
4589 key = pycompat.strurl(m.group(1))
4590 value = pycompat.strurl(m.group(2))
4590 value = pycompat.strurl(m.group(2))
4591 headers[key] = value
4591 headers[key] = value
4592 continue
4592 continue
4593
4593
4594 if line.startswith(b'BODYFILE '):
4594 if line.startswith(b'BODYFILE '):
4595 with open(line.split(b' ', 1), b'rb') as fh:
4595 with open(line.split(b' ', 1), b'rb') as fh:
4596 body = fh.read()
4596 body = fh.read()
4597 elif line.startswith(b'frame '):
4597 elif line.startswith(b'frame '):
4598 frame = wireprotoframing.makeframefromhumanstring(
4598 frame = wireprotoframing.makeframefromhumanstring(
4599 line[len(b'frame ') :]
4599 line[len(b'frame ') :]
4600 )
4600 )
4601
4601
4602 frames.append(frame)
4602 frames.append(frame)
4603 else:
4603 else:
4604 raise error.Abort(
4604 raise error.Abort(
4605 _(b'unknown argument to httprequest: %s') % line
4605 _(b'unknown argument to httprequest: %s') % line
4606 )
4606 )
4607
4607
4608 url = path + httppath
4608 url = path + httppath
4609
4609
4610 if frames:
4610 if frames:
4611 body = b''.join(bytes(f) for f in frames)
4611 body = b''.join(bytes(f) for f in frames)
4612
4612
4613 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4613 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4614
4614
4615 # urllib.Request insists on using has_data() as a proxy for
4615 # urllib.Request insists on using has_data() as a proxy for
4616 # determining the request method. Override that to use our
4616 # determining the request method. Override that to use our
4617 # explicitly requested method.
4617 # explicitly requested method.
4618 req.get_method = lambda: pycompat.sysstr(method)
4618 req.get_method = lambda: pycompat.sysstr(method)
4619
4619
4620 try:
4620 try:
4621 res = opener.open(req)
4621 res = opener.open(req)
4622 body = res.read()
4622 body = res.read()
4623 except util.urlerr.urlerror as e:
4623 except util.urlerr.urlerror as e:
4624 # read() method must be called, but only exists in Python 2
4624 # read() method must be called, but only exists in Python 2
4625 getattr(e, 'read', lambda: None)()
4625 getattr(e, 'read', lambda: None)()
4626 continue
4626 continue
4627
4627
4628 ct = res.headers.get('Content-Type')
4628 ct = res.headers.get('Content-Type')
4629 if ct == 'application/mercurial-cbor':
4629 if ct == 'application/mercurial-cbor':
4630 ui.write(
4630 ui.write(
4631 _(b'cbor> %s\n')
4631 _(b'cbor> %s\n')
4632 % stringutil.pprint(
4632 % stringutil.pprint(
4633 cborutil.decodeall(body), bprefix=True, indent=2
4633 cborutil.decodeall(body), bprefix=True, indent=2
4634 )
4634 )
4635 )
4635 )
4636
4636
4637 elif action == b'close':
4637 elif action == b'close':
4638 assert peer is not None
4638 assert peer is not None
4639 peer.close()
4639 peer.close()
4640 elif action == b'readavailable':
4640 elif action == b'readavailable':
4641 if not stdout or not stderr:
4641 if not stdout or not stderr:
4642 raise error.Abort(
4642 raise error.Abort(
4643 _(b'readavailable not available on this peer')
4643 _(b'readavailable not available on this peer')
4644 )
4644 )
4645
4645
4646 stdin.close()
4646 stdin.close()
4647 stdout.read()
4647 stdout.read()
4648 stderr.read()
4648 stderr.read()
4649
4649
4650 elif action == b'readline':
4650 elif action == b'readline':
4651 if not stdout:
4651 if not stdout:
4652 raise error.Abort(_(b'readline not available on this peer'))
4652 raise error.Abort(_(b'readline not available on this peer'))
4653 stdout.readline()
4653 stdout.readline()
4654 elif action == b'ereadline':
4654 elif action == b'ereadline':
4655 if not stderr:
4655 if not stderr:
4656 raise error.Abort(_(b'ereadline not available on this peer'))
4656 raise error.Abort(_(b'ereadline not available on this peer'))
4657 stderr.readline()
4657 stderr.readline()
4658 elif action.startswith(b'read '):
4658 elif action.startswith(b'read '):
4659 count = int(action.split(b' ', 1)[1])
4659 count = int(action.split(b' ', 1)[1])
4660 if not stdout:
4660 if not stdout:
4661 raise error.Abort(_(b'read not available on this peer'))
4661 raise error.Abort(_(b'read not available on this peer'))
4662 stdout.read(count)
4662 stdout.read(count)
4663 elif action.startswith(b'eread '):
4663 elif action.startswith(b'eread '):
4664 count = int(action.split(b' ', 1)[1])
4664 count = int(action.split(b' ', 1)[1])
4665 if not stderr:
4665 if not stderr:
4666 raise error.Abort(_(b'eread not available on this peer'))
4666 raise error.Abort(_(b'eread not available on this peer'))
4667 stderr.read(count)
4667 stderr.read(count)
4668 else:
4668 else:
4669 raise error.Abort(_(b'unknown action: %s') % action)
4669 raise error.Abort(_(b'unknown action: %s') % action)
4670
4670
4671 if batchedcommands is not None:
4671 if batchedcommands is not None:
4672 raise error.Abort(_(b'unclosed "batchbegin" request'))
4672 raise error.Abort(_(b'unclosed "batchbegin" request'))
4673
4673
4674 if peer:
4674 if peer:
4675 peer.close()
4675 peer.close()
4676
4676
4677 if proc:
4677 if proc:
4678 proc.kill()
4678 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now