##// END OF EJS Templates
debugantivirusrunning: use bytes when opening a vfs file...
Matt Harbison -
r52832:8d9767bf default
parent child Browse files
Show More
@@ -1,4769 +1,4769
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import annotations
8 from __future__ import annotations
9
9
10 import binascii
10 import binascii
11 import codecs
11 import codecs
12 import collections
12 import collections
13 import contextlib
13 import contextlib
14 import difflib
14 import difflib
15 import errno
15 import errno
16 import glob
16 import glob
17 import operator
17 import operator
18 import os
18 import os
19 import platform
19 import platform
20 import random
20 import random
21 import re
21 import re
22 import socket
22 import socket
23 import ssl
23 import ssl
24 import stat
24 import stat
25 import subprocess
25 import subprocess
26 import sys
26 import sys
27 import time
27 import time
28
28
29 from .i18n import _
29 from .i18n import _
30 from .node import (
30 from .node import (
31 bin,
31 bin,
32 hex,
32 hex,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 open,
37 open,
38 )
38 )
39 from . import (
39 from . import (
40 bundle2,
40 bundle2,
41 bundlerepo,
41 bundlerepo,
42 changegroup,
42 changegroup,
43 cmdutil,
43 cmdutil,
44 color,
44 color,
45 context,
45 context,
46 copies,
46 copies,
47 dagparser,
47 dagparser,
48 dirstateutils,
48 dirstateutils,
49 encoding,
49 encoding,
50 error,
50 error,
51 exchange,
51 exchange,
52 extensions,
52 extensions,
53 filelog,
53 filelog,
54 filemerge,
54 filemerge,
55 filesetlang,
55 filesetlang,
56 formatter,
56 formatter,
57 hg,
57 hg,
58 httppeer,
58 httppeer,
59 localrepo,
59 localrepo,
60 lock as lockmod,
60 lock as lockmod,
61 logcmdutil,
61 logcmdutil,
62 manifest,
62 manifest,
63 mergestate as mergestatemod,
63 mergestate as mergestatemod,
64 metadata,
64 metadata,
65 obsolete,
65 obsolete,
66 obsutil,
66 obsutil,
67 pathutil,
67 pathutil,
68 phases,
68 phases,
69 policy,
69 policy,
70 pvec,
70 pvec,
71 pycompat,
71 pycompat,
72 registrar,
72 registrar,
73 repair,
73 repair,
74 repoview,
74 repoview,
75 requirements,
75 requirements,
76 revlog,
76 revlog,
77 revset,
77 revset,
78 revsetlang,
78 revsetlang,
79 scmutil,
79 scmutil,
80 setdiscovery,
80 setdiscovery,
81 simplemerge,
81 simplemerge,
82 sshpeer,
82 sshpeer,
83 sslutil,
83 sslutil,
84 streamclone,
84 streamclone,
85 strip,
85 strip,
86 tags as tagsmod,
86 tags as tagsmod,
87 templater,
87 templater,
88 treediscovery,
88 treediscovery,
89 upgrade,
89 upgrade,
90 url as urlmod,
90 url as urlmod,
91 util,
91 util,
92 verify,
92 verify,
93 vfs as vfsmod,
93 vfs as vfsmod,
94 wireprotoframing,
94 wireprotoframing,
95 wireprotoserver,
95 wireprotoserver,
96 )
96 )
97 from .interfaces import repository
97 from .interfaces import repository
98 from .stabletailgraph import stabletailsort
98 from .stabletailgraph import stabletailsort
99 from .utils import (
99 from .utils import (
100 cborutil,
100 cborutil,
101 compression,
101 compression,
102 dateutil,
102 dateutil,
103 procutil,
103 procutil,
104 stringutil,
104 stringutil,
105 urlutil,
105 urlutil,
106 )
106 )
107
107
108 from .revlogutils import (
108 from .revlogutils import (
109 debug as revlog_debug,
109 debug as revlog_debug,
110 nodemap,
110 nodemap,
111 rewrite,
111 rewrite,
112 sidedata,
112 sidedata,
113 )
113 )
114
114
115 release = lockmod.release
115 release = lockmod.release
116
116
117 table = {}
117 table = {}
118 table.update(strip.command._table)
118 table.update(strip.command._table)
119 command = registrar.command(table)
119 command = registrar.command(table)
120
120
121
121
122 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
122 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
123 def debugancestor(ui, repo, *args):
123 def debugancestor(ui, repo, *args):
124 """find the ancestor revision of two revisions in a given index"""
124 """find the ancestor revision of two revisions in a given index"""
125 if len(args) == 3:
125 if len(args) == 3:
126 index, rev1, rev2 = args
126 index, rev1, rev2 = args
127 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
127 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
128 lookup = r.lookup
128 lookup = r.lookup
129 elif len(args) == 2:
129 elif len(args) == 2:
130 if not repo:
130 if not repo:
131 raise error.Abort(
131 raise error.Abort(
132 _(b'there is no Mercurial repository here (.hg not found)')
132 _(b'there is no Mercurial repository here (.hg not found)')
133 )
133 )
134 rev1, rev2 = args
134 rev1, rev2 = args
135 r = repo.changelog
135 r = repo.changelog
136 lookup = repo.lookup
136 lookup = repo.lookup
137 else:
137 else:
138 raise error.Abort(_(b'either two or three arguments required'))
138 raise error.Abort(_(b'either two or three arguments required'))
139 a = r.ancestor(lookup(rev1), lookup(rev2))
139 a = r.ancestor(lookup(rev1), lookup(rev2))
140 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
140 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
141
141
142
142
143 @command(b'debugantivirusrunning', [])
143 @command(b'debugantivirusrunning', [])
144 def debugantivirusrunning(ui, repo):
144 def debugantivirusrunning(ui, repo):
145 """attempt to trigger an antivirus scanner to see if one is active"""
145 """attempt to trigger an antivirus scanner to see if one is active"""
146 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
146 with repo.cachevfs.open(b'eicar-test-file.com', b'wb') as f:
147 f.write(
147 f.write(
148 util.b85decode(
148 util.b85decode(
149 # This is a base85-armored version of the EICAR test file. See
149 # This is a base85-armored version of the EICAR test file. See
150 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
150 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
151 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
151 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
152 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
152 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
153 )
153 )
154 )
154 )
155 # Give an AV engine time to scan the file.
155 # Give an AV engine time to scan the file.
156 time.sleep(2)
156 time.sleep(2)
157 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
157 util.unlink(repo.cachevfs.join(b'eicar-test-file.com'))
158
158
159
159
160 @command(b'debugapplystreamclonebundle', [], b'FILE')
160 @command(b'debugapplystreamclonebundle', [], b'FILE')
161 def debugapplystreamclonebundle(ui, repo, fname):
161 def debugapplystreamclonebundle(ui, repo, fname):
162 """apply a stream clone bundle file"""
162 """apply a stream clone bundle file"""
163 f = hg.openpath(ui, fname)
163 f = hg.openpath(ui, fname)
164 gen = exchange.readbundle(ui, f, fname)
164 gen = exchange.readbundle(ui, f, fname)
165 gen.apply(repo)
165 gen.apply(repo)
166
166
167
167
168 @command(
168 @command(
169 b'debugbuilddag',
169 b'debugbuilddag',
170 [
170 [
171 (
171 (
172 b'm',
172 b'm',
173 b'mergeable-file',
173 b'mergeable-file',
174 None,
174 None,
175 _(b'add single file mergeable changes'),
175 _(b'add single file mergeable changes'),
176 ),
176 ),
177 (
177 (
178 b'o',
178 b'o',
179 b'overwritten-file',
179 b'overwritten-file',
180 None,
180 None,
181 _(b'add single file all revs overwrite'),
181 _(b'add single file all revs overwrite'),
182 ),
182 ),
183 (b'n', b'new-file', None, _(b'add new file at each rev')),
183 (b'n', b'new-file', None, _(b'add new file at each rev')),
184 (
184 (
185 b'',
185 b'',
186 b'from-existing',
186 b'from-existing',
187 None,
187 None,
188 _(b'continue from a non-empty repository'),
188 _(b'continue from a non-empty repository'),
189 ),
189 ),
190 ],
190 ],
191 _(b'[OPTION]... [TEXT]'),
191 _(b'[OPTION]... [TEXT]'),
192 )
192 )
193 def debugbuilddag(
193 def debugbuilddag(
194 ui,
194 ui,
195 repo,
195 repo,
196 text=None,
196 text=None,
197 mergeable_file=False,
197 mergeable_file=False,
198 overwritten_file=False,
198 overwritten_file=False,
199 new_file=False,
199 new_file=False,
200 from_existing=False,
200 from_existing=False,
201 ):
201 ):
202 """builds a repo with a given DAG from scratch in the current empty repo
202 """builds a repo with a given DAG from scratch in the current empty repo
203
203
204 The description of the DAG is read from stdin if not given on the
204 The description of the DAG is read from stdin if not given on the
205 command line.
205 command line.
206
206
207 Elements:
207 Elements:
208
208
209 - "+n" is a linear run of n nodes based on the current default parent
209 - "+n" is a linear run of n nodes based on the current default parent
210 - "." is a single node based on the current default parent
210 - "." is a single node based on the current default parent
211 - "$" resets the default parent to null (implied at the start);
211 - "$" resets the default parent to null (implied at the start);
212 otherwise the default parent is always the last node created
212 otherwise the default parent is always the last node created
213 - "<p" sets the default parent to the backref p
213 - "<p" sets the default parent to the backref p
214 - "*p" is a fork at parent p, which is a backref
214 - "*p" is a fork at parent p, which is a backref
215 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
215 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
216 - "/p2" is a merge of the preceding node and p2
216 - "/p2" is a merge of the preceding node and p2
217 - ":tag" defines a local tag for the preceding node
217 - ":tag" defines a local tag for the preceding node
218 - "@branch" sets the named branch for subsequent nodes
218 - "@branch" sets the named branch for subsequent nodes
219 - "#...\\n" is a comment up to the end of the line
219 - "#...\\n" is a comment up to the end of the line
220
220
221 Whitespace between the above elements is ignored.
221 Whitespace between the above elements is ignored.
222
222
223 A backref is either
223 A backref is either
224
224
225 - a number n, which references the node curr-n, where curr is the current
225 - a number n, which references the node curr-n, where curr is the current
226 node, or
226 node, or
227 - the name of a local tag you placed earlier using ":tag", or
227 - the name of a local tag you placed earlier using ":tag", or
228 - empty to denote the default parent.
228 - empty to denote the default parent.
229
229
230 All string valued-elements are either strictly alphanumeric, or must
230 All string valued-elements are either strictly alphanumeric, or must
231 be enclosed in double quotes ("..."), with "\\" as escape character.
231 be enclosed in double quotes ("..."), with "\\" as escape character.
232 """
232 """
233
233
234 if text is None:
234 if text is None:
235 ui.status(_(b"reading DAG from stdin\n"))
235 ui.status(_(b"reading DAG from stdin\n"))
236 text = ui.fin.read()
236 text = ui.fin.read()
237
237
238 cl = repo.changelog
238 cl = repo.changelog
239 if len(cl) > 0 and not from_existing:
239 if len(cl) > 0 and not from_existing:
240 raise error.Abort(_(b'repository is not empty'))
240 raise error.Abort(_(b'repository is not empty'))
241
241
242 # determine number of revs in DAG
242 # determine number of revs in DAG
243 total = 0
243 total = 0
244 for type, data in dagparser.parsedag(text):
244 for type, data in dagparser.parsedag(text):
245 if type == b'n':
245 if type == b'n':
246 total += 1
246 total += 1
247
247
248 if mergeable_file:
248 if mergeable_file:
249 linesperrev = 2
249 linesperrev = 2
250 # make a file with k lines per rev
250 # make a file with k lines per rev
251 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
251 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
252 initialmergedlines.append(b"")
252 initialmergedlines.append(b"")
253
253
254 tags = []
254 tags = []
255 progress = ui.makeprogress(
255 progress = ui.makeprogress(
256 _(b'building'), unit=_(b'revisions'), total=total
256 _(b'building'), unit=_(b'revisions'), total=total
257 )
257 )
258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
259 at = -1
259 at = -1
260 atbranch = b'default'
260 atbranch = b'default'
261 nodeids = []
261 nodeids = []
262 id = 0
262 id = 0
263 progress.update(id)
263 progress.update(id)
264 for type, data in dagparser.parsedag(text):
264 for type, data in dagparser.parsedag(text):
265 if type == b'n':
265 if type == b'n':
266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
267 id, ps = data
267 id, ps = data
268
268
269 files = []
269 files = []
270 filecontent = {}
270 filecontent = {}
271
271
272 p2 = None
272 p2 = None
273 if mergeable_file:
273 if mergeable_file:
274 fn = b"mf"
274 fn = b"mf"
275 p1 = repo[ps[0]]
275 p1 = repo[ps[0]]
276 if len(ps) > 1:
276 if len(ps) > 1:
277 p2 = repo[ps[1]]
277 p2 = repo[ps[1]]
278 pa = p1.ancestor(p2)
278 pa = p1.ancestor(p2)
279 base, local, other = [
279 base, local, other = [
280 x[fn].data() for x in (pa, p1, p2)
280 x[fn].data() for x in (pa, p1, p2)
281 ]
281 ]
282 m3 = simplemerge.Merge3Text(base, local, other)
282 m3 = simplemerge.Merge3Text(base, local, other)
283 ml = [
283 ml = [
284 l.strip()
284 l.strip()
285 for l in simplemerge.render_minimized(m3)[0]
285 for l in simplemerge.render_minimized(m3)[0]
286 ]
286 ]
287 ml.append(b"")
287 ml.append(b"")
288 elif at > 0:
288 elif at > 0:
289 ml = p1[fn].data().split(b"\n")
289 ml = p1[fn].data().split(b"\n")
290 else:
290 else:
291 ml = initialmergedlines
291 ml = initialmergedlines
292 ml[id * linesperrev] += b" r%i" % id
292 ml[id * linesperrev] += b" r%i" % id
293 mergedtext = b"\n".join(ml)
293 mergedtext = b"\n".join(ml)
294 files.append(fn)
294 files.append(fn)
295 filecontent[fn] = mergedtext
295 filecontent[fn] = mergedtext
296
296
297 if overwritten_file:
297 if overwritten_file:
298 fn = b"of"
298 fn = b"of"
299 files.append(fn)
299 files.append(fn)
300 filecontent[fn] = b"r%i\n" % id
300 filecontent[fn] = b"r%i\n" % id
301
301
302 if new_file:
302 if new_file:
303 fn = b"nf%i" % id
303 fn = b"nf%i" % id
304 files.append(fn)
304 files.append(fn)
305 filecontent[fn] = b"r%i\n" % id
305 filecontent[fn] = b"r%i\n" % id
306 if len(ps) > 1:
306 if len(ps) > 1:
307 if not p2:
307 if not p2:
308 p2 = repo[ps[1]]
308 p2 = repo[ps[1]]
309 for fn in p2:
309 for fn in p2:
310 if fn.startswith(b"nf"):
310 if fn.startswith(b"nf"):
311 files.append(fn)
311 files.append(fn)
312 filecontent[fn] = p2[fn].data()
312 filecontent[fn] = p2[fn].data()
313
313
314 def fctxfn(repo, cx, path):
314 def fctxfn(repo, cx, path):
315 if path in filecontent:
315 if path in filecontent:
316 return context.memfilectx(
316 return context.memfilectx(
317 repo, cx, path, filecontent[path]
317 repo, cx, path, filecontent[path]
318 )
318 )
319 return None
319 return None
320
320
321 if len(ps) == 0 or ps[0] < 0:
321 if len(ps) == 0 or ps[0] < 0:
322 pars = [None, None]
322 pars = [None, None]
323 elif len(ps) == 1:
323 elif len(ps) == 1:
324 pars = [nodeids[ps[0]], None]
324 pars = [nodeids[ps[0]], None]
325 else:
325 else:
326 pars = [nodeids[p] for p in ps]
326 pars = [nodeids[p] for p in ps]
327 cx = context.memctx(
327 cx = context.memctx(
328 repo,
328 repo,
329 pars,
329 pars,
330 b"r%i" % id,
330 b"r%i" % id,
331 files,
331 files,
332 fctxfn,
332 fctxfn,
333 date=(id, 0),
333 date=(id, 0),
334 user=b"debugbuilddag",
334 user=b"debugbuilddag",
335 extra={b'branch': atbranch},
335 extra={b'branch': atbranch},
336 )
336 )
337 nodeid = repo.commitctx(cx)
337 nodeid = repo.commitctx(cx)
338 nodeids.append(nodeid)
338 nodeids.append(nodeid)
339 at = id
339 at = id
340 elif type == b'l':
340 elif type == b'l':
341 id, name = data
341 id, name = data
342 ui.note((b'tag %s\n' % name))
342 ui.note((b'tag %s\n' % name))
343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
344 elif type == b'a':
344 elif type == b'a':
345 ui.note((b'branch %s\n' % data))
345 ui.note((b'branch %s\n' % data))
346 atbranch = data
346 atbranch = data
347 progress.update(id)
347 progress.update(id)
348
348
349 if tags:
349 if tags:
350 repo.vfs.write(b"localtags", b"".join(tags))
350 repo.vfs.write(b"localtags", b"".join(tags))
351
351
352
352
353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
354 indent_string = b' ' * indent
354 indent_string = b' ' * indent
355 if all:
355 if all:
356 ui.writenoi18n(
356 ui.writenoi18n(
357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
358 % indent_string
358 % indent_string
359 )
359 )
360
360
361 def showchunks(named):
361 def showchunks(named):
362 ui.write(b"\n%s%s\n" % (indent_string, named))
362 ui.write(b"\n%s%s\n" % (indent_string, named))
363 for deltadata in gen.deltaiter():
363 for deltadata in gen.deltaiter():
364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
365 ui.write(
365 ui.write(
366 b"%s%s %s %s %s %s %d\n"
366 b"%s%s %s %s %s %s %d\n"
367 % (
367 % (
368 indent_string,
368 indent_string,
369 hex(node),
369 hex(node),
370 hex(p1),
370 hex(p1),
371 hex(p2),
371 hex(p2),
372 hex(cs),
372 hex(cs),
373 hex(deltabase),
373 hex(deltabase),
374 len(delta),
374 len(delta),
375 )
375 )
376 )
376 )
377
377
378 gen.changelogheader()
378 gen.changelogheader()
379 showchunks(b"changelog")
379 showchunks(b"changelog")
380 gen.manifestheader()
380 gen.manifestheader()
381 showchunks(b"manifest")
381 showchunks(b"manifest")
382 for chunkdata in iter(gen.filelogheader, {}):
382 for chunkdata in iter(gen.filelogheader, {}):
383 fname = chunkdata[b'filename']
383 fname = chunkdata[b'filename']
384 showchunks(fname)
384 showchunks(fname)
385 else:
385 else:
386 if isinstance(gen, bundle2.unbundle20):
386 if isinstance(gen, bundle2.unbundle20):
387 raise error.Abort(_(b'use debugbundle2 for this file'))
387 raise error.Abort(_(b'use debugbundle2 for this file'))
388 gen.changelogheader()
388 gen.changelogheader()
389 for deltadata in gen.deltaiter():
389 for deltadata in gen.deltaiter():
390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
392
392
393
393
394 def _debugobsmarkers(ui, part, indent=0, **opts):
394 def _debugobsmarkers(ui, part, indent=0, **opts):
395 """display version and markers contained in 'data'"""
395 """display version and markers contained in 'data'"""
396 data = part.read()
396 data = part.read()
397 indent_string = b' ' * indent
397 indent_string = b' ' * indent
398 try:
398 try:
399 version, markers = obsolete._readmarkers(data)
399 version, markers = obsolete._readmarkers(data)
400 except error.UnknownVersion as exc:
400 except error.UnknownVersion as exc:
401 msg = b"%sunsupported version: %s (%d bytes)\n"
401 msg = b"%sunsupported version: %s (%d bytes)\n"
402 msg %= indent_string, exc.version, len(data)
402 msg %= indent_string, exc.version, len(data)
403 ui.write(msg)
403 ui.write(msg)
404 else:
404 else:
405 msg = b"%sversion: %d (%d bytes)\n"
405 msg = b"%sversion: %d (%d bytes)\n"
406 msg %= indent_string, version, len(data)
406 msg %= indent_string, version, len(data)
407 ui.write(msg)
407 ui.write(msg)
408 fm = ui.formatter(b'debugobsolete', pycompat.byteskwargs(opts))
408 fm = ui.formatter(b'debugobsolete', pycompat.byteskwargs(opts))
409 for rawmarker in sorted(markers):
409 for rawmarker in sorted(markers):
410 m = obsutil.marker(None, rawmarker)
410 m = obsutil.marker(None, rawmarker)
411 fm.startitem()
411 fm.startitem()
412 fm.plain(indent_string)
412 fm.plain(indent_string)
413 cmdutil.showmarker(fm, m)
413 cmdutil.showmarker(fm, m)
414 fm.end()
414 fm.end()
415
415
416
416
417 def _debugphaseheads(ui, data, indent=0):
417 def _debugphaseheads(ui, data, indent=0):
418 """display version and markers contained in 'data'"""
418 """display version and markers contained in 'data'"""
419 indent_string = b' ' * indent
419 indent_string = b' ' * indent
420 headsbyphase = phases.binarydecode(data)
420 headsbyphase = phases.binarydecode(data)
421 for phase in phases.allphases:
421 for phase in phases.allphases:
422 for head in headsbyphase[phase]:
422 for head in headsbyphase[phase]:
423 ui.write(indent_string)
423 ui.write(indent_string)
424 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
424 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
425
425
426
426
427 def _quasirepr(thing):
427 def _quasirepr(thing):
428 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
428 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
429 return b'{%s}' % (
429 return b'{%s}' % (
430 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
430 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
431 )
431 )
432 return pycompat.bytestr(repr(thing))
432 return pycompat.bytestr(repr(thing))
433
433
434
434
435 def _debugbundle2(ui, gen, all=None, **opts):
435 def _debugbundle2(ui, gen, all=None, **opts):
436 """lists the contents of a bundle2"""
436 """lists the contents of a bundle2"""
437 if not isinstance(gen, bundle2.unbundle20):
437 if not isinstance(gen, bundle2.unbundle20):
438 raise error.Abort(_(b'not a bundle2 file'))
438 raise error.Abort(_(b'not a bundle2 file'))
439 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
439 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
440 parttypes = opts.get('part_type', [])
440 parttypes = opts.get('part_type', [])
441 for part in gen.iterparts():
441 for part in gen.iterparts():
442 if parttypes and part.type not in parttypes:
442 if parttypes and part.type not in parttypes:
443 continue
443 continue
444 msg = b'%s -- %s (mandatory: %r)\n'
444 msg = b'%s -- %s (mandatory: %r)\n'
445 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
445 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
446 if part.type == b'changegroup':
446 if part.type == b'changegroup':
447 version = part.params.get(b'version', b'01')
447 version = part.params.get(b'version', b'01')
448 cg = changegroup.getunbundler(version, part, b'UN')
448 cg = changegroup.getunbundler(version, part, b'UN')
449 if not ui.quiet:
449 if not ui.quiet:
450 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
450 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
451 if part.type == b'obsmarkers':
451 if part.type == b'obsmarkers':
452 if not ui.quiet:
452 if not ui.quiet:
453 _debugobsmarkers(ui, part, indent=4, **opts)
453 _debugobsmarkers(ui, part, indent=4, **opts)
454 if part.type == b'phase-heads':
454 if part.type == b'phase-heads':
455 if not ui.quiet:
455 if not ui.quiet:
456 _debugphaseheads(ui, part, indent=4)
456 _debugphaseheads(ui, part, indent=4)
457
457
458
458
459 @command(
459 @command(
460 b'debugbundle',
460 b'debugbundle',
461 [
461 [
462 (b'a', b'all', None, _(b'show all details')),
462 (b'a', b'all', None, _(b'show all details')),
463 (b'', b'part-type', [], _(b'show only the named part type')),
463 (b'', b'part-type', [], _(b'show only the named part type')),
464 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
464 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
465 ],
465 ],
466 _(b'FILE'),
466 _(b'FILE'),
467 norepo=True,
467 norepo=True,
468 )
468 )
469 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
469 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
470 """lists the contents of a bundle"""
470 """lists the contents of a bundle"""
471 with hg.openpath(ui, bundlepath) as f:
471 with hg.openpath(ui, bundlepath) as f:
472 if spec:
472 if spec:
473 spec = exchange.getbundlespec(ui, f)
473 spec = exchange.getbundlespec(ui, f)
474 ui.write(b'%s\n' % spec)
474 ui.write(b'%s\n' % spec)
475 return
475 return
476
476
477 gen = exchange.readbundle(ui, f, bundlepath)
477 gen = exchange.readbundle(ui, f, bundlepath)
478 if isinstance(gen, bundle2.unbundle20):
478 if isinstance(gen, bundle2.unbundle20):
479 return _debugbundle2(ui, gen, all=all, **opts)
479 return _debugbundle2(ui, gen, all=all, **opts)
480 _debugchangegroup(ui, gen, all=all, **opts)
480 _debugchangegroup(ui, gen, all=all, **opts)
481
481
482
482
483 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
483 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
484 def debugcapabilities(ui, path, **opts):
484 def debugcapabilities(ui, path, **opts):
485 """lists the capabilities of a remote peer"""
485 """lists the capabilities of a remote peer"""
486 peer = hg.peer(ui, pycompat.byteskwargs(opts), path)
486 peer = hg.peer(ui, pycompat.byteskwargs(opts), path)
487 try:
487 try:
488 caps = peer.capabilities()
488 caps = peer.capabilities()
489 ui.writenoi18n(b'Main capabilities:\n')
489 ui.writenoi18n(b'Main capabilities:\n')
490 for c in sorted(caps):
490 for c in sorted(caps):
491 ui.write(b' %s\n' % c)
491 ui.write(b' %s\n' % c)
492 b2caps = bundle2.bundle2caps(peer)
492 b2caps = bundle2.bundle2caps(peer)
493 if b2caps:
493 if b2caps:
494 ui.writenoi18n(b'Bundle2 capabilities:\n')
494 ui.writenoi18n(b'Bundle2 capabilities:\n')
495 for key, values in sorted(b2caps.items()):
495 for key, values in sorted(b2caps.items()):
496 ui.write(b' %s\n' % key)
496 ui.write(b' %s\n' % key)
497 for v in values:
497 for v in values:
498 ui.write(b' %s\n' % v)
498 ui.write(b' %s\n' % v)
499 finally:
499 finally:
500 peer.close()
500 peer.close()
501
501
502
502
503 @command(
503 @command(
504 b'debugchangedfiles',
504 b'debugchangedfiles',
505 [
505 [
506 (
506 (
507 b'',
507 b'',
508 b'compute',
508 b'compute',
509 False,
509 False,
510 b"compute information instead of reading it from storage",
510 b"compute information instead of reading it from storage",
511 ),
511 ),
512 ],
512 ],
513 b'REV',
513 b'REV',
514 )
514 )
515 def debugchangedfiles(ui, repo, rev, **opts):
515 def debugchangedfiles(ui, repo, rev, **opts):
516 """list the stored files changes for a revision"""
516 """list the stored files changes for a revision"""
517 ctx = logcmdutil.revsingle(repo, rev, None)
517 ctx = logcmdutil.revsingle(repo, rev, None)
518 files = None
518 files = None
519
519
520 if opts['compute']:
520 if opts['compute']:
521 files = metadata.compute_all_files_changes(ctx)
521 files = metadata.compute_all_files_changes(ctx)
522 else:
522 else:
523 sd = repo.changelog.sidedata(ctx.rev())
523 sd = repo.changelog.sidedata(ctx.rev())
524 files_block = sd.get(sidedata.SD_FILES)
524 files_block = sd.get(sidedata.SD_FILES)
525 if files_block is not None:
525 if files_block is not None:
526 files = metadata.decode_files_sidedata(sd)
526 files = metadata.decode_files_sidedata(sd)
527 if files is not None:
527 if files is not None:
528 for f in sorted(files.touched):
528 for f in sorted(files.touched):
529 if f in files.added:
529 if f in files.added:
530 action = b"added"
530 action = b"added"
531 elif f in files.removed:
531 elif f in files.removed:
532 action = b"removed"
532 action = b"removed"
533 elif f in files.merged:
533 elif f in files.merged:
534 action = b"merged"
534 action = b"merged"
535 elif f in files.salvaged:
535 elif f in files.salvaged:
536 action = b"salvaged"
536 action = b"salvaged"
537 else:
537 else:
538 action = b"touched"
538 action = b"touched"
539
539
540 copy_parent = b""
540 copy_parent = b""
541 copy_source = b""
541 copy_source = b""
542 if f in files.copied_from_p1:
542 if f in files.copied_from_p1:
543 copy_parent = b"p1"
543 copy_parent = b"p1"
544 copy_source = files.copied_from_p1[f]
544 copy_source = files.copied_from_p1[f]
545 elif f in files.copied_from_p2:
545 elif f in files.copied_from_p2:
546 copy_parent = b"p2"
546 copy_parent = b"p2"
547 copy_source = files.copied_from_p2[f]
547 copy_source = files.copied_from_p2[f]
548
548
549 data = (action, copy_parent, f, copy_source)
549 data = (action, copy_parent, f, copy_source)
550 template = b"%-8s %2s: %s, %s;\n"
550 template = b"%-8s %2s: %s, %s;\n"
551 ui.write(template % data)
551 ui.write(template % data)
552
552
553
553
554 @command(b'debugcheckstate', [], b'')
554 @command(b'debugcheckstate', [], b'')
555 def debugcheckstate(ui, repo):
555 def debugcheckstate(ui, repo):
556 """validate the correctness of the current dirstate"""
556 """validate the correctness of the current dirstate"""
557 errors = verify.verifier(repo)._verify_dirstate()
557 errors = verify.verifier(repo)._verify_dirstate()
558 if errors:
558 if errors:
559 errstr = _(b"dirstate inconsistent with current parent's manifest")
559 errstr = _(b"dirstate inconsistent with current parent's manifest")
560 raise error.Abort(errstr)
560 raise error.Abort(errstr)
561
561
562
562
563 @command(
563 @command(
564 b'debugcolor',
564 b'debugcolor',
565 [(b'', b'style', None, _(b'show all configured styles'))],
565 [(b'', b'style', None, _(b'show all configured styles'))],
566 b'hg debugcolor',
566 b'hg debugcolor',
567 )
567 )
568 def debugcolor(ui, repo, **opts):
568 def debugcolor(ui, repo, **opts):
569 """show available color, effects or style"""
569 """show available color, effects or style"""
570 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
570 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
571 if opts.get('style'):
571 if opts.get('style'):
572 return _debugdisplaystyle(ui)
572 return _debugdisplaystyle(ui)
573 else:
573 else:
574 return _debugdisplaycolor(ui)
574 return _debugdisplaycolor(ui)
575
575
576
576
577 def _debugdisplaycolor(ui):
577 def _debugdisplaycolor(ui):
578 ui = ui.copy()
578 ui = ui.copy()
579 ui._styles.clear()
579 ui._styles.clear()
580 for effect in color._activeeffects(ui).keys():
580 for effect in color._activeeffects(ui).keys():
581 ui._styles[effect] = effect
581 ui._styles[effect] = effect
582 if ui._terminfoparams:
582 if ui._terminfoparams:
583 for k, v in ui.configitems(b'color'):
583 for k, v in ui.configitems(b'color'):
584 if k.startswith(b'color.'):
584 if k.startswith(b'color.'):
585 ui._styles[k] = k[6:]
585 ui._styles[k] = k[6:]
586 elif k.startswith(b'terminfo.'):
586 elif k.startswith(b'terminfo.'):
587 ui._styles[k] = k[9:]
587 ui._styles[k] = k[9:]
588 ui.write(_(b'available colors:\n'))
588 ui.write(_(b'available colors:\n'))
589 # sort label with a '_' after the other to group '_background' entry.
589 # sort label with a '_' after the other to group '_background' entry.
590 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
590 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
591 for colorname, label in items:
591 for colorname, label in items:
592 ui.write(b'%s\n' % colorname, label=label)
592 ui.write(b'%s\n' % colorname, label=label)
593
593
594
594
595 def _debugdisplaystyle(ui):
595 def _debugdisplaystyle(ui):
596 ui.write(_(b'available style:\n'))
596 ui.write(_(b'available style:\n'))
597 if not ui._styles:
597 if not ui._styles:
598 return
598 return
599 width = max(len(s) for s in ui._styles)
599 width = max(len(s) for s in ui._styles)
600 for label, effects in sorted(ui._styles.items()):
600 for label, effects in sorted(ui._styles.items()):
601 ui.write(b'%s' % label, label=label)
601 ui.write(b'%s' % label, label=label)
602 if effects:
602 if effects:
603 # 50
603 # 50
604 ui.write(b': ')
604 ui.write(b': ')
605 ui.write(b' ' * (max(0, width - len(label))))
605 ui.write(b' ' * (max(0, width - len(label))))
606 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
606 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
607 ui.write(b'\n')
607 ui.write(b'\n')
608
608
609
609
610 @command(b'debugcreatestreamclonebundle', [], b'FILE')
610 @command(b'debugcreatestreamclonebundle', [], b'FILE')
611 def debugcreatestreamclonebundle(ui, repo, fname):
611 def debugcreatestreamclonebundle(ui, repo, fname):
612 """create a stream clone bundle file
612 """create a stream clone bundle file
613
613
614 Stream bundles are special bundles that are essentially archives of
614 Stream bundles are special bundles that are essentially archives of
615 revlog files. They are commonly used for cloning very quickly.
615 revlog files. They are commonly used for cloning very quickly.
616
616
617 This command creates a "version 1" stream clone, which is deprecated in
617 This command creates a "version 1" stream clone, which is deprecated in
618 favor of newer versions of the stream protocol. Bundles using such newer
618 favor of newer versions of the stream protocol. Bundles using such newer
619 versions can be generated using the `hg bundle` command.
619 versions can be generated using the `hg bundle` command.
620 """
620 """
621 # TODO we may want to turn this into an abort when this functionality
621 # TODO we may want to turn this into an abort when this functionality
622 # is moved into `hg bundle`.
622 # is moved into `hg bundle`.
623 if phases.hassecret(repo):
623 if phases.hassecret(repo):
624 ui.warn(
624 ui.warn(
625 _(
625 _(
626 b'(warning: stream clone bundle will contain secret '
626 b'(warning: stream clone bundle will contain secret '
627 b'revisions)\n'
627 b'revisions)\n'
628 )
628 )
629 )
629 )
630
630
631 requirements, gen = streamclone.generatebundlev1(repo)
631 requirements, gen = streamclone.generatebundlev1(repo)
632 changegroup.writechunks(ui, gen, fname)
632 changegroup.writechunks(ui, gen, fname)
633
633
634 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
634 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
635
635
636
636
637 @command(
637 @command(
638 b'debugdag',
638 b'debugdag',
639 [
639 [
640 (b't', b'tags', None, _(b'use tags as labels')),
640 (b't', b'tags', None, _(b'use tags as labels')),
641 (b'b', b'branches', None, _(b'annotate with branch names')),
641 (b'b', b'branches', None, _(b'annotate with branch names')),
642 (b'', b'dots', None, _(b'use dots for runs')),
642 (b'', b'dots', None, _(b'use dots for runs')),
643 (b's', b'spaces', None, _(b'separate elements by spaces')),
643 (b's', b'spaces', None, _(b'separate elements by spaces')),
644 ],
644 ],
645 _(b'[OPTION]... [FILE [REV]...]'),
645 _(b'[OPTION]... [FILE [REV]...]'),
646 optionalrepo=True,
646 optionalrepo=True,
647 )
647 )
648 def debugdag(ui, repo, file_=None, *revs, **opts):
648 def debugdag(ui, repo, file_=None, *revs, **opts):
649 """format the changelog or an index DAG as a concise textual description
649 """format the changelog or an index DAG as a concise textual description
650
650
651 If you pass a revlog index, the revlog's DAG is emitted. If you list
651 If you pass a revlog index, the revlog's DAG is emitted. If you list
652 revision numbers, they get labeled in the output as rN.
652 revision numbers, they get labeled in the output as rN.
653
653
654 Otherwise, the changelog DAG of the current repo is emitted.
654 Otherwise, the changelog DAG of the current repo is emitted.
655 """
655 """
656 spaces = opts.get('spaces')
656 spaces = opts.get('spaces')
657 dots = opts.get('dots')
657 dots = opts.get('dots')
658 if file_:
658 if file_:
659 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
659 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
660 revs = {int(r) for r in revs}
660 revs = {int(r) for r in revs}
661
661
662 def events():
662 def events():
663 for r in rlog:
663 for r in rlog:
664 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
664 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
665 if r in revs:
665 if r in revs:
666 yield b'l', (r, b"r%i" % r)
666 yield b'l', (r, b"r%i" % r)
667
667
668 elif repo:
668 elif repo:
669 cl = repo.changelog
669 cl = repo.changelog
670 tags = opts.get('tags')
670 tags = opts.get('tags')
671 branches = opts.get('branches')
671 branches = opts.get('branches')
672 if tags:
672 if tags:
673 labels = {}
673 labels = {}
674 for l, n in repo.tags().items():
674 for l, n in repo.tags().items():
675 labels.setdefault(cl.rev(n), []).append(l)
675 labels.setdefault(cl.rev(n), []).append(l)
676
676
677 def events():
677 def events():
678 b = b"default"
678 b = b"default"
679 for r in cl:
679 for r in cl:
680 if branches:
680 if branches:
681 newb = cl.read(cl.node(r))[5][b'branch']
681 newb = cl.read(cl.node(r))[5][b'branch']
682 if newb != b:
682 if newb != b:
683 yield b'a', newb
683 yield b'a', newb
684 b = newb
684 b = newb
685 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
685 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
686 if tags:
686 if tags:
687 ls = labels.get(r)
687 ls = labels.get(r)
688 if ls:
688 if ls:
689 for l in ls:
689 for l in ls:
690 yield b'l', (r, l)
690 yield b'l', (r, l)
691
691
692 else:
692 else:
693 raise error.Abort(_(b'need repo for changelog dag'))
693 raise error.Abort(_(b'need repo for changelog dag'))
694
694
695 for line in dagparser.dagtextlines(
695 for line in dagparser.dagtextlines(
696 events(),
696 events(),
697 addspaces=spaces,
697 addspaces=spaces,
698 wraplabels=True,
698 wraplabels=True,
699 wrapannotations=True,
699 wrapannotations=True,
700 wrapnonlinear=dots,
700 wrapnonlinear=dots,
701 usedots=dots,
701 usedots=dots,
702 maxlinewidth=70,
702 maxlinewidth=70,
703 ):
703 ):
704 ui.write(line)
704 ui.write(line)
705 ui.write(b"\n")
705 ui.write(b"\n")
706
706
707
707
708 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
708 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
709 def debugdata(ui, repo, file_, rev=None, **opts):
709 def debugdata(ui, repo, file_, rev=None, **opts):
710 """dump the contents of a data file revision"""
710 """dump the contents of a data file revision"""
711 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
711 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
712 if rev is not None:
712 if rev is not None:
713 raise error.InputError(
713 raise error.InputError(
714 _(b'cannot specify a revision with other arguments')
714 _(b'cannot specify a revision with other arguments')
715 )
715 )
716 file_, rev = None, file_
716 file_, rev = None, file_
717 elif rev is None:
717 elif rev is None:
718 raise error.InputError(_(b'please specify a revision'))
718 raise error.InputError(_(b'please specify a revision'))
719 r = cmdutil.openstorage(
719 r = cmdutil.openstorage(
720 repo, b'debugdata', file_, pycompat.byteskwargs(opts)
720 repo, b'debugdata', file_, pycompat.byteskwargs(opts)
721 )
721 )
722 try:
722 try:
723 ui.write(r.rawdata(r.lookup(rev)))
723 ui.write(r.rawdata(r.lookup(rev)))
724 except KeyError:
724 except KeyError:
725 raise error.Abort(_(b'invalid revision identifier %s') % rev)
725 raise error.Abort(_(b'invalid revision identifier %s') % rev)
726
726
727
727
728 @command(
728 @command(
729 b'debugdate',
729 b'debugdate',
730 [(b'e', b'extended', None, _(b'try extended date formats'))],
730 [(b'e', b'extended', None, _(b'try extended date formats'))],
731 _(b'[-e] DATE [RANGE]'),
731 _(b'[-e] DATE [RANGE]'),
732 norepo=True,
732 norepo=True,
733 optionalrepo=True,
733 optionalrepo=True,
734 )
734 )
735 def debugdate(ui, date, range=None, **opts):
735 def debugdate(ui, date, range=None, **opts):
736 """parse and display a date"""
736 """parse and display a date"""
737 if opts["extended"]:
737 if opts["extended"]:
738 d = dateutil.parsedate(date, dateutil.extendeddateformats)
738 d = dateutil.parsedate(date, dateutil.extendeddateformats)
739 else:
739 else:
740 d = dateutil.parsedate(date)
740 d = dateutil.parsedate(date)
741 ui.writenoi18n(b"internal: %d %d\n" % d)
741 ui.writenoi18n(b"internal: %d %d\n" % d)
742 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
742 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
743 if range:
743 if range:
744 m = dateutil.matchdate(range)
744 m = dateutil.matchdate(range)
745 ui.writenoi18n(b"match: %s\n" % m(d[0]))
745 ui.writenoi18n(b"match: %s\n" % m(d[0]))
746
746
747
747
748 @command(
748 @command(
749 b'debugdeltachain',
749 b'debugdeltachain',
750 [
750 [
751 (
751 (
752 b'r',
752 b'r',
753 b'rev',
753 b'rev',
754 [],
754 [],
755 _('restrict processing to these revlog revisions'),
755 _('restrict processing to these revlog revisions'),
756 ),
756 ),
757 (
757 (
758 b'',
758 b'',
759 b'all-info',
759 b'all-info',
760 False,
760 False,
761 _('compute all information unless specified otherwise'),
761 _('compute all information unless specified otherwise'),
762 ),
762 ),
763 (
763 (
764 b'',
764 b'',
765 b'size-info',
765 b'size-info',
766 None,
766 None,
767 _('compute information related to deltas size'),
767 _('compute information related to deltas size'),
768 ),
768 ),
769 (
769 (
770 b'',
770 b'',
771 b'dist-info',
771 b'dist-info',
772 None,
772 None,
773 _('compute information related to base distance'),
773 _('compute information related to base distance'),
774 ),
774 ),
775 (
775 (
776 b'',
776 b'',
777 b'sparse-info',
777 b'sparse-info',
778 None,
778 None,
779 _('compute information related to sparse read'),
779 _('compute information related to sparse read'),
780 ),
780 ),
781 ]
781 ]
782 + cmdutil.debugrevlogopts
782 + cmdutil.debugrevlogopts
783 + cmdutil.formatteropts,
783 + cmdutil.formatteropts,
784 _(b'-c|-m|FILE'),
784 _(b'-c|-m|FILE'),
785 optionalrepo=True,
785 optionalrepo=True,
786 )
786 )
787 def debugdeltachain(ui, repo, file_=None, **opts):
787 def debugdeltachain(ui, repo, file_=None, **opts):
788 """dump information about delta chains in a revlog
788 """dump information about delta chains in a revlog
789
789
790 Output can be templatized. Available template keywords are:
790 Output can be templatized. Available template keywords are:
791
791
792 :``rev``: revision number
792 :``rev``: revision number
793 :``p1``: parent 1 revision number (for reference)
793 :``p1``: parent 1 revision number (for reference)
794 :``p2``: parent 2 revision number (for reference)
794 :``p2``: parent 2 revision number (for reference)
795
795
796 :``chainid``: delta chain identifier (numbered by unique base)
796 :``chainid``: delta chain identifier (numbered by unique base)
797 :``chainlen``: delta chain length to this revision
797 :``chainlen``: delta chain length to this revision
798
798
799 :``prevrev``: previous revision in delta chain
799 :``prevrev``: previous revision in delta chain
800 :``deltatype``: role of delta / how it was computed
800 :``deltatype``: role of delta / how it was computed
801 - base: a full snapshot
801 - base: a full snapshot
802 - snap: an intermediate snapshot
802 - snap: an intermediate snapshot
803 - p1: a delta against the first parent
803 - p1: a delta against the first parent
804 - p2: a delta against the second parent
804 - p2: a delta against the second parent
805 - skip1: a delta against the same base as p1
805 - skip1: a delta against the same base as p1
806 (when p1 has empty delta
806 (when p1 has empty delta
807 - skip2: a delta against the same base as p2
807 - skip2: a delta against the same base as p2
808 (when p2 has empty delta
808 (when p2 has empty delta
809 - prev: a delta against the previous revision
809 - prev: a delta against the previous revision
810 - other: a delta against an arbitrary revision
810 - other: a delta against an arbitrary revision
811
811
812 :``compsize``: compressed size of revision
812 :``compsize``: compressed size of revision
813 :``uncompsize``: uncompressed size of revision
813 :``uncompsize``: uncompressed size of revision
814 :``chainsize``: total size of compressed revisions in chain
814 :``chainsize``: total size of compressed revisions in chain
815 :``chainratio``: total chain size divided by uncompressed revision size
815 :``chainratio``: total chain size divided by uncompressed revision size
816 (new delta chains typically start at ratio 2.00)
816 (new delta chains typically start at ratio 2.00)
817
817
818 :``lindist``: linear distance from base revision in delta chain to end
818 :``lindist``: linear distance from base revision in delta chain to end
819 of this revision
819 of this revision
820 :``extradist``: total size of revisions not part of this delta chain from
820 :``extradist``: total size of revisions not part of this delta chain from
821 base of delta chain to end of this revision; a measurement
821 base of delta chain to end of this revision; a measurement
822 of how much extra data we need to read/seek across to read
822 of how much extra data we need to read/seek across to read
823 the delta chain for this revision
823 the delta chain for this revision
824 :``extraratio``: extradist divided by chainsize; another representation of
824 :``extraratio``: extradist divided by chainsize; another representation of
825 how much unrelated data is needed to load this delta chain
825 how much unrelated data is needed to load this delta chain
826
826
827 If the repository is configured to use the sparse read, additional keywords
827 If the repository is configured to use the sparse read, additional keywords
828 are available:
828 are available:
829
829
830 :``readsize``: total size of data read from the disk for a revision
830 :``readsize``: total size of data read from the disk for a revision
831 (sum of the sizes of all the blocks)
831 (sum of the sizes of all the blocks)
832 :``largestblock``: size of the largest block of data read from the disk
832 :``largestblock``: size of the largest block of data read from the disk
833 :``readdensity``: density of useful bytes in the data read from the disk
833 :``readdensity``: density of useful bytes in the data read from the disk
834 :``srchunks``: in how many data hunks the whole revision would be read
834 :``srchunks``: in how many data hunks the whole revision would be read
835
835
836 It is possible to select the information to be computed, this can provide a
836 It is possible to select the information to be computed, this can provide a
837 noticeable speedup to the command in some cases.
837 noticeable speedup to the command in some cases.
838
838
839 Always computed:
839 Always computed:
840
840
841 - ``rev``
841 - ``rev``
842 - ``p1``
842 - ``p1``
843 - ``p2``
843 - ``p2``
844 - ``chainid``
844 - ``chainid``
845 - ``chainlen``
845 - ``chainlen``
846 - ``prevrev``
846 - ``prevrev``
847 - ``deltatype``
847 - ``deltatype``
848
848
849 Computed with --no-size-info
849 Computed with --no-size-info
850
850
851 - ``compsize``
851 - ``compsize``
852 - ``uncompsize``
852 - ``uncompsize``
853 - ``chainsize``
853 - ``chainsize``
854 - ``chainratio``
854 - ``chainratio``
855
855
856 Computed with --no-dist-info
856 Computed with --no-dist-info
857
857
858 - ``lindist``
858 - ``lindist``
859 - ``extradist``
859 - ``extradist``
860 - ``extraratio``
860 - ``extraratio``
861
861
862 Skipped with --no-sparse-info
862 Skipped with --no-sparse-info
863
863
864 - ``readsize``
864 - ``readsize``
865 - ``largestblock``
865 - ``largestblock``
866 - ``readdensity``
866 - ``readdensity``
867 - ``srchunks``
867 - ``srchunks``
868
868
869 --
869 --
870
870
871 The sparse read can be enabled with experimental.sparse-read = True
871 The sparse read can be enabled with experimental.sparse-read = True
872 """
872 """
873 revs = None
873 revs = None
874 revs_opt = opts.pop('rev', [])
874 revs_opt = opts.pop('rev', [])
875 if revs_opt:
875 if revs_opt:
876 revs = [int(r) for r in revs_opt]
876 revs = [int(r) for r in revs_opt]
877
877
878 all_info = opts.pop('all_info', False)
878 all_info = opts.pop('all_info', False)
879 size_info = opts.pop('size_info', None)
879 size_info = opts.pop('size_info', None)
880 if size_info is None:
880 if size_info is None:
881 size_info = all_info
881 size_info = all_info
882 dist_info = opts.pop('dist_info', None)
882 dist_info = opts.pop('dist_info', None)
883 if dist_info is None:
883 if dist_info is None:
884 dist_info = all_info
884 dist_info = all_info
885 sparse_info = opts.pop('sparse_info', None)
885 sparse_info = opts.pop('sparse_info', None)
886 if sparse_info is None:
886 if sparse_info is None:
887 sparse_info = all_info
887 sparse_info = all_info
888
888
889 revlog = cmdutil.openrevlog(
889 revlog = cmdutil.openrevlog(
890 repo, b'debugdeltachain', file_, pycompat.byteskwargs(opts)
890 repo, b'debugdeltachain', file_, pycompat.byteskwargs(opts)
891 )
891 )
892 fm = ui.formatter(b'debugdeltachain', pycompat.byteskwargs(opts))
892 fm = ui.formatter(b'debugdeltachain', pycompat.byteskwargs(opts))
893
893
894 lines = revlog_debug.debug_delta_chain(
894 lines = revlog_debug.debug_delta_chain(
895 revlog,
895 revlog,
896 revs=revs,
896 revs=revs,
897 size_info=size_info,
897 size_info=size_info,
898 dist_info=dist_info,
898 dist_info=dist_info,
899 sparse_info=sparse_info,
899 sparse_info=sparse_info,
900 )
900 )
901 # first entry is the header
901 # first entry is the header
902 header = next(lines)
902 header = next(lines)
903 fm.plain(header)
903 fm.plain(header)
904 for entry in lines:
904 for entry in lines:
905 label = b' '.join(e[0] for e in entry)
905 label = b' '.join(e[0] for e in entry)
906 format = b' '.join(e[1] for e in entry)
906 format = b' '.join(e[1] for e in entry)
907 values = [e[3] for e in entry]
907 values = [e[3] for e in entry]
908 data = dict((e[2], e[3]) for e in entry)
908 data = dict((e[2], e[3]) for e in entry)
909 fm.startitem()
909 fm.startitem()
910 fm.write(label, format, *values, **data)
910 fm.write(label, format, *values, **data)
911 fm.plain(b'\n')
911 fm.plain(b'\n')
912 fm.end()
912 fm.end()
913
913
914
914
915 @command(
915 @command(
916 b'debug-delta-find',
916 b'debug-delta-find',
917 cmdutil.debugrevlogopts
917 cmdutil.debugrevlogopts
918 + cmdutil.formatteropts
918 + cmdutil.formatteropts
919 + [
919 + [
920 (
920 (
921 b'',
921 b'',
922 b'source',
922 b'source',
923 b'full',
923 b'full',
924 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
924 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
925 ),
925 ),
926 ],
926 ],
927 _(b'-c|-m|FILE REV'),
927 _(b'-c|-m|FILE REV'),
928 optionalrepo=True,
928 optionalrepo=True,
929 )
929 )
930 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
930 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
931 """display the computation to get to a valid delta for storing REV
931 """display the computation to get to a valid delta for storing REV
932
932
933 This command will replay the process used to find the "best" delta to store
933 This command will replay the process used to find the "best" delta to store
934 a revision and display information about all the steps used to get to that
934 a revision and display information about all the steps used to get to that
935 result.
935 result.
936
936
937 By default, the process is fed with a the full-text for the revision. This
937 By default, the process is fed with a the full-text for the revision. This
938 can be controlled with the --source flag.
938 can be controlled with the --source flag.
939
939
940 The revision use the revision number of the target storage (not changelog
940 The revision use the revision number of the target storage (not changelog
941 revision number).
941 revision number).
942
942
943 note: the process is initiated from a full text of the revision to store.
943 note: the process is initiated from a full text of the revision to store.
944 """
944 """
945 if arg_2 is None:
945 if arg_2 is None:
946 file_ = None
946 file_ = None
947 rev = arg_1
947 rev = arg_1
948 else:
948 else:
949 file_ = arg_1
949 file_ = arg_1
950 rev = arg_2
950 rev = arg_2
951
951
952 rev = int(rev)
952 rev = int(rev)
953
953
954 revlog = cmdutil.openrevlog(
954 revlog = cmdutil.openrevlog(
955 repo, b'debugdeltachain', file_, pycompat.byteskwargs(opts)
955 repo, b'debugdeltachain', file_, pycompat.byteskwargs(opts)
956 )
956 )
957 p1r, p2r = revlog.parentrevs(rev)
957 p1r, p2r = revlog.parentrevs(rev)
958
958
959 if source == b'full':
959 if source == b'full':
960 base_rev = nullrev
960 base_rev = nullrev
961 elif source == b'storage':
961 elif source == b'storage':
962 base_rev = revlog.deltaparent(rev)
962 base_rev = revlog.deltaparent(rev)
963 elif source == b'p1':
963 elif source == b'p1':
964 base_rev = p1r
964 base_rev = p1r
965 elif source == b'p2':
965 elif source == b'p2':
966 base_rev = p2r
966 base_rev = p2r
967 elif source == b'prev':
967 elif source == b'prev':
968 base_rev = rev - 1
968 base_rev = rev - 1
969 else:
969 else:
970 raise error.InputError(b"invalid --source value: %s" % source)
970 raise error.InputError(b"invalid --source value: %s" % source)
971
971
972 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
972 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
973
973
974
974
975 @command(
975 @command(
976 b'debugdirstate|debugstate',
976 b'debugdirstate|debugstate',
977 [
977 [
978 (
978 (
979 b'',
979 b'',
980 b'nodates',
980 b'nodates',
981 None,
981 None,
982 _(b'do not display the saved mtime (DEPRECATED)'),
982 _(b'do not display the saved mtime (DEPRECATED)'),
983 ),
983 ),
984 (b'', b'dates', True, _(b'display the saved mtime')),
984 (b'', b'dates', True, _(b'display the saved mtime')),
985 (b'', b'datesort', None, _(b'sort by saved mtime')),
985 (b'', b'datesort', None, _(b'sort by saved mtime')),
986 (
986 (
987 b'',
987 b'',
988 b'docket',
988 b'docket',
989 False,
989 False,
990 _(b'display the docket (metadata file) instead'),
990 _(b'display the docket (metadata file) instead'),
991 ),
991 ),
992 (
992 (
993 b'',
993 b'',
994 b'all',
994 b'all',
995 False,
995 False,
996 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
996 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
997 ),
997 ),
998 ],
998 ],
999 _(b'[OPTION]...'),
999 _(b'[OPTION]...'),
1000 )
1000 )
1001 def debugstate(ui, repo, **opts):
1001 def debugstate(ui, repo, **opts):
1002 """show the contents of the current dirstate"""
1002 """show the contents of the current dirstate"""
1003
1003
1004 if opts.get("docket"):
1004 if opts.get("docket"):
1005 if not repo.dirstate._use_dirstate_v2:
1005 if not repo.dirstate._use_dirstate_v2:
1006 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1006 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1007
1007
1008 docket = repo.dirstate._map.docket
1008 docket = repo.dirstate._map.docket
1009 (
1009 (
1010 start_offset,
1010 start_offset,
1011 root_nodes,
1011 root_nodes,
1012 nodes_with_entry,
1012 nodes_with_entry,
1013 nodes_with_copy,
1013 nodes_with_copy,
1014 unused_bytes,
1014 unused_bytes,
1015 _unused,
1015 _unused,
1016 ignore_pattern,
1016 ignore_pattern,
1017 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1017 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1018
1018
1019 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1019 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1020 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1020 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1021 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1021 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1022 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1022 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1023 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1023 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1024 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1024 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1025 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1025 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1026 ui.write(
1026 ui.write(
1027 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1027 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1028 )
1028 )
1029 return
1029 return
1030
1030
1031 nodates = not opts['dates']
1031 nodates = not opts['dates']
1032 if opts.get('nodates') is not None:
1032 if opts.get('nodates') is not None:
1033 nodates = True
1033 nodates = True
1034 datesort = opts.get('datesort')
1034 datesort = opts.get('datesort')
1035
1035
1036 if datesort:
1036 if datesort:
1037
1037
1038 def keyfunc(entry):
1038 def keyfunc(entry):
1039 filename, _state, _mode, _size, mtime = entry
1039 filename, _state, _mode, _size, mtime = entry
1040 return (mtime, filename)
1040 return (mtime, filename)
1041
1041
1042 else:
1042 else:
1043 keyfunc = None # sort by filename
1043 keyfunc = None # sort by filename
1044 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1044 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1045 entries.sort(key=keyfunc)
1045 entries.sort(key=keyfunc)
1046 for entry in entries:
1046 for entry in entries:
1047 filename, state, mode, size, mtime = entry
1047 filename, state, mode, size, mtime = entry
1048 if mtime == -1:
1048 if mtime == -1:
1049 timestr = b'unset '
1049 timestr = b'unset '
1050 elif nodates:
1050 elif nodates:
1051 timestr = b'set '
1051 timestr = b'set '
1052 else:
1052 else:
1053 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1053 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1054 timestr = encoding.strtolocal(timestr)
1054 timestr = encoding.strtolocal(timestr)
1055 if mode & 0o20000:
1055 if mode & 0o20000:
1056 mode = b'lnk'
1056 mode = b'lnk'
1057 else:
1057 else:
1058 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1058 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1059 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1059 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1060 for f in repo.dirstate.copies():
1060 for f in repo.dirstate.copies():
1061 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1061 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1062
1062
1063
1063
1064 @command(
1064 @command(
1065 b'debugdirstateignorepatternshash',
1065 b'debugdirstateignorepatternshash',
1066 [],
1066 [],
1067 _(b''),
1067 _(b''),
1068 )
1068 )
1069 def debugdirstateignorepatternshash(ui, repo, **opts):
1069 def debugdirstateignorepatternshash(ui, repo, **opts):
1070 """show the hash of ignore patterns stored in dirstate if v2,
1070 """show the hash of ignore patterns stored in dirstate if v2,
1071 or nothing for dirstate-v2
1071 or nothing for dirstate-v2
1072 """
1072 """
1073 if repo.dirstate._use_dirstate_v2:
1073 if repo.dirstate._use_dirstate_v2:
1074 docket = repo.dirstate._map.docket
1074 docket = repo.dirstate._map.docket
1075 hash_len = 20 # 160 bits for SHA-1
1075 hash_len = 20 # 160 bits for SHA-1
1076 hash_bytes = docket.tree_metadata[-hash_len:]
1076 hash_bytes = docket.tree_metadata[-hash_len:]
1077 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1077 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1078
1078
1079
1079
1080 @command(
1080 @command(
1081 b'debugdiscovery',
1081 b'debugdiscovery',
1082 [
1082 [
1083 (b'', b'old', None, _(b'use old-style discovery')),
1083 (b'', b'old', None, _(b'use old-style discovery')),
1084 (
1084 (
1085 b'',
1085 b'',
1086 b'nonheads',
1086 b'nonheads',
1087 None,
1087 None,
1088 _(b'use old-style discovery with non-heads included'),
1088 _(b'use old-style discovery with non-heads included'),
1089 ),
1089 ),
1090 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1090 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1091 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1091 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1092 (
1092 (
1093 b'',
1093 b'',
1094 b'local-as-revs',
1094 b'local-as-revs',
1095 b"",
1095 b"",
1096 b'treat local has having these revisions only',
1096 b'treat local has having these revisions only',
1097 ),
1097 ),
1098 (
1098 (
1099 b'',
1099 b'',
1100 b'remote-as-revs',
1100 b'remote-as-revs',
1101 b"",
1101 b"",
1102 b'use local as remote, with only these revisions',
1102 b'use local as remote, with only these revisions',
1103 ),
1103 ),
1104 ]
1104 ]
1105 + cmdutil.remoteopts
1105 + cmdutil.remoteopts
1106 + cmdutil.formatteropts,
1106 + cmdutil.formatteropts,
1107 _(b'[--rev REV] [OTHER]'),
1107 _(b'[--rev REV] [OTHER]'),
1108 )
1108 )
1109 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1109 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1110 """runs the changeset discovery protocol in isolation
1110 """runs the changeset discovery protocol in isolation
1111
1111
1112 The local peer can be "replaced" by a subset of the local repository by
1112 The local peer can be "replaced" by a subset of the local repository by
1113 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1113 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1114 can be "replaced" by a subset of the local repository using the
1114 can be "replaced" by a subset of the local repository using the
1115 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1115 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1116 discovery situations.
1116 discovery situations.
1117
1117
1118 The following developer oriented config are relevant for people playing with this command:
1118 The following developer oriented config are relevant for people playing with this command:
1119
1119
1120 * devel.discovery.exchange-heads=True
1120 * devel.discovery.exchange-heads=True
1121
1121
1122 If False, the discovery will not start with
1122 If False, the discovery will not start with
1123 remote head fetching and local head querying.
1123 remote head fetching and local head querying.
1124
1124
1125 * devel.discovery.grow-sample=True
1125 * devel.discovery.grow-sample=True
1126
1126
1127 If False, the sample size used in set discovery will not be increased
1127 If False, the sample size used in set discovery will not be increased
1128 through the process
1128 through the process
1129
1129
1130 * devel.discovery.grow-sample.dynamic=True
1130 * devel.discovery.grow-sample.dynamic=True
1131
1131
1132 When discovery.grow-sample.dynamic is True, the default, the sample size is
1132 When discovery.grow-sample.dynamic is True, the default, the sample size is
1133 adapted to the shape of the undecided set (it is set to the max of:
1133 adapted to the shape of the undecided set (it is set to the max of:
1134 <target-size>, len(roots(undecided)), len(heads(undecided)
1134 <target-size>, len(roots(undecided)), len(heads(undecided)
1135
1135
1136 * devel.discovery.grow-sample.rate=1.05
1136 * devel.discovery.grow-sample.rate=1.05
1137
1137
1138 the rate at which the sample grow
1138 the rate at which the sample grow
1139
1139
1140 * devel.discovery.randomize=True
1140 * devel.discovery.randomize=True
1141
1141
1142 If andom sampling during discovery are deterministic. It is meant for
1142 If andom sampling during discovery are deterministic. It is meant for
1143 integration tests.
1143 integration tests.
1144
1144
1145 * devel.discovery.sample-size=200
1145 * devel.discovery.sample-size=200
1146
1146
1147 Control the initial size of the discovery sample
1147 Control the initial size of the discovery sample
1148
1148
1149 * devel.discovery.sample-size.initial=100
1149 * devel.discovery.sample-size.initial=100
1150
1150
1151 Control the initial size of the discovery for initial change
1151 Control the initial size of the discovery for initial change
1152 """
1152 """
1153 unfi = repo.unfiltered()
1153 unfi = repo.unfiltered()
1154
1154
1155 # setup potential extra filtering
1155 # setup potential extra filtering
1156 local_revs = opts["local_as_revs"]
1156 local_revs = opts["local_as_revs"]
1157 remote_revs = opts["remote_as_revs"]
1157 remote_revs = opts["remote_as_revs"]
1158
1158
1159 # make sure tests are repeatable
1159 # make sure tests are repeatable
1160 random.seed(int(opts['seed']))
1160 random.seed(int(opts['seed']))
1161
1161
1162 if not remote_revs:
1162 if not remote_revs:
1163 path = urlutil.get_unique_pull_path_obj(
1163 path = urlutil.get_unique_pull_path_obj(
1164 b'debugdiscovery', ui, remoteurl
1164 b'debugdiscovery', ui, remoteurl
1165 )
1165 )
1166 branches = (path.branch, [])
1166 branches = (path.branch, [])
1167 remote = hg.peer(repo, pycompat.byteskwargs(opts), path)
1167 remote = hg.peer(repo, pycompat.byteskwargs(opts), path)
1168 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1168 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1169 else:
1169 else:
1170 branches = (None, [])
1170 branches = (None, [])
1171 remote_filtered_revs = logcmdutil.revrange(
1171 remote_filtered_revs = logcmdutil.revrange(
1172 unfi, [b"not (::(%s))" % remote_revs]
1172 unfi, [b"not (::(%s))" % remote_revs]
1173 )
1173 )
1174 remote_filtered_revs = frozenset(remote_filtered_revs)
1174 remote_filtered_revs = frozenset(remote_filtered_revs)
1175
1175
1176 def remote_func(x):
1176 def remote_func(x):
1177 return remote_filtered_revs
1177 return remote_filtered_revs
1178
1178
1179 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1179 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1180
1180
1181 remote = repo.peer()
1181 remote = repo.peer()
1182 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1182 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1183
1183
1184 if local_revs:
1184 if local_revs:
1185 local_filtered_revs = logcmdutil.revrange(
1185 local_filtered_revs = logcmdutil.revrange(
1186 unfi, [b"not (::(%s))" % local_revs]
1186 unfi, [b"not (::(%s))" % local_revs]
1187 )
1187 )
1188 local_filtered_revs = frozenset(local_filtered_revs)
1188 local_filtered_revs = frozenset(local_filtered_revs)
1189
1189
1190 def local_func(x):
1190 def local_func(x):
1191 return local_filtered_revs
1191 return local_filtered_revs
1192
1192
1193 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1193 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1194 repo = repo.filtered(b'debug-discovery-local-filter')
1194 repo = repo.filtered(b'debug-discovery-local-filter')
1195
1195
1196 data = {}
1196 data = {}
1197 if opts.get('old'):
1197 if opts.get('old'):
1198
1198
1199 def doit(pushedrevs, remoteheads, remote=remote):
1199 def doit(pushedrevs, remoteheads, remote=remote):
1200 if not hasattr(remote, 'branches'):
1200 if not hasattr(remote, 'branches'):
1201 # enable in-client legacy support
1201 # enable in-client legacy support
1202 remote = localrepo.locallegacypeer(remote.local())
1202 remote = localrepo.locallegacypeer(remote.local())
1203 if remote_revs:
1203 if remote_revs:
1204 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1204 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1205 remote._repo = r
1205 remote._repo = r
1206 common, _in, hds = treediscovery.findcommonincoming(
1206 common, _in, hds = treediscovery.findcommonincoming(
1207 repo, remote, force=True, audit=data
1207 repo, remote, force=True, audit=data
1208 )
1208 )
1209 common = set(common)
1209 common = set(common)
1210 if not opts.get('nonheads'):
1210 if not opts.get('nonheads'):
1211 ui.writenoi18n(
1211 ui.writenoi18n(
1212 b"unpruned common: %s\n"
1212 b"unpruned common: %s\n"
1213 % b" ".join(sorted(short(n) for n in common))
1213 % b" ".join(sorted(short(n) for n in common))
1214 )
1214 )
1215
1215
1216 clnode = repo.changelog.node
1216 clnode = repo.changelog.node
1217 common = repo.revs(b'heads(::%ln)', common)
1217 common = repo.revs(b'heads(::%ln)', common)
1218 common = {clnode(r) for r in common}
1218 common = {clnode(r) for r in common}
1219 return common, hds
1219 return common, hds
1220
1220
1221 else:
1221 else:
1222
1222
1223 def doit(pushedrevs, remoteheads, remote=remote):
1223 def doit(pushedrevs, remoteheads, remote=remote):
1224 nodes = None
1224 nodes = None
1225 if pushedrevs:
1225 if pushedrevs:
1226 revs = logcmdutil.revrange(repo, pushedrevs)
1226 revs = logcmdutil.revrange(repo, pushedrevs)
1227 nodes = [repo[r].node() for r in revs]
1227 nodes = [repo[r].node() for r in revs]
1228 common, any, hds = setdiscovery.findcommonheads(
1228 common, any, hds = setdiscovery.findcommonheads(
1229 ui,
1229 ui,
1230 repo,
1230 repo,
1231 remote,
1231 remote,
1232 ancestorsof=nodes,
1232 ancestorsof=nodes,
1233 audit=data,
1233 audit=data,
1234 abortwhenunrelated=False,
1234 abortwhenunrelated=False,
1235 )
1235 )
1236 return common, hds
1236 return common, hds
1237
1237
1238 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1238 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1239 localrevs = opts['rev']
1239 localrevs = opts['rev']
1240
1240
1241 fm = ui.formatter(b'debugdiscovery', pycompat.byteskwargs(opts))
1241 fm = ui.formatter(b'debugdiscovery', pycompat.byteskwargs(opts))
1242 if fm.strict_format:
1242 if fm.strict_format:
1243
1243
1244 @contextlib.contextmanager
1244 @contextlib.contextmanager
1245 def may_capture_output():
1245 def may_capture_output():
1246 ui.pushbuffer()
1246 ui.pushbuffer()
1247 yield
1247 yield
1248 data[b'output'] = ui.popbuffer()
1248 data[b'output'] = ui.popbuffer()
1249
1249
1250 else:
1250 else:
1251 may_capture_output = util.nullcontextmanager
1251 may_capture_output = util.nullcontextmanager
1252 with may_capture_output():
1252 with may_capture_output():
1253 with util.timedcm('debug-discovery') as t:
1253 with util.timedcm('debug-discovery') as t:
1254 common, hds = doit(localrevs, remoterevs)
1254 common, hds = doit(localrevs, remoterevs)
1255
1255
1256 # compute all statistics
1256 # compute all statistics
1257 if len(common) == 1 and repo.nullid in common:
1257 if len(common) == 1 and repo.nullid in common:
1258 common = set()
1258 common = set()
1259 heads_common = set(common)
1259 heads_common = set(common)
1260 heads_remote = set(hds)
1260 heads_remote = set(hds)
1261 heads_local = set(repo.heads())
1261 heads_local = set(repo.heads())
1262 # note: they cannot be a local or remote head that is in common and not
1262 # note: they cannot be a local or remote head that is in common and not
1263 # itself a head of common.
1263 # itself a head of common.
1264 heads_common_local = heads_common & heads_local
1264 heads_common_local = heads_common & heads_local
1265 heads_common_remote = heads_common & heads_remote
1265 heads_common_remote = heads_common & heads_remote
1266 heads_common_both = heads_common & heads_remote & heads_local
1266 heads_common_both = heads_common & heads_remote & heads_local
1267
1267
1268 all = repo.revs(b'all()')
1268 all = repo.revs(b'all()')
1269 common = repo.revs(b'::%ln', common)
1269 common = repo.revs(b'::%ln', common)
1270 roots_common = repo.revs(b'roots(::%ld)', common)
1270 roots_common = repo.revs(b'roots(::%ld)', common)
1271 missing = repo.revs(b'not ::%ld', common)
1271 missing = repo.revs(b'not ::%ld', common)
1272 heads_missing = repo.revs(b'heads(%ld)', missing)
1272 heads_missing = repo.revs(b'heads(%ld)', missing)
1273 roots_missing = repo.revs(b'roots(%ld)', missing)
1273 roots_missing = repo.revs(b'roots(%ld)', missing)
1274 assert len(common) + len(missing) == len(all)
1274 assert len(common) + len(missing) == len(all)
1275
1275
1276 initial_undecided = repo.revs(
1276 initial_undecided = repo.revs(
1277 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1277 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1278 )
1278 )
1279 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1279 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1280 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1280 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1281 common_initial_undecided = initial_undecided & common
1281 common_initial_undecided = initial_undecided & common
1282 missing_initial_undecided = initial_undecided & missing
1282 missing_initial_undecided = initial_undecided & missing
1283
1283
1284 data[b'elapsed'] = t.elapsed
1284 data[b'elapsed'] = t.elapsed
1285 data[b'nb-common-heads'] = len(heads_common)
1285 data[b'nb-common-heads'] = len(heads_common)
1286 data[b'nb-common-heads-local'] = len(heads_common_local)
1286 data[b'nb-common-heads-local'] = len(heads_common_local)
1287 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1287 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1288 data[b'nb-common-heads-both'] = len(heads_common_both)
1288 data[b'nb-common-heads-both'] = len(heads_common_both)
1289 data[b'nb-common-roots'] = len(roots_common)
1289 data[b'nb-common-roots'] = len(roots_common)
1290 data[b'nb-head-local'] = len(heads_local)
1290 data[b'nb-head-local'] = len(heads_local)
1291 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1291 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1292 data[b'nb-head-remote'] = len(heads_remote)
1292 data[b'nb-head-remote'] = len(heads_remote)
1293 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1293 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1294 heads_common_remote
1294 heads_common_remote
1295 )
1295 )
1296 data[b'nb-revs'] = len(all)
1296 data[b'nb-revs'] = len(all)
1297 data[b'nb-revs-common'] = len(common)
1297 data[b'nb-revs-common'] = len(common)
1298 data[b'nb-revs-missing'] = len(missing)
1298 data[b'nb-revs-missing'] = len(missing)
1299 data[b'nb-missing-heads'] = len(heads_missing)
1299 data[b'nb-missing-heads'] = len(heads_missing)
1300 data[b'nb-missing-roots'] = len(roots_missing)
1300 data[b'nb-missing-roots'] = len(roots_missing)
1301 data[b'nb-ini_und'] = len(initial_undecided)
1301 data[b'nb-ini_und'] = len(initial_undecided)
1302 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1302 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1303 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1303 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1304 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1304 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1305 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1305 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1306
1306
1307 fm.startitem()
1307 fm.startitem()
1308 fm.data(**pycompat.strkwargs(data))
1308 fm.data(**pycompat.strkwargs(data))
1309 # display discovery summary
1309 # display discovery summary
1310 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1310 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1311 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1311 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1312 if b'total-round-trips-heads' in data:
1312 if b'total-round-trips-heads' in data:
1313 fm.plain(
1313 fm.plain(
1314 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1314 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1315 )
1315 )
1316 if b'total-round-trips-branches' in data:
1316 if b'total-round-trips-branches' in data:
1317 fm.plain(
1317 fm.plain(
1318 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1318 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1319 % data
1319 % data
1320 )
1320 )
1321 if b'total-round-trips-between' in data:
1321 if b'total-round-trips-between' in data:
1322 fm.plain(
1322 fm.plain(
1323 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1323 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1324 )
1324 )
1325 fm.plain(b"queries: %(total-queries)9d\n" % data)
1325 fm.plain(b"queries: %(total-queries)9d\n" % data)
1326 if b'total-queries-branches' in data:
1326 if b'total-queries-branches' in data:
1327 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1327 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1328 if b'total-queries-between' in data:
1328 if b'total-queries-between' in data:
1329 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1329 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1330 fm.plain(b"heads summary:\n")
1330 fm.plain(b"heads summary:\n")
1331 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1331 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1332 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1332 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1333 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1333 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1334 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1334 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1335 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1335 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1336 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1336 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1337 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1337 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1338 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1338 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1339 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1339 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1340 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1340 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1341 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1341 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1342 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1342 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1343 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1343 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1344 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1344 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1345 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1345 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1346 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1346 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1347 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1347 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1348 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1348 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1349 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1349 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1350 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1350 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1351 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1351 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1352 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1352 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1353
1353
1354 if ui.verbose:
1354 if ui.verbose:
1355 fm.plain(
1355 fm.plain(
1356 b"common heads: %s\n"
1356 b"common heads: %s\n"
1357 % b" ".join(sorted(short(n) for n in heads_common))
1357 % b" ".join(sorted(short(n) for n in heads_common))
1358 )
1358 )
1359 fm.end()
1359 fm.end()
1360
1360
1361
1361
1362 _chunksize = 4 << 10
1362 _chunksize = 4 << 10
1363
1363
1364
1364
1365 @command(
1365 @command(
1366 b'debugdownload',
1366 b'debugdownload',
1367 [
1367 [
1368 (b'o', b'output', b'', _(b'path')),
1368 (b'o', b'output', b'', _(b'path')),
1369 ],
1369 ],
1370 optionalrepo=True,
1370 optionalrepo=True,
1371 )
1371 )
1372 def debugdownload(ui, repo, url, output=None, **opts):
1372 def debugdownload(ui, repo, url, output=None, **opts):
1373 """download a resource using Mercurial logic and config"""
1373 """download a resource using Mercurial logic and config"""
1374 fh = urlmod.open(ui, url, output)
1374 fh = urlmod.open(ui, url, output)
1375
1375
1376 dest = ui
1376 dest = ui
1377 if output:
1377 if output:
1378 dest = open(output, b"wb", _chunksize)
1378 dest = open(output, b"wb", _chunksize)
1379 try:
1379 try:
1380 data = fh.read(_chunksize)
1380 data = fh.read(_chunksize)
1381 while data:
1381 while data:
1382 dest.write(data)
1382 dest.write(data)
1383 data = fh.read(_chunksize)
1383 data = fh.read(_chunksize)
1384 finally:
1384 finally:
1385 if output:
1385 if output:
1386 dest.close()
1386 dest.close()
1387
1387
1388
1388
1389 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1389 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1390 def debugextensions(ui, repo, **opts):
1390 def debugextensions(ui, repo, **opts):
1391 '''show information about active extensions'''
1391 '''show information about active extensions'''
1392 exts = extensions.extensions(ui)
1392 exts = extensions.extensions(ui)
1393 hgver = util.version()
1393 hgver = util.version()
1394 fm = ui.formatter(b'debugextensions', pycompat.byteskwargs(opts))
1394 fm = ui.formatter(b'debugextensions', pycompat.byteskwargs(opts))
1395 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1395 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1396 isinternal = extensions.ismoduleinternal(extmod)
1396 isinternal = extensions.ismoduleinternal(extmod)
1397 extsource = None
1397 extsource = None
1398
1398
1399 if hasattr(extmod, '__file__'):
1399 if hasattr(extmod, '__file__'):
1400 extsource = pycompat.fsencode(extmod.__file__)
1400 extsource = pycompat.fsencode(extmod.__file__)
1401 elif getattr(sys, 'oxidized', False):
1401 elif getattr(sys, 'oxidized', False):
1402 extsource = pycompat.sysexecutable
1402 extsource = pycompat.sysexecutable
1403 if isinternal:
1403 if isinternal:
1404 exttestedwith = [] # never expose magic string to users
1404 exttestedwith = [] # never expose magic string to users
1405 else:
1405 else:
1406 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1406 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1407 extbuglink = getattr(extmod, 'buglink', None)
1407 extbuglink = getattr(extmod, 'buglink', None)
1408
1408
1409 fm.startitem()
1409 fm.startitem()
1410
1410
1411 if ui.quiet or ui.verbose:
1411 if ui.quiet or ui.verbose:
1412 fm.write(b'name', b'%s\n', extname)
1412 fm.write(b'name', b'%s\n', extname)
1413 else:
1413 else:
1414 fm.write(b'name', b'%s', extname)
1414 fm.write(b'name', b'%s', extname)
1415 if isinternal or hgver in exttestedwith:
1415 if isinternal or hgver in exttestedwith:
1416 fm.plain(b'\n')
1416 fm.plain(b'\n')
1417 elif not exttestedwith:
1417 elif not exttestedwith:
1418 fm.plain(_(b' (untested!)\n'))
1418 fm.plain(_(b' (untested!)\n'))
1419 else:
1419 else:
1420 lasttestedversion = exttestedwith[-1]
1420 lasttestedversion = exttestedwith[-1]
1421 fm.plain(b' (%s!)\n' % lasttestedversion)
1421 fm.plain(b' (%s!)\n' % lasttestedversion)
1422
1422
1423 fm.condwrite(
1423 fm.condwrite(
1424 ui.verbose and extsource,
1424 ui.verbose and extsource,
1425 b'source',
1425 b'source',
1426 _(b' location: %s\n'),
1426 _(b' location: %s\n'),
1427 extsource or b"",
1427 extsource or b"",
1428 )
1428 )
1429
1429
1430 if ui.verbose:
1430 if ui.verbose:
1431 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1431 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1432 fm.data(bundled=isinternal)
1432 fm.data(bundled=isinternal)
1433
1433
1434 fm.condwrite(
1434 fm.condwrite(
1435 ui.verbose and exttestedwith,
1435 ui.verbose and exttestedwith,
1436 b'testedwith',
1436 b'testedwith',
1437 _(b' tested with: %s\n'),
1437 _(b' tested with: %s\n'),
1438 fm.formatlist(exttestedwith, name=b'ver'),
1438 fm.formatlist(exttestedwith, name=b'ver'),
1439 )
1439 )
1440
1440
1441 fm.condwrite(
1441 fm.condwrite(
1442 ui.verbose and extbuglink,
1442 ui.verbose and extbuglink,
1443 b'buglink',
1443 b'buglink',
1444 _(b' bug reporting: %s\n'),
1444 _(b' bug reporting: %s\n'),
1445 extbuglink or b"",
1445 extbuglink or b"",
1446 )
1446 )
1447
1447
1448 fm.end()
1448 fm.end()
1449
1449
1450
1450
1451 @command(
1451 @command(
1452 b'debugfileset',
1452 b'debugfileset',
1453 [
1453 [
1454 (
1454 (
1455 b'r',
1455 b'r',
1456 b'rev',
1456 b'rev',
1457 b'',
1457 b'',
1458 _(b'apply the filespec on this revision'),
1458 _(b'apply the filespec on this revision'),
1459 _(b'REV'),
1459 _(b'REV'),
1460 ),
1460 ),
1461 (
1461 (
1462 b'',
1462 b'',
1463 b'all-files',
1463 b'all-files',
1464 False,
1464 False,
1465 _(b'test files from all revisions and working directory'),
1465 _(b'test files from all revisions and working directory'),
1466 ),
1466 ),
1467 (
1467 (
1468 b's',
1468 b's',
1469 b'show-matcher',
1469 b'show-matcher',
1470 None,
1470 None,
1471 _(b'print internal representation of matcher'),
1471 _(b'print internal representation of matcher'),
1472 ),
1472 ),
1473 (
1473 (
1474 b'p',
1474 b'p',
1475 b'show-stage',
1475 b'show-stage',
1476 [],
1476 [],
1477 _(b'print parsed tree at the given stage'),
1477 _(b'print parsed tree at the given stage'),
1478 _(b'NAME'),
1478 _(b'NAME'),
1479 ),
1479 ),
1480 ],
1480 ],
1481 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1481 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1482 )
1482 )
1483 def debugfileset(ui, repo, expr, **opts):
1483 def debugfileset(ui, repo, expr, **opts):
1484 '''parse and apply a fileset specification'''
1484 '''parse and apply a fileset specification'''
1485 from . import fileset
1485 from . import fileset
1486
1486
1487 fileset.symbols # force import of fileset so we have predicates to optimize
1487 fileset.symbols # force import of fileset so we have predicates to optimize
1488
1488
1489 ctx = logcmdutil.revsingle(repo, opts.get('rev'), None)
1489 ctx = logcmdutil.revsingle(repo, opts.get('rev'), None)
1490
1490
1491 stages = [
1491 stages = [
1492 (b'parsed', pycompat.identity),
1492 (b'parsed', pycompat.identity),
1493 (b'analyzed', filesetlang.analyze),
1493 (b'analyzed', filesetlang.analyze),
1494 (b'optimized', filesetlang.optimize),
1494 (b'optimized', filesetlang.optimize),
1495 ]
1495 ]
1496 stagenames = {n for n, f in stages}
1496 stagenames = {n for n, f in stages}
1497
1497
1498 showalways = set()
1498 showalways = set()
1499 if ui.verbose and not opts['show_stage']:
1499 if ui.verbose and not opts['show_stage']:
1500 # show parsed tree by --verbose (deprecated)
1500 # show parsed tree by --verbose (deprecated)
1501 showalways.add(b'parsed')
1501 showalways.add(b'parsed')
1502 if opts['show_stage'] == [b'all']:
1502 if opts['show_stage'] == [b'all']:
1503 showalways.update(stagenames)
1503 showalways.update(stagenames)
1504 else:
1504 else:
1505 for n in opts['show_stage']:
1505 for n in opts['show_stage']:
1506 if n not in stagenames:
1506 if n not in stagenames:
1507 raise error.Abort(_(b'invalid stage name: %s') % n)
1507 raise error.Abort(_(b'invalid stage name: %s') % n)
1508 showalways.update(opts['show_stage'])
1508 showalways.update(opts['show_stage'])
1509
1509
1510 tree = filesetlang.parse(expr)
1510 tree = filesetlang.parse(expr)
1511 for n, f in stages:
1511 for n, f in stages:
1512 tree = f(tree)
1512 tree = f(tree)
1513 if n in showalways:
1513 if n in showalways:
1514 if opts['show_stage'] or n != b'parsed':
1514 if opts['show_stage'] or n != b'parsed':
1515 ui.write(b"* %s:\n" % n)
1515 ui.write(b"* %s:\n" % n)
1516 ui.write(filesetlang.prettyformat(tree), b"\n")
1516 ui.write(filesetlang.prettyformat(tree), b"\n")
1517
1517
1518 files = set()
1518 files = set()
1519 if opts['all_files']:
1519 if opts['all_files']:
1520 for r in repo:
1520 for r in repo:
1521 c = repo[r]
1521 c = repo[r]
1522 files.update(c.files())
1522 files.update(c.files())
1523 files.update(c.substate)
1523 files.update(c.substate)
1524 if opts['all_files'] or ctx.rev() is None:
1524 if opts['all_files'] or ctx.rev() is None:
1525 wctx = repo[None]
1525 wctx = repo[None]
1526 files.update(
1526 files.update(
1527 repo.dirstate.walk(
1527 repo.dirstate.walk(
1528 scmutil.matchall(repo),
1528 scmutil.matchall(repo),
1529 subrepos=list(wctx.substate),
1529 subrepos=list(wctx.substate),
1530 unknown=True,
1530 unknown=True,
1531 ignored=True,
1531 ignored=True,
1532 )
1532 )
1533 )
1533 )
1534 files.update(wctx.substate)
1534 files.update(wctx.substate)
1535 else:
1535 else:
1536 files.update(ctx.files())
1536 files.update(ctx.files())
1537 files.update(ctx.substate)
1537 files.update(ctx.substate)
1538
1538
1539 m = ctx.matchfileset(repo.getcwd(), expr)
1539 m = ctx.matchfileset(repo.getcwd(), expr)
1540 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
1540 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
1541 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1541 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1542 for f in sorted(files):
1542 for f in sorted(files):
1543 if not m(f):
1543 if not m(f):
1544 continue
1544 continue
1545 ui.write(b"%s\n" % f)
1545 ui.write(b"%s\n" % f)
1546
1546
1547
1547
1548 @command(
1548 @command(
1549 b"debug-repair-issue6528",
1549 b"debug-repair-issue6528",
1550 [
1550 [
1551 (
1551 (
1552 b'',
1552 b'',
1553 b'to-report',
1553 b'to-report',
1554 b'',
1554 b'',
1555 _(b'build a report of affected revisions to this file'),
1555 _(b'build a report of affected revisions to this file'),
1556 _(b'FILE'),
1556 _(b'FILE'),
1557 ),
1557 ),
1558 (
1558 (
1559 b'',
1559 b'',
1560 b'from-report',
1560 b'from-report',
1561 b'',
1561 b'',
1562 _(b'repair revisions listed in this report file'),
1562 _(b'repair revisions listed in this report file'),
1563 _(b'FILE'),
1563 _(b'FILE'),
1564 ),
1564 ),
1565 (
1565 (
1566 b'',
1566 b'',
1567 b'paranoid',
1567 b'paranoid',
1568 False,
1568 False,
1569 _(b'check that both detection methods do the same thing'),
1569 _(b'check that both detection methods do the same thing'),
1570 ),
1570 ),
1571 ]
1571 ]
1572 + cmdutil.dryrunopts,
1572 + cmdutil.dryrunopts,
1573 )
1573 )
1574 def debug_repair_issue6528(ui, repo, **opts):
1574 def debug_repair_issue6528(ui, repo, **opts):
1575 """find affected revisions and repair them. See issue6528 for more details.
1575 """find affected revisions and repair them. See issue6528 for more details.
1576
1576
1577 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1577 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1578 computation of affected revisions for a given repository across clones.
1578 computation of affected revisions for a given repository across clones.
1579 The report format is line-based (with empty lines ignored):
1579 The report format is line-based (with empty lines ignored):
1580
1580
1581 ```
1581 ```
1582 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1582 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1583 ```
1583 ```
1584
1584
1585 There can be multiple broken revisions per filelog, they are separated by
1585 There can be multiple broken revisions per filelog, they are separated by
1586 a comma with no spaces. The only space is between the revision(s) and the
1586 a comma with no spaces. The only space is between the revision(s) and the
1587 filename.
1587 filename.
1588
1588
1589 Note that this does *not* mean that this repairs future affected revisions,
1589 Note that this does *not* mean that this repairs future affected revisions,
1590 that needs a separate fix at the exchange level that was introduced in
1590 that needs a separate fix at the exchange level that was introduced in
1591 Mercurial 5.9.1.
1591 Mercurial 5.9.1.
1592
1592
1593 There is a `--paranoid` flag to test that the fast implementation is correct
1593 There is a `--paranoid` flag to test that the fast implementation is correct
1594 by checking it against the slow implementation. Since this matter is quite
1594 by checking it against the slow implementation. Since this matter is quite
1595 urgent and testing every edge-case is probably quite costly, we use this
1595 urgent and testing every edge-case is probably quite costly, we use this
1596 method to test on large repositories as a fuzzing method of sorts.
1596 method to test on large repositories as a fuzzing method of sorts.
1597 """
1597 """
1598 cmdutil.check_incompatible_arguments(
1598 cmdutil.check_incompatible_arguments(
1599 opts, 'to_report', ['from_report', 'dry_run']
1599 opts, 'to_report', ['from_report', 'dry_run']
1600 )
1600 )
1601 dry_run = opts.get('dry_run')
1601 dry_run = opts.get('dry_run')
1602 to_report = opts.get('to_report')
1602 to_report = opts.get('to_report')
1603 from_report = opts.get('from_report')
1603 from_report = opts.get('from_report')
1604 paranoid = opts.get('paranoid')
1604 paranoid = opts.get('paranoid')
1605 # TODO maybe add filelog pattern and revision pattern parameters to help
1605 # TODO maybe add filelog pattern and revision pattern parameters to help
1606 # narrow down the search for users that know what they're looking for?
1606 # narrow down the search for users that know what they're looking for?
1607
1607
1608 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1608 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1609 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1609 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1610 raise error.Abort(_(msg))
1610 raise error.Abort(_(msg))
1611
1611
1612 rewrite.repair_issue6528(
1612 rewrite.repair_issue6528(
1613 ui,
1613 ui,
1614 repo,
1614 repo,
1615 dry_run=dry_run,
1615 dry_run=dry_run,
1616 to_report=to_report,
1616 to_report=to_report,
1617 from_report=from_report,
1617 from_report=from_report,
1618 paranoid=paranoid,
1618 paranoid=paranoid,
1619 )
1619 )
1620
1620
1621
1621
1622 @command(b'debugformat', [] + cmdutil.formatteropts)
1622 @command(b'debugformat', [] + cmdutil.formatteropts)
1623 def debugformat(ui, repo, **opts):
1623 def debugformat(ui, repo, **opts):
1624 """display format information about the current repository
1624 """display format information about the current repository
1625
1625
1626 Use --verbose to get extra information about current config value and
1626 Use --verbose to get extra information about current config value and
1627 Mercurial default."""
1627 Mercurial default."""
1628 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1628 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1629 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1629 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1630
1630
1631 def makeformatname(name):
1631 def makeformatname(name):
1632 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1632 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1633
1633
1634 fm = ui.formatter(b'debugformat', pycompat.byteskwargs(opts))
1634 fm = ui.formatter(b'debugformat', pycompat.byteskwargs(opts))
1635 if fm.isplain():
1635 if fm.isplain():
1636
1636
1637 def formatvalue(value):
1637 def formatvalue(value):
1638 if hasattr(value, 'startswith'):
1638 if hasattr(value, 'startswith'):
1639 return value
1639 return value
1640 if value:
1640 if value:
1641 return b'yes'
1641 return b'yes'
1642 else:
1642 else:
1643 return b'no'
1643 return b'no'
1644
1644
1645 else:
1645 else:
1646 formatvalue = pycompat.identity
1646 formatvalue = pycompat.identity
1647
1647
1648 fm.plain(b'format-variant')
1648 fm.plain(b'format-variant')
1649 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1649 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1650 fm.plain(b' repo')
1650 fm.plain(b' repo')
1651 if ui.verbose:
1651 if ui.verbose:
1652 fm.plain(b' config default')
1652 fm.plain(b' config default')
1653 fm.plain(b'\n')
1653 fm.plain(b'\n')
1654 for fv in upgrade.allformatvariant:
1654 for fv in upgrade.allformatvariant:
1655 fm.startitem()
1655 fm.startitem()
1656 repovalue = fv.fromrepo(repo)
1656 repovalue = fv.fromrepo(repo)
1657 configvalue = fv.fromconfig(repo)
1657 configvalue = fv.fromconfig(repo)
1658
1658
1659 if repovalue != configvalue:
1659 if repovalue != configvalue:
1660 namelabel = b'formatvariant.name.mismatchconfig'
1660 namelabel = b'formatvariant.name.mismatchconfig'
1661 repolabel = b'formatvariant.repo.mismatchconfig'
1661 repolabel = b'formatvariant.repo.mismatchconfig'
1662 elif repovalue != fv.default:
1662 elif repovalue != fv.default:
1663 namelabel = b'formatvariant.name.mismatchdefault'
1663 namelabel = b'formatvariant.name.mismatchdefault'
1664 repolabel = b'formatvariant.repo.mismatchdefault'
1664 repolabel = b'formatvariant.repo.mismatchdefault'
1665 else:
1665 else:
1666 namelabel = b'formatvariant.name.uptodate'
1666 namelabel = b'formatvariant.name.uptodate'
1667 repolabel = b'formatvariant.repo.uptodate'
1667 repolabel = b'formatvariant.repo.uptodate'
1668
1668
1669 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1669 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1670 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1670 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1671 if fv.default != configvalue:
1671 if fv.default != configvalue:
1672 configlabel = b'formatvariant.config.special'
1672 configlabel = b'formatvariant.config.special'
1673 else:
1673 else:
1674 configlabel = b'formatvariant.config.default'
1674 configlabel = b'formatvariant.config.default'
1675 fm.condwrite(
1675 fm.condwrite(
1676 ui.verbose,
1676 ui.verbose,
1677 b'config',
1677 b'config',
1678 b' %6s',
1678 b' %6s',
1679 formatvalue(configvalue),
1679 formatvalue(configvalue),
1680 label=configlabel,
1680 label=configlabel,
1681 )
1681 )
1682 fm.condwrite(
1682 fm.condwrite(
1683 ui.verbose,
1683 ui.verbose,
1684 b'default',
1684 b'default',
1685 b' %7s',
1685 b' %7s',
1686 formatvalue(fv.default),
1686 formatvalue(fv.default),
1687 label=b'formatvariant.default',
1687 label=b'formatvariant.default',
1688 )
1688 )
1689 fm.plain(b'\n')
1689 fm.plain(b'\n')
1690 fm.end()
1690 fm.end()
1691
1691
1692
1692
1693 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1693 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1694 def debugfsinfo(ui, path=b"."):
1694 def debugfsinfo(ui, path=b"."):
1695 """show information detected about current filesystem"""
1695 """show information detected about current filesystem"""
1696 ui.writenoi18n(b'path: %s\n' % path)
1696 ui.writenoi18n(b'path: %s\n' % path)
1697 ui.writenoi18n(
1697 ui.writenoi18n(
1698 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1698 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1699 )
1699 )
1700 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1700 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1701 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1701 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1702 ui.writenoi18n(
1702 ui.writenoi18n(
1703 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1703 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1704 )
1704 )
1705 ui.writenoi18n(
1705 ui.writenoi18n(
1706 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1706 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1707 )
1707 )
1708 casesensitive = b'(unknown)'
1708 casesensitive = b'(unknown)'
1709 try:
1709 try:
1710 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1710 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1711 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1711 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1712 except OSError:
1712 except OSError:
1713 pass
1713 pass
1714 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1714 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1715
1715
1716
1716
1717 @command(
1717 @command(
1718 b'debuggetbundle',
1718 b'debuggetbundle',
1719 [
1719 [
1720 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1720 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1721 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1721 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1722 (
1722 (
1723 b't',
1723 b't',
1724 b'type',
1724 b'type',
1725 b'bzip2',
1725 b'bzip2',
1726 _(b'bundle compression type to use'),
1726 _(b'bundle compression type to use'),
1727 _(b'TYPE'),
1727 _(b'TYPE'),
1728 ),
1728 ),
1729 ],
1729 ],
1730 _(b'REPO FILE [-H|-C ID]...'),
1730 _(b'REPO FILE [-H|-C ID]...'),
1731 norepo=True,
1731 norepo=True,
1732 )
1732 )
1733 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1733 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1734 """retrieves a bundle from a repo
1734 """retrieves a bundle from a repo
1735
1735
1736 Every ID must be a full-length hex node id string. Saves the bundle to the
1736 Every ID must be a full-length hex node id string. Saves the bundle to the
1737 given file.
1737 given file.
1738 """
1738 """
1739 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
1739 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
1740 if not repo.capable(b'getbundle'):
1740 if not repo.capable(b'getbundle'):
1741 raise error.Abort(b"getbundle() not supported by target repository")
1741 raise error.Abort(b"getbundle() not supported by target repository")
1742 args = {}
1742 args = {}
1743 if common:
1743 if common:
1744 args['common'] = [bin(s) for s in common]
1744 args['common'] = [bin(s) for s in common]
1745 if head:
1745 if head:
1746 args['heads'] = [bin(s) for s in head]
1746 args['heads'] = [bin(s) for s in head]
1747 # TODO: get desired bundlecaps from command line.
1747 # TODO: get desired bundlecaps from command line.
1748 args['bundlecaps'] = None
1748 args['bundlecaps'] = None
1749 bundle = repo.getbundle(b'debug', **args)
1749 bundle = repo.getbundle(b'debug', **args)
1750
1750
1751 bundletype = opts.get('type', b'bzip2').lower()
1751 bundletype = opts.get('type', b'bzip2').lower()
1752 btypes = {
1752 btypes = {
1753 b'none': b'HG10UN',
1753 b'none': b'HG10UN',
1754 b'bzip2': b'HG10BZ',
1754 b'bzip2': b'HG10BZ',
1755 b'gzip': b'HG10GZ',
1755 b'gzip': b'HG10GZ',
1756 b'bundle2': b'HG20',
1756 b'bundle2': b'HG20',
1757 }
1757 }
1758 bundletype = btypes.get(bundletype)
1758 bundletype = btypes.get(bundletype)
1759 if bundletype not in bundle2.bundletypes:
1759 if bundletype not in bundle2.bundletypes:
1760 raise error.Abort(_(b'unknown bundle type specified with --type'))
1760 raise error.Abort(_(b'unknown bundle type specified with --type'))
1761 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1761 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1762
1762
1763
1763
1764 @command(b'debugignore', [], b'[FILE]...')
1764 @command(b'debugignore', [], b'[FILE]...')
1765 def debugignore(ui, repo, *files, **opts):
1765 def debugignore(ui, repo, *files, **opts):
1766 """display the combined ignore pattern and information about ignored files
1766 """display the combined ignore pattern and information about ignored files
1767
1767
1768 With no argument display the combined ignore pattern.
1768 With no argument display the combined ignore pattern.
1769
1769
1770 Given space separated file names, shows if the given file is ignored and
1770 Given space separated file names, shows if the given file is ignored and
1771 if so, show the ignore rule (file and line number) that matched it.
1771 if so, show the ignore rule (file and line number) that matched it.
1772 """
1772 """
1773 ignore = repo.dirstate._ignore
1773 ignore = repo.dirstate._ignore
1774 if not files:
1774 if not files:
1775 # Show all the patterns
1775 # Show all the patterns
1776 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1776 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1777 else:
1777 else:
1778 m = scmutil.match(repo[None], pats=files)
1778 m = scmutil.match(repo[None], pats=files)
1779 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1779 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1780 for f in m.files():
1780 for f in m.files():
1781 nf = util.normpath(f)
1781 nf = util.normpath(f)
1782 ignored = None
1782 ignored = None
1783 ignoredata = None
1783 ignoredata = None
1784 if nf != b'.':
1784 if nf != b'.':
1785 if ignore(nf):
1785 if ignore(nf):
1786 ignored = nf
1786 ignored = nf
1787 ignoredata = repo.dirstate._ignorefileandline(nf)
1787 ignoredata = repo.dirstate._ignorefileandline(nf)
1788 else:
1788 else:
1789 for p in pathutil.finddirs(nf):
1789 for p in pathutil.finddirs(nf):
1790 if ignore(p):
1790 if ignore(p):
1791 ignored = p
1791 ignored = p
1792 ignoredata = repo.dirstate._ignorefileandline(p)
1792 ignoredata = repo.dirstate._ignorefileandline(p)
1793 break
1793 break
1794 if ignored:
1794 if ignored:
1795 if ignored == nf:
1795 if ignored == nf:
1796 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1796 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1797 else:
1797 else:
1798 ui.write(
1798 ui.write(
1799 _(
1799 _(
1800 b"%s is ignored because of "
1800 b"%s is ignored because of "
1801 b"containing directory %s\n"
1801 b"containing directory %s\n"
1802 )
1802 )
1803 % (uipathfn(f), ignored)
1803 % (uipathfn(f), ignored)
1804 )
1804 )
1805 ignorefile, lineno, line = ignoredata
1805 ignorefile, lineno, line = ignoredata
1806 ui.write(
1806 ui.write(
1807 _(b"(ignore rule in %s, line %d: '%s')\n")
1807 _(b"(ignore rule in %s, line %d: '%s')\n")
1808 % (ignorefile, lineno, line)
1808 % (ignorefile, lineno, line)
1809 )
1809 )
1810 else:
1810 else:
1811 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1811 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1812
1812
1813
1813
1814 @command(
1814 @command(
1815 b'debug-revlog-index|debugindex',
1815 b'debug-revlog-index|debugindex',
1816 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1816 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1817 _(b'-c|-m|FILE'),
1817 _(b'-c|-m|FILE'),
1818 )
1818 )
1819 def debugindex(ui, repo, file_=None, **opts):
1819 def debugindex(ui, repo, file_=None, **opts):
1820 """dump index data for a revlog"""
1820 """dump index data for a revlog"""
1821 opts = pycompat.byteskwargs(opts)
1821 opts = pycompat.byteskwargs(opts)
1822 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1822 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1823
1823
1824 fm = ui.formatter(b'debugindex', opts)
1824 fm = ui.formatter(b'debugindex', opts)
1825
1825
1826 revlog = getattr(store, '_revlog', store)
1826 revlog = getattr(store, '_revlog', store)
1827
1827
1828 return revlog_debug.debug_index(
1828 return revlog_debug.debug_index(
1829 ui,
1829 ui,
1830 repo,
1830 repo,
1831 formatter=fm,
1831 formatter=fm,
1832 revlog=revlog,
1832 revlog=revlog,
1833 full_node=ui.debugflag,
1833 full_node=ui.debugflag,
1834 )
1834 )
1835
1835
1836
1836
1837 @command(
1837 @command(
1838 b'debugindexdot',
1838 b'debugindexdot',
1839 cmdutil.debugrevlogopts,
1839 cmdutil.debugrevlogopts,
1840 _(b'-c|-m|FILE'),
1840 _(b'-c|-m|FILE'),
1841 optionalrepo=True,
1841 optionalrepo=True,
1842 )
1842 )
1843 def debugindexdot(ui, repo, file_=None, **opts):
1843 def debugindexdot(ui, repo, file_=None, **opts):
1844 """dump an index DAG as a graphviz dot file"""
1844 """dump an index DAG as a graphviz dot file"""
1845 r = cmdutil.openstorage(
1845 r = cmdutil.openstorage(
1846 repo, b'debugindexdot', file_, pycompat.byteskwargs(opts)
1846 repo, b'debugindexdot', file_, pycompat.byteskwargs(opts)
1847 )
1847 )
1848 ui.writenoi18n(b"digraph G {\n")
1848 ui.writenoi18n(b"digraph G {\n")
1849 for i in r:
1849 for i in r:
1850 node = r.node(i)
1850 node = r.node(i)
1851 pp = r.parents(node)
1851 pp = r.parents(node)
1852 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1852 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1853 if pp[1] != repo.nullid:
1853 if pp[1] != repo.nullid:
1854 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1854 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1855 ui.write(b"}\n")
1855 ui.write(b"}\n")
1856
1856
1857
1857
1858 @command(b'debugindexstats', [])
1858 @command(b'debugindexstats', [])
1859 def debugindexstats(ui, repo):
1859 def debugindexstats(ui, repo):
1860 """show stats related to the changelog index"""
1860 """show stats related to the changelog index"""
1861 repo.changelog.shortest(repo.nullid, 1)
1861 repo.changelog.shortest(repo.nullid, 1)
1862 index = repo.changelog.index
1862 index = repo.changelog.index
1863 if not hasattr(index, 'stats'):
1863 if not hasattr(index, 'stats'):
1864 raise error.Abort(_(b'debugindexstats only works with native C code'))
1864 raise error.Abort(_(b'debugindexstats only works with native C code'))
1865 for k, v in sorted(index.stats().items()):
1865 for k, v in sorted(index.stats().items()):
1866 ui.write(b'%s: %d\n' % (k, v))
1866 ui.write(b'%s: %d\n' % (k, v))
1867
1867
1868
1868
1869 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1869 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1870 def debuginstall(ui, **opts):
1870 def debuginstall(ui, **opts):
1871 """test Mercurial installation
1871 """test Mercurial installation
1872
1872
1873 Returns 0 on success.
1873 Returns 0 on success.
1874 """
1874 """
1875 problems = 0
1875 problems = 0
1876
1876
1877 fm = ui.formatter(b'debuginstall', pycompat.byteskwargs(opts))
1877 fm = ui.formatter(b'debuginstall', pycompat.byteskwargs(opts))
1878 fm.startitem()
1878 fm.startitem()
1879
1879
1880 # encoding might be unknown or wrong. don't translate these messages.
1880 # encoding might be unknown or wrong. don't translate these messages.
1881 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1881 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1882 err = None
1882 err = None
1883 try:
1883 try:
1884 codecs.lookup(pycompat.sysstr(encoding.encoding))
1884 codecs.lookup(pycompat.sysstr(encoding.encoding))
1885 except LookupError as inst:
1885 except LookupError as inst:
1886 err = stringutil.forcebytestr(inst)
1886 err = stringutil.forcebytestr(inst)
1887 problems += 1
1887 problems += 1
1888 fm.condwrite(
1888 fm.condwrite(
1889 err,
1889 err,
1890 b'encodingerror',
1890 b'encodingerror',
1891 b" %s\n (check that your locale is properly set)\n",
1891 b" %s\n (check that your locale is properly set)\n",
1892 err,
1892 err,
1893 )
1893 )
1894
1894
1895 # Python
1895 # Python
1896 pythonlib = None
1896 pythonlib = None
1897 if hasattr(os, '__file__'):
1897 if hasattr(os, '__file__'):
1898 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1898 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1899 elif getattr(sys, 'oxidized', False):
1899 elif getattr(sys, 'oxidized', False):
1900 pythonlib = pycompat.sysexecutable
1900 pythonlib = pycompat.sysexecutable
1901
1901
1902 fm.write(
1902 fm.write(
1903 b'pythonexe',
1903 b'pythonexe',
1904 _(b"checking Python executable (%s)\n"),
1904 _(b"checking Python executable (%s)\n"),
1905 pycompat.sysexecutable or _(b"unknown"),
1905 pycompat.sysexecutable or _(b"unknown"),
1906 )
1906 )
1907 fm.write(
1907 fm.write(
1908 b'pythonimplementation',
1908 b'pythonimplementation',
1909 _(b"checking Python implementation (%s)\n"),
1909 _(b"checking Python implementation (%s)\n"),
1910 pycompat.sysbytes(platform.python_implementation()),
1910 pycompat.sysbytes(platform.python_implementation()),
1911 )
1911 )
1912 fm.write(
1912 fm.write(
1913 b'pythonver',
1913 b'pythonver',
1914 _(b"checking Python version (%s)\n"),
1914 _(b"checking Python version (%s)\n"),
1915 (b"%d.%d.%d" % sys.version_info[:3]),
1915 (b"%d.%d.%d" % sys.version_info[:3]),
1916 )
1916 )
1917 fm.write(
1917 fm.write(
1918 b'pythonlib',
1918 b'pythonlib',
1919 _(b"checking Python lib (%s)...\n"),
1919 _(b"checking Python lib (%s)...\n"),
1920 pythonlib or _(b"unknown"),
1920 pythonlib or _(b"unknown"),
1921 )
1921 )
1922
1922
1923 try:
1923 try:
1924 from . import rustext # pytype: disable=import-error
1924 from . import rustext # pytype: disable=import-error
1925
1925
1926 rustext.__doc__ # trigger lazy import
1926 rustext.__doc__ # trigger lazy import
1927 except ImportError:
1927 except ImportError:
1928 rustext = None
1928 rustext = None
1929
1929
1930 security = set(sslutil.supportedprotocols)
1930 security = set(sslutil.supportedprotocols)
1931 if sslutil.hassni:
1931 if sslutil.hassni:
1932 security.add(b'sni')
1932 security.add(b'sni')
1933
1933
1934 fm.write(
1934 fm.write(
1935 b'pythonsecurity',
1935 b'pythonsecurity',
1936 _(b"checking Python security support (%s)\n"),
1936 _(b"checking Python security support (%s)\n"),
1937 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1937 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1938 )
1938 )
1939
1939
1940 # These are warnings, not errors. So don't increment problem count. This
1940 # These are warnings, not errors. So don't increment problem count. This
1941 # may change in the future.
1941 # may change in the future.
1942 if b'tls1.2' not in security:
1942 if b'tls1.2' not in security:
1943 fm.plain(
1943 fm.plain(
1944 _(
1944 _(
1945 b' TLS 1.2 not supported by Python install; '
1945 b' TLS 1.2 not supported by Python install; '
1946 b'network connections lack modern security\n'
1946 b'network connections lack modern security\n'
1947 )
1947 )
1948 )
1948 )
1949 if b'sni' not in security:
1949 if b'sni' not in security:
1950 fm.plain(
1950 fm.plain(
1951 _(
1951 _(
1952 b' SNI not supported by Python install; may have '
1952 b' SNI not supported by Python install; may have '
1953 b'connectivity issues with some servers\n'
1953 b'connectivity issues with some servers\n'
1954 )
1954 )
1955 )
1955 )
1956
1956
1957 fm.plain(
1957 fm.plain(
1958 _(
1958 _(
1959 b"checking Rust extensions (%s)\n"
1959 b"checking Rust extensions (%s)\n"
1960 % (b'missing' if rustext is None else b'installed')
1960 % (b'missing' if rustext is None else b'installed')
1961 ),
1961 ),
1962 )
1962 )
1963
1963
1964 # TODO print CA cert info
1964 # TODO print CA cert info
1965
1965
1966 # hg version
1966 # hg version
1967 hgver = util.version()
1967 hgver = util.version()
1968 fm.write(
1968 fm.write(
1969 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1969 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1970 )
1970 )
1971 fm.write(
1971 fm.write(
1972 b'hgverextra',
1972 b'hgverextra',
1973 _(b"checking Mercurial custom build (%s)\n"),
1973 _(b"checking Mercurial custom build (%s)\n"),
1974 b'+'.join(hgver.split(b'+')[1:]),
1974 b'+'.join(hgver.split(b'+')[1:]),
1975 )
1975 )
1976
1976
1977 # compiled modules
1977 # compiled modules
1978 hgmodules = None
1978 hgmodules = None
1979 if hasattr(sys.modules[__name__], '__file__'):
1979 if hasattr(sys.modules[__name__], '__file__'):
1980 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1980 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1981 elif getattr(sys, 'oxidized', False):
1981 elif getattr(sys, 'oxidized', False):
1982 hgmodules = pycompat.sysexecutable
1982 hgmodules = pycompat.sysexecutable
1983
1983
1984 fm.write(
1984 fm.write(
1985 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1985 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1986 )
1986 )
1987 fm.write(
1987 fm.write(
1988 b'hgmodules',
1988 b'hgmodules',
1989 _(b"checking installed modules (%s)...\n"),
1989 _(b"checking installed modules (%s)...\n"),
1990 hgmodules or _(b"unknown"),
1990 hgmodules or _(b"unknown"),
1991 )
1991 )
1992
1992
1993 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1993 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1994 rustext = rustandc # for now, that's the only case
1994 rustext = rustandc # for now, that's the only case
1995 cext = policy.policy in (b'c', b'allow') or rustandc
1995 cext = policy.policy in (b'c', b'allow') or rustandc
1996 nopure = cext or rustext
1996 nopure = cext or rustext
1997 if nopure:
1997 if nopure:
1998 err = None
1998 err = None
1999 try:
1999 try:
2000 if cext:
2000 if cext:
2001 from .cext import ( # pytype: disable=import-error
2001 from .cext import ( # pytype: disable=import-error
2002 base85,
2002 base85,
2003 bdiff,
2003 bdiff,
2004 mpatch,
2004 mpatch,
2005 osutil,
2005 osutil,
2006 )
2006 )
2007
2007
2008 # quiet pyflakes
2008 # quiet pyflakes
2009 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2009 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2010 if rustext:
2010 if rustext:
2011 from .rustext import ( # pytype: disable=import-error
2011 from .rustext import ( # pytype: disable=import-error
2012 ancestor,
2012 ancestor,
2013 dirstate,
2013 dirstate,
2014 )
2014 )
2015
2015
2016 dir(ancestor), dir(dirstate) # quiet pyflakes
2016 dir(ancestor), dir(dirstate) # quiet pyflakes
2017 except Exception as inst:
2017 except Exception as inst:
2018 err = stringutil.forcebytestr(inst)
2018 err = stringutil.forcebytestr(inst)
2019 problems += 1
2019 problems += 1
2020 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2020 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2021
2021
2022 compengines = util.compengines._engines.values()
2022 compengines = util.compengines._engines.values()
2023 fm.write(
2023 fm.write(
2024 b'compengines',
2024 b'compengines',
2025 _(b'checking registered compression engines (%s)\n'),
2025 _(b'checking registered compression engines (%s)\n'),
2026 fm.formatlist(
2026 fm.formatlist(
2027 sorted(e.name() for e in compengines),
2027 sorted(e.name() for e in compengines),
2028 name=b'compengine',
2028 name=b'compengine',
2029 fmt=b'%s',
2029 fmt=b'%s',
2030 sep=b', ',
2030 sep=b', ',
2031 ),
2031 ),
2032 )
2032 )
2033 fm.write(
2033 fm.write(
2034 b'compenginesavail',
2034 b'compenginesavail',
2035 _(b'checking available compression engines (%s)\n'),
2035 _(b'checking available compression engines (%s)\n'),
2036 fm.formatlist(
2036 fm.formatlist(
2037 sorted(e.name() for e in compengines if e.available()),
2037 sorted(e.name() for e in compengines if e.available()),
2038 name=b'compengine',
2038 name=b'compengine',
2039 fmt=b'%s',
2039 fmt=b'%s',
2040 sep=b', ',
2040 sep=b', ',
2041 ),
2041 ),
2042 )
2042 )
2043 wirecompengines = compression.compengines.supportedwireengines(
2043 wirecompengines = compression.compengines.supportedwireengines(
2044 compression.SERVERROLE
2044 compression.SERVERROLE
2045 )
2045 )
2046 fm.write(
2046 fm.write(
2047 b'compenginesserver',
2047 b'compenginesserver',
2048 _(
2048 _(
2049 b'checking available compression engines '
2049 b'checking available compression engines '
2050 b'for wire protocol (%s)\n'
2050 b'for wire protocol (%s)\n'
2051 ),
2051 ),
2052 fm.formatlist(
2052 fm.formatlist(
2053 [e.name() for e in wirecompengines if e.wireprotosupport()],
2053 [e.name() for e in wirecompengines if e.wireprotosupport()],
2054 name=b'compengine',
2054 name=b'compengine',
2055 fmt=b'%s',
2055 fmt=b'%s',
2056 sep=b', ',
2056 sep=b', ',
2057 ),
2057 ),
2058 )
2058 )
2059 re2 = b'missing'
2059 re2 = b'missing'
2060 if util.has_re2():
2060 if util.has_re2():
2061 re2 = b'available'
2061 re2 = b'available'
2062 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2062 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2063 fm.data(re2=bool(util._re2))
2063 fm.data(re2=bool(util._re2))
2064
2064
2065 # templates
2065 # templates
2066 p = templater.templatedir()
2066 p = templater.templatedir()
2067 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2067 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2068 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2068 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2069 if p:
2069 if p:
2070 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2070 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2071 if m:
2071 if m:
2072 # template found, check if it is working
2072 # template found, check if it is working
2073 err = None
2073 err = None
2074 try:
2074 try:
2075 templater.templater.frommapfile(m)
2075 templater.templater.frommapfile(m)
2076 except Exception as inst:
2076 except Exception as inst:
2077 err = stringutil.forcebytestr(inst)
2077 err = stringutil.forcebytestr(inst)
2078 p = None
2078 p = None
2079 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2079 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2080 else:
2080 else:
2081 p = None
2081 p = None
2082 fm.condwrite(
2082 fm.condwrite(
2083 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2083 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2084 )
2084 )
2085 fm.condwrite(
2085 fm.condwrite(
2086 not m,
2086 not m,
2087 b'defaulttemplatenotfound',
2087 b'defaulttemplatenotfound',
2088 _(b" template '%s' not found\n"),
2088 _(b" template '%s' not found\n"),
2089 b"default",
2089 b"default",
2090 )
2090 )
2091 if not p:
2091 if not p:
2092 problems += 1
2092 problems += 1
2093 fm.condwrite(
2093 fm.condwrite(
2094 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2094 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2095 )
2095 )
2096
2096
2097 # editor
2097 # editor
2098 editor = ui.geteditor()
2098 editor = ui.geteditor()
2099 editor = util.expandpath(editor)
2099 editor = util.expandpath(editor)
2100 editorbin = procutil.shellsplit(editor)[0]
2100 editorbin = procutil.shellsplit(editor)[0]
2101 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2101 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2102 cmdpath = procutil.findexe(editorbin)
2102 cmdpath = procutil.findexe(editorbin)
2103 fm.condwrite(
2103 fm.condwrite(
2104 not cmdpath and editor == b'vi',
2104 not cmdpath and editor == b'vi',
2105 b'vinotfound',
2105 b'vinotfound',
2106 _(
2106 _(
2107 b" No commit editor set and can't find %s in PATH\n"
2107 b" No commit editor set and can't find %s in PATH\n"
2108 b" (specify a commit editor in your configuration"
2108 b" (specify a commit editor in your configuration"
2109 b" file)\n"
2109 b" file)\n"
2110 ),
2110 ),
2111 not cmdpath and editor == b'vi' and editorbin,
2111 not cmdpath and editor == b'vi' and editorbin,
2112 )
2112 )
2113 fm.condwrite(
2113 fm.condwrite(
2114 not cmdpath and editor != b'vi',
2114 not cmdpath and editor != b'vi',
2115 b'editornotfound',
2115 b'editornotfound',
2116 _(
2116 _(
2117 b" Can't find editor '%s' in PATH\n"
2117 b" Can't find editor '%s' in PATH\n"
2118 b" (specify a commit editor in your configuration"
2118 b" (specify a commit editor in your configuration"
2119 b" file)\n"
2119 b" file)\n"
2120 ),
2120 ),
2121 not cmdpath and editorbin,
2121 not cmdpath and editorbin,
2122 )
2122 )
2123 if not cmdpath and editor != b'vi':
2123 if not cmdpath and editor != b'vi':
2124 problems += 1
2124 problems += 1
2125
2125
2126 # check username
2126 # check username
2127 username = None
2127 username = None
2128 err = None
2128 err = None
2129 try:
2129 try:
2130 username = ui.username()
2130 username = ui.username()
2131 except error.Abort as e:
2131 except error.Abort as e:
2132 err = e.message
2132 err = e.message
2133 problems += 1
2133 problems += 1
2134
2134
2135 fm.condwrite(
2135 fm.condwrite(
2136 username, b'username', _(b"checking username (%s)\n"), username
2136 username, b'username', _(b"checking username (%s)\n"), username
2137 )
2137 )
2138 fm.condwrite(
2138 fm.condwrite(
2139 err,
2139 err,
2140 b'usernameerror',
2140 b'usernameerror',
2141 _(
2141 _(
2142 b"checking username...\n %s\n"
2142 b"checking username...\n %s\n"
2143 b" (specify a username in your configuration file)\n"
2143 b" (specify a username in your configuration file)\n"
2144 ),
2144 ),
2145 err,
2145 err,
2146 )
2146 )
2147
2147
2148 for name, mod in extensions.extensions():
2148 for name, mod in extensions.extensions():
2149 handler = getattr(mod, 'debuginstall', None)
2149 handler = getattr(mod, 'debuginstall', None)
2150 if handler is not None:
2150 if handler is not None:
2151 problems += handler(ui, fm)
2151 problems += handler(ui, fm)
2152
2152
2153 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2153 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2154 if not problems:
2154 if not problems:
2155 fm.data(problems=problems)
2155 fm.data(problems=problems)
2156 fm.condwrite(
2156 fm.condwrite(
2157 problems,
2157 problems,
2158 b'problems',
2158 b'problems',
2159 _(b"%d problems detected, please check your install!\n"),
2159 _(b"%d problems detected, please check your install!\n"),
2160 problems,
2160 problems,
2161 )
2161 )
2162 fm.end()
2162 fm.end()
2163
2163
2164 return problems
2164 return problems
2165
2165
2166
2166
2167 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2167 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2168 def debugknown(ui, repopath, *ids, **opts):
2168 def debugknown(ui, repopath, *ids, **opts):
2169 """test whether node ids are known to a repo
2169 """test whether node ids are known to a repo
2170
2170
2171 Every ID must be a full-length hex node id string. Returns a list of 0s
2171 Every ID must be a full-length hex node id string. Returns a list of 0s
2172 and 1s indicating unknown/known.
2172 and 1s indicating unknown/known.
2173 """
2173 """
2174 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
2174 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
2175 if not repo.capable(b'known'):
2175 if not repo.capable(b'known'):
2176 raise error.Abort(b"known() not supported by target repository")
2176 raise error.Abort(b"known() not supported by target repository")
2177 flags = repo.known([bin(s) for s in ids])
2177 flags = repo.known([bin(s) for s in ids])
2178 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2178 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2179
2179
2180
2180
2181 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2181 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2182 def debuglabelcomplete(ui, repo, *args):
2182 def debuglabelcomplete(ui, repo, *args):
2183 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2183 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2184 debugnamecomplete(ui, repo, *args)
2184 debugnamecomplete(ui, repo, *args)
2185
2185
2186
2186
2187 @command(
2187 @command(
2188 b'debuglocks',
2188 b'debuglocks',
2189 [
2189 [
2190 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2190 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2191 (
2191 (
2192 b'W',
2192 b'W',
2193 b'force-free-wlock',
2193 b'force-free-wlock',
2194 None,
2194 None,
2195 _(b'free the working state lock (DANGEROUS)'),
2195 _(b'free the working state lock (DANGEROUS)'),
2196 ),
2196 ),
2197 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2197 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2198 (
2198 (
2199 b'S',
2199 b'S',
2200 b'set-wlock',
2200 b'set-wlock',
2201 None,
2201 None,
2202 _(b'set the working state lock until stopped'),
2202 _(b'set the working state lock until stopped'),
2203 ),
2203 ),
2204 ],
2204 ],
2205 _(b'[OPTION]...'),
2205 _(b'[OPTION]...'),
2206 )
2206 )
2207 def debuglocks(ui, repo, **opts):
2207 def debuglocks(ui, repo, **opts):
2208 """show or modify state of locks
2208 """show or modify state of locks
2209
2209
2210 By default, this command will show which locks are held. This
2210 By default, this command will show which locks are held. This
2211 includes the user and process holding the lock, the amount of time
2211 includes the user and process holding the lock, the amount of time
2212 the lock has been held, and the machine name where the process is
2212 the lock has been held, and the machine name where the process is
2213 running if it's not local.
2213 running if it's not local.
2214
2214
2215 Locks protect the integrity of Mercurial's data, so should be
2215 Locks protect the integrity of Mercurial's data, so should be
2216 treated with care. System crashes or other interruptions may cause
2216 treated with care. System crashes or other interruptions may cause
2217 locks to not be properly released, though Mercurial will usually
2217 locks to not be properly released, though Mercurial will usually
2218 detect and remove such stale locks automatically.
2218 detect and remove such stale locks automatically.
2219
2219
2220 However, detecting stale locks may not always be possible (for
2220 However, detecting stale locks may not always be possible (for
2221 instance, on a shared filesystem). Removing locks may also be
2221 instance, on a shared filesystem). Removing locks may also be
2222 blocked by filesystem permissions.
2222 blocked by filesystem permissions.
2223
2223
2224 Setting a lock will prevent other commands from changing the data.
2224 Setting a lock will prevent other commands from changing the data.
2225 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2225 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2226 The set locks are removed when the command exits.
2226 The set locks are removed when the command exits.
2227
2227
2228 Returns 0 if no locks are held.
2228 Returns 0 if no locks are held.
2229
2229
2230 """
2230 """
2231
2231
2232 if opts.get('force_free_lock'):
2232 if opts.get('force_free_lock'):
2233 repo.svfs.tryunlink(b'lock')
2233 repo.svfs.tryunlink(b'lock')
2234 if opts.get('force_free_wlock'):
2234 if opts.get('force_free_wlock'):
2235 repo.vfs.tryunlink(b'wlock')
2235 repo.vfs.tryunlink(b'wlock')
2236 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2236 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2237 return 0
2237 return 0
2238
2238
2239 locks = []
2239 locks = []
2240 try:
2240 try:
2241 if opts.get('set_wlock'):
2241 if opts.get('set_wlock'):
2242 try:
2242 try:
2243 locks.append(repo.wlock(False))
2243 locks.append(repo.wlock(False))
2244 except error.LockHeld:
2244 except error.LockHeld:
2245 raise error.Abort(_(b'wlock is already held'))
2245 raise error.Abort(_(b'wlock is already held'))
2246 if opts.get('set_lock'):
2246 if opts.get('set_lock'):
2247 try:
2247 try:
2248 locks.append(repo.lock(False))
2248 locks.append(repo.lock(False))
2249 except error.LockHeld:
2249 except error.LockHeld:
2250 raise error.Abort(_(b'lock is already held'))
2250 raise error.Abort(_(b'lock is already held'))
2251 if len(locks):
2251 if len(locks):
2252 try:
2252 try:
2253 if ui.interactive():
2253 if ui.interactive():
2254 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2254 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2255 ui.promptchoice(prompt)
2255 ui.promptchoice(prompt)
2256 else:
2256 else:
2257 msg = b"%d locks held, waiting for signal\n"
2257 msg = b"%d locks held, waiting for signal\n"
2258 msg %= len(locks)
2258 msg %= len(locks)
2259 ui.status(msg)
2259 ui.status(msg)
2260 while True: # XXX wait for a signal
2260 while True: # XXX wait for a signal
2261 time.sleep(0.1)
2261 time.sleep(0.1)
2262 except KeyboardInterrupt:
2262 except KeyboardInterrupt:
2263 msg = b"signal-received releasing locks\n"
2263 msg = b"signal-received releasing locks\n"
2264 ui.status(msg)
2264 ui.status(msg)
2265 return 0
2265 return 0
2266 finally:
2266 finally:
2267 release(*locks)
2267 release(*locks)
2268
2268
2269 now = time.time()
2269 now = time.time()
2270 held = 0
2270 held = 0
2271
2271
2272 def report(vfs, name, method):
2272 def report(vfs, name, method):
2273 # this causes stale locks to get reaped for more accurate reporting
2273 # this causes stale locks to get reaped for more accurate reporting
2274 try:
2274 try:
2275 l = method(False)
2275 l = method(False)
2276 except error.LockHeld:
2276 except error.LockHeld:
2277 l = None
2277 l = None
2278
2278
2279 if l:
2279 if l:
2280 l.release()
2280 l.release()
2281 else:
2281 else:
2282 try:
2282 try:
2283 st = vfs.lstat(name)
2283 st = vfs.lstat(name)
2284 age = now - st[stat.ST_MTIME]
2284 age = now - st[stat.ST_MTIME]
2285 user = util.username(st.st_uid)
2285 user = util.username(st.st_uid)
2286 locker = vfs.readlock(name)
2286 locker = vfs.readlock(name)
2287 if b":" in locker:
2287 if b":" in locker:
2288 host, pid = locker.split(b':')
2288 host, pid = locker.split(b':')
2289 if host == socket.gethostname():
2289 if host == socket.gethostname():
2290 locker = b'user %s, process %s' % (user or b'None', pid)
2290 locker = b'user %s, process %s' % (user or b'None', pid)
2291 else:
2291 else:
2292 locker = b'user %s, process %s, host %s' % (
2292 locker = b'user %s, process %s, host %s' % (
2293 user or b'None',
2293 user or b'None',
2294 pid,
2294 pid,
2295 host,
2295 host,
2296 )
2296 )
2297 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2297 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2298 return 1
2298 return 1
2299 except FileNotFoundError:
2299 except FileNotFoundError:
2300 pass
2300 pass
2301
2301
2302 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2302 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2303 return 0
2303 return 0
2304
2304
2305 held += report(repo.svfs, b"lock", repo.lock)
2305 held += report(repo.svfs, b"lock", repo.lock)
2306 held += report(repo.vfs, b"wlock", repo.wlock)
2306 held += report(repo.vfs, b"wlock", repo.wlock)
2307
2307
2308 return held
2308 return held
2309
2309
2310
2310
2311 @command(
2311 @command(
2312 b'debugmanifestfulltextcache',
2312 b'debugmanifestfulltextcache',
2313 [
2313 [
2314 (b'', b'clear', False, _(b'clear the cache')),
2314 (b'', b'clear', False, _(b'clear the cache')),
2315 (
2315 (
2316 b'a',
2316 b'a',
2317 b'add',
2317 b'add',
2318 [],
2318 [],
2319 _(b'add the given manifest nodes to the cache'),
2319 _(b'add the given manifest nodes to the cache'),
2320 _(b'NODE'),
2320 _(b'NODE'),
2321 ),
2321 ),
2322 ],
2322 ],
2323 b'',
2323 b'',
2324 )
2324 )
2325 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2325 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2326 """show, clear or amend the contents of the manifest fulltext cache"""
2326 """show, clear or amend the contents of the manifest fulltext cache"""
2327
2327
2328 def getcache():
2328 def getcache():
2329 r = repo.manifestlog.getstorage(b'')
2329 r = repo.manifestlog.getstorage(b'')
2330 try:
2330 try:
2331 return r._fulltextcache
2331 return r._fulltextcache
2332 except AttributeError:
2332 except AttributeError:
2333 msg = _(
2333 msg = _(
2334 b"Current revlog implementation doesn't appear to have a "
2334 b"Current revlog implementation doesn't appear to have a "
2335 b"manifest fulltext cache\n"
2335 b"manifest fulltext cache\n"
2336 )
2336 )
2337 raise error.Abort(msg)
2337 raise error.Abort(msg)
2338
2338
2339 if opts.get('clear'):
2339 if opts.get('clear'):
2340 with repo.wlock():
2340 with repo.wlock():
2341 cache = getcache()
2341 cache = getcache()
2342 cache.clear(clear_persisted_data=True)
2342 cache.clear(clear_persisted_data=True)
2343 return
2343 return
2344
2344
2345 if add:
2345 if add:
2346 with repo.wlock():
2346 with repo.wlock():
2347 m = repo.manifestlog
2347 m = repo.manifestlog
2348 store = m.getstorage(b'')
2348 store = m.getstorage(b'')
2349 for n in add:
2349 for n in add:
2350 try:
2350 try:
2351 manifest = m[store.lookup(n)]
2351 manifest = m[store.lookup(n)]
2352 except error.LookupError as e:
2352 except error.LookupError as e:
2353 raise error.Abort(
2353 raise error.Abort(
2354 bytes(e), hint=b"Check your manifest node id"
2354 bytes(e), hint=b"Check your manifest node id"
2355 )
2355 )
2356 manifest.read() # stores revisision in cache too
2356 manifest.read() # stores revisision in cache too
2357 return
2357 return
2358
2358
2359 cache = getcache()
2359 cache = getcache()
2360 if not len(cache):
2360 if not len(cache):
2361 ui.write(_(b'cache empty\n'))
2361 ui.write(_(b'cache empty\n'))
2362 else:
2362 else:
2363 ui.write(
2363 ui.write(
2364 _(
2364 _(
2365 b'cache contains %d manifest entries, in order of most to '
2365 b'cache contains %d manifest entries, in order of most to '
2366 b'least recent:\n'
2366 b'least recent:\n'
2367 )
2367 )
2368 % (len(cache),)
2368 % (len(cache),)
2369 )
2369 )
2370 totalsize = 0
2370 totalsize = 0
2371 for nodeid in cache:
2371 for nodeid in cache:
2372 # Use cache.get to not update the LRU order
2372 # Use cache.get to not update the LRU order
2373 data = cache.peek(nodeid)
2373 data = cache.peek(nodeid)
2374 size = len(data)
2374 size = len(data)
2375 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2375 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2376 ui.write(
2376 ui.write(
2377 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2377 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2378 )
2378 )
2379 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2379 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2380 ui.write(
2380 ui.write(
2381 _(b'total cache data size %s, on-disk %s\n')
2381 _(b'total cache data size %s, on-disk %s\n')
2382 % (util.bytecount(totalsize), util.bytecount(ondisk))
2382 % (util.bytecount(totalsize), util.bytecount(ondisk))
2383 )
2383 )
2384
2384
2385
2385
2386 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2386 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2387 def debugmergestate(ui, repo, *args, **opts):
2387 def debugmergestate(ui, repo, *args, **opts):
2388 """print merge state
2388 """print merge state
2389
2389
2390 Use --verbose to print out information about whether v1 or v2 merge state
2390 Use --verbose to print out information about whether v1 or v2 merge state
2391 was chosen."""
2391 was chosen."""
2392
2392
2393 if ui.verbose:
2393 if ui.verbose:
2394 ms = mergestatemod.mergestate(repo)
2394 ms = mergestatemod.mergestate(repo)
2395
2395
2396 # sort so that reasonable information is on top
2396 # sort so that reasonable information is on top
2397 v1records = ms._readrecordsv1()
2397 v1records = ms._readrecordsv1()
2398 v2records = ms._readrecordsv2()
2398 v2records = ms._readrecordsv2()
2399
2399
2400 if not v1records and not v2records:
2400 if not v1records and not v2records:
2401 pass
2401 pass
2402 elif not v2records:
2402 elif not v2records:
2403 ui.writenoi18n(b'no version 2 merge state\n')
2403 ui.writenoi18n(b'no version 2 merge state\n')
2404 elif ms._v1v2match(v1records, v2records):
2404 elif ms._v1v2match(v1records, v2records):
2405 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2405 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2406 else:
2406 else:
2407 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2407 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2408
2408
2409 if not opts['template']:
2409 if not opts['template']:
2410 opts['template'] = (
2410 opts['template'] = (
2411 b'{if(commits, "", "no merge state found\n")}'
2411 b'{if(commits, "", "no merge state found\n")}'
2412 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2412 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2413 b'{files % "file: {path} (state \\"{state}\\")\n'
2413 b'{files % "file: {path} (state \\"{state}\\")\n'
2414 b'{if(local_path, "'
2414 b'{if(local_path, "'
2415 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2415 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2416 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2416 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2417 b' other path: {other_path} (node {other_node})\n'
2417 b' other path: {other_path} (node {other_node})\n'
2418 b'")}'
2418 b'")}'
2419 b'{if(rename_side, "'
2419 b'{if(rename_side, "'
2420 b' rename side: {rename_side}\n'
2420 b' rename side: {rename_side}\n'
2421 b' renamed path: {renamed_path}\n'
2421 b' renamed path: {renamed_path}\n'
2422 b'")}'
2422 b'")}'
2423 b'{extras % " extra: {key} = {value}\n"}'
2423 b'{extras % " extra: {key} = {value}\n"}'
2424 b'"}'
2424 b'"}'
2425 b'{extras % "extra: {file} ({key} = {value})\n"}'
2425 b'{extras % "extra: {file} ({key} = {value})\n"}'
2426 )
2426 )
2427
2427
2428 ms = mergestatemod.mergestate.read(repo)
2428 ms = mergestatemod.mergestate.read(repo)
2429
2429
2430 fm = ui.formatter(b'debugmergestate', pycompat.byteskwargs(opts))
2430 fm = ui.formatter(b'debugmergestate', pycompat.byteskwargs(opts))
2431 fm.startitem()
2431 fm.startitem()
2432
2432
2433 fm_commits = fm.nested(b'commits')
2433 fm_commits = fm.nested(b'commits')
2434 if ms.active():
2434 if ms.active():
2435 for name, node, label_index in (
2435 for name, node, label_index in (
2436 (b'local', ms.local, 0),
2436 (b'local', ms.local, 0),
2437 (b'other', ms.other, 1),
2437 (b'other', ms.other, 1),
2438 ):
2438 ):
2439 fm_commits.startitem()
2439 fm_commits.startitem()
2440 fm_commits.data(name=name)
2440 fm_commits.data(name=name)
2441 fm_commits.data(node=hex(node))
2441 fm_commits.data(node=hex(node))
2442 if ms._labels and len(ms._labels) > label_index:
2442 if ms._labels and len(ms._labels) > label_index:
2443 fm_commits.data(label=ms._labels[label_index])
2443 fm_commits.data(label=ms._labels[label_index])
2444 fm_commits.end()
2444 fm_commits.end()
2445
2445
2446 fm_files = fm.nested(b'files')
2446 fm_files = fm.nested(b'files')
2447 if ms.active():
2447 if ms.active():
2448 for f in ms:
2448 for f in ms:
2449 fm_files.startitem()
2449 fm_files.startitem()
2450 fm_files.data(path=f)
2450 fm_files.data(path=f)
2451 state = ms._state[f]
2451 state = ms._state[f]
2452 fm_files.data(state=state[0])
2452 fm_files.data(state=state[0])
2453 if state[0] in (
2453 if state[0] in (
2454 mergestatemod.MERGE_RECORD_UNRESOLVED,
2454 mergestatemod.MERGE_RECORD_UNRESOLVED,
2455 mergestatemod.MERGE_RECORD_RESOLVED,
2455 mergestatemod.MERGE_RECORD_RESOLVED,
2456 ):
2456 ):
2457 fm_files.data(local_key=state[1])
2457 fm_files.data(local_key=state[1])
2458 fm_files.data(local_path=state[2])
2458 fm_files.data(local_path=state[2])
2459 fm_files.data(ancestor_path=state[3])
2459 fm_files.data(ancestor_path=state[3])
2460 fm_files.data(ancestor_node=state[4])
2460 fm_files.data(ancestor_node=state[4])
2461 fm_files.data(other_path=state[5])
2461 fm_files.data(other_path=state[5])
2462 fm_files.data(other_node=state[6])
2462 fm_files.data(other_node=state[6])
2463 fm_files.data(local_flags=state[7])
2463 fm_files.data(local_flags=state[7])
2464 elif state[0] in (
2464 elif state[0] in (
2465 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2465 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2466 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2466 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2467 ):
2467 ):
2468 fm_files.data(renamed_path=state[1])
2468 fm_files.data(renamed_path=state[1])
2469 fm_files.data(rename_side=state[2])
2469 fm_files.data(rename_side=state[2])
2470 fm_extras = fm_files.nested(b'extras')
2470 fm_extras = fm_files.nested(b'extras')
2471 for k, v in sorted(ms.extras(f).items()):
2471 for k, v in sorted(ms.extras(f).items()):
2472 fm_extras.startitem()
2472 fm_extras.startitem()
2473 fm_extras.data(key=k)
2473 fm_extras.data(key=k)
2474 fm_extras.data(value=v)
2474 fm_extras.data(value=v)
2475 fm_extras.end()
2475 fm_extras.end()
2476
2476
2477 fm_files.end()
2477 fm_files.end()
2478
2478
2479 fm_extras = fm.nested(b'extras')
2479 fm_extras = fm.nested(b'extras')
2480 for f, d in sorted(ms.allextras().items()):
2480 for f, d in sorted(ms.allextras().items()):
2481 if f in ms:
2481 if f in ms:
2482 # If file is in mergestate, we have already processed it's extras
2482 # If file is in mergestate, we have already processed it's extras
2483 continue
2483 continue
2484 for k, v in d.items():
2484 for k, v in d.items():
2485 fm_extras.startitem()
2485 fm_extras.startitem()
2486 fm_extras.data(file=f)
2486 fm_extras.data(file=f)
2487 fm_extras.data(key=k)
2487 fm_extras.data(key=k)
2488 fm_extras.data(value=v)
2488 fm_extras.data(value=v)
2489 fm_extras.end()
2489 fm_extras.end()
2490
2490
2491 fm.end()
2491 fm.end()
2492
2492
2493
2493
2494 @command(b'debugnamecomplete', [], _(b'NAME...'))
2494 @command(b'debugnamecomplete', [], _(b'NAME...'))
2495 def debugnamecomplete(ui, repo, *args):
2495 def debugnamecomplete(ui, repo, *args):
2496 '''complete "names" - tags, open branch names, bookmark names'''
2496 '''complete "names" - tags, open branch names, bookmark names'''
2497
2497
2498 names = set()
2498 names = set()
2499 # since we previously only listed open branches, we will handle that
2499 # since we previously only listed open branches, we will handle that
2500 # specially (after this for loop)
2500 # specially (after this for loop)
2501 for name, ns in repo.names.items():
2501 for name, ns in repo.names.items():
2502 if name != b'branches':
2502 if name != b'branches':
2503 names.update(ns.listnames(repo))
2503 names.update(ns.listnames(repo))
2504 names.update(
2504 names.update(
2505 tag
2505 tag
2506 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2506 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2507 if not closed
2507 if not closed
2508 )
2508 )
2509 completions = set()
2509 completions = set()
2510 if not args:
2510 if not args:
2511 args = [b'']
2511 args = [b'']
2512 for a in args:
2512 for a in args:
2513 completions.update(n for n in names if n.startswith(a))
2513 completions.update(n for n in names if n.startswith(a))
2514 ui.write(b'\n'.join(sorted(completions)))
2514 ui.write(b'\n'.join(sorted(completions)))
2515 ui.write(b'\n')
2515 ui.write(b'\n')
2516
2516
2517
2517
2518 @command(
2518 @command(
2519 b'debugnodemap',
2519 b'debugnodemap',
2520 (
2520 (
2521 cmdutil.debugrevlogopts
2521 cmdutil.debugrevlogopts
2522 + [
2522 + [
2523 (
2523 (
2524 b'',
2524 b'',
2525 b'dump-new',
2525 b'dump-new',
2526 False,
2526 False,
2527 _(b'write a (new) persistent binary nodemap on stdout'),
2527 _(b'write a (new) persistent binary nodemap on stdout'),
2528 ),
2528 ),
2529 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2529 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2530 (
2530 (
2531 b'',
2531 b'',
2532 b'check',
2532 b'check',
2533 False,
2533 False,
2534 _(b'check that the data on disk data are correct.'),
2534 _(b'check that the data on disk data are correct.'),
2535 ),
2535 ),
2536 (
2536 (
2537 b'',
2537 b'',
2538 b'metadata',
2538 b'metadata',
2539 False,
2539 False,
2540 _(b'display the on disk meta data for the nodemap'),
2540 _(b'display the on disk meta data for the nodemap'),
2541 ),
2541 ),
2542 ]
2542 ]
2543 ),
2543 ),
2544 _(b'-c|-m|FILE'),
2544 _(b'-c|-m|FILE'),
2545 )
2545 )
2546 def debugnodemap(ui, repo, file_=None, **opts):
2546 def debugnodemap(ui, repo, file_=None, **opts):
2547 """write and inspect on disk nodemap"""
2547 """write and inspect on disk nodemap"""
2548 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
2548 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
2549 if file_ is not None:
2549 if file_ is not None:
2550 raise error.InputError(
2550 raise error.InputError(
2551 _(b'cannot specify a file with other arguments')
2551 _(b'cannot specify a file with other arguments')
2552 )
2552 )
2553 elif file_ is None:
2553 elif file_ is None:
2554 opts['changelog'] = True
2554 opts['changelog'] = True
2555 r = cmdutil.openstorage(
2555 r = cmdutil.openstorage(
2556 repo.unfiltered(), b'debugnodemap', file_, pycompat.byteskwargs(opts)
2556 repo.unfiltered(), b'debugnodemap', file_, pycompat.byteskwargs(opts)
2557 )
2557 )
2558 if isinstance(r, (manifest.manifestrevlog, filelog.filelog)):
2558 if isinstance(r, (manifest.manifestrevlog, filelog.filelog)):
2559 r = r._revlog
2559 r = r._revlog
2560 if opts['dump_new']:
2560 if opts['dump_new']:
2561 if hasattr(r.index, "nodemap_data_all"):
2561 if hasattr(r.index, "nodemap_data_all"):
2562 data = r.index.nodemap_data_all()
2562 data = r.index.nodemap_data_all()
2563 else:
2563 else:
2564 data = nodemap.persistent_data(r.index)
2564 data = nodemap.persistent_data(r.index)
2565 ui.write(data)
2565 ui.write(data)
2566 elif opts['dump_disk']:
2566 elif opts['dump_disk']:
2567 nm_data = nodemap.persisted_data(r)
2567 nm_data = nodemap.persisted_data(r)
2568 if nm_data is not None:
2568 if nm_data is not None:
2569 docket, data = nm_data
2569 docket, data = nm_data
2570 ui.write(data[:])
2570 ui.write(data[:])
2571 elif opts['check']:
2571 elif opts['check']:
2572 nm_data = nodemap.persisted_data(r)
2572 nm_data = nodemap.persisted_data(r)
2573 if nm_data is not None:
2573 if nm_data is not None:
2574 docket, data = nm_data
2574 docket, data = nm_data
2575 return nodemap.check_data(ui, r.index, data)
2575 return nodemap.check_data(ui, r.index, data)
2576 elif opts['metadata']:
2576 elif opts['metadata']:
2577 nm_data = nodemap.persisted_data(r)
2577 nm_data = nodemap.persisted_data(r)
2578 if nm_data is not None:
2578 if nm_data is not None:
2579 docket, data = nm_data
2579 docket, data = nm_data
2580 ui.write((b"uid: %s\n") % docket.uid)
2580 ui.write((b"uid: %s\n") % docket.uid)
2581 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2581 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2582 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2582 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2583 ui.write((b"data-length: %d\n") % docket.data_length)
2583 ui.write((b"data-length: %d\n") % docket.data_length)
2584 ui.write((b"data-unused: %d\n") % docket.data_unused)
2584 ui.write((b"data-unused: %d\n") % docket.data_unused)
2585 unused_perc = docket.data_unused * 100.0 / docket.data_length
2585 unused_perc = docket.data_unused * 100.0 / docket.data_length
2586 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2586 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2587
2587
2588
2588
2589 @command(
2589 @command(
2590 b'debugobsolete',
2590 b'debugobsolete',
2591 [
2591 [
2592 (b'', b'flags', 0, _(b'markers flag')),
2592 (b'', b'flags', 0, _(b'markers flag')),
2593 (
2593 (
2594 b'',
2594 b'',
2595 b'record-parents',
2595 b'record-parents',
2596 False,
2596 False,
2597 _(b'record parent information for the precursor'),
2597 _(b'record parent information for the precursor'),
2598 ),
2598 ),
2599 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2599 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2600 (
2600 (
2601 b'',
2601 b'',
2602 b'exclusive',
2602 b'exclusive',
2603 False,
2603 False,
2604 _(b'restrict display to markers only relevant to REV'),
2604 _(b'restrict display to markers only relevant to REV'),
2605 ),
2605 ),
2606 (b'', b'index', False, _(b'display index of the marker')),
2606 (b'', b'index', False, _(b'display index of the marker')),
2607 (b'', b'delete', [], _(b'delete markers specified by indices')),
2607 (b'', b'delete', [], _(b'delete markers specified by indices')),
2608 ]
2608 ]
2609 + cmdutil.commitopts2
2609 + cmdutil.commitopts2
2610 + cmdutil.formatteropts,
2610 + cmdutil.formatteropts,
2611 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2611 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2612 )
2612 )
2613 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2613 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2614 """create arbitrary obsolete marker
2614 """create arbitrary obsolete marker
2615
2615
2616 With no arguments, displays the list of obsolescence markers."""
2616 With no arguments, displays the list of obsolescence markers."""
2617
2617
2618 def parsenodeid(s):
2618 def parsenodeid(s):
2619 try:
2619 try:
2620 # We do not use revsingle/revrange functions here to accept
2620 # We do not use revsingle/revrange functions here to accept
2621 # arbitrary node identifiers, possibly not present in the
2621 # arbitrary node identifiers, possibly not present in the
2622 # local repository.
2622 # local repository.
2623 n = bin(s)
2623 n = bin(s)
2624 if len(n) != repo.nodeconstants.nodelen:
2624 if len(n) != repo.nodeconstants.nodelen:
2625 raise ValueError
2625 raise ValueError
2626 return n
2626 return n
2627 except ValueError:
2627 except ValueError:
2628 raise error.InputError(
2628 raise error.InputError(
2629 b'changeset references must be full hexadecimal '
2629 b'changeset references must be full hexadecimal '
2630 b'node identifiers'
2630 b'node identifiers'
2631 )
2631 )
2632
2632
2633 if opts.get('delete'):
2633 if opts.get('delete'):
2634 indices = []
2634 indices = []
2635 for v in opts.get('delete'):
2635 for v in opts.get('delete'):
2636 try:
2636 try:
2637 indices.append(int(v))
2637 indices.append(int(v))
2638 except ValueError:
2638 except ValueError:
2639 raise error.InputError(
2639 raise error.InputError(
2640 _(b'invalid index value: %r') % v,
2640 _(b'invalid index value: %r') % v,
2641 hint=_(b'use integers for indices'),
2641 hint=_(b'use integers for indices'),
2642 )
2642 )
2643
2643
2644 if repo.currenttransaction():
2644 if repo.currenttransaction():
2645 raise error.Abort(
2645 raise error.Abort(
2646 _(b'cannot delete obsmarkers in the middle of transaction.')
2646 _(b'cannot delete obsmarkers in the middle of transaction.')
2647 )
2647 )
2648
2648
2649 with repo.lock():
2649 with repo.lock():
2650 n = repair.deleteobsmarkers(repo.obsstore, indices)
2650 n = repair.deleteobsmarkers(repo.obsstore, indices)
2651 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2651 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2652
2652
2653 return
2653 return
2654
2654
2655 if precursor is not None:
2655 if precursor is not None:
2656 if opts['rev']:
2656 if opts['rev']:
2657 raise error.InputError(
2657 raise error.InputError(
2658 b'cannot select revision when creating marker'
2658 b'cannot select revision when creating marker'
2659 )
2659 )
2660 metadata = {}
2660 metadata = {}
2661 metadata[b'user'] = encoding.fromlocal(opts['user'] or ui.username())
2661 metadata[b'user'] = encoding.fromlocal(opts['user'] or ui.username())
2662 succs = tuple(parsenodeid(succ) for succ in successors)
2662 succs = tuple(parsenodeid(succ) for succ in successors)
2663 l = repo.lock()
2663 l = repo.lock()
2664 try:
2664 try:
2665 tr = repo.transaction(b'debugobsolete')
2665 tr = repo.transaction(b'debugobsolete')
2666 try:
2666 try:
2667 date = opts.get('date')
2667 date = opts.get('date')
2668 if date:
2668 if date:
2669 date = dateutil.parsedate(date)
2669 date = dateutil.parsedate(date)
2670 else:
2670 else:
2671 date = None
2671 date = None
2672 prec = parsenodeid(precursor)
2672 prec = parsenodeid(precursor)
2673 parents = None
2673 parents = None
2674 if opts['record_parents']:
2674 if opts['record_parents']:
2675 if prec not in repo.unfiltered():
2675 if prec not in repo.unfiltered():
2676 raise error.Abort(
2676 raise error.Abort(
2677 b'cannot used --record-parents on '
2677 b'cannot used --record-parents on '
2678 b'unknown changesets'
2678 b'unknown changesets'
2679 )
2679 )
2680 parents = repo.unfiltered()[prec].parents()
2680 parents = repo.unfiltered()[prec].parents()
2681 parents = tuple(p.node() for p in parents)
2681 parents = tuple(p.node() for p in parents)
2682 repo.obsstore.create(
2682 repo.obsstore.create(
2683 tr,
2683 tr,
2684 prec,
2684 prec,
2685 succs,
2685 succs,
2686 opts['flags'],
2686 opts['flags'],
2687 parents=parents,
2687 parents=parents,
2688 date=date,
2688 date=date,
2689 metadata=metadata,
2689 metadata=metadata,
2690 ui=ui,
2690 ui=ui,
2691 )
2691 )
2692 tr.close()
2692 tr.close()
2693 except ValueError as exc:
2693 except ValueError as exc:
2694 raise error.Abort(
2694 raise error.Abort(
2695 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2695 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2696 )
2696 )
2697 finally:
2697 finally:
2698 tr.release()
2698 tr.release()
2699 finally:
2699 finally:
2700 l.release()
2700 l.release()
2701 else:
2701 else:
2702 if opts['rev']:
2702 if opts['rev']:
2703 revs = logcmdutil.revrange(repo, opts['rev'])
2703 revs = logcmdutil.revrange(repo, opts['rev'])
2704 nodes = [repo[r].node() for r in revs]
2704 nodes = [repo[r].node() for r in revs]
2705 markers = list(
2705 markers = list(
2706 obsutil.getmarkers(
2706 obsutil.getmarkers(
2707 repo, nodes=nodes, exclusive=opts['exclusive']
2707 repo, nodes=nodes, exclusive=opts['exclusive']
2708 )
2708 )
2709 )
2709 )
2710 markers.sort(key=lambda x: x._data)
2710 markers.sort(key=lambda x: x._data)
2711 else:
2711 else:
2712 markers = obsutil.getmarkers(repo)
2712 markers = obsutil.getmarkers(repo)
2713
2713
2714 markerstoiter = markers
2714 markerstoiter = markers
2715 isrelevant = lambda m: True
2715 isrelevant = lambda m: True
2716 if opts.get('rev') and opts.get('index'):
2716 if opts.get('rev') and opts.get('index'):
2717 markerstoiter = obsutil.getmarkers(repo)
2717 markerstoiter = obsutil.getmarkers(repo)
2718 markerset = set(markers)
2718 markerset = set(markers)
2719 isrelevant = lambda m: m in markerset
2719 isrelevant = lambda m: m in markerset
2720
2720
2721 fm = ui.formatter(b'debugobsolete', pycompat.byteskwargs(opts))
2721 fm = ui.formatter(b'debugobsolete', pycompat.byteskwargs(opts))
2722 for i, m in enumerate(markerstoiter):
2722 for i, m in enumerate(markerstoiter):
2723 if not isrelevant(m):
2723 if not isrelevant(m):
2724 # marker can be irrelevant when we're iterating over a set
2724 # marker can be irrelevant when we're iterating over a set
2725 # of markers (markerstoiter) which is bigger than the set
2725 # of markers (markerstoiter) which is bigger than the set
2726 # of markers we want to display (markers)
2726 # of markers we want to display (markers)
2727 # this can happen if both --index and --rev options are
2727 # this can happen if both --index and --rev options are
2728 # provided and thus we need to iterate over all of the markers
2728 # provided and thus we need to iterate over all of the markers
2729 # to get the correct indices, but only display the ones that
2729 # to get the correct indices, but only display the ones that
2730 # are relevant to --rev value
2730 # are relevant to --rev value
2731 continue
2731 continue
2732 fm.startitem()
2732 fm.startitem()
2733 ind = i if opts.get('index') else None
2733 ind = i if opts.get('index') else None
2734 cmdutil.showmarker(fm, m, index=ind)
2734 cmdutil.showmarker(fm, m, index=ind)
2735 fm.end()
2735 fm.end()
2736
2736
2737
2737
2738 @command(
2738 @command(
2739 b'debugp1copies',
2739 b'debugp1copies',
2740 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2740 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2741 _(b'[-r REV]'),
2741 _(b'[-r REV]'),
2742 )
2742 )
2743 def debugp1copies(ui, repo, **opts):
2743 def debugp1copies(ui, repo, **opts):
2744 """dump copy information compared to p1"""
2744 """dump copy information compared to p1"""
2745
2745
2746 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
2746 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
2747 for dst, src in ctx.p1copies().items():
2747 for dst, src in ctx.p1copies().items():
2748 ui.write(b'%s -> %s\n' % (src, dst))
2748 ui.write(b'%s -> %s\n' % (src, dst))
2749
2749
2750
2750
2751 @command(
2751 @command(
2752 b'debugp2copies',
2752 b'debugp2copies',
2753 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2753 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2754 _(b'[-r REV]'),
2754 _(b'[-r REV]'),
2755 )
2755 )
2756 def debugp2copies(ui, repo, **opts):
2756 def debugp2copies(ui, repo, **opts):
2757 """dump copy information compared to p2"""
2757 """dump copy information compared to p2"""
2758
2758
2759 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
2759 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
2760 for dst, src in ctx.p2copies().items():
2760 for dst, src in ctx.p2copies().items():
2761 ui.write(b'%s -> %s\n' % (src, dst))
2761 ui.write(b'%s -> %s\n' % (src, dst))
2762
2762
2763
2763
2764 @command(
2764 @command(
2765 b'debugpathcomplete',
2765 b'debugpathcomplete',
2766 [
2766 [
2767 (b'f', b'full', None, _(b'complete an entire path')),
2767 (b'f', b'full', None, _(b'complete an entire path')),
2768 (b'n', b'normal', None, _(b'show only normal files')),
2768 (b'n', b'normal', None, _(b'show only normal files')),
2769 (b'a', b'added', None, _(b'show only added files')),
2769 (b'a', b'added', None, _(b'show only added files')),
2770 (b'r', b'removed', None, _(b'show only removed files')),
2770 (b'r', b'removed', None, _(b'show only removed files')),
2771 ],
2771 ],
2772 _(b'FILESPEC...'),
2772 _(b'FILESPEC...'),
2773 )
2773 )
2774 def debugpathcomplete(ui, repo, *specs, **opts):
2774 def debugpathcomplete(ui, repo, *specs, **opts):
2775 """complete part or all of a tracked path
2775 """complete part or all of a tracked path
2776
2776
2777 This command supports shells that offer path name completion. It
2777 This command supports shells that offer path name completion. It
2778 currently completes only files already known to the dirstate.
2778 currently completes only files already known to the dirstate.
2779
2779
2780 Completion extends only to the next path segment unless
2780 Completion extends only to the next path segment unless
2781 --full is specified, in which case entire paths are used."""
2781 --full is specified, in which case entire paths are used."""
2782
2782
2783 def complete(path, acceptable):
2783 def complete(path, acceptable):
2784 dirstate = repo.dirstate
2784 dirstate = repo.dirstate
2785 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2785 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2786 rootdir = repo.root + pycompat.ossep
2786 rootdir = repo.root + pycompat.ossep
2787 if spec != repo.root and not spec.startswith(rootdir):
2787 if spec != repo.root and not spec.startswith(rootdir):
2788 return [], []
2788 return [], []
2789 if os.path.isdir(spec):
2789 if os.path.isdir(spec):
2790 spec += b'/'
2790 spec += b'/'
2791 spec = spec[len(rootdir) :]
2791 spec = spec[len(rootdir) :]
2792 fixpaths = pycompat.ossep != b'/'
2792 fixpaths = pycompat.ossep != b'/'
2793 if fixpaths:
2793 if fixpaths:
2794 spec = spec.replace(pycompat.ossep, b'/')
2794 spec = spec.replace(pycompat.ossep, b'/')
2795 speclen = len(spec)
2795 speclen = len(spec)
2796 fullpaths = opts['full']
2796 fullpaths = opts['full']
2797 files, dirs = set(), set()
2797 files, dirs = set(), set()
2798 adddir, addfile = dirs.add, files.add
2798 adddir, addfile = dirs.add, files.add
2799 for f, st in dirstate.items():
2799 for f, st in dirstate.items():
2800 if f.startswith(spec) and st.state in acceptable:
2800 if f.startswith(spec) and st.state in acceptable:
2801 if fixpaths:
2801 if fixpaths:
2802 f = f.replace(b'/', pycompat.ossep)
2802 f = f.replace(b'/', pycompat.ossep)
2803 if fullpaths:
2803 if fullpaths:
2804 addfile(f)
2804 addfile(f)
2805 continue
2805 continue
2806 s = f.find(pycompat.ossep, speclen)
2806 s = f.find(pycompat.ossep, speclen)
2807 if s >= 0:
2807 if s >= 0:
2808 adddir(f[:s])
2808 adddir(f[:s])
2809 else:
2809 else:
2810 addfile(f)
2810 addfile(f)
2811 return files, dirs
2811 return files, dirs
2812
2812
2813 acceptable = b''
2813 acceptable = b''
2814 if opts['normal']:
2814 if opts['normal']:
2815 acceptable += b'nm'
2815 acceptable += b'nm'
2816 if opts['added']:
2816 if opts['added']:
2817 acceptable += b'a'
2817 acceptable += b'a'
2818 if opts['removed']:
2818 if opts['removed']:
2819 acceptable += b'r'
2819 acceptable += b'r'
2820 cwd = repo.getcwd()
2820 cwd = repo.getcwd()
2821 if not specs:
2821 if not specs:
2822 specs = [b'.']
2822 specs = [b'.']
2823
2823
2824 files, dirs = set(), set()
2824 files, dirs = set(), set()
2825 for spec in specs:
2825 for spec in specs:
2826 f, d = complete(spec, acceptable or b'nmar')
2826 f, d = complete(spec, acceptable or b'nmar')
2827 files.update(f)
2827 files.update(f)
2828 dirs.update(d)
2828 dirs.update(d)
2829 files.update(dirs)
2829 files.update(dirs)
2830 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2830 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2831 ui.write(b'\n')
2831 ui.write(b'\n')
2832
2832
2833
2833
2834 @command(
2834 @command(
2835 b'debugpathcopies',
2835 b'debugpathcopies',
2836 cmdutil.walkopts,
2836 cmdutil.walkopts,
2837 b'hg debugpathcopies REV1 REV2 [FILE]',
2837 b'hg debugpathcopies REV1 REV2 [FILE]',
2838 inferrepo=True,
2838 inferrepo=True,
2839 )
2839 )
2840 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2840 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2841 """show copies between two revisions"""
2841 """show copies between two revisions"""
2842 ctx1 = scmutil.revsingle(repo, rev1)
2842 ctx1 = scmutil.revsingle(repo, rev1)
2843 ctx2 = scmutil.revsingle(repo, rev2)
2843 ctx2 = scmutil.revsingle(repo, rev2)
2844 m = scmutil.match(ctx1, pats, opts)
2844 m = scmutil.match(ctx1, pats, opts)
2845 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2845 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2846 ui.write(b'%s -> %s\n' % (src, dst))
2846 ui.write(b'%s -> %s\n' % (src, dst))
2847
2847
2848
2848
2849 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2849 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2850 def debugpeer(ui, path):
2850 def debugpeer(ui, path):
2851 """establish a connection to a peer repository"""
2851 """establish a connection to a peer repository"""
2852 # Always enable peer request logging. Requires --debug to display
2852 # Always enable peer request logging. Requires --debug to display
2853 # though.
2853 # though.
2854 overrides = {
2854 overrides = {
2855 (b'devel', b'debug.peer-request'): True,
2855 (b'devel', b'debug.peer-request'): True,
2856 }
2856 }
2857
2857
2858 with ui.configoverride(overrides):
2858 with ui.configoverride(overrides):
2859 peer = hg.peer(ui, {}, path)
2859 peer = hg.peer(ui, {}, path)
2860
2860
2861 try:
2861 try:
2862 local = peer.local() is not None
2862 local = peer.local() is not None
2863 canpush = peer.canpush()
2863 canpush = peer.canpush()
2864
2864
2865 ui.write(_(b'url: %s\n') % peer.url())
2865 ui.write(_(b'url: %s\n') % peer.url())
2866 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2866 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2867 ui.write(
2867 ui.write(
2868 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2868 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2869 )
2869 )
2870 finally:
2870 finally:
2871 peer.close()
2871 peer.close()
2872
2872
2873
2873
2874 @command(
2874 @command(
2875 b'debugpickmergetool',
2875 b'debugpickmergetool',
2876 [
2876 [
2877 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2877 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2878 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2878 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2879 ]
2879 ]
2880 + cmdutil.walkopts
2880 + cmdutil.walkopts
2881 + cmdutil.mergetoolopts,
2881 + cmdutil.mergetoolopts,
2882 _(b'[PATTERN]...'),
2882 _(b'[PATTERN]...'),
2883 inferrepo=True,
2883 inferrepo=True,
2884 )
2884 )
2885 def debugpickmergetool(ui, repo, *pats, **opts):
2885 def debugpickmergetool(ui, repo, *pats, **opts):
2886 """examine which merge tool is chosen for specified file
2886 """examine which merge tool is chosen for specified file
2887
2887
2888 As described in :hg:`help merge-tools`, Mercurial examines
2888 As described in :hg:`help merge-tools`, Mercurial examines
2889 configurations below in this order to decide which merge tool is
2889 configurations below in this order to decide which merge tool is
2890 chosen for specified file.
2890 chosen for specified file.
2891
2891
2892 1. ``--tool`` option
2892 1. ``--tool`` option
2893 2. ``HGMERGE`` environment variable
2893 2. ``HGMERGE`` environment variable
2894 3. configurations in ``merge-patterns`` section
2894 3. configurations in ``merge-patterns`` section
2895 4. configuration of ``ui.merge``
2895 4. configuration of ``ui.merge``
2896 5. configurations in ``merge-tools`` section
2896 5. configurations in ``merge-tools`` section
2897 6. ``hgmerge`` tool (for historical reason only)
2897 6. ``hgmerge`` tool (for historical reason only)
2898 7. default tool for fallback (``:merge`` or ``:prompt``)
2898 7. default tool for fallback (``:merge`` or ``:prompt``)
2899
2899
2900 This command writes out examination result in the style below::
2900 This command writes out examination result in the style below::
2901
2901
2902 FILE = MERGETOOL
2902 FILE = MERGETOOL
2903
2903
2904 By default, all files known in the first parent context of the
2904 By default, all files known in the first parent context of the
2905 working directory are examined. Use file patterns and/or -I/-X
2905 working directory are examined. Use file patterns and/or -I/-X
2906 options to limit target files. -r/--rev is also useful to examine
2906 options to limit target files. -r/--rev is also useful to examine
2907 files in another context without actual updating to it.
2907 files in another context without actual updating to it.
2908
2908
2909 With --debug, this command shows warning messages while matching
2909 With --debug, this command shows warning messages while matching
2910 against ``merge-patterns`` and so on, too. It is recommended to
2910 against ``merge-patterns`` and so on, too. It is recommended to
2911 use this option with explicit file patterns and/or -I/-X options,
2911 use this option with explicit file patterns and/or -I/-X options,
2912 because this option increases amount of output per file according
2912 because this option increases amount of output per file according
2913 to configurations in hgrc.
2913 to configurations in hgrc.
2914
2914
2915 With -v/--verbose, this command shows configurations below at
2915 With -v/--verbose, this command shows configurations below at
2916 first (only if specified).
2916 first (only if specified).
2917
2917
2918 - ``--tool`` option
2918 - ``--tool`` option
2919 - ``HGMERGE`` environment variable
2919 - ``HGMERGE`` environment variable
2920 - configuration of ``ui.merge``
2920 - configuration of ``ui.merge``
2921
2921
2922 If merge tool is chosen before matching against
2922 If merge tool is chosen before matching against
2923 ``merge-patterns``, this command can't show any helpful
2923 ``merge-patterns``, this command can't show any helpful
2924 information, even with --debug. In such case, information above is
2924 information, even with --debug. In such case, information above is
2925 useful to know why a merge tool is chosen.
2925 useful to know why a merge tool is chosen.
2926 """
2926 """
2927 overrides = {}
2927 overrides = {}
2928 if opts['tool']:
2928 if opts['tool']:
2929 overrides[(b'ui', b'forcemerge')] = opts['tool']
2929 overrides[(b'ui', b'forcemerge')] = opts['tool']
2930 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts['tool'])))
2930 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts['tool'])))
2931
2931
2932 with ui.configoverride(overrides, b'debugmergepatterns'):
2932 with ui.configoverride(overrides, b'debugmergepatterns'):
2933 hgmerge = encoding.environ.get(b"HGMERGE")
2933 hgmerge = encoding.environ.get(b"HGMERGE")
2934 if hgmerge is not None:
2934 if hgmerge is not None:
2935 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2935 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2936 uimerge = ui.config(b"ui", b"merge")
2936 uimerge = ui.config(b"ui", b"merge")
2937 if uimerge:
2937 if uimerge:
2938 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2938 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2939
2939
2940 ctx = scmutil.revsingle(repo, opts.get('rev'))
2940 ctx = scmutil.revsingle(repo, opts.get('rev'))
2941 m = scmutil.match(ctx, pats, pycompat.byteskwargs(opts))
2941 m = scmutil.match(ctx, pats, pycompat.byteskwargs(opts))
2942 changedelete = opts['changedelete']
2942 changedelete = opts['changedelete']
2943 for path in ctx.walk(m):
2943 for path in ctx.walk(m):
2944 fctx = ctx[path]
2944 fctx = ctx[path]
2945 with ui.silent(
2945 with ui.silent(
2946 error=True
2946 error=True
2947 ) if not ui.debugflag else util.nullcontextmanager():
2947 ) if not ui.debugflag else util.nullcontextmanager():
2948 tool, toolpath = filemerge._picktool(
2948 tool, toolpath = filemerge._picktool(
2949 repo,
2949 repo,
2950 ui,
2950 ui,
2951 path,
2951 path,
2952 fctx.isbinary(),
2952 fctx.isbinary(),
2953 b'l' in fctx.flags(),
2953 b'l' in fctx.flags(),
2954 changedelete,
2954 changedelete,
2955 )
2955 )
2956 ui.write(b'%s = %s\n' % (path, tool))
2956 ui.write(b'%s = %s\n' % (path, tool))
2957
2957
2958
2958
2959 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2959 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2960 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2960 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2961 """access the pushkey key/value protocol
2961 """access the pushkey key/value protocol
2962
2962
2963 With two args, list the keys in the given namespace.
2963 With two args, list the keys in the given namespace.
2964
2964
2965 With five args, set a key to new if it currently is set to old.
2965 With five args, set a key to new if it currently is set to old.
2966 Reports success or failure.
2966 Reports success or failure.
2967 """
2967 """
2968
2968
2969 target = hg.peer(ui, {}, repopath)
2969 target = hg.peer(ui, {}, repopath)
2970 try:
2970 try:
2971 if keyinfo:
2971 if keyinfo:
2972 key, old, new = keyinfo
2972 key, old, new = keyinfo
2973 with target.commandexecutor() as e:
2973 with target.commandexecutor() as e:
2974 r = e.callcommand(
2974 r = e.callcommand(
2975 b'pushkey',
2975 b'pushkey',
2976 {
2976 {
2977 b'namespace': namespace,
2977 b'namespace': namespace,
2978 b'key': key,
2978 b'key': key,
2979 b'old': old,
2979 b'old': old,
2980 b'new': new,
2980 b'new': new,
2981 },
2981 },
2982 ).result()
2982 ).result()
2983
2983
2984 ui.status(pycompat.bytestr(r) + b'\n')
2984 ui.status(pycompat.bytestr(r) + b'\n')
2985 return not r
2985 return not r
2986 else:
2986 else:
2987 for k, v in sorted(target.listkeys(namespace).items()):
2987 for k, v in sorted(target.listkeys(namespace).items()):
2988 ui.write(
2988 ui.write(
2989 b"%s\t%s\n"
2989 b"%s\t%s\n"
2990 % (stringutil.escapestr(k), stringutil.escapestr(v))
2990 % (stringutil.escapestr(k), stringutil.escapestr(v))
2991 )
2991 )
2992 finally:
2992 finally:
2993 target.close()
2993 target.close()
2994
2994
2995
2995
2996 @command(b'debugpvec', [], _(b'A B'))
2996 @command(b'debugpvec', [], _(b'A B'))
2997 def debugpvec(ui, repo, a, b=None):
2997 def debugpvec(ui, repo, a, b=None):
2998 ca = scmutil.revsingle(repo, a)
2998 ca = scmutil.revsingle(repo, a)
2999 cb = scmutil.revsingle(repo, b)
2999 cb = scmutil.revsingle(repo, b)
3000 pa = pvec.ctxpvec(ca)
3000 pa = pvec.ctxpvec(ca)
3001 pb = pvec.ctxpvec(cb)
3001 pb = pvec.ctxpvec(cb)
3002 if pa == pb:
3002 if pa == pb:
3003 rel = b"="
3003 rel = b"="
3004 elif pa > pb:
3004 elif pa > pb:
3005 rel = b">"
3005 rel = b">"
3006 elif pa < pb:
3006 elif pa < pb:
3007 rel = b"<"
3007 rel = b"<"
3008 elif pa | pb:
3008 elif pa | pb:
3009 rel = b"|"
3009 rel = b"|"
3010 ui.write(_(b"a: %s\n") % pa)
3010 ui.write(_(b"a: %s\n") % pa)
3011 ui.write(_(b"b: %s\n") % pb)
3011 ui.write(_(b"b: %s\n") % pb)
3012 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3012 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3013 ui.write(
3013 ui.write(
3014 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3014 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3015 % (
3015 % (
3016 abs(pa._depth - pb._depth),
3016 abs(pa._depth - pb._depth),
3017 pvec._hamming(pa._vec, pb._vec),
3017 pvec._hamming(pa._vec, pb._vec),
3018 pa.distance(pb),
3018 pa.distance(pb),
3019 rel,
3019 rel,
3020 )
3020 )
3021 )
3021 )
3022
3022
3023
3023
3024 @command(
3024 @command(
3025 b'debugrebuilddirstate|debugrebuildstate',
3025 b'debugrebuilddirstate|debugrebuildstate',
3026 [
3026 [
3027 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3027 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3028 (
3028 (
3029 b'',
3029 b'',
3030 b'minimal',
3030 b'minimal',
3031 None,
3031 None,
3032 _(
3032 _(
3033 b'only rebuild files that are inconsistent with '
3033 b'only rebuild files that are inconsistent with '
3034 b'the working copy parent'
3034 b'the working copy parent'
3035 ),
3035 ),
3036 ),
3036 ),
3037 ],
3037 ],
3038 _(b'[-r REV]'),
3038 _(b'[-r REV]'),
3039 )
3039 )
3040 def debugrebuilddirstate(ui, repo, rev, **opts):
3040 def debugrebuilddirstate(ui, repo, rev, **opts):
3041 """rebuild the dirstate as it would look like for the given revision
3041 """rebuild the dirstate as it would look like for the given revision
3042
3042
3043 If no revision is specified the first current parent will be used.
3043 If no revision is specified the first current parent will be used.
3044
3044
3045 The dirstate will be set to the files of the given revision.
3045 The dirstate will be set to the files of the given revision.
3046 The actual working directory content or existing dirstate
3046 The actual working directory content or existing dirstate
3047 information such as adds or removes is not considered.
3047 information such as adds or removes is not considered.
3048
3048
3049 ``minimal`` will only rebuild the dirstate status for files that claim to be
3049 ``minimal`` will only rebuild the dirstate status for files that claim to be
3050 tracked but are not in the parent manifest, or that exist in the parent
3050 tracked but are not in the parent manifest, or that exist in the parent
3051 manifest but are not in the dirstate. It will not change adds, removes, or
3051 manifest but are not in the dirstate. It will not change adds, removes, or
3052 modified files that are in the working copy parent.
3052 modified files that are in the working copy parent.
3053
3053
3054 One use of this command is to make the next :hg:`status` invocation
3054 One use of this command is to make the next :hg:`status` invocation
3055 check the actual file content.
3055 check the actual file content.
3056 """
3056 """
3057 ctx = scmutil.revsingle(repo, rev)
3057 ctx = scmutil.revsingle(repo, rev)
3058 with repo.wlock():
3058 with repo.wlock():
3059 if repo.currenttransaction() is not None:
3059 if repo.currenttransaction() is not None:
3060 msg = b'rebuild the dirstate outside of a transaction'
3060 msg = b'rebuild the dirstate outside of a transaction'
3061 raise error.ProgrammingError(msg)
3061 raise error.ProgrammingError(msg)
3062 dirstate = repo.dirstate
3062 dirstate = repo.dirstate
3063 changedfiles = None
3063 changedfiles = None
3064 # See command doc for what minimal does.
3064 # See command doc for what minimal does.
3065 if opts.get('minimal'):
3065 if opts.get('minimal'):
3066 manifestfiles = set(ctx.manifest().keys())
3066 manifestfiles = set(ctx.manifest().keys())
3067 dirstatefiles = set(dirstate)
3067 dirstatefiles = set(dirstate)
3068 manifestonly = manifestfiles - dirstatefiles
3068 manifestonly = manifestfiles - dirstatefiles
3069 dsonly = dirstatefiles - manifestfiles
3069 dsonly = dirstatefiles - manifestfiles
3070 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3070 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3071 changedfiles = manifestonly | dsnotadded
3071 changedfiles = manifestonly | dsnotadded
3072
3072
3073 with dirstate.changing_parents(repo):
3073 with dirstate.changing_parents(repo):
3074 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3074 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3075
3075
3076
3076
3077 @command(
3077 @command(
3078 b'debugrebuildfncache',
3078 b'debugrebuildfncache',
3079 [
3079 [
3080 (
3080 (
3081 b'',
3081 b'',
3082 b'only-data',
3082 b'only-data',
3083 False,
3083 False,
3084 _(b'only look for wrong .d files (much faster)'),
3084 _(b'only look for wrong .d files (much faster)'),
3085 )
3085 )
3086 ],
3086 ],
3087 b'',
3087 b'',
3088 )
3088 )
3089 def debugrebuildfncache(ui, repo, **opts):
3089 def debugrebuildfncache(ui, repo, **opts):
3090 """rebuild the fncache file"""
3090 """rebuild the fncache file"""
3091 repair.rebuildfncache(ui, repo, opts.get("only_data"))
3091 repair.rebuildfncache(ui, repo, opts.get("only_data"))
3092
3092
3093
3093
3094 @command(
3094 @command(
3095 b'debugrename',
3095 b'debugrename',
3096 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3096 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3097 _(b'[-r REV] [FILE]...'),
3097 _(b'[-r REV] [FILE]...'),
3098 )
3098 )
3099 def debugrename(ui, repo, *pats, **opts):
3099 def debugrename(ui, repo, *pats, **opts):
3100 """dump rename information"""
3100 """dump rename information"""
3101
3101
3102 ctx = scmutil.revsingle(repo, opts.get('rev'))
3102 ctx = scmutil.revsingle(repo, opts.get('rev'))
3103 m = scmutil.match(ctx, pats, pycompat.byteskwargs(opts))
3103 m = scmutil.match(ctx, pats, pycompat.byteskwargs(opts))
3104 for abs in ctx.walk(m):
3104 for abs in ctx.walk(m):
3105 fctx = ctx[abs]
3105 fctx = ctx[abs]
3106 o = fctx.filelog().renamed(fctx.filenode())
3106 o = fctx.filelog().renamed(fctx.filenode())
3107 rel = repo.pathto(abs)
3107 rel = repo.pathto(abs)
3108 if o:
3108 if o:
3109 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3109 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3110 else:
3110 else:
3111 ui.write(_(b"%s not renamed\n") % rel)
3111 ui.write(_(b"%s not renamed\n") % rel)
3112
3112
3113
3113
3114 @command(b'debugrequires|debugrequirements', [], b'')
3114 @command(b'debugrequires|debugrequirements', [], b'')
3115 def debugrequirements(ui, repo):
3115 def debugrequirements(ui, repo):
3116 """print the current repo requirements"""
3116 """print the current repo requirements"""
3117 for r in sorted(repo.requirements):
3117 for r in sorted(repo.requirements):
3118 ui.write(b"%s\n" % r)
3118 ui.write(b"%s\n" % r)
3119
3119
3120
3120
3121 @command(
3121 @command(
3122 b'debugrevlog',
3122 b'debugrevlog',
3123 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3123 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3124 _(b'-c|-m|FILE'),
3124 _(b'-c|-m|FILE'),
3125 optionalrepo=True,
3125 optionalrepo=True,
3126 )
3126 )
3127 def debugrevlog(ui, repo, file_=None, **opts):
3127 def debugrevlog(ui, repo, file_=None, **opts):
3128 """show data and statistics about a revlog"""
3128 """show data and statistics about a revlog"""
3129 r = cmdutil.openrevlog(
3129 r = cmdutil.openrevlog(
3130 repo, b'debugrevlog', file_, pycompat.byteskwargs(opts)
3130 repo, b'debugrevlog', file_, pycompat.byteskwargs(opts)
3131 )
3131 )
3132
3132
3133 if opts.get("dump"):
3133 if opts.get("dump"):
3134 revlog_debug.dump(ui, r)
3134 revlog_debug.dump(ui, r)
3135 else:
3135 else:
3136 revlog_debug.debug_revlog(ui, r)
3136 revlog_debug.debug_revlog(ui, r)
3137 return 0
3137 return 0
3138
3138
3139
3139
3140 @command(
3140 @command(
3141 b'debugrevlogindex',
3141 b'debugrevlogindex',
3142 cmdutil.debugrevlogopts
3142 cmdutil.debugrevlogopts
3143 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3143 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3144 _(b'[-f FORMAT] -c|-m|FILE'),
3144 _(b'[-f FORMAT] -c|-m|FILE'),
3145 optionalrepo=True,
3145 optionalrepo=True,
3146 )
3146 )
3147 def debugrevlogindex(ui, repo, file_=None, **opts):
3147 def debugrevlogindex(ui, repo, file_=None, **opts):
3148 """dump the contents of a revlog index"""
3148 """dump the contents of a revlog index"""
3149 r = cmdutil.openrevlog(
3149 r = cmdutil.openrevlog(
3150 repo, b'debugrevlogindex', file_, pycompat.byteskwargs(opts)
3150 repo, b'debugrevlogindex', file_, pycompat.byteskwargs(opts)
3151 )
3151 )
3152 format = opts.get('format', 0)
3152 format = opts.get('format', 0)
3153 if format not in (0, 1):
3153 if format not in (0, 1):
3154 raise error.Abort(_(b"unknown format %d") % format)
3154 raise error.Abort(_(b"unknown format %d") % format)
3155
3155
3156 if ui.debugflag:
3156 if ui.debugflag:
3157 shortfn = hex
3157 shortfn = hex
3158 else:
3158 else:
3159 shortfn = short
3159 shortfn = short
3160
3160
3161 # There might not be anything in r, so have a sane default
3161 # There might not be anything in r, so have a sane default
3162 idlen = 12
3162 idlen = 12
3163 for i in r:
3163 for i in r:
3164 idlen = len(shortfn(r.node(i)))
3164 idlen = len(shortfn(r.node(i)))
3165 break
3165 break
3166
3166
3167 if format == 0:
3167 if format == 0:
3168 if ui.verbose:
3168 if ui.verbose:
3169 ui.writenoi18n(
3169 ui.writenoi18n(
3170 b" rev offset length linkrev %s %s p2\n"
3170 b" rev offset length linkrev %s %s p2\n"
3171 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3171 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3172 )
3172 )
3173 else:
3173 else:
3174 ui.writenoi18n(
3174 ui.writenoi18n(
3175 b" rev linkrev %s %s p2\n"
3175 b" rev linkrev %s %s p2\n"
3176 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3176 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3177 )
3177 )
3178 elif format == 1:
3178 elif format == 1:
3179 if ui.verbose:
3179 if ui.verbose:
3180 ui.writenoi18n(
3180 ui.writenoi18n(
3181 (
3181 (
3182 b" rev flag offset length size link p1"
3182 b" rev flag offset length size link p1"
3183 b" p2 %s\n"
3183 b" p2 %s\n"
3184 )
3184 )
3185 % b"nodeid".rjust(idlen)
3185 % b"nodeid".rjust(idlen)
3186 )
3186 )
3187 else:
3187 else:
3188 ui.writenoi18n(
3188 ui.writenoi18n(
3189 b" rev flag size link p1 p2 %s\n"
3189 b" rev flag size link p1 p2 %s\n"
3190 % b"nodeid".rjust(idlen)
3190 % b"nodeid".rjust(idlen)
3191 )
3191 )
3192
3192
3193 for i in r:
3193 for i in r:
3194 node = r.node(i)
3194 node = r.node(i)
3195 if format == 0:
3195 if format == 0:
3196 try:
3196 try:
3197 pp = r.parents(node)
3197 pp = r.parents(node)
3198 except Exception:
3198 except Exception:
3199 pp = [repo.nullid, repo.nullid]
3199 pp = [repo.nullid, repo.nullid]
3200 if ui.verbose:
3200 if ui.verbose:
3201 ui.write(
3201 ui.write(
3202 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3202 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3203 % (
3203 % (
3204 i,
3204 i,
3205 r.start(i),
3205 r.start(i),
3206 r.length(i),
3206 r.length(i),
3207 r.linkrev(i),
3207 r.linkrev(i),
3208 shortfn(node),
3208 shortfn(node),
3209 shortfn(pp[0]),
3209 shortfn(pp[0]),
3210 shortfn(pp[1]),
3210 shortfn(pp[1]),
3211 )
3211 )
3212 )
3212 )
3213 else:
3213 else:
3214 ui.write(
3214 ui.write(
3215 b"% 6d % 7d %s %s %s\n"
3215 b"% 6d % 7d %s %s %s\n"
3216 % (
3216 % (
3217 i,
3217 i,
3218 r.linkrev(i),
3218 r.linkrev(i),
3219 shortfn(node),
3219 shortfn(node),
3220 shortfn(pp[0]),
3220 shortfn(pp[0]),
3221 shortfn(pp[1]),
3221 shortfn(pp[1]),
3222 )
3222 )
3223 )
3223 )
3224 elif format == 1:
3224 elif format == 1:
3225 pr = r.parentrevs(i)
3225 pr = r.parentrevs(i)
3226 if ui.verbose:
3226 if ui.verbose:
3227 ui.write(
3227 ui.write(
3228 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3228 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3229 % (
3229 % (
3230 i,
3230 i,
3231 r.flags(i),
3231 r.flags(i),
3232 r.start(i),
3232 r.start(i),
3233 r.length(i),
3233 r.length(i),
3234 r.rawsize(i),
3234 r.rawsize(i),
3235 r.linkrev(i),
3235 r.linkrev(i),
3236 pr[0],
3236 pr[0],
3237 pr[1],
3237 pr[1],
3238 shortfn(node),
3238 shortfn(node),
3239 )
3239 )
3240 )
3240 )
3241 else:
3241 else:
3242 ui.write(
3242 ui.write(
3243 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3243 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3244 % (
3244 % (
3245 i,
3245 i,
3246 r.flags(i),
3246 r.flags(i),
3247 r.rawsize(i),
3247 r.rawsize(i),
3248 r.linkrev(i),
3248 r.linkrev(i),
3249 pr[0],
3249 pr[0],
3250 pr[1],
3250 pr[1],
3251 shortfn(node),
3251 shortfn(node),
3252 )
3252 )
3253 )
3253 )
3254
3254
3255
3255
3256 @command(
3256 @command(
3257 b'debugrevspec',
3257 b'debugrevspec',
3258 [
3258 [
3259 (
3259 (
3260 b'',
3260 b'',
3261 b'optimize',
3261 b'optimize',
3262 None,
3262 None,
3263 _(b'print parsed tree after optimizing (DEPRECATED)'),
3263 _(b'print parsed tree after optimizing (DEPRECATED)'),
3264 ),
3264 ),
3265 (
3265 (
3266 b'',
3266 b'',
3267 b'show-revs',
3267 b'show-revs',
3268 True,
3268 True,
3269 _(b'print list of result revisions (default)'),
3269 _(b'print list of result revisions (default)'),
3270 ),
3270 ),
3271 (
3271 (
3272 b's',
3272 b's',
3273 b'show-set',
3273 b'show-set',
3274 None,
3274 None,
3275 _(b'print internal representation of result set'),
3275 _(b'print internal representation of result set'),
3276 ),
3276 ),
3277 (
3277 (
3278 b'p',
3278 b'p',
3279 b'show-stage',
3279 b'show-stage',
3280 [],
3280 [],
3281 _(b'print parsed tree at the given stage'),
3281 _(b'print parsed tree at the given stage'),
3282 _(b'NAME'),
3282 _(b'NAME'),
3283 ),
3283 ),
3284 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3284 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3285 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3285 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3286 ],
3286 ],
3287 b'REVSPEC',
3287 b'REVSPEC',
3288 )
3288 )
3289 def debugrevspec(ui, repo, expr, **opts):
3289 def debugrevspec(ui, repo, expr, **opts):
3290 """parse and apply a revision specification
3290 """parse and apply a revision specification
3291
3291
3292 Use -p/--show-stage option to print the parsed tree at the given stages.
3292 Use -p/--show-stage option to print the parsed tree at the given stages.
3293 Use -p all to print tree at every stage.
3293 Use -p all to print tree at every stage.
3294
3294
3295 Use --no-show-revs option with -s or -p to print only the set
3295 Use --no-show-revs option with -s or -p to print only the set
3296 representation or the parsed tree respectively.
3296 representation or the parsed tree respectively.
3297
3297
3298 Use --verify-optimized to compare the optimized result with the unoptimized
3298 Use --verify-optimized to compare the optimized result with the unoptimized
3299 one. Returns 1 if the optimized result differs.
3299 one. Returns 1 if the optimized result differs.
3300 """
3300 """
3301 aliases = ui.configitems(b'revsetalias')
3301 aliases = ui.configitems(b'revsetalias')
3302 stages = [
3302 stages = [
3303 (b'parsed', lambda tree: tree),
3303 (b'parsed', lambda tree: tree),
3304 (
3304 (
3305 b'expanded',
3305 b'expanded',
3306 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3306 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3307 ),
3307 ),
3308 (b'concatenated', revsetlang.foldconcat),
3308 (b'concatenated', revsetlang.foldconcat),
3309 (b'analyzed', revsetlang.analyze),
3309 (b'analyzed', revsetlang.analyze),
3310 (b'optimized', revsetlang.optimize),
3310 (b'optimized', revsetlang.optimize),
3311 ]
3311 ]
3312 if opts['no_optimized']:
3312 if opts['no_optimized']:
3313 stages = stages[:-1]
3313 stages = stages[:-1]
3314 if opts['verify_optimized'] and opts['no_optimized']:
3314 if opts['verify_optimized'] and opts['no_optimized']:
3315 raise error.Abort(
3315 raise error.Abort(
3316 _(b'cannot use --verify-optimized with --no-optimized')
3316 _(b'cannot use --verify-optimized with --no-optimized')
3317 )
3317 )
3318 stagenames = {n for n, f in stages}
3318 stagenames = {n for n, f in stages}
3319
3319
3320 showalways = set()
3320 showalways = set()
3321 showchanged = set()
3321 showchanged = set()
3322 if ui.verbose and not opts['show_stage']:
3322 if ui.verbose and not opts['show_stage']:
3323 # show parsed tree by --verbose (deprecated)
3323 # show parsed tree by --verbose (deprecated)
3324 showalways.add(b'parsed')
3324 showalways.add(b'parsed')
3325 showchanged.update([b'expanded', b'concatenated'])
3325 showchanged.update([b'expanded', b'concatenated'])
3326 if opts['optimize']:
3326 if opts['optimize']:
3327 showalways.add(b'optimized')
3327 showalways.add(b'optimized')
3328 if opts['show_stage'] and opts['optimize']:
3328 if opts['show_stage'] and opts['optimize']:
3329 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3329 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3330 if opts['show_stage'] == [b'all']:
3330 if opts['show_stage'] == [b'all']:
3331 showalways.update(stagenames)
3331 showalways.update(stagenames)
3332 else:
3332 else:
3333 for n in opts['show_stage']:
3333 for n in opts['show_stage']:
3334 if n not in stagenames:
3334 if n not in stagenames:
3335 raise error.Abort(_(b'invalid stage name: %s') % n)
3335 raise error.Abort(_(b'invalid stage name: %s') % n)
3336 showalways.update(opts['show_stage'])
3336 showalways.update(opts['show_stage'])
3337
3337
3338 treebystage = {}
3338 treebystage = {}
3339 printedtree = None
3339 printedtree = None
3340 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3340 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3341 for n, f in stages:
3341 for n, f in stages:
3342 treebystage[n] = tree = f(tree)
3342 treebystage[n] = tree = f(tree)
3343 if n in showalways or (n in showchanged and tree != printedtree):
3343 if n in showalways or (n in showchanged and tree != printedtree):
3344 if opts['show_stage'] or n != b'parsed':
3344 if opts['show_stage'] or n != b'parsed':
3345 ui.write(b"* %s:\n" % n)
3345 ui.write(b"* %s:\n" % n)
3346 ui.write(revsetlang.prettyformat(tree), b"\n")
3346 ui.write(revsetlang.prettyformat(tree), b"\n")
3347 printedtree = tree
3347 printedtree = tree
3348
3348
3349 if opts['verify_optimized']:
3349 if opts['verify_optimized']:
3350 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3350 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3351 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3351 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3352 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
3352 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
3353 ui.writenoi18n(
3353 ui.writenoi18n(
3354 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3354 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3355 )
3355 )
3356 ui.writenoi18n(
3356 ui.writenoi18n(
3357 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3357 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3358 )
3358 )
3359 arevs = list(arevs)
3359 arevs = list(arevs)
3360 brevs = list(brevs)
3360 brevs = list(brevs)
3361 if arevs == brevs:
3361 if arevs == brevs:
3362 return 0
3362 return 0
3363 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3363 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3364 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3364 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3365 sm = difflib.SequenceMatcher(None, arevs, brevs)
3365 sm = difflib.SequenceMatcher(None, arevs, brevs)
3366 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3366 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3367 if tag in ('delete', 'replace'):
3367 if tag in ('delete', 'replace'):
3368 for c in arevs[alo:ahi]:
3368 for c in arevs[alo:ahi]:
3369 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3369 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3370 if tag in ('insert', 'replace'):
3370 if tag in ('insert', 'replace'):
3371 for c in brevs[blo:bhi]:
3371 for c in brevs[blo:bhi]:
3372 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3372 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3373 if tag == 'equal':
3373 if tag == 'equal':
3374 for c in arevs[alo:ahi]:
3374 for c in arevs[alo:ahi]:
3375 ui.write(b' %d\n' % c)
3375 ui.write(b' %d\n' % c)
3376 return 1
3376 return 1
3377
3377
3378 func = revset.makematcher(tree)
3378 func = revset.makematcher(tree)
3379 revs = func(repo)
3379 revs = func(repo)
3380 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
3380 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
3381 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3381 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3382 if not opts['show_revs']:
3382 if not opts['show_revs']:
3383 return
3383 return
3384 for c in revs:
3384 for c in revs:
3385 ui.write(b"%d\n" % c)
3385 ui.write(b"%d\n" % c)
3386
3386
3387
3387
3388 @command(
3388 @command(
3389 b'debugserve',
3389 b'debugserve',
3390 [
3390 [
3391 (
3391 (
3392 b'',
3392 b'',
3393 b'sshstdio',
3393 b'sshstdio',
3394 False,
3394 False,
3395 _(b'run an SSH server bound to process handles'),
3395 _(b'run an SSH server bound to process handles'),
3396 ),
3396 ),
3397 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3397 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3398 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3398 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3399 ],
3399 ],
3400 b'',
3400 b'',
3401 )
3401 )
3402 def debugserve(ui, repo, **opts):
3402 def debugserve(ui, repo, **opts):
3403 """run a server with advanced settings
3403 """run a server with advanced settings
3404
3404
3405 This command is similar to :hg:`serve`. It exists partially as a
3405 This command is similar to :hg:`serve`. It exists partially as a
3406 workaround to the fact that ``hg serve --stdio`` must have specific
3406 workaround to the fact that ``hg serve --stdio`` must have specific
3407 arguments for security reasons.
3407 arguments for security reasons.
3408 """
3408 """
3409 if not opts['sshstdio']:
3409 if not opts['sshstdio']:
3410 raise error.Abort(_(b'only --sshstdio is currently supported'))
3410 raise error.Abort(_(b'only --sshstdio is currently supported'))
3411
3411
3412 logfh = None
3412 logfh = None
3413
3413
3414 if opts['logiofd'] and opts['logiofile']:
3414 if opts['logiofd'] and opts['logiofile']:
3415 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3415 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3416
3416
3417 if opts['logiofd']:
3417 if opts['logiofd']:
3418 # Ideally we would be line buffered. But line buffering in binary
3418 # Ideally we would be line buffered. But line buffering in binary
3419 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3419 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3420 # buffering could have performance impacts. But since this isn't
3420 # buffering could have performance impacts. But since this isn't
3421 # performance critical code, it should be fine.
3421 # performance critical code, it should be fine.
3422 try:
3422 try:
3423 logfh = os.fdopen(int(opts['logiofd']), 'ab', 0)
3423 logfh = os.fdopen(int(opts['logiofd']), 'ab', 0)
3424 except OSError as e:
3424 except OSError as e:
3425 if e.errno != errno.ESPIPE:
3425 if e.errno != errno.ESPIPE:
3426 raise
3426 raise
3427 # can't seek a pipe, so `ab` mode fails on py3
3427 # can't seek a pipe, so `ab` mode fails on py3
3428 logfh = os.fdopen(int(opts['logiofd']), 'wb', 0)
3428 logfh = os.fdopen(int(opts['logiofd']), 'wb', 0)
3429 elif opts['logiofile']:
3429 elif opts['logiofile']:
3430 logfh = open(opts['logiofile'], b'ab', 0)
3430 logfh = open(opts['logiofile'], b'ab', 0)
3431
3431
3432 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3432 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3433 s.serve_forever()
3433 s.serve_forever()
3434
3434
3435
3435
3436 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3436 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3437 def debugsetparents(ui, repo, rev1, rev2=None):
3437 def debugsetparents(ui, repo, rev1, rev2=None):
3438 """manually set the parents of the current working directory (DANGEROUS)
3438 """manually set the parents of the current working directory (DANGEROUS)
3439
3439
3440 This command is not what you are looking for and should not be used. Using
3440 This command is not what you are looking for and should not be used. Using
3441 this command will most certainly results in slight corruption of the file
3441 this command will most certainly results in slight corruption of the file
3442 level histories within your repository. DO NOT USE THIS COMMAND.
3442 level histories within your repository. DO NOT USE THIS COMMAND.
3443
3443
3444 The command updates the p1 and p2 fields in the dirstate, without touching
3444 The command updates the p1 and p2 fields in the dirstate, without touching
3445 anything else. This useful for writing repository conversion tools, but
3445 anything else. This useful for writing repository conversion tools, but
3446 should be used with extreme care. For example, neither the working
3446 should be used with extreme care. For example, neither the working
3447 directory nor the dirstate is updated, so file statuses may be incorrect
3447 directory nor the dirstate is updated, so file statuses may be incorrect
3448 after running this command. Use it only if you are one of the few people who
3448 after running this command. Use it only if you are one of the few people who
3449 deeply understands both conversion tools and file level histories. If you are
3449 deeply understands both conversion tools and file level histories. If you are
3450 reading this help, you are not one of those people (most of them sailed west
3450 reading this help, you are not one of those people (most of them sailed west
3451 from Mithlond anyway).
3451 from Mithlond anyway).
3452
3452
3453 So, one more time, DO NOT USE THIS COMMAND.
3453 So, one more time, DO NOT USE THIS COMMAND.
3454
3454
3455 Returns 0 on success.
3455 Returns 0 on success.
3456 """
3456 """
3457
3457
3458 node1 = scmutil.revsingle(repo, rev1).node()
3458 node1 = scmutil.revsingle(repo, rev1).node()
3459 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3459 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3460
3460
3461 with repo.wlock():
3461 with repo.wlock():
3462 repo.setparents(node1, node2)
3462 repo.setparents(node1, node2)
3463
3463
3464
3464
3465 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3465 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3466 def debugsidedata(ui, repo, file_, rev=None, **opts):
3466 def debugsidedata(ui, repo, file_, rev=None, **opts):
3467 """dump the side data for a cl/manifest/file revision
3467 """dump the side data for a cl/manifest/file revision
3468
3468
3469 Use --verbose to dump the sidedata content."""
3469 Use --verbose to dump the sidedata content."""
3470 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
3470 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
3471 if rev is not None:
3471 if rev is not None:
3472 raise error.InputError(
3472 raise error.InputError(
3473 _(b'cannot specify a revision with other arguments')
3473 _(b'cannot specify a revision with other arguments')
3474 )
3474 )
3475 file_, rev = None, file_
3475 file_, rev = None, file_
3476 elif rev is None:
3476 elif rev is None:
3477 raise error.InputError(_(b'please specify a revision'))
3477 raise error.InputError(_(b'please specify a revision'))
3478 r = cmdutil.openstorage(
3478 r = cmdutil.openstorage(
3479 repo, b'debugdata', file_, pycompat.byteskwargs(opts)
3479 repo, b'debugdata', file_, pycompat.byteskwargs(opts)
3480 )
3480 )
3481 r = getattr(r, '_revlog', r)
3481 r = getattr(r, '_revlog', r)
3482 try:
3482 try:
3483 sidedata = r.sidedata(r.lookup(rev))
3483 sidedata = r.sidedata(r.lookup(rev))
3484 except KeyError:
3484 except KeyError:
3485 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3485 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3486 if sidedata:
3486 if sidedata:
3487 sidedata = list(sidedata.items())
3487 sidedata = list(sidedata.items())
3488 sidedata.sort()
3488 sidedata.sort()
3489 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3489 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3490 for key, value in sidedata:
3490 for key, value in sidedata:
3491 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3491 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3492 if ui.verbose:
3492 if ui.verbose:
3493 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3493 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3494
3494
3495
3495
3496 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3496 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3497 def debugssl(ui, repo, source=None, **opts):
3497 def debugssl(ui, repo, source=None, **opts):
3498 """test a secure connection to a server
3498 """test a secure connection to a server
3499
3499
3500 This builds the certificate chain for the server on Windows, installing the
3500 This builds the certificate chain for the server on Windows, installing the
3501 missing intermediates and trusted root via Windows Update if necessary. It
3501 missing intermediates and trusted root via Windows Update if necessary. It
3502 does nothing on other platforms.
3502 does nothing on other platforms.
3503
3503
3504 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3504 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3505 that server is used. See :hg:`help urls` for more information.
3505 that server is used. See :hg:`help urls` for more information.
3506
3506
3507 If the update succeeds, retry the original operation. Otherwise, the cause
3507 If the update succeeds, retry the original operation. Otherwise, the cause
3508 of the SSL error is likely another issue.
3508 of the SSL error is likely another issue.
3509 """
3509 """
3510 if not pycompat.iswindows:
3510 if not pycompat.iswindows:
3511 raise error.Abort(
3511 raise error.Abort(
3512 _(b'certificate chain building is only possible on Windows')
3512 _(b'certificate chain building is only possible on Windows')
3513 )
3513 )
3514
3514
3515 if not source:
3515 if not source:
3516 if not repo:
3516 if not repo:
3517 raise error.Abort(
3517 raise error.Abort(
3518 _(
3518 _(
3519 b"there is no Mercurial repository here, and no "
3519 b"there is no Mercurial repository here, and no "
3520 b"server specified"
3520 b"server specified"
3521 )
3521 )
3522 )
3522 )
3523 source = b"default"
3523 source = b"default"
3524
3524
3525 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3525 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3526 url = path.url
3526 url = path.url
3527
3527
3528 defaultport = {b'https': 443, b'ssh': 22}
3528 defaultport = {b'https': 443, b'ssh': 22}
3529 if url.scheme in defaultport:
3529 if url.scheme in defaultport:
3530 try:
3530 try:
3531 addr = (url.host, int(url.port or defaultport[url.scheme]))
3531 addr = (url.host, int(url.port or defaultport[url.scheme]))
3532 except ValueError:
3532 except ValueError:
3533 raise error.Abort(_(b"malformed port number in URL"))
3533 raise error.Abort(_(b"malformed port number in URL"))
3534 else:
3534 else:
3535 raise error.Abort(_(b"only https and ssh connections are supported"))
3535 raise error.Abort(_(b"only https and ssh connections are supported"))
3536
3536
3537 from . import win32
3537 from . import win32
3538
3538
3539 s = ssl.wrap_socket(
3539 s = ssl.wrap_socket(
3540 socket.socket(),
3540 socket.socket(),
3541 ssl_version=ssl.PROTOCOL_TLS,
3541 ssl_version=ssl.PROTOCOL_TLS,
3542 cert_reqs=ssl.CERT_NONE,
3542 cert_reqs=ssl.CERT_NONE,
3543 ca_certs=None,
3543 ca_certs=None,
3544 )
3544 )
3545
3545
3546 try:
3546 try:
3547 s.connect(addr)
3547 s.connect(addr)
3548 cert = s.getpeercert(True)
3548 cert = s.getpeercert(True)
3549
3549
3550 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3550 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3551
3551
3552 complete = win32.checkcertificatechain(cert, build=False)
3552 complete = win32.checkcertificatechain(cert, build=False)
3553
3553
3554 if not complete:
3554 if not complete:
3555 ui.status(_(b'certificate chain is incomplete, updating... '))
3555 ui.status(_(b'certificate chain is incomplete, updating... '))
3556
3556
3557 if not win32.checkcertificatechain(cert):
3557 if not win32.checkcertificatechain(cert):
3558 ui.status(_(b'failed.\n'))
3558 ui.status(_(b'failed.\n'))
3559 else:
3559 else:
3560 ui.status(_(b'done.\n'))
3560 ui.status(_(b'done.\n'))
3561 else:
3561 else:
3562 ui.status(_(b'full certificate chain is available\n'))
3562 ui.status(_(b'full certificate chain is available\n'))
3563 finally:
3563 finally:
3564 s.close()
3564 s.close()
3565
3565
3566
3566
3567 @command(
3567 @command(
3568 b'debug::stable-tail-sort',
3568 b'debug::stable-tail-sort',
3569 [
3569 [
3570 (
3570 (
3571 b'T',
3571 b'T',
3572 b'template',
3572 b'template',
3573 b'{rev}\n',
3573 b'{rev}\n',
3574 _(b'display with template'),
3574 _(b'display with template'),
3575 _(b'TEMPLATE'),
3575 _(b'TEMPLATE'),
3576 ),
3576 ),
3577 ],
3577 ],
3578 b'REV',
3578 b'REV',
3579 )
3579 )
3580 def debug_stable_tail_sort(ui, repo, revspec, template, **opts):
3580 def debug_stable_tail_sort(ui, repo, revspec, template, **opts):
3581 """display the stable-tail sort of the ancestors of a given node"""
3581 """display the stable-tail sort of the ancestors of a given node"""
3582 rev = logcmdutil.revsingle(repo, revspec).rev()
3582 rev = logcmdutil.revsingle(repo, revspec).rev()
3583 cl = repo.changelog
3583 cl = repo.changelog
3584
3584
3585 displayer = logcmdutil.maketemplater(ui, repo, template)
3585 displayer = logcmdutil.maketemplater(ui, repo, template)
3586 sorted_revs = stabletailsort._stable_tail_sort_naive(cl, rev)
3586 sorted_revs = stabletailsort._stable_tail_sort_naive(cl, rev)
3587 for ancestor_rev in sorted_revs:
3587 for ancestor_rev in sorted_revs:
3588 displayer.show(repo[ancestor_rev])
3588 displayer.show(repo[ancestor_rev])
3589
3589
3590
3590
3591 @command(
3591 @command(
3592 b'debug::stable-tail-sort-leaps',
3592 b'debug::stable-tail-sort-leaps',
3593 [
3593 [
3594 (
3594 (
3595 b'T',
3595 b'T',
3596 b'template',
3596 b'template',
3597 b'{rev}',
3597 b'{rev}',
3598 _(b'display with template'),
3598 _(b'display with template'),
3599 _(b'TEMPLATE'),
3599 _(b'TEMPLATE'),
3600 ),
3600 ),
3601 (b's', b'specific', False, _(b'restrict to specific leaps')),
3601 (b's', b'specific', False, _(b'restrict to specific leaps')),
3602 ],
3602 ],
3603 b'REV',
3603 b'REV',
3604 )
3604 )
3605 def debug_stable_tail_sort_leaps(ui, repo, rspec, template, specific, **opts):
3605 def debug_stable_tail_sort_leaps(ui, repo, rspec, template, specific, **opts):
3606 """display the leaps in the stable-tail sort of a node, one per line"""
3606 """display the leaps in the stable-tail sort of a node, one per line"""
3607 rev = logcmdutil.revsingle(repo, rspec).rev()
3607 rev = logcmdutil.revsingle(repo, rspec).rev()
3608
3608
3609 if specific:
3609 if specific:
3610 get_leaps = stabletailsort._find_specific_leaps_naive
3610 get_leaps = stabletailsort._find_specific_leaps_naive
3611 else:
3611 else:
3612 get_leaps = stabletailsort._find_all_leaps_naive
3612 get_leaps = stabletailsort._find_all_leaps_naive
3613
3613
3614 displayer = logcmdutil.maketemplater(ui, repo, template)
3614 displayer = logcmdutil.maketemplater(ui, repo, template)
3615 for source, target in get_leaps(repo.changelog, rev):
3615 for source, target in get_leaps(repo.changelog, rev):
3616 displayer.show(repo[source])
3616 displayer.show(repo[source])
3617 displayer.show(repo[target])
3617 displayer.show(repo[target])
3618 ui.write(b'\n')
3618 ui.write(b'\n')
3619
3619
3620
3620
3621 @command(
3621 @command(
3622 b"debugbackupbundle",
3622 b"debugbackupbundle",
3623 [
3623 [
3624 (
3624 (
3625 b"",
3625 b"",
3626 b"recover",
3626 b"recover",
3627 b"",
3627 b"",
3628 b"brings the specified changeset back into the repository",
3628 b"brings the specified changeset back into the repository",
3629 )
3629 )
3630 ]
3630 ]
3631 + cmdutil.logopts,
3631 + cmdutil.logopts,
3632 _(b"hg debugbackupbundle [--recover HASH]"),
3632 _(b"hg debugbackupbundle [--recover HASH]"),
3633 )
3633 )
3634 def debugbackupbundle(ui, repo, *pats, **opts):
3634 def debugbackupbundle(ui, repo, *pats, **opts):
3635 """lists the changesets available in backup bundles
3635 """lists the changesets available in backup bundles
3636
3636
3637 Without any arguments, this command prints a list of the changesets in each
3637 Without any arguments, this command prints a list of the changesets in each
3638 backup bundle.
3638 backup bundle.
3639
3639
3640 --recover takes a changeset hash and unbundles the first bundle that
3640 --recover takes a changeset hash and unbundles the first bundle that
3641 contains that hash, which puts that changeset back in your repository.
3641 contains that hash, which puts that changeset back in your repository.
3642
3642
3643 --verbose will print the entire commit message and the bundle path for that
3643 --verbose will print the entire commit message and the bundle path for that
3644 backup.
3644 backup.
3645 """
3645 """
3646 backups = list(
3646 backups = list(
3647 filter(
3647 filter(
3648 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3648 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3649 )
3649 )
3650 )
3650 )
3651 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3651 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3652
3652
3653 opts["bundle"] = b""
3653 opts["bundle"] = b""
3654 opts["force"] = None
3654 opts["force"] = None
3655 limit = logcmdutil.getlimit(pycompat.byteskwargs(opts))
3655 limit = logcmdutil.getlimit(pycompat.byteskwargs(opts))
3656
3656
3657 def display(other, chlist, displayer):
3657 def display(other, chlist, displayer):
3658 if opts.get("newest_first"):
3658 if opts.get("newest_first"):
3659 chlist.reverse()
3659 chlist.reverse()
3660 count = 0
3660 count = 0
3661 for n in chlist:
3661 for n in chlist:
3662 if limit is not None and count >= limit:
3662 if limit is not None and count >= limit:
3663 break
3663 break
3664 parents = [
3664 parents = [
3665 True for p in other.changelog.parents(n) if p != repo.nullid
3665 True for p in other.changelog.parents(n) if p != repo.nullid
3666 ]
3666 ]
3667 if opts.get("no_merges") and len(parents) == 2:
3667 if opts.get("no_merges") and len(parents) == 2:
3668 continue
3668 continue
3669 count += 1
3669 count += 1
3670 displayer.show(other[n])
3670 displayer.show(other[n])
3671
3671
3672 recovernode = opts.get("recover")
3672 recovernode = opts.get("recover")
3673 if recovernode:
3673 if recovernode:
3674 if scmutil.isrevsymbol(repo, recovernode):
3674 if scmutil.isrevsymbol(repo, recovernode):
3675 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3675 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3676 return
3676 return
3677 elif backups:
3677 elif backups:
3678 msg = _(
3678 msg = _(
3679 b"Recover changesets using: hg debugbackupbundle --recover "
3679 b"Recover changesets using: hg debugbackupbundle --recover "
3680 b"<changeset hash>\n\nAvailable backup changesets:"
3680 b"<changeset hash>\n\nAvailable backup changesets:"
3681 )
3681 )
3682 ui.status(msg, label=b"status.removed")
3682 ui.status(msg, label=b"status.removed")
3683 else:
3683 else:
3684 ui.status(_(b"no backup changesets found\n"))
3684 ui.status(_(b"no backup changesets found\n"))
3685 return
3685 return
3686
3686
3687 for backup in backups:
3687 for backup in backups:
3688 # Much of this is copied from the hg incoming logic
3688 # Much of this is copied from the hg incoming logic
3689 source = os.path.relpath(backup, encoding.getcwd())
3689 source = os.path.relpath(backup, encoding.getcwd())
3690 path = urlutil.get_unique_pull_path_obj(
3690 path = urlutil.get_unique_pull_path_obj(
3691 b'debugbackupbundle',
3691 b'debugbackupbundle',
3692 ui,
3692 ui,
3693 source,
3693 source,
3694 )
3694 )
3695 try:
3695 try:
3696 other = hg.peer(repo, pycompat.byteskwargs(opts), path)
3696 other = hg.peer(repo, pycompat.byteskwargs(opts), path)
3697 except error.LookupError as ex:
3697 except error.LookupError as ex:
3698 msg = _(b"\nwarning: unable to open bundle %s") % path.loc
3698 msg = _(b"\nwarning: unable to open bundle %s") % path.loc
3699 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3699 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3700 ui.warn(msg, hint=hint)
3700 ui.warn(msg, hint=hint)
3701 continue
3701 continue
3702 branches = (path.branch, opts.get('branch', []))
3702 branches = (path.branch, opts.get('branch', []))
3703 revs, checkout = hg.addbranchrevs(
3703 revs, checkout = hg.addbranchrevs(
3704 repo, other, branches, opts.get("rev")
3704 repo, other, branches, opts.get("rev")
3705 )
3705 )
3706
3706
3707 if revs:
3707 if revs:
3708 revs = [other.lookup(rev) for rev in revs]
3708 revs = [other.lookup(rev) for rev in revs]
3709
3709
3710 with ui.silent():
3710 with ui.silent():
3711 try:
3711 try:
3712 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3712 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3713 ui, repo, other, revs, opts["bundle"], opts["force"]
3713 ui, repo, other, revs, opts["bundle"], opts["force"]
3714 )
3714 )
3715 except error.LookupError:
3715 except error.LookupError:
3716 continue
3716 continue
3717
3717
3718 try:
3718 try:
3719 if not chlist:
3719 if not chlist:
3720 continue
3720 continue
3721 if recovernode:
3721 if recovernode:
3722 with repo.lock(), repo.transaction(b"unbundle") as tr:
3722 with repo.lock(), repo.transaction(b"unbundle") as tr:
3723 if scmutil.isrevsymbol(other, recovernode):
3723 if scmutil.isrevsymbol(other, recovernode):
3724 ui.status(_(b"Unbundling %s\n") % (recovernode))
3724 ui.status(_(b"Unbundling %s\n") % (recovernode))
3725 f = hg.openpath(ui, path.loc)
3725 f = hg.openpath(ui, path.loc)
3726 gen = exchange.readbundle(ui, f, path.loc)
3726 gen = exchange.readbundle(ui, f, path.loc)
3727 if isinstance(gen, bundle2.unbundle20):
3727 if isinstance(gen, bundle2.unbundle20):
3728 bundle2.applybundle(
3728 bundle2.applybundle(
3729 repo,
3729 repo,
3730 gen,
3730 gen,
3731 tr,
3731 tr,
3732 source=b"unbundle",
3732 source=b"unbundle",
3733 url=b"bundle:" + path.loc,
3733 url=b"bundle:" + path.loc,
3734 )
3734 )
3735 else:
3735 else:
3736 gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
3736 gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
3737 break
3737 break
3738 else:
3738 else:
3739 backupdate = encoding.strtolocal(
3739 backupdate = encoding.strtolocal(
3740 time.strftime(
3740 time.strftime(
3741 "%a %H:%M, %Y-%m-%d",
3741 "%a %H:%M, %Y-%m-%d",
3742 time.localtime(os.path.getmtime(path.loc)),
3742 time.localtime(os.path.getmtime(path.loc)),
3743 )
3743 )
3744 )
3744 )
3745 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3745 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3746 if ui.verbose:
3746 if ui.verbose:
3747 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
3747 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
3748 else:
3748 else:
3749 opts[
3749 opts[
3750 "template"
3750 "template"
3751 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3751 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3752 displayer = logcmdutil.changesetdisplayer(
3752 displayer = logcmdutil.changesetdisplayer(
3753 ui, other, pycompat.byteskwargs(opts), False
3753 ui, other, pycompat.byteskwargs(opts), False
3754 )
3754 )
3755 display(other, chlist, displayer)
3755 display(other, chlist, displayer)
3756 displayer.close()
3756 displayer.close()
3757 finally:
3757 finally:
3758 cleanupfn()
3758 cleanupfn()
3759
3759
3760
3760
3761 @command(
3761 @command(
3762 b'debugsub',
3762 b'debugsub',
3763 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3763 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3764 _(b'[-r REV] [REV]'),
3764 _(b'[-r REV] [REV]'),
3765 )
3765 )
3766 def debugsub(ui, repo, rev=None):
3766 def debugsub(ui, repo, rev=None):
3767 ctx = scmutil.revsingle(repo, rev, None)
3767 ctx = scmutil.revsingle(repo, rev, None)
3768 for k, v in sorted(ctx.substate.items()):
3768 for k, v in sorted(ctx.substate.items()):
3769 ui.writenoi18n(b'path %s\n' % k)
3769 ui.writenoi18n(b'path %s\n' % k)
3770 ui.writenoi18n(b' source %s\n' % v[0])
3770 ui.writenoi18n(b' source %s\n' % v[0])
3771 ui.writenoi18n(b' revision %s\n' % v[1])
3771 ui.writenoi18n(b' revision %s\n' % v[1])
3772
3772
3773
3773
3774 @command(
3774 @command(
3775 b'debugshell',
3775 b'debugshell',
3776 [
3776 [
3777 (
3777 (
3778 b'c',
3778 b'c',
3779 b'command',
3779 b'command',
3780 b'',
3780 b'',
3781 _(b'program passed in as a string'),
3781 _(b'program passed in as a string'),
3782 _(b'COMMAND'),
3782 _(b'COMMAND'),
3783 )
3783 )
3784 ],
3784 ],
3785 _(b'[-c COMMAND]'),
3785 _(b'[-c COMMAND]'),
3786 optionalrepo=True,
3786 optionalrepo=True,
3787 )
3787 )
3788 def debugshell(ui, repo, **opts):
3788 def debugshell(ui, repo, **opts):
3789 """run an interactive Python interpreter
3789 """run an interactive Python interpreter
3790
3790
3791 The local namespace is provided with a reference to the ui and
3791 The local namespace is provided with a reference to the ui and
3792 the repo instance (if available).
3792 the repo instance (if available).
3793 """
3793 """
3794 import code
3794 import code
3795
3795
3796 imported_objects = {
3796 imported_objects = {
3797 'ui': ui,
3797 'ui': ui,
3798 'repo': repo,
3798 'repo': repo,
3799 }
3799 }
3800
3800
3801 # py2exe disables initialization of the site module, which is responsible
3801 # py2exe disables initialization of the site module, which is responsible
3802 # for arranging for ``quit()`` to exit the interpreter. Manually initialize
3802 # for arranging for ``quit()`` to exit the interpreter. Manually initialize
3803 # the stuff that site normally does here, so that the interpreter can be
3803 # the stuff that site normally does here, so that the interpreter can be
3804 # quit in a consistent manner, whether run with pyoxidizer, exewrapper.c,
3804 # quit in a consistent manner, whether run with pyoxidizer, exewrapper.c,
3805 # py.exe, or py2exe.
3805 # py.exe, or py2exe.
3806 if getattr(sys, "frozen", None) == 'console_exe':
3806 if getattr(sys, "frozen", None) == 'console_exe':
3807 try:
3807 try:
3808 import site
3808 import site
3809
3809
3810 site.setcopyright()
3810 site.setcopyright()
3811 site.sethelper()
3811 site.sethelper()
3812 site.setquit()
3812 site.setquit()
3813 except ImportError:
3813 except ImportError:
3814 site = None # Keep PyCharm happy
3814 site = None # Keep PyCharm happy
3815
3815
3816 command = opts.get('command')
3816 command = opts.get('command')
3817 if command:
3817 if command:
3818 compiled = code.compile_command(encoding.strfromlocal(command))
3818 compiled = code.compile_command(encoding.strfromlocal(command))
3819 code.InteractiveInterpreter(locals=imported_objects).runcode(compiled)
3819 code.InteractiveInterpreter(locals=imported_objects).runcode(compiled)
3820 return
3820 return
3821
3821
3822 code.interact(local=imported_objects)
3822 code.interact(local=imported_objects)
3823
3823
3824
3824
3825 @command(
3825 @command(
3826 b'debug-revlog-stats',
3826 b'debug-revlog-stats',
3827 [
3827 [
3828 (b'c', b'changelog', None, _(b'Display changelog statistics')),
3828 (b'c', b'changelog', None, _(b'Display changelog statistics')),
3829 (b'm', b'manifest', None, _(b'Display manifest statistics')),
3829 (b'm', b'manifest', None, _(b'Display manifest statistics')),
3830 (b'f', b'filelogs', None, _(b'Display filelogs statistics')),
3830 (b'f', b'filelogs', None, _(b'Display filelogs statistics')),
3831 ]
3831 ]
3832 + cmdutil.formatteropts,
3832 + cmdutil.formatteropts,
3833 )
3833 )
3834 def debug_revlog_stats(ui, repo, **opts):
3834 def debug_revlog_stats(ui, repo, **opts):
3835 """display statistics about revlogs in the store"""
3835 """display statistics about revlogs in the store"""
3836 changelog = opts["changelog"]
3836 changelog = opts["changelog"]
3837 manifest = opts["manifest"]
3837 manifest = opts["manifest"]
3838 filelogs = opts["filelogs"]
3838 filelogs = opts["filelogs"]
3839
3839
3840 if changelog is None and manifest is None and filelogs is None:
3840 if changelog is None and manifest is None and filelogs is None:
3841 changelog = True
3841 changelog = True
3842 manifest = True
3842 manifest = True
3843 filelogs = True
3843 filelogs = True
3844
3844
3845 repo = repo.unfiltered()
3845 repo = repo.unfiltered()
3846 fm = ui.formatter(b'debug-revlog-stats', pycompat.byteskwargs(opts))
3846 fm = ui.formatter(b'debug-revlog-stats', pycompat.byteskwargs(opts))
3847 revlog_debug.debug_revlog_stats(repo, fm, changelog, manifest, filelogs)
3847 revlog_debug.debug_revlog_stats(repo, fm, changelog, manifest, filelogs)
3848 fm.end()
3848 fm.end()
3849
3849
3850
3850
3851 @command(
3851 @command(
3852 b'debugsuccessorssets',
3852 b'debugsuccessorssets',
3853 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3853 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3854 _(b'[REV]'),
3854 _(b'[REV]'),
3855 )
3855 )
3856 def debugsuccessorssets(ui, repo, *revs, **opts):
3856 def debugsuccessorssets(ui, repo, *revs, **opts):
3857 """show set of successors for revision
3857 """show set of successors for revision
3858
3858
3859 A successors set of changeset A is a consistent group of revisions that
3859 A successors set of changeset A is a consistent group of revisions that
3860 succeed A. It contains non-obsolete changesets only unless closests
3860 succeed A. It contains non-obsolete changesets only unless closests
3861 successors set is set.
3861 successors set is set.
3862
3862
3863 In most cases a changeset A has a single successors set containing a single
3863 In most cases a changeset A has a single successors set containing a single
3864 successor (changeset A replaced by A').
3864 successor (changeset A replaced by A').
3865
3865
3866 A changeset that is made obsolete with no successors are called "pruned".
3866 A changeset that is made obsolete with no successors are called "pruned".
3867 Such changesets have no successors sets at all.
3867 Such changesets have no successors sets at all.
3868
3868
3869 A changeset that has been "split" will have a successors set containing
3869 A changeset that has been "split" will have a successors set containing
3870 more than one successor.
3870 more than one successor.
3871
3871
3872 A changeset that has been rewritten in multiple different ways is called
3872 A changeset that has been rewritten in multiple different ways is called
3873 "divergent". Such changesets have multiple successor sets (each of which
3873 "divergent". Such changesets have multiple successor sets (each of which
3874 may also be split, i.e. have multiple successors).
3874 may also be split, i.e. have multiple successors).
3875
3875
3876 Results are displayed as follows::
3876 Results are displayed as follows::
3877
3877
3878 <rev1>
3878 <rev1>
3879 <successors-1A>
3879 <successors-1A>
3880 <rev2>
3880 <rev2>
3881 <successors-2A>
3881 <successors-2A>
3882 <successors-2B1> <successors-2B2> <successors-2B3>
3882 <successors-2B1> <successors-2B2> <successors-2B3>
3883
3883
3884 Here rev2 has two possible (i.e. divergent) successors sets. The first
3884 Here rev2 has two possible (i.e. divergent) successors sets. The first
3885 holds one element, whereas the second holds three (i.e. the changeset has
3885 holds one element, whereas the second holds three (i.e. the changeset has
3886 been split).
3886 been split).
3887 """
3887 """
3888 # passed to successorssets caching computation from one call to another
3888 # passed to successorssets caching computation from one call to another
3889 cache = {}
3889 cache = {}
3890 ctx2str = bytes
3890 ctx2str = bytes
3891 node2str = short
3891 node2str = short
3892 for rev in logcmdutil.revrange(repo, revs):
3892 for rev in logcmdutil.revrange(repo, revs):
3893 ctx = repo[rev]
3893 ctx = repo[rev]
3894 ui.write(b'%s\n' % ctx2str(ctx))
3894 ui.write(b'%s\n' % ctx2str(ctx))
3895 for succsset in obsutil.successorssets(
3895 for succsset in obsutil.successorssets(
3896 repo, ctx.node(), closest=opts['closest'], cache=cache
3896 repo, ctx.node(), closest=opts['closest'], cache=cache
3897 ):
3897 ):
3898 if succsset:
3898 if succsset:
3899 ui.write(b' ')
3899 ui.write(b' ')
3900 ui.write(node2str(succsset[0]))
3900 ui.write(node2str(succsset[0]))
3901 for node in succsset[1:]:
3901 for node in succsset[1:]:
3902 ui.write(b' ')
3902 ui.write(b' ')
3903 ui.write(node2str(node))
3903 ui.write(node2str(node))
3904 ui.write(b'\n')
3904 ui.write(b'\n')
3905
3905
3906
3906
3907 @command(b'debugtagscache', [])
3907 @command(b'debugtagscache', [])
3908 def debugtagscache(ui, repo):
3908 def debugtagscache(ui, repo):
3909 """display the contents of .hg/cache/hgtagsfnodes1"""
3909 """display the contents of .hg/cache/hgtagsfnodes1"""
3910 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3910 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3911 flog = repo.file(b'.hgtags')
3911 flog = repo.file(b'.hgtags')
3912 for r in repo:
3912 for r in repo:
3913 node = repo[r].node()
3913 node = repo[r].node()
3914 tagsnode = cache.getfnode(node, computemissing=False)
3914 tagsnode = cache.getfnode(node, computemissing=False)
3915 if tagsnode:
3915 if tagsnode:
3916 tagsnodedisplay = hex(tagsnode)
3916 tagsnodedisplay = hex(tagsnode)
3917 if not flog.hasnode(tagsnode):
3917 if not flog.hasnode(tagsnode):
3918 tagsnodedisplay += b' (unknown node)'
3918 tagsnodedisplay += b' (unknown node)'
3919 elif tagsnode is None:
3919 elif tagsnode is None:
3920 tagsnodedisplay = b'missing'
3920 tagsnodedisplay = b'missing'
3921 else:
3921 else:
3922 tagsnodedisplay = b'invalid'
3922 tagsnodedisplay = b'invalid'
3923
3923
3924 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3924 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3925
3925
3926
3926
3927 @command(
3927 @command(
3928 b'debugtemplate',
3928 b'debugtemplate',
3929 [
3929 [
3930 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3930 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3931 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3931 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3932 ],
3932 ],
3933 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3933 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3934 optionalrepo=True,
3934 optionalrepo=True,
3935 )
3935 )
3936 def debugtemplate(ui, repo, tmpl, **opts):
3936 def debugtemplate(ui, repo, tmpl, **opts):
3937 """parse and apply a template
3937 """parse and apply a template
3938
3938
3939 If -r/--rev is given, the template is processed as a log template and
3939 If -r/--rev is given, the template is processed as a log template and
3940 applied to the given changesets. Otherwise, it is processed as a generic
3940 applied to the given changesets. Otherwise, it is processed as a generic
3941 template.
3941 template.
3942
3942
3943 Use --verbose to print the parsed tree.
3943 Use --verbose to print the parsed tree.
3944 """
3944 """
3945 revs = None
3945 revs = None
3946 if opts['rev']:
3946 if opts['rev']:
3947 if repo is None:
3947 if repo is None:
3948 raise error.RepoError(
3948 raise error.RepoError(
3949 _(b'there is no Mercurial repository here (.hg not found)')
3949 _(b'there is no Mercurial repository here (.hg not found)')
3950 )
3950 )
3951 revs = logcmdutil.revrange(repo, opts['rev'])
3951 revs = logcmdutil.revrange(repo, opts['rev'])
3952
3952
3953 props = {}
3953 props = {}
3954 for d in opts['define']:
3954 for d in opts['define']:
3955 try:
3955 try:
3956 k, v = (e.strip() for e in d.split(b'=', 1))
3956 k, v = (e.strip() for e in d.split(b'=', 1))
3957 if not k or k == b'ui':
3957 if not k or k == b'ui':
3958 raise ValueError
3958 raise ValueError
3959 props[k] = v
3959 props[k] = v
3960 except ValueError:
3960 except ValueError:
3961 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3961 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3962
3962
3963 if ui.verbose:
3963 if ui.verbose:
3964 aliases = ui.configitems(b'templatealias')
3964 aliases = ui.configitems(b'templatealias')
3965 tree = templater.parse(tmpl)
3965 tree = templater.parse(tmpl)
3966 ui.note(templater.prettyformat(tree), b'\n')
3966 ui.note(templater.prettyformat(tree), b'\n')
3967 newtree = templater.expandaliases(tree, aliases)
3967 newtree = templater.expandaliases(tree, aliases)
3968 if newtree != tree:
3968 if newtree != tree:
3969 ui.notenoi18n(
3969 ui.notenoi18n(
3970 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3970 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3971 )
3971 )
3972
3972
3973 if revs is None:
3973 if revs is None:
3974 tres = formatter.templateresources(ui, repo)
3974 tres = formatter.templateresources(ui, repo)
3975 t = formatter.maketemplater(ui, tmpl, resources=tres)
3975 t = formatter.maketemplater(ui, tmpl, resources=tres)
3976 if ui.verbose:
3976 if ui.verbose:
3977 kwds, funcs = t.symbolsuseddefault()
3977 kwds, funcs = t.symbolsuseddefault()
3978 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3978 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3979 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3979 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3980 ui.write(t.renderdefault(props))
3980 ui.write(t.renderdefault(props))
3981 else:
3981 else:
3982 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3982 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3983 if ui.verbose:
3983 if ui.verbose:
3984 kwds, funcs = displayer.t.symbolsuseddefault()
3984 kwds, funcs = displayer.t.symbolsuseddefault()
3985 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3985 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3986 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3986 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3987 for r in revs:
3987 for r in revs:
3988 displayer.show(repo[r], **pycompat.strkwargs(props))
3988 displayer.show(repo[r], **pycompat.strkwargs(props))
3989 displayer.close()
3989 displayer.close()
3990
3990
3991
3991
3992 @command(
3992 @command(
3993 b'debuguigetpass',
3993 b'debuguigetpass',
3994 [
3994 [
3995 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3995 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3996 ],
3996 ],
3997 _(b'[-p TEXT]'),
3997 _(b'[-p TEXT]'),
3998 norepo=True,
3998 norepo=True,
3999 )
3999 )
4000 def debuguigetpass(ui, prompt=b''):
4000 def debuguigetpass(ui, prompt=b''):
4001 """show prompt to type password"""
4001 """show prompt to type password"""
4002 r = ui.getpass(prompt)
4002 r = ui.getpass(prompt)
4003 if r is None:
4003 if r is None:
4004 r = b"<default response>"
4004 r = b"<default response>"
4005 ui.writenoi18n(b'response: %s\n' % r)
4005 ui.writenoi18n(b'response: %s\n' % r)
4006
4006
4007
4007
4008 @command(
4008 @command(
4009 b'debuguiprompt',
4009 b'debuguiprompt',
4010 [
4010 [
4011 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4011 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4012 ],
4012 ],
4013 _(b'[-p TEXT]'),
4013 _(b'[-p TEXT]'),
4014 norepo=True,
4014 norepo=True,
4015 )
4015 )
4016 def debuguiprompt(ui, prompt=b''):
4016 def debuguiprompt(ui, prompt=b''):
4017 """show plain prompt"""
4017 """show plain prompt"""
4018 r = ui.prompt(prompt)
4018 r = ui.prompt(prompt)
4019 ui.writenoi18n(b'response: %s\n' % r)
4019 ui.writenoi18n(b'response: %s\n' % r)
4020
4020
4021
4021
4022 @command(b'debugupdatecaches', [])
4022 @command(b'debugupdatecaches', [])
4023 def debugupdatecaches(ui, repo, *pats, **opts):
4023 def debugupdatecaches(ui, repo, *pats, **opts):
4024 """warm all known caches in the repository"""
4024 """warm all known caches in the repository"""
4025 with repo.wlock(), repo.lock():
4025 with repo.wlock(), repo.lock():
4026 repo.updatecaches(caches=repository.CACHES_ALL)
4026 repo.updatecaches(caches=repository.CACHES_ALL)
4027
4027
4028
4028
4029 @command(
4029 @command(
4030 b'debugupgraderepo',
4030 b'debugupgraderepo',
4031 [
4031 [
4032 (
4032 (
4033 b'o',
4033 b'o',
4034 b'optimize',
4034 b'optimize',
4035 [],
4035 [],
4036 _(b'extra optimization to perform'),
4036 _(b'extra optimization to perform'),
4037 _(b'NAME'),
4037 _(b'NAME'),
4038 ),
4038 ),
4039 (b'', b'run', False, _(b'performs an upgrade')),
4039 (b'', b'run', False, _(b'performs an upgrade')),
4040 (b'', b'backup', True, _(b'keep the old repository content around')),
4040 (b'', b'backup', True, _(b'keep the old repository content around')),
4041 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4041 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4042 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4042 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4043 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4043 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4044 ],
4044 ],
4045 )
4045 )
4046 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4046 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4047 """upgrade a repository to use different features
4047 """upgrade a repository to use different features
4048
4048
4049 If no arguments are specified, the repository is evaluated for upgrade
4049 If no arguments are specified, the repository is evaluated for upgrade
4050 and a list of problems and potential optimizations is printed.
4050 and a list of problems and potential optimizations is printed.
4051
4051
4052 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4052 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4053 can be influenced via additional arguments. More details will be provided
4053 can be influenced via additional arguments. More details will be provided
4054 by the command output when run without ``--run``.
4054 by the command output when run without ``--run``.
4055
4055
4056 During the upgrade, the repository will be locked and no writes will be
4056 During the upgrade, the repository will be locked and no writes will be
4057 allowed.
4057 allowed.
4058
4058
4059 At the end of the upgrade, the repository may not be readable while new
4059 At the end of the upgrade, the repository may not be readable while new
4060 repository data is swapped in. This window will be as long as it takes to
4060 repository data is swapped in. This window will be as long as it takes to
4061 rename some directories inside the ``.hg`` directory. On most machines, this
4061 rename some directories inside the ``.hg`` directory. On most machines, this
4062 should complete almost instantaneously and the chances of a consumer being
4062 should complete almost instantaneously and the chances of a consumer being
4063 unable to access the repository should be low.
4063 unable to access the repository should be low.
4064
4064
4065 By default, all revlogs will be upgraded. You can restrict this using flags
4065 By default, all revlogs will be upgraded. You can restrict this using flags
4066 such as `--manifest`:
4066 such as `--manifest`:
4067
4067
4068 * `--manifest`: only optimize the manifest
4068 * `--manifest`: only optimize the manifest
4069 * `--no-manifest`: optimize all revlog but the manifest
4069 * `--no-manifest`: optimize all revlog but the manifest
4070 * `--changelog`: optimize the changelog only
4070 * `--changelog`: optimize the changelog only
4071 * `--no-changelog --no-manifest`: optimize filelogs only
4071 * `--no-changelog --no-manifest`: optimize filelogs only
4072 * `--filelogs`: optimize the filelogs only
4072 * `--filelogs`: optimize the filelogs only
4073 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4073 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4074 """
4074 """
4075 return upgrade.upgraderepo(
4075 return upgrade.upgraderepo(
4076 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4076 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4077 )
4077 )
4078
4078
4079
4079
4080 @command(
4080 @command(
4081 b'debug::unbundle',
4081 b'debug::unbundle',
4082 [],
4082 [],
4083 _(b'FILE...'),
4083 _(b'FILE...'),
4084 helpcategory=command.CATEGORY_IMPORT_EXPORT,
4084 helpcategory=command.CATEGORY_IMPORT_EXPORT,
4085 )
4085 )
4086 def debugunbundle(ui, repo, fname1, *fnames):
4086 def debugunbundle(ui, repo, fname1, *fnames):
4087 """same as `hg unbundle`, but pretent to come from a push
4087 """same as `hg unbundle`, but pretent to come from a push
4088
4088
4089 This is useful to debug behavior and performance change in this case.
4089 This is useful to debug behavior and performance change in this case.
4090 """
4090 """
4091 fnames = (fname1,) + fnames
4091 fnames = (fname1,) + fnames
4092 cmdutil.unbundle_files(ui, repo, fnames)
4092 cmdutil.unbundle_files(ui, repo, fnames)
4093
4093
4094
4094
4095 @command(
4095 @command(
4096 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4096 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4097 )
4097 )
4098 def debugwalk(ui, repo, *pats, **opts):
4098 def debugwalk(ui, repo, *pats, **opts):
4099 """show how files match on given patterns"""
4099 """show how files match on given patterns"""
4100 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
4100 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
4101 if ui.verbose:
4101 if ui.verbose:
4102 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4102 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4103 items = list(repo[None].walk(m))
4103 items = list(repo[None].walk(m))
4104 if not items:
4104 if not items:
4105 return
4105 return
4106 f = lambda fn: fn
4106 f = lambda fn: fn
4107 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4107 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4108 f = lambda fn: util.normpath(fn)
4108 f = lambda fn: util.normpath(fn)
4109 fmt = b'f %%-%ds %%-%ds %%s' % (
4109 fmt = b'f %%-%ds %%-%ds %%s' % (
4110 max([len(abs) for abs in items]),
4110 max([len(abs) for abs in items]),
4111 max([len(repo.pathto(abs)) for abs in items]),
4111 max([len(repo.pathto(abs)) for abs in items]),
4112 )
4112 )
4113 for abs in items:
4113 for abs in items:
4114 line = fmt % (
4114 line = fmt % (
4115 abs,
4115 abs,
4116 f(repo.pathto(abs)),
4116 f(repo.pathto(abs)),
4117 m.exact(abs) and b'exact' or b'',
4117 m.exact(abs) and b'exact' or b'',
4118 )
4118 )
4119 ui.write(b"%s\n" % line.rstrip())
4119 ui.write(b"%s\n" % line.rstrip())
4120
4120
4121
4121
4122 @command(b'debugwhyunstable', [], _(b'REV'))
4122 @command(b'debugwhyunstable', [], _(b'REV'))
4123 def debugwhyunstable(ui, repo, rev):
4123 def debugwhyunstable(ui, repo, rev):
4124 """explain instabilities of a changeset"""
4124 """explain instabilities of a changeset"""
4125 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4125 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4126 dnodes = b''
4126 dnodes = b''
4127 if entry.get(b'divergentnodes'):
4127 if entry.get(b'divergentnodes'):
4128 dnodes = (
4128 dnodes = (
4129 b' '.join(
4129 b' '.join(
4130 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4130 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4131 for ctx in entry[b'divergentnodes']
4131 for ctx in entry[b'divergentnodes']
4132 )
4132 )
4133 + b' '
4133 + b' '
4134 )
4134 )
4135 ui.write(
4135 ui.write(
4136 b'%s: %s%s %s\n'
4136 b'%s: %s%s %s\n'
4137 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4137 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4138 )
4138 )
4139
4139
4140
4140
4141 @command(
4141 @command(
4142 b'debugwireargs',
4142 b'debugwireargs',
4143 [
4143 [
4144 (b'', b'three', b'', b'three'),
4144 (b'', b'three', b'', b'three'),
4145 (b'', b'four', b'', b'four'),
4145 (b'', b'four', b'', b'four'),
4146 (b'', b'five', b'', b'five'),
4146 (b'', b'five', b'', b'five'),
4147 ]
4147 ]
4148 + cmdutil.remoteopts,
4148 + cmdutil.remoteopts,
4149 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4149 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4150 norepo=True,
4150 norepo=True,
4151 )
4151 )
4152 def debugwireargs(ui, repopath, *vals, **opts):
4152 def debugwireargs(ui, repopath, *vals, **opts):
4153 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
4153 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
4154 try:
4154 try:
4155 for opt in cmdutil.remoteopts:
4155 for opt in cmdutil.remoteopts:
4156 del opts[pycompat.sysstr(opt[1])]
4156 del opts[pycompat.sysstr(opt[1])]
4157 args = {}
4157 args = {}
4158 for k, v in opts.items():
4158 for k, v in opts.items():
4159 if v:
4159 if v:
4160 args[k] = v
4160 args[k] = v
4161
4161
4162 # run twice to check that we don't mess up the stream for the next command
4162 # run twice to check that we don't mess up the stream for the next command
4163 res1 = repo.debugwireargs(*vals, **args)
4163 res1 = repo.debugwireargs(*vals, **args)
4164 res2 = repo.debugwireargs(*vals, **args)
4164 res2 = repo.debugwireargs(*vals, **args)
4165 ui.write(b"%s\n" % res1)
4165 ui.write(b"%s\n" % res1)
4166 if res1 != res2:
4166 if res1 != res2:
4167 ui.warn(b"%s\n" % res2)
4167 ui.warn(b"%s\n" % res2)
4168 finally:
4168 finally:
4169 repo.close()
4169 repo.close()
4170
4170
4171
4171
4172 def _parsewirelangblocks(fh):
4172 def _parsewirelangblocks(fh):
4173 activeaction = None
4173 activeaction = None
4174 blocklines = []
4174 blocklines = []
4175 lastindent = 0
4175 lastindent = 0
4176
4176
4177 for line in fh:
4177 for line in fh:
4178 line = line.rstrip()
4178 line = line.rstrip()
4179 if not line:
4179 if not line:
4180 continue
4180 continue
4181
4181
4182 if line.startswith(b'#'):
4182 if line.startswith(b'#'):
4183 continue
4183 continue
4184
4184
4185 if not line.startswith(b' '):
4185 if not line.startswith(b' '):
4186 # New block. Flush previous one.
4186 # New block. Flush previous one.
4187 if activeaction:
4187 if activeaction:
4188 yield activeaction, blocklines
4188 yield activeaction, blocklines
4189
4189
4190 activeaction = line
4190 activeaction = line
4191 blocklines = []
4191 blocklines = []
4192 lastindent = 0
4192 lastindent = 0
4193 continue
4193 continue
4194
4194
4195 # Else we start with an indent.
4195 # Else we start with an indent.
4196
4196
4197 if not activeaction:
4197 if not activeaction:
4198 raise error.Abort(_(b'indented line outside of block'))
4198 raise error.Abort(_(b'indented line outside of block'))
4199
4199
4200 indent = len(line) - len(line.lstrip())
4200 indent = len(line) - len(line.lstrip())
4201
4201
4202 # If this line is indented more than the last line, concatenate it.
4202 # If this line is indented more than the last line, concatenate it.
4203 if indent > lastindent and blocklines:
4203 if indent > lastindent and blocklines:
4204 blocklines[-1] += line.lstrip()
4204 blocklines[-1] += line.lstrip()
4205 else:
4205 else:
4206 blocklines.append(line)
4206 blocklines.append(line)
4207 lastindent = indent
4207 lastindent = indent
4208
4208
4209 # Flush last block.
4209 # Flush last block.
4210 if activeaction:
4210 if activeaction:
4211 yield activeaction, blocklines
4211 yield activeaction, blocklines
4212
4212
4213
4213
4214 @command(
4214 @command(
4215 b'debugwireproto',
4215 b'debugwireproto',
4216 [
4216 [
4217 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4217 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4218 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4218 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4219 (
4219 (
4220 b'',
4220 b'',
4221 b'noreadstderr',
4221 b'noreadstderr',
4222 False,
4222 False,
4223 _(b'do not read from stderr of the remote'),
4223 _(b'do not read from stderr of the remote'),
4224 ),
4224 ),
4225 (
4225 (
4226 b'',
4226 b'',
4227 b'nologhandshake',
4227 b'nologhandshake',
4228 False,
4228 False,
4229 _(b'do not log I/O related to the peer handshake'),
4229 _(b'do not log I/O related to the peer handshake'),
4230 ),
4230 ),
4231 ]
4231 ]
4232 + cmdutil.remoteopts,
4232 + cmdutil.remoteopts,
4233 _(b'[PATH]'),
4233 _(b'[PATH]'),
4234 optionalrepo=True,
4234 optionalrepo=True,
4235 )
4235 )
4236 def debugwireproto(ui, repo, path=None, **opts):
4236 def debugwireproto(ui, repo, path=None, **opts):
4237 """send wire protocol commands to a server
4237 """send wire protocol commands to a server
4238
4238
4239 This command can be used to issue wire protocol commands to remote
4239 This command can be used to issue wire protocol commands to remote
4240 peers and to debug the raw data being exchanged.
4240 peers and to debug the raw data being exchanged.
4241
4241
4242 ``--localssh`` will start an SSH server against the current repository
4242 ``--localssh`` will start an SSH server against the current repository
4243 and connect to that. By default, the connection will perform a handshake
4243 and connect to that. By default, the connection will perform a handshake
4244 and establish an appropriate peer instance.
4244 and establish an appropriate peer instance.
4245
4245
4246 ``--peer`` can be used to bypass the handshake protocol and construct a
4246 ``--peer`` can be used to bypass the handshake protocol and construct a
4247 peer instance using the specified class type. Valid values are ``raw``,
4247 peer instance using the specified class type. Valid values are ``raw``,
4248 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4248 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4249 don't support higher-level command actions.
4249 don't support higher-level command actions.
4250
4250
4251 ``--noreadstderr`` can be used to disable automatic reading from stderr
4251 ``--noreadstderr`` can be used to disable automatic reading from stderr
4252 of the peer (for SSH connections only). Disabling automatic reading of
4252 of the peer (for SSH connections only). Disabling automatic reading of
4253 stderr is useful for making output more deterministic.
4253 stderr is useful for making output more deterministic.
4254
4254
4255 Commands are issued via a mini language which is specified via stdin.
4255 Commands are issued via a mini language which is specified via stdin.
4256 The language consists of individual actions to perform. An action is
4256 The language consists of individual actions to perform. An action is
4257 defined by a block. A block is defined as a line with no leading
4257 defined by a block. A block is defined as a line with no leading
4258 space followed by 0 or more lines with leading space. Blocks are
4258 space followed by 0 or more lines with leading space. Blocks are
4259 effectively a high-level command with additional metadata.
4259 effectively a high-level command with additional metadata.
4260
4260
4261 Lines beginning with ``#`` are ignored.
4261 Lines beginning with ``#`` are ignored.
4262
4262
4263 The following sections denote available actions.
4263 The following sections denote available actions.
4264
4264
4265 raw
4265 raw
4266 ---
4266 ---
4267
4267
4268 Send raw data to the server.
4268 Send raw data to the server.
4269
4269
4270 The block payload contains the raw data to send as one atomic send
4270 The block payload contains the raw data to send as one atomic send
4271 operation. The data may not actually be delivered in a single system
4271 operation. The data may not actually be delivered in a single system
4272 call: it depends on the abilities of the transport being used.
4272 call: it depends on the abilities of the transport being used.
4273
4273
4274 Each line in the block is de-indented and concatenated. Then, that
4274 Each line in the block is de-indented and concatenated. Then, that
4275 value is evaluated as a Python b'' literal. This allows the use of
4275 value is evaluated as a Python b'' literal. This allows the use of
4276 backslash escaping, etc.
4276 backslash escaping, etc.
4277
4277
4278 raw+
4278 raw+
4279 ----
4279 ----
4280
4280
4281 Behaves like ``raw`` except flushes output afterwards.
4281 Behaves like ``raw`` except flushes output afterwards.
4282
4282
4283 command <X>
4283 command <X>
4284 -----------
4284 -----------
4285
4285
4286 Send a request to run a named command, whose name follows the ``command``
4286 Send a request to run a named command, whose name follows the ``command``
4287 string.
4287 string.
4288
4288
4289 Arguments to the command are defined as lines in this block. The format of
4289 Arguments to the command are defined as lines in this block. The format of
4290 each line is ``<key> <value>``. e.g.::
4290 each line is ``<key> <value>``. e.g.::
4291
4291
4292 command listkeys
4292 command listkeys
4293 namespace bookmarks
4293 namespace bookmarks
4294
4294
4295 If the value begins with ``eval:``, it will be interpreted as a Python
4295 If the value begins with ``eval:``, it will be interpreted as a Python
4296 literal expression. Otherwise values are interpreted as Python b'' literals.
4296 literal expression. Otherwise values are interpreted as Python b'' literals.
4297 This allows sending complex types and encoding special byte sequences via
4297 This allows sending complex types and encoding special byte sequences via
4298 backslash escaping.
4298 backslash escaping.
4299
4299
4300 The following arguments have special meaning:
4300 The following arguments have special meaning:
4301
4301
4302 ``PUSHFILE``
4302 ``PUSHFILE``
4303 When defined, the *push* mechanism of the peer will be used instead
4303 When defined, the *push* mechanism of the peer will be used instead
4304 of the static request-response mechanism and the content of the
4304 of the static request-response mechanism and the content of the
4305 file specified in the value of this argument will be sent as the
4305 file specified in the value of this argument will be sent as the
4306 command payload.
4306 command payload.
4307
4307
4308 This can be used to submit a local bundle file to the remote.
4308 This can be used to submit a local bundle file to the remote.
4309
4309
4310 batchbegin
4310 batchbegin
4311 ----------
4311 ----------
4312
4312
4313 Instruct the peer to begin a batched send.
4313 Instruct the peer to begin a batched send.
4314
4314
4315 All ``command`` blocks are queued for execution until the next
4315 All ``command`` blocks are queued for execution until the next
4316 ``batchsubmit`` block.
4316 ``batchsubmit`` block.
4317
4317
4318 batchsubmit
4318 batchsubmit
4319 -----------
4319 -----------
4320
4320
4321 Submit previously queued ``command`` blocks as a batch request.
4321 Submit previously queued ``command`` blocks as a batch request.
4322
4322
4323 This action MUST be paired with a ``batchbegin`` action.
4323 This action MUST be paired with a ``batchbegin`` action.
4324
4324
4325 httprequest <method> <path>
4325 httprequest <method> <path>
4326 ---------------------------
4326 ---------------------------
4327
4327
4328 (HTTP peer only)
4328 (HTTP peer only)
4329
4329
4330 Send an HTTP request to the peer.
4330 Send an HTTP request to the peer.
4331
4331
4332 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4332 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4333
4333
4334 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4334 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4335 headers to add to the request. e.g. ``Accept: foo``.
4335 headers to add to the request. e.g. ``Accept: foo``.
4336
4336
4337 The following arguments are special:
4337 The following arguments are special:
4338
4338
4339 ``BODYFILE``
4339 ``BODYFILE``
4340 The content of the file defined as the value to this argument will be
4340 The content of the file defined as the value to this argument will be
4341 transferred verbatim as the HTTP request body.
4341 transferred verbatim as the HTTP request body.
4342
4342
4343 ``frame <type> <flags> <payload>``
4343 ``frame <type> <flags> <payload>``
4344 Send a unified protocol frame as part of the request body.
4344 Send a unified protocol frame as part of the request body.
4345
4345
4346 All frames will be collected and sent as the body to the HTTP
4346 All frames will be collected and sent as the body to the HTTP
4347 request.
4347 request.
4348
4348
4349 close
4349 close
4350 -----
4350 -----
4351
4351
4352 Close the connection to the server.
4352 Close the connection to the server.
4353
4353
4354 flush
4354 flush
4355 -----
4355 -----
4356
4356
4357 Flush data written to the server.
4357 Flush data written to the server.
4358
4358
4359 readavailable
4359 readavailable
4360 -------------
4360 -------------
4361
4361
4362 Close the write end of the connection and read all available data from
4362 Close the write end of the connection and read all available data from
4363 the server.
4363 the server.
4364
4364
4365 If the connection to the server encompasses multiple pipes, we poll both
4365 If the connection to the server encompasses multiple pipes, we poll both
4366 pipes and read available data.
4366 pipes and read available data.
4367
4367
4368 readline
4368 readline
4369 --------
4369 --------
4370
4370
4371 Read a line of output from the server. If there are multiple output
4371 Read a line of output from the server. If there are multiple output
4372 pipes, reads only the main pipe.
4372 pipes, reads only the main pipe.
4373
4373
4374 ereadline
4374 ereadline
4375 ---------
4375 ---------
4376
4376
4377 Like ``readline``, but read from the stderr pipe, if available.
4377 Like ``readline``, but read from the stderr pipe, if available.
4378
4378
4379 read <X>
4379 read <X>
4380 --------
4380 --------
4381
4381
4382 ``read()`` N bytes from the server's main output pipe.
4382 ``read()`` N bytes from the server's main output pipe.
4383
4383
4384 eread <X>
4384 eread <X>
4385 ---------
4385 ---------
4386
4386
4387 ``read()`` N bytes from the server's stderr pipe, if available.
4387 ``read()`` N bytes from the server's stderr pipe, if available.
4388
4388
4389 Specifying Unified Frame-Based Protocol Frames
4389 Specifying Unified Frame-Based Protocol Frames
4390 ----------------------------------------------
4390 ----------------------------------------------
4391
4391
4392 It is possible to emit a *Unified Frame-Based Protocol* by using special
4392 It is possible to emit a *Unified Frame-Based Protocol* by using special
4393 syntax.
4393 syntax.
4394
4394
4395 A frame is composed as a type, flags, and payload. These can be parsed
4395 A frame is composed as a type, flags, and payload. These can be parsed
4396 from a string of the form:
4396 from a string of the form:
4397
4397
4398 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4398 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4399
4399
4400 ``request-id`` and ``stream-id`` are integers defining the request and
4400 ``request-id`` and ``stream-id`` are integers defining the request and
4401 stream identifiers.
4401 stream identifiers.
4402
4402
4403 ``type`` can be an integer value for the frame type or the string name
4403 ``type`` can be an integer value for the frame type or the string name
4404 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4404 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4405 ``command-name``.
4405 ``command-name``.
4406
4406
4407 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4407 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4408 components. Each component (and there can be just one) can be an integer
4408 components. Each component (and there can be just one) can be an integer
4409 or a flag name for stream flags or frame flags, respectively. Values are
4409 or a flag name for stream flags or frame flags, respectively. Values are
4410 resolved to integers and then bitwise OR'd together.
4410 resolved to integers and then bitwise OR'd together.
4411
4411
4412 ``payload`` represents the raw frame payload. If it begins with
4412 ``payload`` represents the raw frame payload. If it begins with
4413 ``cbor:``, the following string is evaluated as Python code and the
4413 ``cbor:``, the following string is evaluated as Python code and the
4414 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4414 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4415 as a Python byte string literal.
4415 as a Python byte string literal.
4416 """
4416 """
4417 if opts['localssh'] and not repo:
4417 if opts['localssh'] and not repo:
4418 raise error.Abort(_(b'--localssh requires a repository'))
4418 raise error.Abort(_(b'--localssh requires a repository'))
4419
4419
4420 if opts['peer'] and opts['peer'] not in (
4420 if opts['peer'] and opts['peer'] not in (
4421 b'raw',
4421 b'raw',
4422 b'ssh1',
4422 b'ssh1',
4423 ):
4423 ):
4424 raise error.Abort(
4424 raise error.Abort(
4425 _(b'invalid value for --peer'),
4425 _(b'invalid value for --peer'),
4426 hint=_(b'valid values are "raw" and "ssh1"'),
4426 hint=_(b'valid values are "raw" and "ssh1"'),
4427 )
4427 )
4428
4428
4429 if path and opts['localssh']:
4429 if path and opts['localssh']:
4430 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4430 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4431
4431
4432 if ui.interactive():
4432 if ui.interactive():
4433 ui.write(_(b'(waiting for commands on stdin)\n'))
4433 ui.write(_(b'(waiting for commands on stdin)\n'))
4434
4434
4435 blocks = list(_parsewirelangblocks(ui.fin))
4435 blocks = list(_parsewirelangblocks(ui.fin))
4436
4436
4437 proc = None
4437 proc = None
4438 stdin = None
4438 stdin = None
4439 stdout = None
4439 stdout = None
4440 stderr = None
4440 stderr = None
4441 opener = None
4441 opener = None
4442
4442
4443 if opts['localssh']:
4443 if opts['localssh']:
4444 # We start the SSH server in its own process so there is process
4444 # We start the SSH server in its own process so there is process
4445 # separation. This prevents a whole class of potential bugs around
4445 # separation. This prevents a whole class of potential bugs around
4446 # shared state from interfering with server operation.
4446 # shared state from interfering with server operation.
4447 args = procutil.hgcmd() + [
4447 args = procutil.hgcmd() + [
4448 b'-R',
4448 b'-R',
4449 repo.root,
4449 repo.root,
4450 b'debugserve',
4450 b'debugserve',
4451 b'--sshstdio',
4451 b'--sshstdio',
4452 ]
4452 ]
4453 proc = subprocess.Popen(
4453 proc = subprocess.Popen(
4454 pycompat.rapply(procutil.tonativestr, args),
4454 pycompat.rapply(procutil.tonativestr, args),
4455 stdin=subprocess.PIPE,
4455 stdin=subprocess.PIPE,
4456 stdout=subprocess.PIPE,
4456 stdout=subprocess.PIPE,
4457 stderr=subprocess.PIPE,
4457 stderr=subprocess.PIPE,
4458 bufsize=0,
4458 bufsize=0,
4459 )
4459 )
4460
4460
4461 stdin = proc.stdin
4461 stdin = proc.stdin
4462 stdout = proc.stdout
4462 stdout = proc.stdout
4463 stderr = proc.stderr
4463 stderr = proc.stderr
4464
4464
4465 # We turn the pipes into observers so we can log I/O.
4465 # We turn the pipes into observers so we can log I/O.
4466 if ui.verbose or opts['peer'] == b'raw':
4466 if ui.verbose or opts['peer'] == b'raw':
4467 stdin = util.makeloggingfileobject(
4467 stdin = util.makeloggingfileobject(
4468 ui, proc.stdin, b'i', logdata=True
4468 ui, proc.stdin, b'i', logdata=True
4469 )
4469 )
4470 stdout = util.makeloggingfileobject(
4470 stdout = util.makeloggingfileobject(
4471 ui, proc.stdout, b'o', logdata=True
4471 ui, proc.stdout, b'o', logdata=True
4472 )
4472 )
4473 stderr = util.makeloggingfileobject(
4473 stderr = util.makeloggingfileobject(
4474 ui, proc.stderr, b'e', logdata=True
4474 ui, proc.stderr, b'e', logdata=True
4475 )
4475 )
4476
4476
4477 # --localssh also implies the peer connection settings.
4477 # --localssh also implies the peer connection settings.
4478
4478
4479 url = b'ssh://localserver'
4479 url = b'ssh://localserver'
4480 autoreadstderr = not opts['noreadstderr']
4480 autoreadstderr = not opts['noreadstderr']
4481
4481
4482 if opts['peer'] == b'ssh1':
4482 if opts['peer'] == b'ssh1':
4483 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4483 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4484 peer = sshpeer.sshv1peer(
4484 peer = sshpeer.sshv1peer(
4485 ui,
4485 ui,
4486 url,
4486 url,
4487 proc,
4487 proc,
4488 stdin,
4488 stdin,
4489 stdout,
4489 stdout,
4490 stderr,
4490 stderr,
4491 None,
4491 None,
4492 autoreadstderr=autoreadstderr,
4492 autoreadstderr=autoreadstderr,
4493 )
4493 )
4494 elif opts['peer'] == b'raw':
4494 elif opts['peer'] == b'raw':
4495 ui.write(_(b'using raw connection to peer\n'))
4495 ui.write(_(b'using raw connection to peer\n'))
4496 peer = None
4496 peer = None
4497 else:
4497 else:
4498 ui.write(_(b'creating ssh peer from handshake results\n'))
4498 ui.write(_(b'creating ssh peer from handshake results\n'))
4499 peer = sshpeer._make_peer(
4499 peer = sshpeer._make_peer(
4500 ui,
4500 ui,
4501 url,
4501 url,
4502 proc,
4502 proc,
4503 stdin,
4503 stdin,
4504 stdout,
4504 stdout,
4505 stderr,
4505 stderr,
4506 autoreadstderr=autoreadstderr,
4506 autoreadstderr=autoreadstderr,
4507 )
4507 )
4508
4508
4509 elif path:
4509 elif path:
4510 # We bypass hg.peer() so we can proxy the sockets.
4510 # We bypass hg.peer() so we can proxy the sockets.
4511 # TODO consider not doing this because we skip
4511 # TODO consider not doing this because we skip
4512 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4512 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4513 u = urlutil.url(path)
4513 u = urlutil.url(path)
4514 if u.scheme not in (b'http', b'https'):
4514 if u.scheme not in (b'http', b'https'):
4515 raise error.Abort(
4515 raise error.Abort(
4516 _(b'only http:// and https:// paths are currently supported')
4516 _(b'only http:// and https:// paths are currently supported')
4517 )
4517 )
4518
4518
4519 url, authinfo = u.authinfo()
4519 url, authinfo = u.authinfo()
4520 openerargs = {
4520 openerargs = {
4521 'useragent': b'Mercurial debugwireproto',
4521 'useragent': b'Mercurial debugwireproto',
4522 }
4522 }
4523
4523
4524 # Turn pipes/sockets into observers so we can log I/O.
4524 # Turn pipes/sockets into observers so we can log I/O.
4525 if ui.verbose:
4525 if ui.verbose:
4526 openerargs.update(
4526 openerargs.update(
4527 {
4527 {
4528 'loggingfh': ui,
4528 'loggingfh': ui,
4529 'loggingname': b's',
4529 'loggingname': b's',
4530 'loggingopts': {
4530 'loggingopts': {
4531 'logdata': True,
4531 'logdata': True,
4532 'logdataapis': False,
4532 'logdataapis': False,
4533 },
4533 },
4534 }
4534 }
4535 )
4535 )
4536
4536
4537 if ui.debugflag:
4537 if ui.debugflag:
4538 openerargs['loggingopts']['logdataapis'] = True
4538 openerargs['loggingopts']['logdataapis'] = True
4539
4539
4540 # Don't send default headers when in raw mode. This allows us to
4540 # Don't send default headers when in raw mode. This allows us to
4541 # bypass most of the behavior of our URL handling code so we can
4541 # bypass most of the behavior of our URL handling code so we can
4542 # have near complete control over what's sent on the wire.
4542 # have near complete control over what's sent on the wire.
4543 if opts['peer'] == b'raw':
4543 if opts['peer'] == b'raw':
4544 openerargs['sendaccept'] = False
4544 openerargs['sendaccept'] = False
4545
4545
4546 opener = urlmod.opener(ui, authinfo, **openerargs)
4546 opener = urlmod.opener(ui, authinfo, **openerargs)
4547
4547
4548 if opts['peer'] == b'raw':
4548 if opts['peer'] == b'raw':
4549 ui.write(_(b'using raw connection to peer\n'))
4549 ui.write(_(b'using raw connection to peer\n'))
4550 peer = None
4550 peer = None
4551 elif opts['peer']:
4551 elif opts['peer']:
4552 raise error.Abort(
4552 raise error.Abort(
4553 _(b'--peer %s not supported with HTTP peers') % opts['peer']
4553 _(b'--peer %s not supported with HTTP peers') % opts['peer']
4554 )
4554 )
4555 else:
4555 else:
4556 peer_path = urlutil.try_path(ui, path)
4556 peer_path = urlutil.try_path(ui, path)
4557 peer = httppeer._make_peer(ui, peer_path, opener=opener)
4557 peer = httppeer._make_peer(ui, peer_path, opener=opener)
4558
4558
4559 # We /could/ populate stdin/stdout with sock.makefile()...
4559 # We /could/ populate stdin/stdout with sock.makefile()...
4560 else:
4560 else:
4561 raise error.Abort(_(b'unsupported connection configuration'))
4561 raise error.Abort(_(b'unsupported connection configuration'))
4562
4562
4563 batchedcommands = None
4563 batchedcommands = None
4564
4564
4565 # Now perform actions based on the parsed wire language instructions.
4565 # Now perform actions based on the parsed wire language instructions.
4566 for action, lines in blocks:
4566 for action, lines in blocks:
4567 if action in (b'raw', b'raw+'):
4567 if action in (b'raw', b'raw+'):
4568 if not stdin:
4568 if not stdin:
4569 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4569 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4570
4570
4571 # Concatenate the data together.
4571 # Concatenate the data together.
4572 data = b''.join(l.lstrip() for l in lines)
4572 data = b''.join(l.lstrip() for l in lines)
4573 data = stringutil.unescapestr(data)
4573 data = stringutil.unescapestr(data)
4574 stdin.write(data)
4574 stdin.write(data)
4575
4575
4576 if action == b'raw+':
4576 if action == b'raw+':
4577 stdin.flush()
4577 stdin.flush()
4578 elif action == b'flush':
4578 elif action == b'flush':
4579 if not stdin:
4579 if not stdin:
4580 raise error.Abort(_(b'cannot call flush on this peer'))
4580 raise error.Abort(_(b'cannot call flush on this peer'))
4581 stdin.flush()
4581 stdin.flush()
4582 elif action.startswith(b'command'):
4582 elif action.startswith(b'command'):
4583 if not peer:
4583 if not peer:
4584 raise error.Abort(
4584 raise error.Abort(
4585 _(
4585 _(
4586 b'cannot send commands unless peer instance '
4586 b'cannot send commands unless peer instance '
4587 b'is available'
4587 b'is available'
4588 )
4588 )
4589 )
4589 )
4590
4590
4591 command = action.split(b' ', 1)[1]
4591 command = action.split(b' ', 1)[1]
4592
4592
4593 args = {}
4593 args = {}
4594 for line in lines:
4594 for line in lines:
4595 # We need to allow empty values.
4595 # We need to allow empty values.
4596 fields = line.lstrip().split(b' ', 1)
4596 fields = line.lstrip().split(b' ', 1)
4597 if len(fields) == 1:
4597 if len(fields) == 1:
4598 key = fields[0]
4598 key = fields[0]
4599 value = b''
4599 value = b''
4600 else:
4600 else:
4601 key, value = fields
4601 key, value = fields
4602
4602
4603 if value.startswith(b'eval:'):
4603 if value.startswith(b'eval:'):
4604 value = stringutil.evalpythonliteral(value[5:])
4604 value = stringutil.evalpythonliteral(value[5:])
4605 else:
4605 else:
4606 value = stringutil.unescapestr(value)
4606 value = stringutil.unescapestr(value)
4607
4607
4608 args[key] = value
4608 args[key] = value
4609
4609
4610 if batchedcommands is not None:
4610 if batchedcommands is not None:
4611 batchedcommands.append((command, args))
4611 batchedcommands.append((command, args))
4612 continue
4612 continue
4613
4613
4614 ui.status(_(b'sending %s command\n') % command)
4614 ui.status(_(b'sending %s command\n') % command)
4615
4615
4616 if b'PUSHFILE' in args:
4616 if b'PUSHFILE' in args:
4617 with open(args[b'PUSHFILE'], 'rb') as fh:
4617 with open(args[b'PUSHFILE'], 'rb') as fh:
4618 del args[b'PUSHFILE']
4618 del args[b'PUSHFILE']
4619 res, output = peer._callpush(
4619 res, output = peer._callpush(
4620 command, fh, **pycompat.strkwargs(args)
4620 command, fh, **pycompat.strkwargs(args)
4621 )
4621 )
4622 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4622 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4623 ui.status(
4623 ui.status(
4624 _(b'remote output: %s\n') % stringutil.escapestr(output)
4624 _(b'remote output: %s\n') % stringutil.escapestr(output)
4625 )
4625 )
4626 else:
4626 else:
4627 with peer.commandexecutor() as e:
4627 with peer.commandexecutor() as e:
4628 res = e.callcommand(command, args).result()
4628 res = e.callcommand(command, args).result()
4629
4629
4630 ui.status(
4630 ui.status(
4631 _(b'response: %s\n')
4631 _(b'response: %s\n')
4632 % stringutil.pprint(res, bprefix=True, indent=2)
4632 % stringutil.pprint(res, bprefix=True, indent=2)
4633 )
4633 )
4634
4634
4635 elif action == b'batchbegin':
4635 elif action == b'batchbegin':
4636 if batchedcommands is not None:
4636 if batchedcommands is not None:
4637 raise error.Abort(_(b'nested batchbegin not allowed'))
4637 raise error.Abort(_(b'nested batchbegin not allowed'))
4638
4638
4639 batchedcommands = []
4639 batchedcommands = []
4640 elif action == b'batchsubmit':
4640 elif action == b'batchsubmit':
4641 # There is a batching API we could go through. But it would be
4641 # There is a batching API we could go through. But it would be
4642 # difficult to normalize requests into function calls. It is easier
4642 # difficult to normalize requests into function calls. It is easier
4643 # to bypass this layer and normalize to commands + args.
4643 # to bypass this layer and normalize to commands + args.
4644 ui.status(
4644 ui.status(
4645 _(b'sending batch with %d sub-commands\n')
4645 _(b'sending batch with %d sub-commands\n')
4646 % len(batchedcommands)
4646 % len(batchedcommands)
4647 )
4647 )
4648 assert peer is not None
4648 assert peer is not None
4649 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4649 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4650 ui.status(
4650 ui.status(
4651 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4651 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4652 )
4652 )
4653
4653
4654 batchedcommands = None
4654 batchedcommands = None
4655
4655
4656 elif action.startswith(b'httprequest '):
4656 elif action.startswith(b'httprequest '):
4657 if not opener:
4657 if not opener:
4658 raise error.Abort(
4658 raise error.Abort(
4659 _(b'cannot use httprequest without an HTTP peer')
4659 _(b'cannot use httprequest without an HTTP peer')
4660 )
4660 )
4661
4661
4662 request = action.split(b' ', 2)
4662 request = action.split(b' ', 2)
4663 if len(request) != 3:
4663 if len(request) != 3:
4664 raise error.Abort(
4664 raise error.Abort(
4665 _(
4665 _(
4666 b'invalid httprequest: expected format is '
4666 b'invalid httprequest: expected format is '
4667 b'"httprequest <method> <path>'
4667 b'"httprequest <method> <path>'
4668 )
4668 )
4669 )
4669 )
4670
4670
4671 method, httppath = request[1:]
4671 method, httppath = request[1:]
4672 headers = {}
4672 headers = {}
4673 body = None
4673 body = None
4674 frames = []
4674 frames = []
4675 for line in lines:
4675 for line in lines:
4676 line = line.lstrip()
4676 line = line.lstrip()
4677 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4677 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4678 if m:
4678 if m:
4679 # Headers need to use native strings.
4679 # Headers need to use native strings.
4680 key = pycompat.strurl(m.group(1))
4680 key = pycompat.strurl(m.group(1))
4681 value = pycompat.strurl(m.group(2))
4681 value = pycompat.strurl(m.group(2))
4682 headers[key] = value
4682 headers[key] = value
4683 continue
4683 continue
4684
4684
4685 if line.startswith(b'BODYFILE '):
4685 if line.startswith(b'BODYFILE '):
4686 with open(line.split(b' ', 1), b'rb') as fh:
4686 with open(line.split(b' ', 1), b'rb') as fh:
4687 body = fh.read()
4687 body = fh.read()
4688 elif line.startswith(b'frame '):
4688 elif line.startswith(b'frame '):
4689 frame = wireprotoframing.makeframefromhumanstring(
4689 frame = wireprotoframing.makeframefromhumanstring(
4690 line[len(b'frame ') :]
4690 line[len(b'frame ') :]
4691 )
4691 )
4692
4692
4693 frames.append(frame)
4693 frames.append(frame)
4694 else:
4694 else:
4695 raise error.Abort(
4695 raise error.Abort(
4696 _(b'unknown argument to httprequest: %s') % line
4696 _(b'unknown argument to httprequest: %s') % line
4697 )
4697 )
4698
4698
4699 url = path + httppath
4699 url = path + httppath
4700
4700
4701 if frames:
4701 if frames:
4702 body = b''.join(bytes(f) for f in frames)
4702 body = b''.join(bytes(f) for f in frames)
4703
4703
4704 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4704 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4705
4705
4706 # urllib.Request insists on using has_data() as a proxy for
4706 # urllib.Request insists on using has_data() as a proxy for
4707 # determining the request method. Override that to use our
4707 # determining the request method. Override that to use our
4708 # explicitly requested method.
4708 # explicitly requested method.
4709 req.get_method = lambda: pycompat.sysstr(method)
4709 req.get_method = lambda: pycompat.sysstr(method)
4710
4710
4711 try:
4711 try:
4712 res = opener.open(req)
4712 res = opener.open(req)
4713 body = res.read()
4713 body = res.read()
4714 except util.urlerr.urlerror as e:
4714 except util.urlerr.urlerror as e:
4715 # read() method must be called, but only exists in Python 2
4715 # read() method must be called, but only exists in Python 2
4716 getattr(e, 'read', lambda: None)()
4716 getattr(e, 'read', lambda: None)()
4717 continue
4717 continue
4718
4718
4719 ct = res.headers.get('Content-Type')
4719 ct = res.headers.get('Content-Type')
4720 if ct == 'application/mercurial-cbor':
4720 if ct == 'application/mercurial-cbor':
4721 ui.write(
4721 ui.write(
4722 _(b'cbor> %s\n')
4722 _(b'cbor> %s\n')
4723 % stringutil.pprint(
4723 % stringutil.pprint(
4724 cborutil.decodeall(body), bprefix=True, indent=2
4724 cborutil.decodeall(body), bprefix=True, indent=2
4725 )
4725 )
4726 )
4726 )
4727
4727
4728 elif action == b'close':
4728 elif action == b'close':
4729 assert peer is not None
4729 assert peer is not None
4730 peer.close()
4730 peer.close()
4731 elif action == b'readavailable':
4731 elif action == b'readavailable':
4732 if not stdout or not stderr:
4732 if not stdout or not stderr:
4733 raise error.Abort(
4733 raise error.Abort(
4734 _(b'readavailable not available on this peer')
4734 _(b'readavailable not available on this peer')
4735 )
4735 )
4736
4736
4737 stdin.close()
4737 stdin.close()
4738 stdout.read()
4738 stdout.read()
4739 stderr.read()
4739 stderr.read()
4740
4740
4741 elif action == b'readline':
4741 elif action == b'readline':
4742 if not stdout:
4742 if not stdout:
4743 raise error.Abort(_(b'readline not available on this peer'))
4743 raise error.Abort(_(b'readline not available on this peer'))
4744 stdout.readline()
4744 stdout.readline()
4745 elif action == b'ereadline':
4745 elif action == b'ereadline':
4746 if not stderr:
4746 if not stderr:
4747 raise error.Abort(_(b'ereadline not available on this peer'))
4747 raise error.Abort(_(b'ereadline not available on this peer'))
4748 stderr.readline()
4748 stderr.readline()
4749 elif action.startswith(b'read '):
4749 elif action.startswith(b'read '):
4750 count = int(action.split(b' ', 1)[1])
4750 count = int(action.split(b' ', 1)[1])
4751 if not stdout:
4751 if not stdout:
4752 raise error.Abort(_(b'read not available on this peer'))
4752 raise error.Abort(_(b'read not available on this peer'))
4753 stdout.read(count)
4753 stdout.read(count)
4754 elif action.startswith(b'eread '):
4754 elif action.startswith(b'eread '):
4755 count = int(action.split(b' ', 1)[1])
4755 count = int(action.split(b' ', 1)[1])
4756 if not stderr:
4756 if not stderr:
4757 raise error.Abort(_(b'eread not available on this peer'))
4757 raise error.Abort(_(b'eread not available on this peer'))
4758 stderr.read(count)
4758 stderr.read(count)
4759 else:
4759 else:
4760 raise error.Abort(_(b'unknown action: %s') % action)
4760 raise error.Abort(_(b'unknown action: %s') % action)
4761
4761
4762 if batchedcommands is not None:
4762 if batchedcommands is not None:
4763 raise error.Abort(_(b'unclosed "batchbegin" request'))
4763 raise error.Abort(_(b'unclosed "batchbegin" request'))
4764
4764
4765 if peer:
4765 if peer:
4766 peer.close()
4766 peer.close()
4767
4767
4768 if proc:
4768 if proc:
4769 proc.kill()
4769 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now