##// END OF EJS Templates
path: pass `path` to `peer` in `hg debugbackupbundle`...
marmoute -
r50623:5177be2b default
parent child Browse files
Show More
@@ -1,4716 +1,4715
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import binascii
9 import binascii
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import subprocess
24 import subprocess
25 import sys
25 import sys
26 import time
26 import time
27
27
28 from .i18n import _
28 from .i18n import _
29 from .node import (
29 from .node import (
30 bin,
30 bin,
31 hex,
31 hex,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .pycompat import (
35 from .pycompat import (
36 getattr,
36 getattr,
37 open,
37 open,
38 )
38 )
39 from . import (
39 from . import (
40 bundle2,
40 bundle2,
41 bundlerepo,
41 bundlerepo,
42 changegroup,
42 changegroup,
43 cmdutil,
43 cmdutil,
44 color,
44 color,
45 context,
45 context,
46 copies,
46 copies,
47 dagparser,
47 dagparser,
48 dirstateutils,
48 dirstateutils,
49 encoding,
49 encoding,
50 error,
50 error,
51 exchange,
51 exchange,
52 extensions,
52 extensions,
53 filemerge,
53 filemerge,
54 filesetlang,
54 filesetlang,
55 formatter,
55 formatter,
56 hg,
56 hg,
57 httppeer,
57 httppeer,
58 localrepo,
58 localrepo,
59 lock as lockmod,
59 lock as lockmod,
60 logcmdutil,
60 logcmdutil,
61 mergestate as mergestatemod,
61 mergestate as mergestatemod,
62 metadata,
62 metadata,
63 obsolete,
63 obsolete,
64 obsutil,
64 obsutil,
65 pathutil,
65 pathutil,
66 phases,
66 phases,
67 policy,
67 policy,
68 pvec,
68 pvec,
69 pycompat,
69 pycompat,
70 registrar,
70 registrar,
71 repair,
71 repair,
72 repoview,
72 repoview,
73 requirements,
73 requirements,
74 revlog,
74 revlog,
75 revset,
75 revset,
76 revsetlang,
76 revsetlang,
77 scmutil,
77 scmutil,
78 setdiscovery,
78 setdiscovery,
79 simplemerge,
79 simplemerge,
80 sshpeer,
80 sshpeer,
81 sslutil,
81 sslutil,
82 streamclone,
82 streamclone,
83 strip,
83 strip,
84 tags as tagsmod,
84 tags as tagsmod,
85 templater,
85 templater,
86 treediscovery,
86 treediscovery,
87 upgrade,
87 upgrade,
88 url as urlmod,
88 url as urlmod,
89 util,
89 util,
90 vfs as vfsmod,
90 vfs as vfsmod,
91 wireprotoframing,
91 wireprotoframing,
92 wireprotoserver,
92 wireprotoserver,
93 )
93 )
94 from .interfaces import repository
94 from .interfaces import repository
95 from .utils import (
95 from .utils import (
96 cborutil,
96 cborutil,
97 compression,
97 compression,
98 dateutil,
98 dateutil,
99 procutil,
99 procutil,
100 stringutil,
100 stringutil,
101 urlutil,
101 urlutil,
102 )
102 )
103
103
104 from .revlogutils import (
104 from .revlogutils import (
105 constants as revlog_constants,
105 constants as revlog_constants,
106 debug as revlog_debug,
106 debug as revlog_debug,
107 deltas as deltautil,
107 deltas as deltautil,
108 nodemap,
108 nodemap,
109 rewrite,
109 rewrite,
110 sidedata,
110 sidedata,
111 )
111 )
112
112
113 release = lockmod.release
113 release = lockmod.release
114
114
115 table = {}
115 table = {}
116 table.update(strip.command._table)
116 table.update(strip.command._table)
117 command = registrar.command(table)
117 command = registrar.command(table)
118
118
119
119
120 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
120 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
121 def debugancestor(ui, repo, *args):
121 def debugancestor(ui, repo, *args):
122 """find the ancestor revision of two revisions in a given index"""
122 """find the ancestor revision of two revisions in a given index"""
123 if len(args) == 3:
123 if len(args) == 3:
124 index, rev1, rev2 = args
124 index, rev1, rev2 = args
125 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
125 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
126 lookup = r.lookup
126 lookup = r.lookup
127 elif len(args) == 2:
127 elif len(args) == 2:
128 if not repo:
128 if not repo:
129 raise error.Abort(
129 raise error.Abort(
130 _(b'there is no Mercurial repository here (.hg not found)')
130 _(b'there is no Mercurial repository here (.hg not found)')
131 )
131 )
132 rev1, rev2 = args
132 rev1, rev2 = args
133 r = repo.changelog
133 r = repo.changelog
134 lookup = repo.lookup
134 lookup = repo.lookup
135 else:
135 else:
136 raise error.Abort(_(b'either two or three arguments required'))
136 raise error.Abort(_(b'either two or three arguments required'))
137 a = r.ancestor(lookup(rev1), lookup(rev2))
137 a = r.ancestor(lookup(rev1), lookup(rev2))
138 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
138 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
139
139
140
140
141 @command(b'debugantivirusrunning', [])
141 @command(b'debugantivirusrunning', [])
142 def debugantivirusrunning(ui, repo):
142 def debugantivirusrunning(ui, repo):
143 """attempt to trigger an antivirus scanner to see if one is active"""
143 """attempt to trigger an antivirus scanner to see if one is active"""
144 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
144 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
145 f.write(
145 f.write(
146 util.b85decode(
146 util.b85decode(
147 # This is a base85-armored version of the EICAR test file. See
147 # This is a base85-armored version of the EICAR test file. See
148 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
148 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
149 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
149 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
150 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
150 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
151 )
151 )
152 )
152 )
153 # Give an AV engine time to scan the file.
153 # Give an AV engine time to scan the file.
154 time.sleep(2)
154 time.sleep(2)
155 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
155 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
156
156
157
157
158 @command(b'debugapplystreamclonebundle', [], b'FILE')
158 @command(b'debugapplystreamclonebundle', [], b'FILE')
159 def debugapplystreamclonebundle(ui, repo, fname):
159 def debugapplystreamclonebundle(ui, repo, fname):
160 """apply a stream clone bundle file"""
160 """apply a stream clone bundle file"""
161 f = hg.openpath(ui, fname)
161 f = hg.openpath(ui, fname)
162 gen = exchange.readbundle(ui, f, fname)
162 gen = exchange.readbundle(ui, f, fname)
163 gen.apply(repo)
163 gen.apply(repo)
164
164
165
165
166 @command(
166 @command(
167 b'debugbuilddag',
167 b'debugbuilddag',
168 [
168 [
169 (
169 (
170 b'm',
170 b'm',
171 b'mergeable-file',
171 b'mergeable-file',
172 None,
172 None,
173 _(b'add single file mergeable changes'),
173 _(b'add single file mergeable changes'),
174 ),
174 ),
175 (
175 (
176 b'o',
176 b'o',
177 b'overwritten-file',
177 b'overwritten-file',
178 None,
178 None,
179 _(b'add single file all revs overwrite'),
179 _(b'add single file all revs overwrite'),
180 ),
180 ),
181 (b'n', b'new-file', None, _(b'add new file at each rev')),
181 (b'n', b'new-file', None, _(b'add new file at each rev')),
182 (
182 (
183 b'',
183 b'',
184 b'from-existing',
184 b'from-existing',
185 None,
185 None,
186 _(b'continue from a non-empty repository'),
186 _(b'continue from a non-empty repository'),
187 ),
187 ),
188 ],
188 ],
189 _(b'[OPTION]... [TEXT]'),
189 _(b'[OPTION]... [TEXT]'),
190 )
190 )
191 def debugbuilddag(
191 def debugbuilddag(
192 ui,
192 ui,
193 repo,
193 repo,
194 text=None,
194 text=None,
195 mergeable_file=False,
195 mergeable_file=False,
196 overwritten_file=False,
196 overwritten_file=False,
197 new_file=False,
197 new_file=False,
198 from_existing=False,
198 from_existing=False,
199 ):
199 ):
200 """builds a repo with a given DAG from scratch in the current empty repo
200 """builds a repo with a given DAG from scratch in the current empty repo
201
201
202 The description of the DAG is read from stdin if not given on the
202 The description of the DAG is read from stdin if not given on the
203 command line.
203 command line.
204
204
205 Elements:
205 Elements:
206
206
207 - "+n" is a linear run of n nodes based on the current default parent
207 - "+n" is a linear run of n nodes based on the current default parent
208 - "." is a single node based on the current default parent
208 - "." is a single node based on the current default parent
209 - "$" resets the default parent to null (implied at the start);
209 - "$" resets the default parent to null (implied at the start);
210 otherwise the default parent is always the last node created
210 otherwise the default parent is always the last node created
211 - "<p" sets the default parent to the backref p
211 - "<p" sets the default parent to the backref p
212 - "*p" is a fork at parent p, which is a backref
212 - "*p" is a fork at parent p, which is a backref
213 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
213 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
214 - "/p2" is a merge of the preceding node and p2
214 - "/p2" is a merge of the preceding node and p2
215 - ":tag" defines a local tag for the preceding node
215 - ":tag" defines a local tag for the preceding node
216 - "@branch" sets the named branch for subsequent nodes
216 - "@branch" sets the named branch for subsequent nodes
217 - "#...\\n" is a comment up to the end of the line
217 - "#...\\n" is a comment up to the end of the line
218
218
219 Whitespace between the above elements is ignored.
219 Whitespace between the above elements is ignored.
220
220
221 A backref is either
221 A backref is either
222
222
223 - a number n, which references the node curr-n, where curr is the current
223 - a number n, which references the node curr-n, where curr is the current
224 node, or
224 node, or
225 - the name of a local tag you placed earlier using ":tag", or
225 - the name of a local tag you placed earlier using ":tag", or
226 - empty to denote the default parent.
226 - empty to denote the default parent.
227
227
228 All string valued-elements are either strictly alphanumeric, or must
228 All string valued-elements are either strictly alphanumeric, or must
229 be enclosed in double quotes ("..."), with "\\" as escape character.
229 be enclosed in double quotes ("..."), with "\\" as escape character.
230 """
230 """
231
231
232 if text is None:
232 if text is None:
233 ui.status(_(b"reading DAG from stdin\n"))
233 ui.status(_(b"reading DAG from stdin\n"))
234 text = ui.fin.read()
234 text = ui.fin.read()
235
235
236 cl = repo.changelog
236 cl = repo.changelog
237 if len(cl) > 0 and not from_existing:
237 if len(cl) > 0 and not from_existing:
238 raise error.Abort(_(b'repository is not empty'))
238 raise error.Abort(_(b'repository is not empty'))
239
239
240 # determine number of revs in DAG
240 # determine number of revs in DAG
241 total = 0
241 total = 0
242 for type, data in dagparser.parsedag(text):
242 for type, data in dagparser.parsedag(text):
243 if type == b'n':
243 if type == b'n':
244 total += 1
244 total += 1
245
245
246 if mergeable_file:
246 if mergeable_file:
247 linesperrev = 2
247 linesperrev = 2
248 # make a file with k lines per rev
248 # make a file with k lines per rev
249 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
249 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
250 initialmergedlines.append(b"")
250 initialmergedlines.append(b"")
251
251
252 tags = []
252 tags = []
253 progress = ui.makeprogress(
253 progress = ui.makeprogress(
254 _(b'building'), unit=_(b'revisions'), total=total
254 _(b'building'), unit=_(b'revisions'), total=total
255 )
255 )
256 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
256 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
257 at = -1
257 at = -1
258 atbranch = b'default'
258 atbranch = b'default'
259 nodeids = []
259 nodeids = []
260 id = 0
260 id = 0
261 progress.update(id)
261 progress.update(id)
262 for type, data in dagparser.parsedag(text):
262 for type, data in dagparser.parsedag(text):
263 if type == b'n':
263 if type == b'n':
264 ui.note((b'node %s\n' % pycompat.bytestr(data)))
264 ui.note((b'node %s\n' % pycompat.bytestr(data)))
265 id, ps = data
265 id, ps = data
266
266
267 files = []
267 files = []
268 filecontent = {}
268 filecontent = {}
269
269
270 p2 = None
270 p2 = None
271 if mergeable_file:
271 if mergeable_file:
272 fn = b"mf"
272 fn = b"mf"
273 p1 = repo[ps[0]]
273 p1 = repo[ps[0]]
274 if len(ps) > 1:
274 if len(ps) > 1:
275 p2 = repo[ps[1]]
275 p2 = repo[ps[1]]
276 pa = p1.ancestor(p2)
276 pa = p1.ancestor(p2)
277 base, local, other = [
277 base, local, other = [
278 x[fn].data() for x in (pa, p1, p2)
278 x[fn].data() for x in (pa, p1, p2)
279 ]
279 ]
280 m3 = simplemerge.Merge3Text(base, local, other)
280 m3 = simplemerge.Merge3Text(base, local, other)
281 ml = [
281 ml = [
282 l.strip()
282 l.strip()
283 for l in simplemerge.render_minimized(m3)[0]
283 for l in simplemerge.render_minimized(m3)[0]
284 ]
284 ]
285 ml.append(b"")
285 ml.append(b"")
286 elif at > 0:
286 elif at > 0:
287 ml = p1[fn].data().split(b"\n")
287 ml = p1[fn].data().split(b"\n")
288 else:
288 else:
289 ml = initialmergedlines
289 ml = initialmergedlines
290 ml[id * linesperrev] += b" r%i" % id
290 ml[id * linesperrev] += b" r%i" % id
291 mergedtext = b"\n".join(ml)
291 mergedtext = b"\n".join(ml)
292 files.append(fn)
292 files.append(fn)
293 filecontent[fn] = mergedtext
293 filecontent[fn] = mergedtext
294
294
295 if overwritten_file:
295 if overwritten_file:
296 fn = b"of"
296 fn = b"of"
297 files.append(fn)
297 files.append(fn)
298 filecontent[fn] = b"r%i\n" % id
298 filecontent[fn] = b"r%i\n" % id
299
299
300 if new_file:
300 if new_file:
301 fn = b"nf%i" % id
301 fn = b"nf%i" % id
302 files.append(fn)
302 files.append(fn)
303 filecontent[fn] = b"r%i\n" % id
303 filecontent[fn] = b"r%i\n" % id
304 if len(ps) > 1:
304 if len(ps) > 1:
305 if not p2:
305 if not p2:
306 p2 = repo[ps[1]]
306 p2 = repo[ps[1]]
307 for fn in p2:
307 for fn in p2:
308 if fn.startswith(b"nf"):
308 if fn.startswith(b"nf"):
309 files.append(fn)
309 files.append(fn)
310 filecontent[fn] = p2[fn].data()
310 filecontent[fn] = p2[fn].data()
311
311
312 def fctxfn(repo, cx, path):
312 def fctxfn(repo, cx, path):
313 if path in filecontent:
313 if path in filecontent:
314 return context.memfilectx(
314 return context.memfilectx(
315 repo, cx, path, filecontent[path]
315 repo, cx, path, filecontent[path]
316 )
316 )
317 return None
317 return None
318
318
319 if len(ps) == 0 or ps[0] < 0:
319 if len(ps) == 0 or ps[0] < 0:
320 pars = [None, None]
320 pars = [None, None]
321 elif len(ps) == 1:
321 elif len(ps) == 1:
322 pars = [nodeids[ps[0]], None]
322 pars = [nodeids[ps[0]], None]
323 else:
323 else:
324 pars = [nodeids[p] for p in ps]
324 pars = [nodeids[p] for p in ps]
325 cx = context.memctx(
325 cx = context.memctx(
326 repo,
326 repo,
327 pars,
327 pars,
328 b"r%i" % id,
328 b"r%i" % id,
329 files,
329 files,
330 fctxfn,
330 fctxfn,
331 date=(id, 0),
331 date=(id, 0),
332 user=b"debugbuilddag",
332 user=b"debugbuilddag",
333 extra={b'branch': atbranch},
333 extra={b'branch': atbranch},
334 )
334 )
335 nodeid = repo.commitctx(cx)
335 nodeid = repo.commitctx(cx)
336 nodeids.append(nodeid)
336 nodeids.append(nodeid)
337 at = id
337 at = id
338 elif type == b'l':
338 elif type == b'l':
339 id, name = data
339 id, name = data
340 ui.note((b'tag %s\n' % name))
340 ui.note((b'tag %s\n' % name))
341 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
341 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
342 elif type == b'a':
342 elif type == b'a':
343 ui.note((b'branch %s\n' % data))
343 ui.note((b'branch %s\n' % data))
344 atbranch = data
344 atbranch = data
345 progress.update(id)
345 progress.update(id)
346
346
347 if tags:
347 if tags:
348 repo.vfs.write(b"localtags", b"".join(tags))
348 repo.vfs.write(b"localtags", b"".join(tags))
349
349
350
350
351 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
351 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
352 indent_string = b' ' * indent
352 indent_string = b' ' * indent
353 if all:
353 if all:
354 ui.writenoi18n(
354 ui.writenoi18n(
355 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
355 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
356 % indent_string
356 % indent_string
357 )
357 )
358
358
359 def showchunks(named):
359 def showchunks(named):
360 ui.write(b"\n%s%s\n" % (indent_string, named))
360 ui.write(b"\n%s%s\n" % (indent_string, named))
361 for deltadata in gen.deltaiter():
361 for deltadata in gen.deltaiter():
362 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
362 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
363 ui.write(
363 ui.write(
364 b"%s%s %s %s %s %s %d\n"
364 b"%s%s %s %s %s %s %d\n"
365 % (
365 % (
366 indent_string,
366 indent_string,
367 hex(node),
367 hex(node),
368 hex(p1),
368 hex(p1),
369 hex(p2),
369 hex(p2),
370 hex(cs),
370 hex(cs),
371 hex(deltabase),
371 hex(deltabase),
372 len(delta),
372 len(delta),
373 )
373 )
374 )
374 )
375
375
376 gen.changelogheader()
376 gen.changelogheader()
377 showchunks(b"changelog")
377 showchunks(b"changelog")
378 gen.manifestheader()
378 gen.manifestheader()
379 showchunks(b"manifest")
379 showchunks(b"manifest")
380 for chunkdata in iter(gen.filelogheader, {}):
380 for chunkdata in iter(gen.filelogheader, {}):
381 fname = chunkdata[b'filename']
381 fname = chunkdata[b'filename']
382 showchunks(fname)
382 showchunks(fname)
383 else:
383 else:
384 if isinstance(gen, bundle2.unbundle20):
384 if isinstance(gen, bundle2.unbundle20):
385 raise error.Abort(_(b'use debugbundle2 for this file'))
385 raise error.Abort(_(b'use debugbundle2 for this file'))
386 gen.changelogheader()
386 gen.changelogheader()
387 for deltadata in gen.deltaiter():
387 for deltadata in gen.deltaiter():
388 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
388 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
389 ui.write(b"%s%s\n" % (indent_string, hex(node)))
389 ui.write(b"%s%s\n" % (indent_string, hex(node)))
390
390
391
391
392 def _debugobsmarkers(ui, part, indent=0, **opts):
392 def _debugobsmarkers(ui, part, indent=0, **opts):
393 """display version and markers contained in 'data'"""
393 """display version and markers contained in 'data'"""
394 opts = pycompat.byteskwargs(opts)
394 opts = pycompat.byteskwargs(opts)
395 data = part.read()
395 data = part.read()
396 indent_string = b' ' * indent
396 indent_string = b' ' * indent
397 try:
397 try:
398 version, markers = obsolete._readmarkers(data)
398 version, markers = obsolete._readmarkers(data)
399 except error.UnknownVersion as exc:
399 except error.UnknownVersion as exc:
400 msg = b"%sunsupported version: %s (%d bytes)\n"
400 msg = b"%sunsupported version: %s (%d bytes)\n"
401 msg %= indent_string, exc.version, len(data)
401 msg %= indent_string, exc.version, len(data)
402 ui.write(msg)
402 ui.write(msg)
403 else:
403 else:
404 msg = b"%sversion: %d (%d bytes)\n"
404 msg = b"%sversion: %d (%d bytes)\n"
405 msg %= indent_string, version, len(data)
405 msg %= indent_string, version, len(data)
406 ui.write(msg)
406 ui.write(msg)
407 fm = ui.formatter(b'debugobsolete', opts)
407 fm = ui.formatter(b'debugobsolete', opts)
408 for rawmarker in sorted(markers):
408 for rawmarker in sorted(markers):
409 m = obsutil.marker(None, rawmarker)
409 m = obsutil.marker(None, rawmarker)
410 fm.startitem()
410 fm.startitem()
411 fm.plain(indent_string)
411 fm.plain(indent_string)
412 cmdutil.showmarker(fm, m)
412 cmdutil.showmarker(fm, m)
413 fm.end()
413 fm.end()
414
414
415
415
416 def _debugphaseheads(ui, data, indent=0):
416 def _debugphaseheads(ui, data, indent=0):
417 """display version and markers contained in 'data'"""
417 """display version and markers contained in 'data'"""
418 indent_string = b' ' * indent
418 indent_string = b' ' * indent
419 headsbyphase = phases.binarydecode(data)
419 headsbyphase = phases.binarydecode(data)
420 for phase in phases.allphases:
420 for phase in phases.allphases:
421 for head in headsbyphase[phase]:
421 for head in headsbyphase[phase]:
422 ui.write(indent_string)
422 ui.write(indent_string)
423 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
423 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
424
424
425
425
426 def _quasirepr(thing):
426 def _quasirepr(thing):
427 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
427 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
428 return b'{%s}' % (
428 return b'{%s}' % (
429 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
429 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
430 )
430 )
431 return pycompat.bytestr(repr(thing))
431 return pycompat.bytestr(repr(thing))
432
432
433
433
434 def _debugbundle2(ui, gen, all=None, **opts):
434 def _debugbundle2(ui, gen, all=None, **opts):
435 """lists the contents of a bundle2"""
435 """lists the contents of a bundle2"""
436 if not isinstance(gen, bundle2.unbundle20):
436 if not isinstance(gen, bundle2.unbundle20):
437 raise error.Abort(_(b'not a bundle2 file'))
437 raise error.Abort(_(b'not a bundle2 file'))
438 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
438 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
439 parttypes = opts.get('part_type', [])
439 parttypes = opts.get('part_type', [])
440 for part in gen.iterparts():
440 for part in gen.iterparts():
441 if parttypes and part.type not in parttypes:
441 if parttypes and part.type not in parttypes:
442 continue
442 continue
443 msg = b'%s -- %s (mandatory: %r)\n'
443 msg = b'%s -- %s (mandatory: %r)\n'
444 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
444 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
445 if part.type == b'changegroup':
445 if part.type == b'changegroup':
446 version = part.params.get(b'version', b'01')
446 version = part.params.get(b'version', b'01')
447 cg = changegroup.getunbundler(version, part, b'UN')
447 cg = changegroup.getunbundler(version, part, b'UN')
448 if not ui.quiet:
448 if not ui.quiet:
449 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
449 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
450 if part.type == b'obsmarkers':
450 if part.type == b'obsmarkers':
451 if not ui.quiet:
451 if not ui.quiet:
452 _debugobsmarkers(ui, part, indent=4, **opts)
452 _debugobsmarkers(ui, part, indent=4, **opts)
453 if part.type == b'phase-heads':
453 if part.type == b'phase-heads':
454 if not ui.quiet:
454 if not ui.quiet:
455 _debugphaseheads(ui, part, indent=4)
455 _debugphaseheads(ui, part, indent=4)
456
456
457
457
458 @command(
458 @command(
459 b'debugbundle',
459 b'debugbundle',
460 [
460 [
461 (b'a', b'all', None, _(b'show all details')),
461 (b'a', b'all', None, _(b'show all details')),
462 (b'', b'part-type', [], _(b'show only the named part type')),
462 (b'', b'part-type', [], _(b'show only the named part type')),
463 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
463 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
464 ],
464 ],
465 _(b'FILE'),
465 _(b'FILE'),
466 norepo=True,
466 norepo=True,
467 )
467 )
468 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
468 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
469 """lists the contents of a bundle"""
469 """lists the contents of a bundle"""
470 with hg.openpath(ui, bundlepath) as f:
470 with hg.openpath(ui, bundlepath) as f:
471 if spec:
471 if spec:
472 spec = exchange.getbundlespec(ui, f)
472 spec = exchange.getbundlespec(ui, f)
473 ui.write(b'%s\n' % spec)
473 ui.write(b'%s\n' % spec)
474 return
474 return
475
475
476 gen = exchange.readbundle(ui, f, bundlepath)
476 gen = exchange.readbundle(ui, f, bundlepath)
477 if isinstance(gen, bundle2.unbundle20):
477 if isinstance(gen, bundle2.unbundle20):
478 return _debugbundle2(ui, gen, all=all, **opts)
478 return _debugbundle2(ui, gen, all=all, **opts)
479 _debugchangegroup(ui, gen, all=all, **opts)
479 _debugchangegroup(ui, gen, all=all, **opts)
480
480
481
481
482 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
482 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
483 def debugcapabilities(ui, path, **opts):
483 def debugcapabilities(ui, path, **opts):
484 """lists the capabilities of a remote peer"""
484 """lists the capabilities of a remote peer"""
485 opts = pycompat.byteskwargs(opts)
485 opts = pycompat.byteskwargs(opts)
486 peer = hg.peer(ui, opts, path)
486 peer = hg.peer(ui, opts, path)
487 try:
487 try:
488 caps = peer.capabilities()
488 caps = peer.capabilities()
489 ui.writenoi18n(b'Main capabilities:\n')
489 ui.writenoi18n(b'Main capabilities:\n')
490 for c in sorted(caps):
490 for c in sorted(caps):
491 ui.write(b' %s\n' % c)
491 ui.write(b' %s\n' % c)
492 b2caps = bundle2.bundle2caps(peer)
492 b2caps = bundle2.bundle2caps(peer)
493 if b2caps:
493 if b2caps:
494 ui.writenoi18n(b'Bundle2 capabilities:\n')
494 ui.writenoi18n(b'Bundle2 capabilities:\n')
495 for key, values in sorted(b2caps.items()):
495 for key, values in sorted(b2caps.items()):
496 ui.write(b' %s\n' % key)
496 ui.write(b' %s\n' % key)
497 for v in values:
497 for v in values:
498 ui.write(b' %s\n' % v)
498 ui.write(b' %s\n' % v)
499 finally:
499 finally:
500 peer.close()
500 peer.close()
501
501
502
502
503 @command(
503 @command(
504 b'debugchangedfiles',
504 b'debugchangedfiles',
505 [
505 [
506 (
506 (
507 b'',
507 b'',
508 b'compute',
508 b'compute',
509 False,
509 False,
510 b"compute information instead of reading it from storage",
510 b"compute information instead of reading it from storage",
511 ),
511 ),
512 ],
512 ],
513 b'REV',
513 b'REV',
514 )
514 )
515 def debugchangedfiles(ui, repo, rev, **opts):
515 def debugchangedfiles(ui, repo, rev, **opts):
516 """list the stored files changes for a revision"""
516 """list the stored files changes for a revision"""
517 ctx = logcmdutil.revsingle(repo, rev, None)
517 ctx = logcmdutil.revsingle(repo, rev, None)
518 files = None
518 files = None
519
519
520 if opts['compute']:
520 if opts['compute']:
521 files = metadata.compute_all_files_changes(ctx)
521 files = metadata.compute_all_files_changes(ctx)
522 else:
522 else:
523 sd = repo.changelog.sidedata(ctx.rev())
523 sd = repo.changelog.sidedata(ctx.rev())
524 files_block = sd.get(sidedata.SD_FILES)
524 files_block = sd.get(sidedata.SD_FILES)
525 if files_block is not None:
525 if files_block is not None:
526 files = metadata.decode_files_sidedata(sd)
526 files = metadata.decode_files_sidedata(sd)
527 if files is not None:
527 if files is not None:
528 for f in sorted(files.touched):
528 for f in sorted(files.touched):
529 if f in files.added:
529 if f in files.added:
530 action = b"added"
530 action = b"added"
531 elif f in files.removed:
531 elif f in files.removed:
532 action = b"removed"
532 action = b"removed"
533 elif f in files.merged:
533 elif f in files.merged:
534 action = b"merged"
534 action = b"merged"
535 elif f in files.salvaged:
535 elif f in files.salvaged:
536 action = b"salvaged"
536 action = b"salvaged"
537 else:
537 else:
538 action = b"touched"
538 action = b"touched"
539
539
540 copy_parent = b""
540 copy_parent = b""
541 copy_source = b""
541 copy_source = b""
542 if f in files.copied_from_p1:
542 if f in files.copied_from_p1:
543 copy_parent = b"p1"
543 copy_parent = b"p1"
544 copy_source = files.copied_from_p1[f]
544 copy_source = files.copied_from_p1[f]
545 elif f in files.copied_from_p2:
545 elif f in files.copied_from_p2:
546 copy_parent = b"p2"
546 copy_parent = b"p2"
547 copy_source = files.copied_from_p2[f]
547 copy_source = files.copied_from_p2[f]
548
548
549 data = (action, copy_parent, f, copy_source)
549 data = (action, copy_parent, f, copy_source)
550 template = b"%-8s %2s: %s, %s;\n"
550 template = b"%-8s %2s: %s, %s;\n"
551 ui.write(template % data)
551 ui.write(template % data)
552
552
553
553
554 @command(b'debugcheckstate', [], b'')
554 @command(b'debugcheckstate', [], b'')
555 def debugcheckstate(ui, repo):
555 def debugcheckstate(ui, repo):
556 """validate the correctness of the current dirstate"""
556 """validate the correctness of the current dirstate"""
557 parent1, parent2 = repo.dirstate.parents()
557 parent1, parent2 = repo.dirstate.parents()
558 m1 = repo[parent1].manifest()
558 m1 = repo[parent1].manifest()
559 m2 = repo[parent2].manifest()
559 m2 = repo[parent2].manifest()
560 errors = 0
560 errors = 0
561 for err in repo.dirstate.verify(m1, m2):
561 for err in repo.dirstate.verify(m1, m2):
562 ui.warn(err[0] % err[1:])
562 ui.warn(err[0] % err[1:])
563 errors += 1
563 errors += 1
564 if errors:
564 if errors:
565 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
565 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
566 raise error.Abort(errstr)
566 raise error.Abort(errstr)
567
567
568
568
569 @command(
569 @command(
570 b'debugcolor',
570 b'debugcolor',
571 [(b'', b'style', None, _(b'show all configured styles'))],
571 [(b'', b'style', None, _(b'show all configured styles'))],
572 b'hg debugcolor',
572 b'hg debugcolor',
573 )
573 )
574 def debugcolor(ui, repo, **opts):
574 def debugcolor(ui, repo, **opts):
575 """show available color, effects or style"""
575 """show available color, effects or style"""
576 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
576 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
577 if opts.get('style'):
577 if opts.get('style'):
578 return _debugdisplaystyle(ui)
578 return _debugdisplaystyle(ui)
579 else:
579 else:
580 return _debugdisplaycolor(ui)
580 return _debugdisplaycolor(ui)
581
581
582
582
583 def _debugdisplaycolor(ui):
583 def _debugdisplaycolor(ui):
584 ui = ui.copy()
584 ui = ui.copy()
585 ui._styles.clear()
585 ui._styles.clear()
586 for effect in color._activeeffects(ui).keys():
586 for effect in color._activeeffects(ui).keys():
587 ui._styles[effect] = effect
587 ui._styles[effect] = effect
588 if ui._terminfoparams:
588 if ui._terminfoparams:
589 for k, v in ui.configitems(b'color'):
589 for k, v in ui.configitems(b'color'):
590 if k.startswith(b'color.'):
590 if k.startswith(b'color.'):
591 ui._styles[k] = k[6:]
591 ui._styles[k] = k[6:]
592 elif k.startswith(b'terminfo.'):
592 elif k.startswith(b'terminfo.'):
593 ui._styles[k] = k[9:]
593 ui._styles[k] = k[9:]
594 ui.write(_(b'available colors:\n'))
594 ui.write(_(b'available colors:\n'))
595 # sort label with a '_' after the other to group '_background' entry.
595 # sort label with a '_' after the other to group '_background' entry.
596 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
596 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
597 for colorname, label in items:
597 for colorname, label in items:
598 ui.write(b'%s\n' % colorname, label=label)
598 ui.write(b'%s\n' % colorname, label=label)
599
599
600
600
601 def _debugdisplaystyle(ui):
601 def _debugdisplaystyle(ui):
602 ui.write(_(b'available style:\n'))
602 ui.write(_(b'available style:\n'))
603 if not ui._styles:
603 if not ui._styles:
604 return
604 return
605 width = max(len(s) for s in ui._styles)
605 width = max(len(s) for s in ui._styles)
606 for label, effects in sorted(ui._styles.items()):
606 for label, effects in sorted(ui._styles.items()):
607 ui.write(b'%s' % label, label=label)
607 ui.write(b'%s' % label, label=label)
608 if effects:
608 if effects:
609 # 50
609 # 50
610 ui.write(b': ')
610 ui.write(b': ')
611 ui.write(b' ' * (max(0, width - len(label))))
611 ui.write(b' ' * (max(0, width - len(label))))
612 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
612 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
613 ui.write(b'\n')
613 ui.write(b'\n')
614
614
615
615
616 @command(b'debugcreatestreamclonebundle', [], b'FILE')
616 @command(b'debugcreatestreamclonebundle', [], b'FILE')
617 def debugcreatestreamclonebundle(ui, repo, fname):
617 def debugcreatestreamclonebundle(ui, repo, fname):
618 """create a stream clone bundle file
618 """create a stream clone bundle file
619
619
620 Stream bundles are special bundles that are essentially archives of
620 Stream bundles are special bundles that are essentially archives of
621 revlog files. They are commonly used for cloning very quickly.
621 revlog files. They are commonly used for cloning very quickly.
622 """
622 """
623 # TODO we may want to turn this into an abort when this functionality
623 # TODO we may want to turn this into an abort when this functionality
624 # is moved into `hg bundle`.
624 # is moved into `hg bundle`.
625 if phases.hassecret(repo):
625 if phases.hassecret(repo):
626 ui.warn(
626 ui.warn(
627 _(
627 _(
628 b'(warning: stream clone bundle will contain secret '
628 b'(warning: stream clone bundle will contain secret '
629 b'revisions)\n'
629 b'revisions)\n'
630 )
630 )
631 )
631 )
632
632
633 requirements, gen = streamclone.generatebundlev1(repo)
633 requirements, gen = streamclone.generatebundlev1(repo)
634 changegroup.writechunks(ui, gen, fname)
634 changegroup.writechunks(ui, gen, fname)
635
635
636 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
636 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
637
637
638
638
639 @command(
639 @command(
640 b'debugdag',
640 b'debugdag',
641 [
641 [
642 (b't', b'tags', None, _(b'use tags as labels')),
642 (b't', b'tags', None, _(b'use tags as labels')),
643 (b'b', b'branches', None, _(b'annotate with branch names')),
643 (b'b', b'branches', None, _(b'annotate with branch names')),
644 (b'', b'dots', None, _(b'use dots for runs')),
644 (b'', b'dots', None, _(b'use dots for runs')),
645 (b's', b'spaces', None, _(b'separate elements by spaces')),
645 (b's', b'spaces', None, _(b'separate elements by spaces')),
646 ],
646 ],
647 _(b'[OPTION]... [FILE [REV]...]'),
647 _(b'[OPTION]... [FILE [REV]...]'),
648 optionalrepo=True,
648 optionalrepo=True,
649 )
649 )
650 def debugdag(ui, repo, file_=None, *revs, **opts):
650 def debugdag(ui, repo, file_=None, *revs, **opts):
651 """format the changelog or an index DAG as a concise textual description
651 """format the changelog or an index DAG as a concise textual description
652
652
653 If you pass a revlog index, the revlog's DAG is emitted. If you list
653 If you pass a revlog index, the revlog's DAG is emitted. If you list
654 revision numbers, they get labeled in the output as rN.
654 revision numbers, they get labeled in the output as rN.
655
655
656 Otherwise, the changelog DAG of the current repo is emitted.
656 Otherwise, the changelog DAG of the current repo is emitted.
657 """
657 """
658 spaces = opts.get('spaces')
658 spaces = opts.get('spaces')
659 dots = opts.get('dots')
659 dots = opts.get('dots')
660 if file_:
660 if file_:
661 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
661 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
662 revs = {int(r) for r in revs}
662 revs = {int(r) for r in revs}
663
663
664 def events():
664 def events():
665 for r in rlog:
665 for r in rlog:
666 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
666 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
667 if r in revs:
667 if r in revs:
668 yield b'l', (r, b"r%i" % r)
668 yield b'l', (r, b"r%i" % r)
669
669
670 elif repo:
670 elif repo:
671 cl = repo.changelog
671 cl = repo.changelog
672 tags = opts.get('tags')
672 tags = opts.get('tags')
673 branches = opts.get('branches')
673 branches = opts.get('branches')
674 if tags:
674 if tags:
675 labels = {}
675 labels = {}
676 for l, n in repo.tags().items():
676 for l, n in repo.tags().items():
677 labels.setdefault(cl.rev(n), []).append(l)
677 labels.setdefault(cl.rev(n), []).append(l)
678
678
679 def events():
679 def events():
680 b = b"default"
680 b = b"default"
681 for r in cl:
681 for r in cl:
682 if branches:
682 if branches:
683 newb = cl.read(cl.node(r))[5][b'branch']
683 newb = cl.read(cl.node(r))[5][b'branch']
684 if newb != b:
684 if newb != b:
685 yield b'a', newb
685 yield b'a', newb
686 b = newb
686 b = newb
687 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
687 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
688 if tags:
688 if tags:
689 ls = labels.get(r)
689 ls = labels.get(r)
690 if ls:
690 if ls:
691 for l in ls:
691 for l in ls:
692 yield b'l', (r, l)
692 yield b'l', (r, l)
693
693
694 else:
694 else:
695 raise error.Abort(_(b'need repo for changelog dag'))
695 raise error.Abort(_(b'need repo for changelog dag'))
696
696
697 for line in dagparser.dagtextlines(
697 for line in dagparser.dagtextlines(
698 events(),
698 events(),
699 addspaces=spaces,
699 addspaces=spaces,
700 wraplabels=True,
700 wraplabels=True,
701 wrapannotations=True,
701 wrapannotations=True,
702 wrapnonlinear=dots,
702 wrapnonlinear=dots,
703 usedots=dots,
703 usedots=dots,
704 maxlinewidth=70,
704 maxlinewidth=70,
705 ):
705 ):
706 ui.write(line)
706 ui.write(line)
707 ui.write(b"\n")
707 ui.write(b"\n")
708
708
709
709
710 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
710 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
711 def debugdata(ui, repo, file_, rev=None, **opts):
711 def debugdata(ui, repo, file_, rev=None, **opts):
712 """dump the contents of a data file revision"""
712 """dump the contents of a data file revision"""
713 opts = pycompat.byteskwargs(opts)
713 opts = pycompat.byteskwargs(opts)
714 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
714 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
715 if rev is not None:
715 if rev is not None:
716 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
716 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
717 file_, rev = None, file_
717 file_, rev = None, file_
718 elif rev is None:
718 elif rev is None:
719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
720 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
720 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
721 try:
721 try:
722 ui.write(r.rawdata(r.lookup(rev)))
722 ui.write(r.rawdata(r.lookup(rev)))
723 except KeyError:
723 except KeyError:
724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
725
725
726
726
727 @command(
727 @command(
728 b'debugdate',
728 b'debugdate',
729 [(b'e', b'extended', None, _(b'try extended date formats'))],
729 [(b'e', b'extended', None, _(b'try extended date formats'))],
730 _(b'[-e] DATE [RANGE]'),
730 _(b'[-e] DATE [RANGE]'),
731 norepo=True,
731 norepo=True,
732 optionalrepo=True,
732 optionalrepo=True,
733 )
733 )
734 def debugdate(ui, date, range=None, **opts):
734 def debugdate(ui, date, range=None, **opts):
735 """parse and display a date"""
735 """parse and display a date"""
736 if opts["extended"]:
736 if opts["extended"]:
737 d = dateutil.parsedate(date, dateutil.extendeddateformats)
737 d = dateutil.parsedate(date, dateutil.extendeddateformats)
738 else:
738 else:
739 d = dateutil.parsedate(date)
739 d = dateutil.parsedate(date)
740 ui.writenoi18n(b"internal: %d %d\n" % d)
740 ui.writenoi18n(b"internal: %d %d\n" % d)
741 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
741 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
742 if range:
742 if range:
743 m = dateutil.matchdate(range)
743 m = dateutil.matchdate(range)
744 ui.writenoi18n(b"match: %s\n" % m(d[0]))
744 ui.writenoi18n(b"match: %s\n" % m(d[0]))
745
745
746
746
747 @command(
747 @command(
748 b'debugdeltachain',
748 b'debugdeltachain',
749 cmdutil.debugrevlogopts + cmdutil.formatteropts,
749 cmdutil.debugrevlogopts + cmdutil.formatteropts,
750 _(b'-c|-m|FILE'),
750 _(b'-c|-m|FILE'),
751 optionalrepo=True,
751 optionalrepo=True,
752 )
752 )
753 def debugdeltachain(ui, repo, file_=None, **opts):
753 def debugdeltachain(ui, repo, file_=None, **opts):
754 """dump information about delta chains in a revlog
754 """dump information about delta chains in a revlog
755
755
756 Output can be templatized. Available template keywords are:
756 Output can be templatized. Available template keywords are:
757
757
758 :``rev``: revision number
758 :``rev``: revision number
759 :``p1``: parent 1 revision number (for reference)
759 :``p1``: parent 1 revision number (for reference)
760 :``p2``: parent 2 revision number (for reference)
760 :``p2``: parent 2 revision number (for reference)
761 :``chainid``: delta chain identifier (numbered by unique base)
761 :``chainid``: delta chain identifier (numbered by unique base)
762 :``chainlen``: delta chain length to this revision
762 :``chainlen``: delta chain length to this revision
763 :``prevrev``: previous revision in delta chain
763 :``prevrev``: previous revision in delta chain
764 :``deltatype``: role of delta / how it was computed
764 :``deltatype``: role of delta / how it was computed
765 - base: a full snapshot
765 - base: a full snapshot
766 - snap: an intermediate snapshot
766 - snap: an intermediate snapshot
767 - p1: a delta against the first parent
767 - p1: a delta against the first parent
768 - p2: a delta against the second parent
768 - p2: a delta against the second parent
769 - skip1: a delta against the same base as p1
769 - skip1: a delta against the same base as p1
770 (when p1 has empty delta
770 (when p1 has empty delta
771 - skip2: a delta against the same base as p2
771 - skip2: a delta against the same base as p2
772 (when p2 has empty delta
772 (when p2 has empty delta
773 - prev: a delta against the previous revision
773 - prev: a delta against the previous revision
774 - other: a delta against an arbitrary revision
774 - other: a delta against an arbitrary revision
775 :``compsize``: compressed size of revision
775 :``compsize``: compressed size of revision
776 :``uncompsize``: uncompressed size of revision
776 :``uncompsize``: uncompressed size of revision
777 :``chainsize``: total size of compressed revisions in chain
777 :``chainsize``: total size of compressed revisions in chain
778 :``chainratio``: total chain size divided by uncompressed revision size
778 :``chainratio``: total chain size divided by uncompressed revision size
779 (new delta chains typically start at ratio 2.00)
779 (new delta chains typically start at ratio 2.00)
780 :``lindist``: linear distance from base revision in delta chain to end
780 :``lindist``: linear distance from base revision in delta chain to end
781 of this revision
781 of this revision
782 :``extradist``: total size of revisions not part of this delta chain from
782 :``extradist``: total size of revisions not part of this delta chain from
783 base of delta chain to end of this revision; a measurement
783 base of delta chain to end of this revision; a measurement
784 of how much extra data we need to read/seek across to read
784 of how much extra data we need to read/seek across to read
785 the delta chain for this revision
785 the delta chain for this revision
786 :``extraratio``: extradist divided by chainsize; another representation of
786 :``extraratio``: extradist divided by chainsize; another representation of
787 how much unrelated data is needed to load this delta chain
787 how much unrelated data is needed to load this delta chain
788
788
789 If the repository is configured to use the sparse read, additional keywords
789 If the repository is configured to use the sparse read, additional keywords
790 are available:
790 are available:
791
791
792 :``readsize``: total size of data read from the disk for a revision
792 :``readsize``: total size of data read from the disk for a revision
793 (sum of the sizes of all the blocks)
793 (sum of the sizes of all the blocks)
794 :``largestblock``: size of the largest block of data read from the disk
794 :``largestblock``: size of the largest block of data read from the disk
795 :``readdensity``: density of useful bytes in the data read from the disk
795 :``readdensity``: density of useful bytes in the data read from the disk
796 :``srchunks``: in how many data hunks the whole revision would be read
796 :``srchunks``: in how many data hunks the whole revision would be read
797
797
798 The sparse read can be enabled with experimental.sparse-read = True
798 The sparse read can be enabled with experimental.sparse-read = True
799 """
799 """
800 opts = pycompat.byteskwargs(opts)
800 opts = pycompat.byteskwargs(opts)
801 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
801 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
802 index = r.index
802 index = r.index
803 start = r.start
803 start = r.start
804 length = r.length
804 length = r.length
805 generaldelta = r._generaldelta
805 generaldelta = r._generaldelta
806 withsparseread = getattr(r, '_withsparseread', False)
806 withsparseread = getattr(r, '_withsparseread', False)
807
807
808 # security to avoid crash on corrupted revlogs
808 # security to avoid crash on corrupted revlogs
809 total_revs = len(index)
809 total_revs = len(index)
810
810
811 def revinfo(rev):
811 def revinfo(rev):
812 e = index[rev]
812 e = index[rev]
813 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
813 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
814 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
814 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
815 chainsize = 0
815 chainsize = 0
816
816
817 base = e[revlog_constants.ENTRY_DELTA_BASE]
817 base = e[revlog_constants.ENTRY_DELTA_BASE]
818 p1 = e[revlog_constants.ENTRY_PARENT_1]
818 p1 = e[revlog_constants.ENTRY_PARENT_1]
819 p2 = e[revlog_constants.ENTRY_PARENT_2]
819 p2 = e[revlog_constants.ENTRY_PARENT_2]
820
820
821 # If the parents of a revision has an empty delta, we never try to delta
821 # If the parents of a revision has an empty delta, we never try to delta
822 # against that parent, but directly against the delta base of that
822 # against that parent, but directly against the delta base of that
823 # parent (recursively). It avoids adding a useless entry in the chain.
823 # parent (recursively). It avoids adding a useless entry in the chain.
824 #
824 #
825 # However we need to detect that as a special case for delta-type, that
825 # However we need to detect that as a special case for delta-type, that
826 # is not simply "other".
826 # is not simply "other".
827 p1_base = p1
827 p1_base = p1
828 if p1 != nullrev and p1 < total_revs:
828 if p1 != nullrev and p1 < total_revs:
829 e1 = index[p1]
829 e1 = index[p1]
830 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
830 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
831 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
831 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
832 if (
832 if (
833 new_base == p1_base
833 new_base == p1_base
834 or new_base == nullrev
834 or new_base == nullrev
835 or new_base >= total_revs
835 or new_base >= total_revs
836 ):
836 ):
837 break
837 break
838 p1_base = new_base
838 p1_base = new_base
839 e1 = index[p1_base]
839 e1 = index[p1_base]
840 p2_base = p2
840 p2_base = p2
841 if p2 != nullrev and p2 < total_revs:
841 if p2 != nullrev and p2 < total_revs:
842 e2 = index[p2]
842 e2 = index[p2]
843 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
843 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
844 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
844 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
845 if (
845 if (
846 new_base == p2_base
846 new_base == p2_base
847 or new_base == nullrev
847 or new_base == nullrev
848 or new_base >= total_revs
848 or new_base >= total_revs
849 ):
849 ):
850 break
850 break
851 p2_base = new_base
851 p2_base = new_base
852 e2 = index[p2_base]
852 e2 = index[p2_base]
853
853
854 if generaldelta:
854 if generaldelta:
855 if base == p1:
855 if base == p1:
856 deltatype = b'p1'
856 deltatype = b'p1'
857 elif base == p2:
857 elif base == p2:
858 deltatype = b'p2'
858 deltatype = b'p2'
859 elif base == rev:
859 elif base == rev:
860 deltatype = b'base'
860 deltatype = b'base'
861 elif base == p1_base:
861 elif base == p1_base:
862 deltatype = b'skip1'
862 deltatype = b'skip1'
863 elif base == p2_base:
863 elif base == p2_base:
864 deltatype = b'skip2'
864 deltatype = b'skip2'
865 elif r.issnapshot(rev):
865 elif r.issnapshot(rev):
866 deltatype = b'snap'
866 deltatype = b'snap'
867 elif base == rev - 1:
867 elif base == rev - 1:
868 deltatype = b'prev'
868 deltatype = b'prev'
869 else:
869 else:
870 deltatype = b'other'
870 deltatype = b'other'
871 else:
871 else:
872 if base == rev:
872 if base == rev:
873 deltatype = b'base'
873 deltatype = b'base'
874 else:
874 else:
875 deltatype = b'prev'
875 deltatype = b'prev'
876
876
877 chain = r._deltachain(rev)[0]
877 chain = r._deltachain(rev)[0]
878 for iterrev in chain:
878 for iterrev in chain:
879 e = index[iterrev]
879 e = index[iterrev]
880 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
880 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
881
881
882 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
882 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
883
883
884 fm = ui.formatter(b'debugdeltachain', opts)
884 fm = ui.formatter(b'debugdeltachain', opts)
885
885
886 fm.plain(
886 fm.plain(
887 b' rev p1 p2 chain# chainlen prev delta '
887 b' rev p1 p2 chain# chainlen prev delta '
888 b'size rawsize chainsize ratio lindist extradist '
888 b'size rawsize chainsize ratio lindist extradist '
889 b'extraratio'
889 b'extraratio'
890 )
890 )
891 if withsparseread:
891 if withsparseread:
892 fm.plain(b' readsize largestblk rddensity srchunks')
892 fm.plain(b' readsize largestblk rddensity srchunks')
893 fm.plain(b'\n')
893 fm.plain(b'\n')
894
894
895 chainbases = {}
895 chainbases = {}
896 for rev in r:
896 for rev in r:
897 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
897 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
898 chainbase = chain[0]
898 chainbase = chain[0]
899 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
899 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
900 basestart = start(chainbase)
900 basestart = start(chainbase)
901 revstart = start(rev)
901 revstart = start(rev)
902 lineardist = revstart + comp - basestart
902 lineardist = revstart + comp - basestart
903 extradist = lineardist - chainsize
903 extradist = lineardist - chainsize
904 try:
904 try:
905 prevrev = chain[-2]
905 prevrev = chain[-2]
906 except IndexError:
906 except IndexError:
907 prevrev = -1
907 prevrev = -1
908
908
909 if uncomp != 0:
909 if uncomp != 0:
910 chainratio = float(chainsize) / float(uncomp)
910 chainratio = float(chainsize) / float(uncomp)
911 else:
911 else:
912 chainratio = chainsize
912 chainratio = chainsize
913
913
914 if chainsize != 0:
914 if chainsize != 0:
915 extraratio = float(extradist) / float(chainsize)
915 extraratio = float(extradist) / float(chainsize)
916 else:
916 else:
917 extraratio = extradist
917 extraratio = extradist
918
918
919 fm.startitem()
919 fm.startitem()
920 fm.write(
920 fm.write(
921 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
921 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
922 b'uncompsize chainsize chainratio lindist extradist '
922 b'uncompsize chainsize chainratio lindist extradist '
923 b'extraratio',
923 b'extraratio',
924 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
924 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
925 rev,
925 rev,
926 p1,
926 p1,
927 p2,
927 p2,
928 chainid,
928 chainid,
929 len(chain),
929 len(chain),
930 prevrev,
930 prevrev,
931 deltatype,
931 deltatype,
932 comp,
932 comp,
933 uncomp,
933 uncomp,
934 chainsize,
934 chainsize,
935 chainratio,
935 chainratio,
936 lineardist,
936 lineardist,
937 extradist,
937 extradist,
938 extraratio,
938 extraratio,
939 rev=rev,
939 rev=rev,
940 chainid=chainid,
940 chainid=chainid,
941 chainlen=len(chain),
941 chainlen=len(chain),
942 prevrev=prevrev,
942 prevrev=prevrev,
943 deltatype=deltatype,
943 deltatype=deltatype,
944 compsize=comp,
944 compsize=comp,
945 uncompsize=uncomp,
945 uncompsize=uncomp,
946 chainsize=chainsize,
946 chainsize=chainsize,
947 chainratio=chainratio,
947 chainratio=chainratio,
948 lindist=lineardist,
948 lindist=lineardist,
949 extradist=extradist,
949 extradist=extradist,
950 extraratio=extraratio,
950 extraratio=extraratio,
951 )
951 )
952 if withsparseread:
952 if withsparseread:
953 readsize = 0
953 readsize = 0
954 largestblock = 0
954 largestblock = 0
955 srchunks = 0
955 srchunks = 0
956
956
957 for revschunk in deltautil.slicechunk(r, chain):
957 for revschunk in deltautil.slicechunk(r, chain):
958 srchunks += 1
958 srchunks += 1
959 blkend = start(revschunk[-1]) + length(revschunk[-1])
959 blkend = start(revschunk[-1]) + length(revschunk[-1])
960 blksize = blkend - start(revschunk[0])
960 blksize = blkend - start(revschunk[0])
961
961
962 readsize += blksize
962 readsize += blksize
963 if largestblock < blksize:
963 if largestblock < blksize:
964 largestblock = blksize
964 largestblock = blksize
965
965
966 if readsize:
966 if readsize:
967 readdensity = float(chainsize) / float(readsize)
967 readdensity = float(chainsize) / float(readsize)
968 else:
968 else:
969 readdensity = 1
969 readdensity = 1
970
970
971 fm.write(
971 fm.write(
972 b'readsize largestblock readdensity srchunks',
972 b'readsize largestblock readdensity srchunks',
973 b' %10d %10d %9.5f %8d',
973 b' %10d %10d %9.5f %8d',
974 readsize,
974 readsize,
975 largestblock,
975 largestblock,
976 readdensity,
976 readdensity,
977 srchunks,
977 srchunks,
978 readsize=readsize,
978 readsize=readsize,
979 largestblock=largestblock,
979 largestblock=largestblock,
980 readdensity=readdensity,
980 readdensity=readdensity,
981 srchunks=srchunks,
981 srchunks=srchunks,
982 )
982 )
983
983
984 fm.plain(b'\n')
984 fm.plain(b'\n')
985
985
986 fm.end()
986 fm.end()
987
987
988
988
989 @command(
989 @command(
990 b'debug-delta-find',
990 b'debug-delta-find',
991 cmdutil.debugrevlogopts
991 cmdutil.debugrevlogopts
992 + cmdutil.formatteropts
992 + cmdutil.formatteropts
993 + [
993 + [
994 (
994 (
995 b'',
995 b'',
996 b'source',
996 b'source',
997 b'full',
997 b'full',
998 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
998 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
999 ),
999 ),
1000 ],
1000 ],
1001 _(b'-c|-m|FILE REV'),
1001 _(b'-c|-m|FILE REV'),
1002 optionalrepo=True,
1002 optionalrepo=True,
1003 )
1003 )
1004 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1004 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1005 """display the computation to get to a valid delta for storing REV
1005 """display the computation to get to a valid delta for storing REV
1006
1006
1007 This command will replay the process used to find the "best" delta to store
1007 This command will replay the process used to find the "best" delta to store
1008 a revision and display information about all the steps used to get to that
1008 a revision and display information about all the steps used to get to that
1009 result.
1009 result.
1010
1010
1011 By default, the process is fed with a the full-text for the revision. This
1011 By default, the process is fed with a the full-text for the revision. This
1012 can be controlled with the --source flag.
1012 can be controlled with the --source flag.
1013
1013
1014 The revision use the revision number of the target storage (not changelog
1014 The revision use the revision number of the target storage (not changelog
1015 revision number).
1015 revision number).
1016
1016
1017 note: the process is initiated from a full text of the revision to store.
1017 note: the process is initiated from a full text of the revision to store.
1018 """
1018 """
1019 opts = pycompat.byteskwargs(opts)
1019 opts = pycompat.byteskwargs(opts)
1020 if arg_2 is None:
1020 if arg_2 is None:
1021 file_ = None
1021 file_ = None
1022 rev = arg_1
1022 rev = arg_1
1023 else:
1023 else:
1024 file_ = arg_1
1024 file_ = arg_1
1025 rev = arg_2
1025 rev = arg_2
1026
1026
1027 rev = int(rev)
1027 rev = int(rev)
1028
1028
1029 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1029 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1030 p1r, p2r = revlog.parentrevs(rev)
1030 p1r, p2r = revlog.parentrevs(rev)
1031
1031
1032 if source == b'full':
1032 if source == b'full':
1033 base_rev = nullrev
1033 base_rev = nullrev
1034 elif source == b'storage':
1034 elif source == b'storage':
1035 base_rev = revlog.deltaparent(rev)
1035 base_rev = revlog.deltaparent(rev)
1036 elif source == b'p1':
1036 elif source == b'p1':
1037 base_rev = p1r
1037 base_rev = p1r
1038 elif source == b'p2':
1038 elif source == b'p2':
1039 base_rev = p2r
1039 base_rev = p2r
1040 elif source == b'prev':
1040 elif source == b'prev':
1041 base_rev = rev - 1
1041 base_rev = rev - 1
1042 else:
1042 else:
1043 raise error.InputError(b"invalid --source value: %s" % source)
1043 raise error.InputError(b"invalid --source value: %s" % source)
1044
1044
1045 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
1045 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
1046
1046
1047
1047
1048 @command(
1048 @command(
1049 b'debugdirstate|debugstate',
1049 b'debugdirstate|debugstate',
1050 [
1050 [
1051 (
1051 (
1052 b'',
1052 b'',
1053 b'nodates',
1053 b'nodates',
1054 None,
1054 None,
1055 _(b'do not display the saved mtime (DEPRECATED)'),
1055 _(b'do not display the saved mtime (DEPRECATED)'),
1056 ),
1056 ),
1057 (b'', b'dates', True, _(b'display the saved mtime')),
1057 (b'', b'dates', True, _(b'display the saved mtime')),
1058 (b'', b'datesort', None, _(b'sort by saved mtime')),
1058 (b'', b'datesort', None, _(b'sort by saved mtime')),
1059 (
1059 (
1060 b'',
1060 b'',
1061 b'docket',
1061 b'docket',
1062 False,
1062 False,
1063 _(b'display the docket (metadata file) instead'),
1063 _(b'display the docket (metadata file) instead'),
1064 ),
1064 ),
1065 (
1065 (
1066 b'',
1066 b'',
1067 b'all',
1067 b'all',
1068 False,
1068 False,
1069 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1069 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1070 ),
1070 ),
1071 ],
1071 ],
1072 _(b'[OPTION]...'),
1072 _(b'[OPTION]...'),
1073 )
1073 )
1074 def debugstate(ui, repo, **opts):
1074 def debugstate(ui, repo, **opts):
1075 """show the contents of the current dirstate"""
1075 """show the contents of the current dirstate"""
1076
1076
1077 if opts.get("docket"):
1077 if opts.get("docket"):
1078 if not repo.dirstate._use_dirstate_v2:
1078 if not repo.dirstate._use_dirstate_v2:
1079 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1079 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1080
1080
1081 docket = repo.dirstate._map.docket
1081 docket = repo.dirstate._map.docket
1082 (
1082 (
1083 start_offset,
1083 start_offset,
1084 root_nodes,
1084 root_nodes,
1085 nodes_with_entry,
1085 nodes_with_entry,
1086 nodes_with_copy,
1086 nodes_with_copy,
1087 unused_bytes,
1087 unused_bytes,
1088 _unused,
1088 _unused,
1089 ignore_pattern,
1089 ignore_pattern,
1090 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1090 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1091
1091
1092 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1092 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1093 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1093 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1094 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1094 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1095 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1095 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1096 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1096 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1097 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1097 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1098 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1098 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1099 ui.write(
1099 ui.write(
1100 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1100 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1101 )
1101 )
1102 return
1102 return
1103
1103
1104 nodates = not opts['dates']
1104 nodates = not opts['dates']
1105 if opts.get('nodates') is not None:
1105 if opts.get('nodates') is not None:
1106 nodates = True
1106 nodates = True
1107 datesort = opts.get('datesort')
1107 datesort = opts.get('datesort')
1108
1108
1109 if datesort:
1109 if datesort:
1110
1110
1111 def keyfunc(entry):
1111 def keyfunc(entry):
1112 filename, _state, _mode, _size, mtime = entry
1112 filename, _state, _mode, _size, mtime = entry
1113 return (mtime, filename)
1113 return (mtime, filename)
1114
1114
1115 else:
1115 else:
1116 keyfunc = None # sort by filename
1116 keyfunc = None # sort by filename
1117 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1117 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1118 entries.sort(key=keyfunc)
1118 entries.sort(key=keyfunc)
1119 for entry in entries:
1119 for entry in entries:
1120 filename, state, mode, size, mtime = entry
1120 filename, state, mode, size, mtime = entry
1121 if mtime == -1:
1121 if mtime == -1:
1122 timestr = b'unset '
1122 timestr = b'unset '
1123 elif nodates:
1123 elif nodates:
1124 timestr = b'set '
1124 timestr = b'set '
1125 else:
1125 else:
1126 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1126 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1127 timestr = encoding.strtolocal(timestr)
1127 timestr = encoding.strtolocal(timestr)
1128 if mode & 0o20000:
1128 if mode & 0o20000:
1129 mode = b'lnk'
1129 mode = b'lnk'
1130 else:
1130 else:
1131 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1131 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1132 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1132 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1133 for f in repo.dirstate.copies():
1133 for f in repo.dirstate.copies():
1134 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1134 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1135
1135
1136
1136
1137 @command(
1137 @command(
1138 b'debugdirstateignorepatternshash',
1138 b'debugdirstateignorepatternshash',
1139 [],
1139 [],
1140 _(b''),
1140 _(b''),
1141 )
1141 )
1142 def debugdirstateignorepatternshash(ui, repo, **opts):
1142 def debugdirstateignorepatternshash(ui, repo, **opts):
1143 """show the hash of ignore patterns stored in dirstate if v2,
1143 """show the hash of ignore patterns stored in dirstate if v2,
1144 or nothing for dirstate-v2
1144 or nothing for dirstate-v2
1145 """
1145 """
1146 if repo.dirstate._use_dirstate_v2:
1146 if repo.dirstate._use_dirstate_v2:
1147 docket = repo.dirstate._map.docket
1147 docket = repo.dirstate._map.docket
1148 hash_len = 20 # 160 bits for SHA-1
1148 hash_len = 20 # 160 bits for SHA-1
1149 hash_bytes = docket.tree_metadata[-hash_len:]
1149 hash_bytes = docket.tree_metadata[-hash_len:]
1150 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1150 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1151
1151
1152
1152
1153 @command(
1153 @command(
1154 b'debugdiscovery',
1154 b'debugdiscovery',
1155 [
1155 [
1156 (b'', b'old', None, _(b'use old-style discovery')),
1156 (b'', b'old', None, _(b'use old-style discovery')),
1157 (
1157 (
1158 b'',
1158 b'',
1159 b'nonheads',
1159 b'nonheads',
1160 None,
1160 None,
1161 _(b'use old-style discovery with non-heads included'),
1161 _(b'use old-style discovery with non-heads included'),
1162 ),
1162 ),
1163 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1163 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1164 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1164 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1165 (
1165 (
1166 b'',
1166 b'',
1167 b'local-as-revs',
1167 b'local-as-revs',
1168 b"",
1168 b"",
1169 b'treat local has having these revisions only',
1169 b'treat local has having these revisions only',
1170 ),
1170 ),
1171 (
1171 (
1172 b'',
1172 b'',
1173 b'remote-as-revs',
1173 b'remote-as-revs',
1174 b"",
1174 b"",
1175 b'use local as remote, with only these revisions',
1175 b'use local as remote, with only these revisions',
1176 ),
1176 ),
1177 ]
1177 ]
1178 + cmdutil.remoteopts
1178 + cmdutil.remoteopts
1179 + cmdutil.formatteropts,
1179 + cmdutil.formatteropts,
1180 _(b'[--rev REV] [OTHER]'),
1180 _(b'[--rev REV] [OTHER]'),
1181 )
1181 )
1182 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1182 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1183 """runs the changeset discovery protocol in isolation
1183 """runs the changeset discovery protocol in isolation
1184
1184
1185 The local peer can be "replaced" by a subset of the local repository by
1185 The local peer can be "replaced" by a subset of the local repository by
1186 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1186 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1187 can be "replaced" by a subset of the local repository using the
1187 can be "replaced" by a subset of the local repository using the
1188 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1188 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1189 discovery situations.
1189 discovery situations.
1190
1190
1191 The following developer oriented config are relevant for people playing with this command:
1191 The following developer oriented config are relevant for people playing with this command:
1192
1192
1193 * devel.discovery.exchange-heads=True
1193 * devel.discovery.exchange-heads=True
1194
1194
1195 If False, the discovery will not start with
1195 If False, the discovery will not start with
1196 remote head fetching and local head querying.
1196 remote head fetching and local head querying.
1197
1197
1198 * devel.discovery.grow-sample=True
1198 * devel.discovery.grow-sample=True
1199
1199
1200 If False, the sample size used in set discovery will not be increased
1200 If False, the sample size used in set discovery will not be increased
1201 through the process
1201 through the process
1202
1202
1203 * devel.discovery.grow-sample.dynamic=True
1203 * devel.discovery.grow-sample.dynamic=True
1204
1204
1205 When discovery.grow-sample.dynamic is True, the default, the sample size is
1205 When discovery.grow-sample.dynamic is True, the default, the sample size is
1206 adapted to the shape of the undecided set (it is set to the max of:
1206 adapted to the shape of the undecided set (it is set to the max of:
1207 <target-size>, len(roots(undecided)), len(heads(undecided)
1207 <target-size>, len(roots(undecided)), len(heads(undecided)
1208
1208
1209 * devel.discovery.grow-sample.rate=1.05
1209 * devel.discovery.grow-sample.rate=1.05
1210
1210
1211 the rate at which the sample grow
1211 the rate at which the sample grow
1212
1212
1213 * devel.discovery.randomize=True
1213 * devel.discovery.randomize=True
1214
1214
1215 If andom sampling during discovery are deterministic. It is meant for
1215 If andom sampling during discovery are deterministic. It is meant for
1216 integration tests.
1216 integration tests.
1217
1217
1218 * devel.discovery.sample-size=200
1218 * devel.discovery.sample-size=200
1219
1219
1220 Control the initial size of the discovery sample
1220 Control the initial size of the discovery sample
1221
1221
1222 * devel.discovery.sample-size.initial=100
1222 * devel.discovery.sample-size.initial=100
1223
1223
1224 Control the initial size of the discovery for initial change
1224 Control the initial size of the discovery for initial change
1225 """
1225 """
1226 opts = pycompat.byteskwargs(opts)
1226 opts = pycompat.byteskwargs(opts)
1227 unfi = repo.unfiltered()
1227 unfi = repo.unfiltered()
1228
1228
1229 # setup potential extra filtering
1229 # setup potential extra filtering
1230 local_revs = opts[b"local_as_revs"]
1230 local_revs = opts[b"local_as_revs"]
1231 remote_revs = opts[b"remote_as_revs"]
1231 remote_revs = opts[b"remote_as_revs"]
1232
1232
1233 # make sure tests are repeatable
1233 # make sure tests are repeatable
1234 random.seed(int(opts[b'seed']))
1234 random.seed(int(opts[b'seed']))
1235
1235
1236 if not remote_revs:
1236 if not remote_revs:
1237 path = urlutil.get_unique_pull_path_obj(
1237 path = urlutil.get_unique_pull_path_obj(
1238 b'debugdiscovery', ui, remoteurl
1238 b'debugdiscovery', ui, remoteurl
1239 )
1239 )
1240 branches = (path.branch, [])
1240 branches = (path.branch, [])
1241 remote = hg.peer(repo, opts, path)
1241 remote = hg.peer(repo, opts, path)
1242 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1242 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1243 else:
1243 else:
1244 branches = (None, [])
1244 branches = (None, [])
1245 remote_filtered_revs = logcmdutil.revrange(
1245 remote_filtered_revs = logcmdutil.revrange(
1246 unfi, [b"not (::(%s))" % remote_revs]
1246 unfi, [b"not (::(%s))" % remote_revs]
1247 )
1247 )
1248 remote_filtered_revs = frozenset(remote_filtered_revs)
1248 remote_filtered_revs = frozenset(remote_filtered_revs)
1249
1249
1250 def remote_func(x):
1250 def remote_func(x):
1251 return remote_filtered_revs
1251 return remote_filtered_revs
1252
1252
1253 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1253 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1254
1254
1255 remote = repo.peer()
1255 remote = repo.peer()
1256 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1256 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1257
1257
1258 if local_revs:
1258 if local_revs:
1259 local_filtered_revs = logcmdutil.revrange(
1259 local_filtered_revs = logcmdutil.revrange(
1260 unfi, [b"not (::(%s))" % local_revs]
1260 unfi, [b"not (::(%s))" % local_revs]
1261 )
1261 )
1262 local_filtered_revs = frozenset(local_filtered_revs)
1262 local_filtered_revs = frozenset(local_filtered_revs)
1263
1263
1264 def local_func(x):
1264 def local_func(x):
1265 return local_filtered_revs
1265 return local_filtered_revs
1266
1266
1267 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1267 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1268 repo = repo.filtered(b'debug-discovery-local-filter')
1268 repo = repo.filtered(b'debug-discovery-local-filter')
1269
1269
1270 data = {}
1270 data = {}
1271 if opts.get(b'old'):
1271 if opts.get(b'old'):
1272
1272
1273 def doit(pushedrevs, remoteheads, remote=remote):
1273 def doit(pushedrevs, remoteheads, remote=remote):
1274 if not util.safehasattr(remote, b'branches'):
1274 if not util.safehasattr(remote, b'branches'):
1275 # enable in-client legacy support
1275 # enable in-client legacy support
1276 remote = localrepo.locallegacypeer(remote.local())
1276 remote = localrepo.locallegacypeer(remote.local())
1277 if remote_revs:
1277 if remote_revs:
1278 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1278 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1279 remote._repo = r
1279 remote._repo = r
1280 common, _in, hds = treediscovery.findcommonincoming(
1280 common, _in, hds = treediscovery.findcommonincoming(
1281 repo, remote, force=True, audit=data
1281 repo, remote, force=True, audit=data
1282 )
1282 )
1283 common = set(common)
1283 common = set(common)
1284 if not opts.get(b'nonheads'):
1284 if not opts.get(b'nonheads'):
1285 ui.writenoi18n(
1285 ui.writenoi18n(
1286 b"unpruned common: %s\n"
1286 b"unpruned common: %s\n"
1287 % b" ".join(sorted(short(n) for n in common))
1287 % b" ".join(sorted(short(n) for n in common))
1288 )
1288 )
1289
1289
1290 clnode = repo.changelog.node
1290 clnode = repo.changelog.node
1291 common = repo.revs(b'heads(::%ln)', common)
1291 common = repo.revs(b'heads(::%ln)', common)
1292 common = {clnode(r) for r in common}
1292 common = {clnode(r) for r in common}
1293 return common, hds
1293 return common, hds
1294
1294
1295 else:
1295 else:
1296
1296
1297 def doit(pushedrevs, remoteheads, remote=remote):
1297 def doit(pushedrevs, remoteheads, remote=remote):
1298 nodes = None
1298 nodes = None
1299 if pushedrevs:
1299 if pushedrevs:
1300 revs = logcmdutil.revrange(repo, pushedrevs)
1300 revs = logcmdutil.revrange(repo, pushedrevs)
1301 nodes = [repo[r].node() for r in revs]
1301 nodes = [repo[r].node() for r in revs]
1302 common, any, hds = setdiscovery.findcommonheads(
1302 common, any, hds = setdiscovery.findcommonheads(
1303 ui,
1303 ui,
1304 repo,
1304 repo,
1305 remote,
1305 remote,
1306 ancestorsof=nodes,
1306 ancestorsof=nodes,
1307 audit=data,
1307 audit=data,
1308 abortwhenunrelated=False,
1308 abortwhenunrelated=False,
1309 )
1309 )
1310 return common, hds
1310 return common, hds
1311
1311
1312 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1312 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1313 localrevs = opts[b'rev']
1313 localrevs = opts[b'rev']
1314
1314
1315 fm = ui.formatter(b'debugdiscovery', opts)
1315 fm = ui.formatter(b'debugdiscovery', opts)
1316 if fm.strict_format:
1316 if fm.strict_format:
1317
1317
1318 @contextlib.contextmanager
1318 @contextlib.contextmanager
1319 def may_capture_output():
1319 def may_capture_output():
1320 ui.pushbuffer()
1320 ui.pushbuffer()
1321 yield
1321 yield
1322 data[b'output'] = ui.popbuffer()
1322 data[b'output'] = ui.popbuffer()
1323
1323
1324 else:
1324 else:
1325 may_capture_output = util.nullcontextmanager
1325 may_capture_output = util.nullcontextmanager
1326 with may_capture_output():
1326 with may_capture_output():
1327 with util.timedcm('debug-discovery') as t:
1327 with util.timedcm('debug-discovery') as t:
1328 common, hds = doit(localrevs, remoterevs)
1328 common, hds = doit(localrevs, remoterevs)
1329
1329
1330 # compute all statistics
1330 # compute all statistics
1331 if len(common) == 1 and repo.nullid in common:
1331 if len(common) == 1 and repo.nullid in common:
1332 common = set()
1332 common = set()
1333 heads_common = set(common)
1333 heads_common = set(common)
1334 heads_remote = set(hds)
1334 heads_remote = set(hds)
1335 heads_local = set(repo.heads())
1335 heads_local = set(repo.heads())
1336 # note: they cannot be a local or remote head that is in common and not
1336 # note: they cannot be a local or remote head that is in common and not
1337 # itself a head of common.
1337 # itself a head of common.
1338 heads_common_local = heads_common & heads_local
1338 heads_common_local = heads_common & heads_local
1339 heads_common_remote = heads_common & heads_remote
1339 heads_common_remote = heads_common & heads_remote
1340 heads_common_both = heads_common & heads_remote & heads_local
1340 heads_common_both = heads_common & heads_remote & heads_local
1341
1341
1342 all = repo.revs(b'all()')
1342 all = repo.revs(b'all()')
1343 common = repo.revs(b'::%ln', common)
1343 common = repo.revs(b'::%ln', common)
1344 roots_common = repo.revs(b'roots(::%ld)', common)
1344 roots_common = repo.revs(b'roots(::%ld)', common)
1345 missing = repo.revs(b'not ::%ld', common)
1345 missing = repo.revs(b'not ::%ld', common)
1346 heads_missing = repo.revs(b'heads(%ld)', missing)
1346 heads_missing = repo.revs(b'heads(%ld)', missing)
1347 roots_missing = repo.revs(b'roots(%ld)', missing)
1347 roots_missing = repo.revs(b'roots(%ld)', missing)
1348 assert len(common) + len(missing) == len(all)
1348 assert len(common) + len(missing) == len(all)
1349
1349
1350 initial_undecided = repo.revs(
1350 initial_undecided = repo.revs(
1351 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1351 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1352 )
1352 )
1353 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1353 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1354 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1354 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1355 common_initial_undecided = initial_undecided & common
1355 common_initial_undecided = initial_undecided & common
1356 missing_initial_undecided = initial_undecided & missing
1356 missing_initial_undecided = initial_undecided & missing
1357
1357
1358 data[b'elapsed'] = t.elapsed
1358 data[b'elapsed'] = t.elapsed
1359 data[b'nb-common-heads'] = len(heads_common)
1359 data[b'nb-common-heads'] = len(heads_common)
1360 data[b'nb-common-heads-local'] = len(heads_common_local)
1360 data[b'nb-common-heads-local'] = len(heads_common_local)
1361 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1361 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1362 data[b'nb-common-heads-both'] = len(heads_common_both)
1362 data[b'nb-common-heads-both'] = len(heads_common_both)
1363 data[b'nb-common-roots'] = len(roots_common)
1363 data[b'nb-common-roots'] = len(roots_common)
1364 data[b'nb-head-local'] = len(heads_local)
1364 data[b'nb-head-local'] = len(heads_local)
1365 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1365 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1366 data[b'nb-head-remote'] = len(heads_remote)
1366 data[b'nb-head-remote'] = len(heads_remote)
1367 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1367 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1368 heads_common_remote
1368 heads_common_remote
1369 )
1369 )
1370 data[b'nb-revs'] = len(all)
1370 data[b'nb-revs'] = len(all)
1371 data[b'nb-revs-common'] = len(common)
1371 data[b'nb-revs-common'] = len(common)
1372 data[b'nb-revs-missing'] = len(missing)
1372 data[b'nb-revs-missing'] = len(missing)
1373 data[b'nb-missing-heads'] = len(heads_missing)
1373 data[b'nb-missing-heads'] = len(heads_missing)
1374 data[b'nb-missing-roots'] = len(roots_missing)
1374 data[b'nb-missing-roots'] = len(roots_missing)
1375 data[b'nb-ini_und'] = len(initial_undecided)
1375 data[b'nb-ini_und'] = len(initial_undecided)
1376 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1376 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1377 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1377 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1378 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1378 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1379 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1379 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1380
1380
1381 fm.startitem()
1381 fm.startitem()
1382 fm.data(**pycompat.strkwargs(data))
1382 fm.data(**pycompat.strkwargs(data))
1383 # display discovery summary
1383 # display discovery summary
1384 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1384 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1385 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1385 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1386 if b'total-round-trips-heads' in data:
1386 if b'total-round-trips-heads' in data:
1387 fm.plain(
1387 fm.plain(
1388 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1388 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1389 )
1389 )
1390 if b'total-round-trips-branches' in data:
1390 if b'total-round-trips-branches' in data:
1391 fm.plain(
1391 fm.plain(
1392 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1392 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1393 % data
1393 % data
1394 )
1394 )
1395 if b'total-round-trips-between' in data:
1395 if b'total-round-trips-between' in data:
1396 fm.plain(
1396 fm.plain(
1397 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1397 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1398 )
1398 )
1399 fm.plain(b"queries: %(total-queries)9d\n" % data)
1399 fm.plain(b"queries: %(total-queries)9d\n" % data)
1400 if b'total-queries-branches' in data:
1400 if b'total-queries-branches' in data:
1401 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1401 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1402 if b'total-queries-between' in data:
1402 if b'total-queries-between' in data:
1403 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1403 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1404 fm.plain(b"heads summary:\n")
1404 fm.plain(b"heads summary:\n")
1405 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1405 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1406 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1406 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1407 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1407 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1408 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1408 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1409 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1409 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1410 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1410 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1411 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1411 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1412 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1412 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1413 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1413 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1414 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1414 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1415 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1415 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1416 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1416 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1417 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1417 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1418 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1418 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1419 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1419 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1420 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1420 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1421 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1421 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1422 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1422 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1423 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1423 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1424 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1424 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1425 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1425 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1426 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1426 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1427
1427
1428 if ui.verbose:
1428 if ui.verbose:
1429 fm.plain(
1429 fm.plain(
1430 b"common heads: %s\n"
1430 b"common heads: %s\n"
1431 % b" ".join(sorted(short(n) for n in heads_common))
1431 % b" ".join(sorted(short(n) for n in heads_common))
1432 )
1432 )
1433 fm.end()
1433 fm.end()
1434
1434
1435
1435
1436 _chunksize = 4 << 10
1436 _chunksize = 4 << 10
1437
1437
1438
1438
1439 @command(
1439 @command(
1440 b'debugdownload',
1440 b'debugdownload',
1441 [
1441 [
1442 (b'o', b'output', b'', _(b'path')),
1442 (b'o', b'output', b'', _(b'path')),
1443 ],
1443 ],
1444 optionalrepo=True,
1444 optionalrepo=True,
1445 )
1445 )
1446 def debugdownload(ui, repo, url, output=None, **opts):
1446 def debugdownload(ui, repo, url, output=None, **opts):
1447 """download a resource using Mercurial logic and config"""
1447 """download a resource using Mercurial logic and config"""
1448 fh = urlmod.open(ui, url, output)
1448 fh = urlmod.open(ui, url, output)
1449
1449
1450 dest = ui
1450 dest = ui
1451 if output:
1451 if output:
1452 dest = open(output, b"wb", _chunksize)
1452 dest = open(output, b"wb", _chunksize)
1453 try:
1453 try:
1454 data = fh.read(_chunksize)
1454 data = fh.read(_chunksize)
1455 while data:
1455 while data:
1456 dest.write(data)
1456 dest.write(data)
1457 data = fh.read(_chunksize)
1457 data = fh.read(_chunksize)
1458 finally:
1458 finally:
1459 if output:
1459 if output:
1460 dest.close()
1460 dest.close()
1461
1461
1462
1462
1463 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1463 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1464 def debugextensions(ui, repo, **opts):
1464 def debugextensions(ui, repo, **opts):
1465 '''show information about active extensions'''
1465 '''show information about active extensions'''
1466 opts = pycompat.byteskwargs(opts)
1466 opts = pycompat.byteskwargs(opts)
1467 exts = extensions.extensions(ui)
1467 exts = extensions.extensions(ui)
1468 hgver = util.version()
1468 hgver = util.version()
1469 fm = ui.formatter(b'debugextensions', opts)
1469 fm = ui.formatter(b'debugextensions', opts)
1470 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1470 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1471 isinternal = extensions.ismoduleinternal(extmod)
1471 isinternal = extensions.ismoduleinternal(extmod)
1472 extsource = None
1472 extsource = None
1473
1473
1474 if util.safehasattr(extmod, '__file__'):
1474 if util.safehasattr(extmod, '__file__'):
1475 extsource = pycompat.fsencode(extmod.__file__)
1475 extsource = pycompat.fsencode(extmod.__file__)
1476 elif getattr(sys, 'oxidized', False):
1476 elif getattr(sys, 'oxidized', False):
1477 extsource = pycompat.sysexecutable
1477 extsource = pycompat.sysexecutable
1478 if isinternal:
1478 if isinternal:
1479 exttestedwith = [] # never expose magic string to users
1479 exttestedwith = [] # never expose magic string to users
1480 else:
1480 else:
1481 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1481 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1482 extbuglink = getattr(extmod, 'buglink', None)
1482 extbuglink = getattr(extmod, 'buglink', None)
1483
1483
1484 fm.startitem()
1484 fm.startitem()
1485
1485
1486 if ui.quiet or ui.verbose:
1486 if ui.quiet or ui.verbose:
1487 fm.write(b'name', b'%s\n', extname)
1487 fm.write(b'name', b'%s\n', extname)
1488 else:
1488 else:
1489 fm.write(b'name', b'%s', extname)
1489 fm.write(b'name', b'%s', extname)
1490 if isinternal or hgver in exttestedwith:
1490 if isinternal or hgver in exttestedwith:
1491 fm.plain(b'\n')
1491 fm.plain(b'\n')
1492 elif not exttestedwith:
1492 elif not exttestedwith:
1493 fm.plain(_(b' (untested!)\n'))
1493 fm.plain(_(b' (untested!)\n'))
1494 else:
1494 else:
1495 lasttestedversion = exttestedwith[-1]
1495 lasttestedversion = exttestedwith[-1]
1496 fm.plain(b' (%s!)\n' % lasttestedversion)
1496 fm.plain(b' (%s!)\n' % lasttestedversion)
1497
1497
1498 fm.condwrite(
1498 fm.condwrite(
1499 ui.verbose and extsource,
1499 ui.verbose and extsource,
1500 b'source',
1500 b'source',
1501 _(b' location: %s\n'),
1501 _(b' location: %s\n'),
1502 extsource or b"",
1502 extsource or b"",
1503 )
1503 )
1504
1504
1505 if ui.verbose:
1505 if ui.verbose:
1506 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1506 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1507 fm.data(bundled=isinternal)
1507 fm.data(bundled=isinternal)
1508
1508
1509 fm.condwrite(
1509 fm.condwrite(
1510 ui.verbose and exttestedwith,
1510 ui.verbose and exttestedwith,
1511 b'testedwith',
1511 b'testedwith',
1512 _(b' tested with: %s\n'),
1512 _(b' tested with: %s\n'),
1513 fm.formatlist(exttestedwith, name=b'ver'),
1513 fm.formatlist(exttestedwith, name=b'ver'),
1514 )
1514 )
1515
1515
1516 fm.condwrite(
1516 fm.condwrite(
1517 ui.verbose and extbuglink,
1517 ui.verbose and extbuglink,
1518 b'buglink',
1518 b'buglink',
1519 _(b' bug reporting: %s\n'),
1519 _(b' bug reporting: %s\n'),
1520 extbuglink or b"",
1520 extbuglink or b"",
1521 )
1521 )
1522
1522
1523 fm.end()
1523 fm.end()
1524
1524
1525
1525
1526 @command(
1526 @command(
1527 b'debugfileset',
1527 b'debugfileset',
1528 [
1528 [
1529 (
1529 (
1530 b'r',
1530 b'r',
1531 b'rev',
1531 b'rev',
1532 b'',
1532 b'',
1533 _(b'apply the filespec on this revision'),
1533 _(b'apply the filespec on this revision'),
1534 _(b'REV'),
1534 _(b'REV'),
1535 ),
1535 ),
1536 (
1536 (
1537 b'',
1537 b'',
1538 b'all-files',
1538 b'all-files',
1539 False,
1539 False,
1540 _(b'test files from all revisions and working directory'),
1540 _(b'test files from all revisions and working directory'),
1541 ),
1541 ),
1542 (
1542 (
1543 b's',
1543 b's',
1544 b'show-matcher',
1544 b'show-matcher',
1545 None,
1545 None,
1546 _(b'print internal representation of matcher'),
1546 _(b'print internal representation of matcher'),
1547 ),
1547 ),
1548 (
1548 (
1549 b'p',
1549 b'p',
1550 b'show-stage',
1550 b'show-stage',
1551 [],
1551 [],
1552 _(b'print parsed tree at the given stage'),
1552 _(b'print parsed tree at the given stage'),
1553 _(b'NAME'),
1553 _(b'NAME'),
1554 ),
1554 ),
1555 ],
1555 ],
1556 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1556 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1557 )
1557 )
1558 def debugfileset(ui, repo, expr, **opts):
1558 def debugfileset(ui, repo, expr, **opts):
1559 '''parse and apply a fileset specification'''
1559 '''parse and apply a fileset specification'''
1560 from . import fileset
1560 from . import fileset
1561
1561
1562 fileset.symbols # force import of fileset so we have predicates to optimize
1562 fileset.symbols # force import of fileset so we have predicates to optimize
1563 opts = pycompat.byteskwargs(opts)
1563 opts = pycompat.byteskwargs(opts)
1564 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1564 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1565
1565
1566 stages = [
1566 stages = [
1567 (b'parsed', pycompat.identity),
1567 (b'parsed', pycompat.identity),
1568 (b'analyzed', filesetlang.analyze),
1568 (b'analyzed', filesetlang.analyze),
1569 (b'optimized', filesetlang.optimize),
1569 (b'optimized', filesetlang.optimize),
1570 ]
1570 ]
1571 stagenames = {n for n, f in stages}
1571 stagenames = {n for n, f in stages}
1572
1572
1573 showalways = set()
1573 showalways = set()
1574 if ui.verbose and not opts[b'show_stage']:
1574 if ui.verbose and not opts[b'show_stage']:
1575 # show parsed tree by --verbose (deprecated)
1575 # show parsed tree by --verbose (deprecated)
1576 showalways.add(b'parsed')
1576 showalways.add(b'parsed')
1577 if opts[b'show_stage'] == [b'all']:
1577 if opts[b'show_stage'] == [b'all']:
1578 showalways.update(stagenames)
1578 showalways.update(stagenames)
1579 else:
1579 else:
1580 for n in opts[b'show_stage']:
1580 for n in opts[b'show_stage']:
1581 if n not in stagenames:
1581 if n not in stagenames:
1582 raise error.Abort(_(b'invalid stage name: %s') % n)
1582 raise error.Abort(_(b'invalid stage name: %s') % n)
1583 showalways.update(opts[b'show_stage'])
1583 showalways.update(opts[b'show_stage'])
1584
1584
1585 tree = filesetlang.parse(expr)
1585 tree = filesetlang.parse(expr)
1586 for n, f in stages:
1586 for n, f in stages:
1587 tree = f(tree)
1587 tree = f(tree)
1588 if n in showalways:
1588 if n in showalways:
1589 if opts[b'show_stage'] or n != b'parsed':
1589 if opts[b'show_stage'] or n != b'parsed':
1590 ui.write(b"* %s:\n" % n)
1590 ui.write(b"* %s:\n" % n)
1591 ui.write(filesetlang.prettyformat(tree), b"\n")
1591 ui.write(filesetlang.prettyformat(tree), b"\n")
1592
1592
1593 files = set()
1593 files = set()
1594 if opts[b'all_files']:
1594 if opts[b'all_files']:
1595 for r in repo:
1595 for r in repo:
1596 c = repo[r]
1596 c = repo[r]
1597 files.update(c.files())
1597 files.update(c.files())
1598 files.update(c.substate)
1598 files.update(c.substate)
1599 if opts[b'all_files'] or ctx.rev() is None:
1599 if opts[b'all_files'] or ctx.rev() is None:
1600 wctx = repo[None]
1600 wctx = repo[None]
1601 files.update(
1601 files.update(
1602 repo.dirstate.walk(
1602 repo.dirstate.walk(
1603 scmutil.matchall(repo),
1603 scmutil.matchall(repo),
1604 subrepos=list(wctx.substate),
1604 subrepos=list(wctx.substate),
1605 unknown=True,
1605 unknown=True,
1606 ignored=True,
1606 ignored=True,
1607 )
1607 )
1608 )
1608 )
1609 files.update(wctx.substate)
1609 files.update(wctx.substate)
1610 else:
1610 else:
1611 files.update(ctx.files())
1611 files.update(ctx.files())
1612 files.update(ctx.substate)
1612 files.update(ctx.substate)
1613
1613
1614 m = ctx.matchfileset(repo.getcwd(), expr)
1614 m = ctx.matchfileset(repo.getcwd(), expr)
1615 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1615 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1616 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1616 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1617 for f in sorted(files):
1617 for f in sorted(files):
1618 if not m(f):
1618 if not m(f):
1619 continue
1619 continue
1620 ui.write(b"%s\n" % f)
1620 ui.write(b"%s\n" % f)
1621
1621
1622
1622
1623 @command(
1623 @command(
1624 b"debug-repair-issue6528",
1624 b"debug-repair-issue6528",
1625 [
1625 [
1626 (
1626 (
1627 b'',
1627 b'',
1628 b'to-report',
1628 b'to-report',
1629 b'',
1629 b'',
1630 _(b'build a report of affected revisions to this file'),
1630 _(b'build a report of affected revisions to this file'),
1631 _(b'FILE'),
1631 _(b'FILE'),
1632 ),
1632 ),
1633 (
1633 (
1634 b'',
1634 b'',
1635 b'from-report',
1635 b'from-report',
1636 b'',
1636 b'',
1637 _(b'repair revisions listed in this report file'),
1637 _(b'repair revisions listed in this report file'),
1638 _(b'FILE'),
1638 _(b'FILE'),
1639 ),
1639 ),
1640 (
1640 (
1641 b'',
1641 b'',
1642 b'paranoid',
1642 b'paranoid',
1643 False,
1643 False,
1644 _(b'check that both detection methods do the same thing'),
1644 _(b'check that both detection methods do the same thing'),
1645 ),
1645 ),
1646 ]
1646 ]
1647 + cmdutil.dryrunopts,
1647 + cmdutil.dryrunopts,
1648 )
1648 )
1649 def debug_repair_issue6528(ui, repo, **opts):
1649 def debug_repair_issue6528(ui, repo, **opts):
1650 """find affected revisions and repair them. See issue6528 for more details.
1650 """find affected revisions and repair them. See issue6528 for more details.
1651
1651
1652 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1652 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1653 computation of affected revisions for a given repository across clones.
1653 computation of affected revisions for a given repository across clones.
1654 The report format is line-based (with empty lines ignored):
1654 The report format is line-based (with empty lines ignored):
1655
1655
1656 ```
1656 ```
1657 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1657 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1658 ```
1658 ```
1659
1659
1660 There can be multiple broken revisions per filelog, they are separated by
1660 There can be multiple broken revisions per filelog, they are separated by
1661 a comma with no spaces. The only space is between the revision(s) and the
1661 a comma with no spaces. The only space is between the revision(s) and the
1662 filename.
1662 filename.
1663
1663
1664 Note that this does *not* mean that this repairs future affected revisions,
1664 Note that this does *not* mean that this repairs future affected revisions,
1665 that needs a separate fix at the exchange level that was introduced in
1665 that needs a separate fix at the exchange level that was introduced in
1666 Mercurial 5.9.1.
1666 Mercurial 5.9.1.
1667
1667
1668 There is a `--paranoid` flag to test that the fast implementation is correct
1668 There is a `--paranoid` flag to test that the fast implementation is correct
1669 by checking it against the slow implementation. Since this matter is quite
1669 by checking it against the slow implementation. Since this matter is quite
1670 urgent and testing every edge-case is probably quite costly, we use this
1670 urgent and testing every edge-case is probably quite costly, we use this
1671 method to test on large repositories as a fuzzing method of sorts.
1671 method to test on large repositories as a fuzzing method of sorts.
1672 """
1672 """
1673 cmdutil.check_incompatible_arguments(
1673 cmdutil.check_incompatible_arguments(
1674 opts, 'to_report', ['from_report', 'dry_run']
1674 opts, 'to_report', ['from_report', 'dry_run']
1675 )
1675 )
1676 dry_run = opts.get('dry_run')
1676 dry_run = opts.get('dry_run')
1677 to_report = opts.get('to_report')
1677 to_report = opts.get('to_report')
1678 from_report = opts.get('from_report')
1678 from_report = opts.get('from_report')
1679 paranoid = opts.get('paranoid')
1679 paranoid = opts.get('paranoid')
1680 # TODO maybe add filelog pattern and revision pattern parameters to help
1680 # TODO maybe add filelog pattern and revision pattern parameters to help
1681 # narrow down the search for users that know what they're looking for?
1681 # narrow down the search for users that know what they're looking for?
1682
1682
1683 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1683 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1684 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1684 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1685 raise error.Abort(_(msg))
1685 raise error.Abort(_(msg))
1686
1686
1687 rewrite.repair_issue6528(
1687 rewrite.repair_issue6528(
1688 ui,
1688 ui,
1689 repo,
1689 repo,
1690 dry_run=dry_run,
1690 dry_run=dry_run,
1691 to_report=to_report,
1691 to_report=to_report,
1692 from_report=from_report,
1692 from_report=from_report,
1693 paranoid=paranoid,
1693 paranoid=paranoid,
1694 )
1694 )
1695
1695
1696
1696
1697 @command(b'debugformat', [] + cmdutil.formatteropts)
1697 @command(b'debugformat', [] + cmdutil.formatteropts)
1698 def debugformat(ui, repo, **opts):
1698 def debugformat(ui, repo, **opts):
1699 """display format information about the current repository
1699 """display format information about the current repository
1700
1700
1701 Use --verbose to get extra information about current config value and
1701 Use --verbose to get extra information about current config value and
1702 Mercurial default."""
1702 Mercurial default."""
1703 opts = pycompat.byteskwargs(opts)
1703 opts = pycompat.byteskwargs(opts)
1704 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1704 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1705 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1705 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1706
1706
1707 def makeformatname(name):
1707 def makeformatname(name):
1708 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1708 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1709
1709
1710 fm = ui.formatter(b'debugformat', opts)
1710 fm = ui.formatter(b'debugformat', opts)
1711 if fm.isplain():
1711 if fm.isplain():
1712
1712
1713 def formatvalue(value):
1713 def formatvalue(value):
1714 if util.safehasattr(value, b'startswith'):
1714 if util.safehasattr(value, b'startswith'):
1715 return value
1715 return value
1716 if value:
1716 if value:
1717 return b'yes'
1717 return b'yes'
1718 else:
1718 else:
1719 return b'no'
1719 return b'no'
1720
1720
1721 else:
1721 else:
1722 formatvalue = pycompat.identity
1722 formatvalue = pycompat.identity
1723
1723
1724 fm.plain(b'format-variant')
1724 fm.plain(b'format-variant')
1725 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1725 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1726 fm.plain(b' repo')
1726 fm.plain(b' repo')
1727 if ui.verbose:
1727 if ui.verbose:
1728 fm.plain(b' config default')
1728 fm.plain(b' config default')
1729 fm.plain(b'\n')
1729 fm.plain(b'\n')
1730 for fv in upgrade.allformatvariant:
1730 for fv in upgrade.allformatvariant:
1731 fm.startitem()
1731 fm.startitem()
1732 repovalue = fv.fromrepo(repo)
1732 repovalue = fv.fromrepo(repo)
1733 configvalue = fv.fromconfig(repo)
1733 configvalue = fv.fromconfig(repo)
1734
1734
1735 if repovalue != configvalue:
1735 if repovalue != configvalue:
1736 namelabel = b'formatvariant.name.mismatchconfig'
1736 namelabel = b'formatvariant.name.mismatchconfig'
1737 repolabel = b'formatvariant.repo.mismatchconfig'
1737 repolabel = b'formatvariant.repo.mismatchconfig'
1738 elif repovalue != fv.default:
1738 elif repovalue != fv.default:
1739 namelabel = b'formatvariant.name.mismatchdefault'
1739 namelabel = b'formatvariant.name.mismatchdefault'
1740 repolabel = b'formatvariant.repo.mismatchdefault'
1740 repolabel = b'formatvariant.repo.mismatchdefault'
1741 else:
1741 else:
1742 namelabel = b'formatvariant.name.uptodate'
1742 namelabel = b'formatvariant.name.uptodate'
1743 repolabel = b'formatvariant.repo.uptodate'
1743 repolabel = b'formatvariant.repo.uptodate'
1744
1744
1745 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1745 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1746 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1746 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1747 if fv.default != configvalue:
1747 if fv.default != configvalue:
1748 configlabel = b'formatvariant.config.special'
1748 configlabel = b'formatvariant.config.special'
1749 else:
1749 else:
1750 configlabel = b'formatvariant.config.default'
1750 configlabel = b'formatvariant.config.default'
1751 fm.condwrite(
1751 fm.condwrite(
1752 ui.verbose,
1752 ui.verbose,
1753 b'config',
1753 b'config',
1754 b' %6s',
1754 b' %6s',
1755 formatvalue(configvalue),
1755 formatvalue(configvalue),
1756 label=configlabel,
1756 label=configlabel,
1757 )
1757 )
1758 fm.condwrite(
1758 fm.condwrite(
1759 ui.verbose,
1759 ui.verbose,
1760 b'default',
1760 b'default',
1761 b' %7s',
1761 b' %7s',
1762 formatvalue(fv.default),
1762 formatvalue(fv.default),
1763 label=b'formatvariant.default',
1763 label=b'formatvariant.default',
1764 )
1764 )
1765 fm.plain(b'\n')
1765 fm.plain(b'\n')
1766 fm.end()
1766 fm.end()
1767
1767
1768
1768
1769 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1769 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1770 def debugfsinfo(ui, path=b"."):
1770 def debugfsinfo(ui, path=b"."):
1771 """show information detected about current filesystem"""
1771 """show information detected about current filesystem"""
1772 ui.writenoi18n(b'path: %s\n' % path)
1772 ui.writenoi18n(b'path: %s\n' % path)
1773 ui.writenoi18n(
1773 ui.writenoi18n(
1774 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1774 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1775 )
1775 )
1776 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1776 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1777 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1777 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1778 ui.writenoi18n(
1778 ui.writenoi18n(
1779 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1779 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1780 )
1780 )
1781 ui.writenoi18n(
1781 ui.writenoi18n(
1782 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1782 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1783 )
1783 )
1784 casesensitive = b'(unknown)'
1784 casesensitive = b'(unknown)'
1785 try:
1785 try:
1786 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1786 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1787 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1787 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1788 except OSError:
1788 except OSError:
1789 pass
1789 pass
1790 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1790 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1791
1791
1792
1792
1793 @command(
1793 @command(
1794 b'debuggetbundle',
1794 b'debuggetbundle',
1795 [
1795 [
1796 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1796 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1797 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1797 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1798 (
1798 (
1799 b't',
1799 b't',
1800 b'type',
1800 b'type',
1801 b'bzip2',
1801 b'bzip2',
1802 _(b'bundle compression type to use'),
1802 _(b'bundle compression type to use'),
1803 _(b'TYPE'),
1803 _(b'TYPE'),
1804 ),
1804 ),
1805 ],
1805 ],
1806 _(b'REPO FILE [-H|-C ID]...'),
1806 _(b'REPO FILE [-H|-C ID]...'),
1807 norepo=True,
1807 norepo=True,
1808 )
1808 )
1809 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1809 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1810 """retrieves a bundle from a repo
1810 """retrieves a bundle from a repo
1811
1811
1812 Every ID must be a full-length hex node id string. Saves the bundle to the
1812 Every ID must be a full-length hex node id string. Saves the bundle to the
1813 given file.
1813 given file.
1814 """
1814 """
1815 opts = pycompat.byteskwargs(opts)
1815 opts = pycompat.byteskwargs(opts)
1816 repo = hg.peer(ui, opts, repopath)
1816 repo = hg.peer(ui, opts, repopath)
1817 if not repo.capable(b'getbundle'):
1817 if not repo.capable(b'getbundle'):
1818 raise error.Abort(b"getbundle() not supported by target repository")
1818 raise error.Abort(b"getbundle() not supported by target repository")
1819 args = {}
1819 args = {}
1820 if common:
1820 if common:
1821 args['common'] = [bin(s) for s in common]
1821 args['common'] = [bin(s) for s in common]
1822 if head:
1822 if head:
1823 args['heads'] = [bin(s) for s in head]
1823 args['heads'] = [bin(s) for s in head]
1824 # TODO: get desired bundlecaps from command line.
1824 # TODO: get desired bundlecaps from command line.
1825 args['bundlecaps'] = None
1825 args['bundlecaps'] = None
1826 bundle = repo.getbundle(b'debug', **args)
1826 bundle = repo.getbundle(b'debug', **args)
1827
1827
1828 bundletype = opts.get(b'type', b'bzip2').lower()
1828 bundletype = opts.get(b'type', b'bzip2').lower()
1829 btypes = {
1829 btypes = {
1830 b'none': b'HG10UN',
1830 b'none': b'HG10UN',
1831 b'bzip2': b'HG10BZ',
1831 b'bzip2': b'HG10BZ',
1832 b'gzip': b'HG10GZ',
1832 b'gzip': b'HG10GZ',
1833 b'bundle2': b'HG20',
1833 b'bundle2': b'HG20',
1834 }
1834 }
1835 bundletype = btypes.get(bundletype)
1835 bundletype = btypes.get(bundletype)
1836 if bundletype not in bundle2.bundletypes:
1836 if bundletype not in bundle2.bundletypes:
1837 raise error.Abort(_(b'unknown bundle type specified with --type'))
1837 raise error.Abort(_(b'unknown bundle type specified with --type'))
1838 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1838 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1839
1839
1840
1840
1841 @command(b'debugignore', [], b'[FILE]')
1841 @command(b'debugignore', [], b'[FILE]')
1842 def debugignore(ui, repo, *files, **opts):
1842 def debugignore(ui, repo, *files, **opts):
1843 """display the combined ignore pattern and information about ignored files
1843 """display the combined ignore pattern and information about ignored files
1844
1844
1845 With no argument display the combined ignore pattern.
1845 With no argument display the combined ignore pattern.
1846
1846
1847 Given space separated file names, shows if the given file is ignored and
1847 Given space separated file names, shows if the given file is ignored and
1848 if so, show the ignore rule (file and line number) that matched it.
1848 if so, show the ignore rule (file and line number) that matched it.
1849 """
1849 """
1850 ignore = repo.dirstate._ignore
1850 ignore = repo.dirstate._ignore
1851 if not files:
1851 if not files:
1852 # Show all the patterns
1852 # Show all the patterns
1853 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1853 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1854 else:
1854 else:
1855 m = scmutil.match(repo[None], pats=files)
1855 m = scmutil.match(repo[None], pats=files)
1856 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1856 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1857 for f in m.files():
1857 for f in m.files():
1858 nf = util.normpath(f)
1858 nf = util.normpath(f)
1859 ignored = None
1859 ignored = None
1860 ignoredata = None
1860 ignoredata = None
1861 if nf != b'.':
1861 if nf != b'.':
1862 if ignore(nf):
1862 if ignore(nf):
1863 ignored = nf
1863 ignored = nf
1864 ignoredata = repo.dirstate._ignorefileandline(nf)
1864 ignoredata = repo.dirstate._ignorefileandline(nf)
1865 else:
1865 else:
1866 for p in pathutil.finddirs(nf):
1866 for p in pathutil.finddirs(nf):
1867 if ignore(p):
1867 if ignore(p):
1868 ignored = p
1868 ignored = p
1869 ignoredata = repo.dirstate._ignorefileandline(p)
1869 ignoredata = repo.dirstate._ignorefileandline(p)
1870 break
1870 break
1871 if ignored:
1871 if ignored:
1872 if ignored == nf:
1872 if ignored == nf:
1873 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1873 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1874 else:
1874 else:
1875 ui.write(
1875 ui.write(
1876 _(
1876 _(
1877 b"%s is ignored because of "
1877 b"%s is ignored because of "
1878 b"containing directory %s\n"
1878 b"containing directory %s\n"
1879 )
1879 )
1880 % (uipathfn(f), ignored)
1880 % (uipathfn(f), ignored)
1881 )
1881 )
1882 ignorefile, lineno, line = ignoredata
1882 ignorefile, lineno, line = ignoredata
1883 ui.write(
1883 ui.write(
1884 _(b"(ignore rule in %s, line %d: '%s')\n")
1884 _(b"(ignore rule in %s, line %d: '%s')\n")
1885 % (ignorefile, lineno, line)
1885 % (ignorefile, lineno, line)
1886 )
1886 )
1887 else:
1887 else:
1888 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1888 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1889
1889
1890
1890
1891 @command(
1891 @command(
1892 b'debug-revlog-index|debugindex',
1892 b'debug-revlog-index|debugindex',
1893 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1893 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1894 _(b'-c|-m|FILE'),
1894 _(b'-c|-m|FILE'),
1895 )
1895 )
1896 def debugindex(ui, repo, file_=None, **opts):
1896 def debugindex(ui, repo, file_=None, **opts):
1897 """dump index data for a revlog"""
1897 """dump index data for a revlog"""
1898 opts = pycompat.byteskwargs(opts)
1898 opts = pycompat.byteskwargs(opts)
1899 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1899 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1900
1900
1901 fm = ui.formatter(b'debugindex', opts)
1901 fm = ui.formatter(b'debugindex', opts)
1902
1902
1903 revlog = getattr(store, b'_revlog', store)
1903 revlog = getattr(store, b'_revlog', store)
1904
1904
1905 return revlog_debug.debug_index(
1905 return revlog_debug.debug_index(
1906 ui,
1906 ui,
1907 repo,
1907 repo,
1908 formatter=fm,
1908 formatter=fm,
1909 revlog=revlog,
1909 revlog=revlog,
1910 full_node=ui.debugflag,
1910 full_node=ui.debugflag,
1911 )
1911 )
1912
1912
1913
1913
1914 @command(
1914 @command(
1915 b'debugindexdot',
1915 b'debugindexdot',
1916 cmdutil.debugrevlogopts,
1916 cmdutil.debugrevlogopts,
1917 _(b'-c|-m|FILE'),
1917 _(b'-c|-m|FILE'),
1918 optionalrepo=True,
1918 optionalrepo=True,
1919 )
1919 )
1920 def debugindexdot(ui, repo, file_=None, **opts):
1920 def debugindexdot(ui, repo, file_=None, **opts):
1921 """dump an index DAG as a graphviz dot file"""
1921 """dump an index DAG as a graphviz dot file"""
1922 opts = pycompat.byteskwargs(opts)
1922 opts = pycompat.byteskwargs(opts)
1923 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1923 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1924 ui.writenoi18n(b"digraph G {\n")
1924 ui.writenoi18n(b"digraph G {\n")
1925 for i in r:
1925 for i in r:
1926 node = r.node(i)
1926 node = r.node(i)
1927 pp = r.parents(node)
1927 pp = r.parents(node)
1928 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1928 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1929 if pp[1] != repo.nullid:
1929 if pp[1] != repo.nullid:
1930 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1930 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1931 ui.write(b"}\n")
1931 ui.write(b"}\n")
1932
1932
1933
1933
1934 @command(b'debugindexstats', [])
1934 @command(b'debugindexstats', [])
1935 def debugindexstats(ui, repo):
1935 def debugindexstats(ui, repo):
1936 """show stats related to the changelog index"""
1936 """show stats related to the changelog index"""
1937 repo.changelog.shortest(repo.nullid, 1)
1937 repo.changelog.shortest(repo.nullid, 1)
1938 index = repo.changelog.index
1938 index = repo.changelog.index
1939 if not util.safehasattr(index, b'stats'):
1939 if not util.safehasattr(index, b'stats'):
1940 raise error.Abort(_(b'debugindexstats only works with native code'))
1940 raise error.Abort(_(b'debugindexstats only works with native code'))
1941 for k, v in sorted(index.stats().items()):
1941 for k, v in sorted(index.stats().items()):
1942 ui.write(b'%s: %d\n' % (k, v))
1942 ui.write(b'%s: %d\n' % (k, v))
1943
1943
1944
1944
1945 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1945 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1946 def debuginstall(ui, **opts):
1946 def debuginstall(ui, **opts):
1947 """test Mercurial installation
1947 """test Mercurial installation
1948
1948
1949 Returns 0 on success.
1949 Returns 0 on success.
1950 """
1950 """
1951 opts = pycompat.byteskwargs(opts)
1951 opts = pycompat.byteskwargs(opts)
1952
1952
1953 problems = 0
1953 problems = 0
1954
1954
1955 fm = ui.formatter(b'debuginstall', opts)
1955 fm = ui.formatter(b'debuginstall', opts)
1956 fm.startitem()
1956 fm.startitem()
1957
1957
1958 # encoding might be unknown or wrong. don't translate these messages.
1958 # encoding might be unknown or wrong. don't translate these messages.
1959 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1959 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1960 err = None
1960 err = None
1961 try:
1961 try:
1962 codecs.lookup(pycompat.sysstr(encoding.encoding))
1962 codecs.lookup(pycompat.sysstr(encoding.encoding))
1963 except LookupError as inst:
1963 except LookupError as inst:
1964 err = stringutil.forcebytestr(inst)
1964 err = stringutil.forcebytestr(inst)
1965 problems += 1
1965 problems += 1
1966 fm.condwrite(
1966 fm.condwrite(
1967 err,
1967 err,
1968 b'encodingerror',
1968 b'encodingerror',
1969 b" %s\n (check that your locale is properly set)\n",
1969 b" %s\n (check that your locale is properly set)\n",
1970 err,
1970 err,
1971 )
1971 )
1972
1972
1973 # Python
1973 # Python
1974 pythonlib = None
1974 pythonlib = None
1975 if util.safehasattr(os, '__file__'):
1975 if util.safehasattr(os, '__file__'):
1976 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1976 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1977 elif getattr(sys, 'oxidized', False):
1977 elif getattr(sys, 'oxidized', False):
1978 pythonlib = pycompat.sysexecutable
1978 pythonlib = pycompat.sysexecutable
1979
1979
1980 fm.write(
1980 fm.write(
1981 b'pythonexe',
1981 b'pythonexe',
1982 _(b"checking Python executable (%s)\n"),
1982 _(b"checking Python executable (%s)\n"),
1983 pycompat.sysexecutable or _(b"unknown"),
1983 pycompat.sysexecutable or _(b"unknown"),
1984 )
1984 )
1985 fm.write(
1985 fm.write(
1986 b'pythonimplementation',
1986 b'pythonimplementation',
1987 _(b"checking Python implementation (%s)\n"),
1987 _(b"checking Python implementation (%s)\n"),
1988 pycompat.sysbytes(platform.python_implementation()),
1988 pycompat.sysbytes(platform.python_implementation()),
1989 )
1989 )
1990 fm.write(
1990 fm.write(
1991 b'pythonver',
1991 b'pythonver',
1992 _(b"checking Python version (%s)\n"),
1992 _(b"checking Python version (%s)\n"),
1993 (b"%d.%d.%d" % sys.version_info[:3]),
1993 (b"%d.%d.%d" % sys.version_info[:3]),
1994 )
1994 )
1995 fm.write(
1995 fm.write(
1996 b'pythonlib',
1996 b'pythonlib',
1997 _(b"checking Python lib (%s)...\n"),
1997 _(b"checking Python lib (%s)...\n"),
1998 pythonlib or _(b"unknown"),
1998 pythonlib or _(b"unknown"),
1999 )
1999 )
2000
2000
2001 try:
2001 try:
2002 from . import rustext # pytype: disable=import-error
2002 from . import rustext # pytype: disable=import-error
2003
2003
2004 rustext.__doc__ # trigger lazy import
2004 rustext.__doc__ # trigger lazy import
2005 except ImportError:
2005 except ImportError:
2006 rustext = None
2006 rustext = None
2007
2007
2008 security = set(sslutil.supportedprotocols)
2008 security = set(sslutil.supportedprotocols)
2009 if sslutil.hassni:
2009 if sslutil.hassni:
2010 security.add(b'sni')
2010 security.add(b'sni')
2011
2011
2012 fm.write(
2012 fm.write(
2013 b'pythonsecurity',
2013 b'pythonsecurity',
2014 _(b"checking Python security support (%s)\n"),
2014 _(b"checking Python security support (%s)\n"),
2015 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2015 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2016 )
2016 )
2017
2017
2018 # These are warnings, not errors. So don't increment problem count. This
2018 # These are warnings, not errors. So don't increment problem count. This
2019 # may change in the future.
2019 # may change in the future.
2020 if b'tls1.2' not in security:
2020 if b'tls1.2' not in security:
2021 fm.plain(
2021 fm.plain(
2022 _(
2022 _(
2023 b' TLS 1.2 not supported by Python install; '
2023 b' TLS 1.2 not supported by Python install; '
2024 b'network connections lack modern security\n'
2024 b'network connections lack modern security\n'
2025 )
2025 )
2026 )
2026 )
2027 if b'sni' not in security:
2027 if b'sni' not in security:
2028 fm.plain(
2028 fm.plain(
2029 _(
2029 _(
2030 b' SNI not supported by Python install; may have '
2030 b' SNI not supported by Python install; may have '
2031 b'connectivity issues with some servers\n'
2031 b'connectivity issues with some servers\n'
2032 )
2032 )
2033 )
2033 )
2034
2034
2035 fm.plain(
2035 fm.plain(
2036 _(
2036 _(
2037 b"checking Rust extensions (%s)\n"
2037 b"checking Rust extensions (%s)\n"
2038 % (b'missing' if rustext is None else b'installed')
2038 % (b'missing' if rustext is None else b'installed')
2039 ),
2039 ),
2040 )
2040 )
2041
2041
2042 # TODO print CA cert info
2042 # TODO print CA cert info
2043
2043
2044 # hg version
2044 # hg version
2045 hgver = util.version()
2045 hgver = util.version()
2046 fm.write(
2046 fm.write(
2047 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2047 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2048 )
2048 )
2049 fm.write(
2049 fm.write(
2050 b'hgverextra',
2050 b'hgverextra',
2051 _(b"checking Mercurial custom build (%s)\n"),
2051 _(b"checking Mercurial custom build (%s)\n"),
2052 b'+'.join(hgver.split(b'+')[1:]),
2052 b'+'.join(hgver.split(b'+')[1:]),
2053 )
2053 )
2054
2054
2055 # compiled modules
2055 # compiled modules
2056 hgmodules = None
2056 hgmodules = None
2057 if util.safehasattr(sys.modules[__name__], '__file__'):
2057 if util.safehasattr(sys.modules[__name__], '__file__'):
2058 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2058 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2059 elif getattr(sys, 'oxidized', False):
2059 elif getattr(sys, 'oxidized', False):
2060 hgmodules = pycompat.sysexecutable
2060 hgmodules = pycompat.sysexecutable
2061
2061
2062 fm.write(
2062 fm.write(
2063 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2063 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2064 )
2064 )
2065 fm.write(
2065 fm.write(
2066 b'hgmodules',
2066 b'hgmodules',
2067 _(b"checking installed modules (%s)...\n"),
2067 _(b"checking installed modules (%s)...\n"),
2068 hgmodules or _(b"unknown"),
2068 hgmodules or _(b"unknown"),
2069 )
2069 )
2070
2070
2071 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2071 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2072 rustext = rustandc # for now, that's the only case
2072 rustext = rustandc # for now, that's the only case
2073 cext = policy.policy in (b'c', b'allow') or rustandc
2073 cext = policy.policy in (b'c', b'allow') or rustandc
2074 nopure = cext or rustext
2074 nopure = cext or rustext
2075 if nopure:
2075 if nopure:
2076 err = None
2076 err = None
2077 try:
2077 try:
2078 if cext:
2078 if cext:
2079 from .cext import ( # pytype: disable=import-error
2079 from .cext import ( # pytype: disable=import-error
2080 base85,
2080 base85,
2081 bdiff,
2081 bdiff,
2082 mpatch,
2082 mpatch,
2083 osutil,
2083 osutil,
2084 )
2084 )
2085
2085
2086 # quiet pyflakes
2086 # quiet pyflakes
2087 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2087 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2088 if rustext:
2088 if rustext:
2089 from .rustext import ( # pytype: disable=import-error
2089 from .rustext import ( # pytype: disable=import-error
2090 ancestor,
2090 ancestor,
2091 dirstate,
2091 dirstate,
2092 )
2092 )
2093
2093
2094 dir(ancestor), dir(dirstate) # quiet pyflakes
2094 dir(ancestor), dir(dirstate) # quiet pyflakes
2095 except Exception as inst:
2095 except Exception as inst:
2096 err = stringutil.forcebytestr(inst)
2096 err = stringutil.forcebytestr(inst)
2097 problems += 1
2097 problems += 1
2098 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2098 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2099
2099
2100 compengines = util.compengines._engines.values()
2100 compengines = util.compengines._engines.values()
2101 fm.write(
2101 fm.write(
2102 b'compengines',
2102 b'compengines',
2103 _(b'checking registered compression engines (%s)\n'),
2103 _(b'checking registered compression engines (%s)\n'),
2104 fm.formatlist(
2104 fm.formatlist(
2105 sorted(e.name() for e in compengines),
2105 sorted(e.name() for e in compengines),
2106 name=b'compengine',
2106 name=b'compengine',
2107 fmt=b'%s',
2107 fmt=b'%s',
2108 sep=b', ',
2108 sep=b', ',
2109 ),
2109 ),
2110 )
2110 )
2111 fm.write(
2111 fm.write(
2112 b'compenginesavail',
2112 b'compenginesavail',
2113 _(b'checking available compression engines (%s)\n'),
2113 _(b'checking available compression engines (%s)\n'),
2114 fm.formatlist(
2114 fm.formatlist(
2115 sorted(e.name() for e in compengines if e.available()),
2115 sorted(e.name() for e in compengines if e.available()),
2116 name=b'compengine',
2116 name=b'compengine',
2117 fmt=b'%s',
2117 fmt=b'%s',
2118 sep=b', ',
2118 sep=b', ',
2119 ),
2119 ),
2120 )
2120 )
2121 wirecompengines = compression.compengines.supportedwireengines(
2121 wirecompengines = compression.compengines.supportedwireengines(
2122 compression.SERVERROLE
2122 compression.SERVERROLE
2123 )
2123 )
2124 fm.write(
2124 fm.write(
2125 b'compenginesserver',
2125 b'compenginesserver',
2126 _(
2126 _(
2127 b'checking available compression engines '
2127 b'checking available compression engines '
2128 b'for wire protocol (%s)\n'
2128 b'for wire protocol (%s)\n'
2129 ),
2129 ),
2130 fm.formatlist(
2130 fm.formatlist(
2131 [e.name() for e in wirecompengines if e.wireprotosupport()],
2131 [e.name() for e in wirecompengines if e.wireprotosupport()],
2132 name=b'compengine',
2132 name=b'compengine',
2133 fmt=b'%s',
2133 fmt=b'%s',
2134 sep=b', ',
2134 sep=b', ',
2135 ),
2135 ),
2136 )
2136 )
2137 re2 = b'missing'
2137 re2 = b'missing'
2138 if util._re2:
2138 if util._re2:
2139 re2 = b'available'
2139 re2 = b'available'
2140 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2140 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2141 fm.data(re2=bool(util._re2))
2141 fm.data(re2=bool(util._re2))
2142
2142
2143 # templates
2143 # templates
2144 p = templater.templatedir()
2144 p = templater.templatedir()
2145 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2145 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2146 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2146 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2147 if p:
2147 if p:
2148 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2148 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2149 if m:
2149 if m:
2150 # template found, check if it is working
2150 # template found, check if it is working
2151 err = None
2151 err = None
2152 try:
2152 try:
2153 templater.templater.frommapfile(m)
2153 templater.templater.frommapfile(m)
2154 except Exception as inst:
2154 except Exception as inst:
2155 err = stringutil.forcebytestr(inst)
2155 err = stringutil.forcebytestr(inst)
2156 p = None
2156 p = None
2157 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2157 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2158 else:
2158 else:
2159 p = None
2159 p = None
2160 fm.condwrite(
2160 fm.condwrite(
2161 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2161 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2162 )
2162 )
2163 fm.condwrite(
2163 fm.condwrite(
2164 not m,
2164 not m,
2165 b'defaulttemplatenotfound',
2165 b'defaulttemplatenotfound',
2166 _(b" template '%s' not found\n"),
2166 _(b" template '%s' not found\n"),
2167 b"default",
2167 b"default",
2168 )
2168 )
2169 if not p:
2169 if not p:
2170 problems += 1
2170 problems += 1
2171 fm.condwrite(
2171 fm.condwrite(
2172 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2172 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2173 )
2173 )
2174
2174
2175 # editor
2175 # editor
2176 editor = ui.geteditor()
2176 editor = ui.geteditor()
2177 editor = util.expandpath(editor)
2177 editor = util.expandpath(editor)
2178 editorbin = procutil.shellsplit(editor)[0]
2178 editorbin = procutil.shellsplit(editor)[0]
2179 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2179 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2180 cmdpath = procutil.findexe(editorbin)
2180 cmdpath = procutil.findexe(editorbin)
2181 fm.condwrite(
2181 fm.condwrite(
2182 not cmdpath and editor == b'vi',
2182 not cmdpath and editor == b'vi',
2183 b'vinotfound',
2183 b'vinotfound',
2184 _(
2184 _(
2185 b" No commit editor set and can't find %s in PATH\n"
2185 b" No commit editor set and can't find %s in PATH\n"
2186 b" (specify a commit editor in your configuration"
2186 b" (specify a commit editor in your configuration"
2187 b" file)\n"
2187 b" file)\n"
2188 ),
2188 ),
2189 not cmdpath and editor == b'vi' and editorbin,
2189 not cmdpath and editor == b'vi' and editorbin,
2190 )
2190 )
2191 fm.condwrite(
2191 fm.condwrite(
2192 not cmdpath and editor != b'vi',
2192 not cmdpath and editor != b'vi',
2193 b'editornotfound',
2193 b'editornotfound',
2194 _(
2194 _(
2195 b" Can't find editor '%s' in PATH\n"
2195 b" Can't find editor '%s' in PATH\n"
2196 b" (specify a commit editor in your configuration"
2196 b" (specify a commit editor in your configuration"
2197 b" file)\n"
2197 b" file)\n"
2198 ),
2198 ),
2199 not cmdpath and editorbin,
2199 not cmdpath and editorbin,
2200 )
2200 )
2201 if not cmdpath and editor != b'vi':
2201 if not cmdpath and editor != b'vi':
2202 problems += 1
2202 problems += 1
2203
2203
2204 # check username
2204 # check username
2205 username = None
2205 username = None
2206 err = None
2206 err = None
2207 try:
2207 try:
2208 username = ui.username()
2208 username = ui.username()
2209 except error.Abort as e:
2209 except error.Abort as e:
2210 err = e.message
2210 err = e.message
2211 problems += 1
2211 problems += 1
2212
2212
2213 fm.condwrite(
2213 fm.condwrite(
2214 username, b'username', _(b"checking username (%s)\n"), username
2214 username, b'username', _(b"checking username (%s)\n"), username
2215 )
2215 )
2216 fm.condwrite(
2216 fm.condwrite(
2217 err,
2217 err,
2218 b'usernameerror',
2218 b'usernameerror',
2219 _(
2219 _(
2220 b"checking username...\n %s\n"
2220 b"checking username...\n %s\n"
2221 b" (specify a username in your configuration file)\n"
2221 b" (specify a username in your configuration file)\n"
2222 ),
2222 ),
2223 err,
2223 err,
2224 )
2224 )
2225
2225
2226 for name, mod in extensions.extensions():
2226 for name, mod in extensions.extensions():
2227 handler = getattr(mod, 'debuginstall', None)
2227 handler = getattr(mod, 'debuginstall', None)
2228 if handler is not None:
2228 if handler is not None:
2229 problems += handler(ui, fm)
2229 problems += handler(ui, fm)
2230
2230
2231 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2231 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2232 if not problems:
2232 if not problems:
2233 fm.data(problems=problems)
2233 fm.data(problems=problems)
2234 fm.condwrite(
2234 fm.condwrite(
2235 problems,
2235 problems,
2236 b'problems',
2236 b'problems',
2237 _(b"%d problems detected, please check your install!\n"),
2237 _(b"%d problems detected, please check your install!\n"),
2238 problems,
2238 problems,
2239 )
2239 )
2240 fm.end()
2240 fm.end()
2241
2241
2242 return problems
2242 return problems
2243
2243
2244
2244
2245 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2245 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2246 def debugknown(ui, repopath, *ids, **opts):
2246 def debugknown(ui, repopath, *ids, **opts):
2247 """test whether node ids are known to a repo
2247 """test whether node ids are known to a repo
2248
2248
2249 Every ID must be a full-length hex node id string. Returns a list of 0s
2249 Every ID must be a full-length hex node id string. Returns a list of 0s
2250 and 1s indicating unknown/known.
2250 and 1s indicating unknown/known.
2251 """
2251 """
2252 opts = pycompat.byteskwargs(opts)
2252 opts = pycompat.byteskwargs(opts)
2253 repo = hg.peer(ui, opts, repopath)
2253 repo = hg.peer(ui, opts, repopath)
2254 if not repo.capable(b'known'):
2254 if not repo.capable(b'known'):
2255 raise error.Abort(b"known() not supported by target repository")
2255 raise error.Abort(b"known() not supported by target repository")
2256 flags = repo.known([bin(s) for s in ids])
2256 flags = repo.known([bin(s) for s in ids])
2257 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2257 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2258
2258
2259
2259
2260 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2260 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2261 def debuglabelcomplete(ui, repo, *args):
2261 def debuglabelcomplete(ui, repo, *args):
2262 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2262 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2263 debugnamecomplete(ui, repo, *args)
2263 debugnamecomplete(ui, repo, *args)
2264
2264
2265
2265
2266 @command(
2266 @command(
2267 b'debuglocks',
2267 b'debuglocks',
2268 [
2268 [
2269 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2269 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2270 (
2270 (
2271 b'W',
2271 b'W',
2272 b'force-free-wlock',
2272 b'force-free-wlock',
2273 None,
2273 None,
2274 _(b'free the working state lock (DANGEROUS)'),
2274 _(b'free the working state lock (DANGEROUS)'),
2275 ),
2275 ),
2276 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2276 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2277 (
2277 (
2278 b'S',
2278 b'S',
2279 b'set-wlock',
2279 b'set-wlock',
2280 None,
2280 None,
2281 _(b'set the working state lock until stopped'),
2281 _(b'set the working state lock until stopped'),
2282 ),
2282 ),
2283 ],
2283 ],
2284 _(b'[OPTION]...'),
2284 _(b'[OPTION]...'),
2285 )
2285 )
2286 def debuglocks(ui, repo, **opts):
2286 def debuglocks(ui, repo, **opts):
2287 """show or modify state of locks
2287 """show or modify state of locks
2288
2288
2289 By default, this command will show which locks are held. This
2289 By default, this command will show which locks are held. This
2290 includes the user and process holding the lock, the amount of time
2290 includes the user and process holding the lock, the amount of time
2291 the lock has been held, and the machine name where the process is
2291 the lock has been held, and the machine name where the process is
2292 running if it's not local.
2292 running if it's not local.
2293
2293
2294 Locks protect the integrity of Mercurial's data, so should be
2294 Locks protect the integrity of Mercurial's data, so should be
2295 treated with care. System crashes or other interruptions may cause
2295 treated with care. System crashes or other interruptions may cause
2296 locks to not be properly released, though Mercurial will usually
2296 locks to not be properly released, though Mercurial will usually
2297 detect and remove such stale locks automatically.
2297 detect and remove such stale locks automatically.
2298
2298
2299 However, detecting stale locks may not always be possible (for
2299 However, detecting stale locks may not always be possible (for
2300 instance, on a shared filesystem). Removing locks may also be
2300 instance, on a shared filesystem). Removing locks may also be
2301 blocked by filesystem permissions.
2301 blocked by filesystem permissions.
2302
2302
2303 Setting a lock will prevent other commands from changing the data.
2303 Setting a lock will prevent other commands from changing the data.
2304 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2304 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2305 The set locks are removed when the command exits.
2305 The set locks are removed when the command exits.
2306
2306
2307 Returns 0 if no locks are held.
2307 Returns 0 if no locks are held.
2308
2308
2309 """
2309 """
2310
2310
2311 if opts.get('force_free_lock'):
2311 if opts.get('force_free_lock'):
2312 repo.svfs.tryunlink(b'lock')
2312 repo.svfs.tryunlink(b'lock')
2313 if opts.get('force_free_wlock'):
2313 if opts.get('force_free_wlock'):
2314 repo.vfs.tryunlink(b'wlock')
2314 repo.vfs.tryunlink(b'wlock')
2315 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2315 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2316 return 0
2316 return 0
2317
2317
2318 locks = []
2318 locks = []
2319 try:
2319 try:
2320 if opts.get('set_wlock'):
2320 if opts.get('set_wlock'):
2321 try:
2321 try:
2322 locks.append(repo.wlock(False))
2322 locks.append(repo.wlock(False))
2323 except error.LockHeld:
2323 except error.LockHeld:
2324 raise error.Abort(_(b'wlock is already held'))
2324 raise error.Abort(_(b'wlock is already held'))
2325 if opts.get('set_lock'):
2325 if opts.get('set_lock'):
2326 try:
2326 try:
2327 locks.append(repo.lock(False))
2327 locks.append(repo.lock(False))
2328 except error.LockHeld:
2328 except error.LockHeld:
2329 raise error.Abort(_(b'lock is already held'))
2329 raise error.Abort(_(b'lock is already held'))
2330 if len(locks):
2330 if len(locks):
2331 try:
2331 try:
2332 if ui.interactive():
2332 if ui.interactive():
2333 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2333 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2334 ui.promptchoice(prompt)
2334 ui.promptchoice(prompt)
2335 else:
2335 else:
2336 msg = b"%d locks held, waiting for signal\n"
2336 msg = b"%d locks held, waiting for signal\n"
2337 msg %= len(locks)
2337 msg %= len(locks)
2338 ui.status(msg)
2338 ui.status(msg)
2339 while True: # XXX wait for a signal
2339 while True: # XXX wait for a signal
2340 time.sleep(0.1)
2340 time.sleep(0.1)
2341 except KeyboardInterrupt:
2341 except KeyboardInterrupt:
2342 msg = b"signal-received releasing locks\n"
2342 msg = b"signal-received releasing locks\n"
2343 ui.status(msg)
2343 ui.status(msg)
2344 return 0
2344 return 0
2345 finally:
2345 finally:
2346 release(*locks)
2346 release(*locks)
2347
2347
2348 now = time.time()
2348 now = time.time()
2349 held = 0
2349 held = 0
2350
2350
2351 def report(vfs, name, method):
2351 def report(vfs, name, method):
2352 # this causes stale locks to get reaped for more accurate reporting
2352 # this causes stale locks to get reaped for more accurate reporting
2353 try:
2353 try:
2354 l = method(False)
2354 l = method(False)
2355 except error.LockHeld:
2355 except error.LockHeld:
2356 l = None
2356 l = None
2357
2357
2358 if l:
2358 if l:
2359 l.release()
2359 l.release()
2360 else:
2360 else:
2361 try:
2361 try:
2362 st = vfs.lstat(name)
2362 st = vfs.lstat(name)
2363 age = now - st[stat.ST_MTIME]
2363 age = now - st[stat.ST_MTIME]
2364 user = util.username(st.st_uid)
2364 user = util.username(st.st_uid)
2365 locker = vfs.readlock(name)
2365 locker = vfs.readlock(name)
2366 if b":" in locker:
2366 if b":" in locker:
2367 host, pid = locker.split(b':')
2367 host, pid = locker.split(b':')
2368 if host == socket.gethostname():
2368 if host == socket.gethostname():
2369 locker = b'user %s, process %s' % (user or b'None', pid)
2369 locker = b'user %s, process %s' % (user or b'None', pid)
2370 else:
2370 else:
2371 locker = b'user %s, process %s, host %s' % (
2371 locker = b'user %s, process %s, host %s' % (
2372 user or b'None',
2372 user or b'None',
2373 pid,
2373 pid,
2374 host,
2374 host,
2375 )
2375 )
2376 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2376 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2377 return 1
2377 return 1
2378 except FileNotFoundError:
2378 except FileNotFoundError:
2379 pass
2379 pass
2380
2380
2381 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2381 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2382 return 0
2382 return 0
2383
2383
2384 held += report(repo.svfs, b"lock", repo.lock)
2384 held += report(repo.svfs, b"lock", repo.lock)
2385 held += report(repo.vfs, b"wlock", repo.wlock)
2385 held += report(repo.vfs, b"wlock", repo.wlock)
2386
2386
2387 return held
2387 return held
2388
2388
2389
2389
2390 @command(
2390 @command(
2391 b'debugmanifestfulltextcache',
2391 b'debugmanifestfulltextcache',
2392 [
2392 [
2393 (b'', b'clear', False, _(b'clear the cache')),
2393 (b'', b'clear', False, _(b'clear the cache')),
2394 (
2394 (
2395 b'a',
2395 b'a',
2396 b'add',
2396 b'add',
2397 [],
2397 [],
2398 _(b'add the given manifest nodes to the cache'),
2398 _(b'add the given manifest nodes to the cache'),
2399 _(b'NODE'),
2399 _(b'NODE'),
2400 ),
2400 ),
2401 ],
2401 ],
2402 b'',
2402 b'',
2403 )
2403 )
2404 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2404 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2405 """show, clear or amend the contents of the manifest fulltext cache"""
2405 """show, clear or amend the contents of the manifest fulltext cache"""
2406
2406
2407 def getcache():
2407 def getcache():
2408 r = repo.manifestlog.getstorage(b'')
2408 r = repo.manifestlog.getstorage(b'')
2409 try:
2409 try:
2410 return r._fulltextcache
2410 return r._fulltextcache
2411 except AttributeError:
2411 except AttributeError:
2412 msg = _(
2412 msg = _(
2413 b"Current revlog implementation doesn't appear to have a "
2413 b"Current revlog implementation doesn't appear to have a "
2414 b"manifest fulltext cache\n"
2414 b"manifest fulltext cache\n"
2415 )
2415 )
2416 raise error.Abort(msg)
2416 raise error.Abort(msg)
2417
2417
2418 if opts.get('clear'):
2418 if opts.get('clear'):
2419 with repo.wlock():
2419 with repo.wlock():
2420 cache = getcache()
2420 cache = getcache()
2421 cache.clear(clear_persisted_data=True)
2421 cache.clear(clear_persisted_data=True)
2422 return
2422 return
2423
2423
2424 if add:
2424 if add:
2425 with repo.wlock():
2425 with repo.wlock():
2426 m = repo.manifestlog
2426 m = repo.manifestlog
2427 store = m.getstorage(b'')
2427 store = m.getstorage(b'')
2428 for n in add:
2428 for n in add:
2429 try:
2429 try:
2430 manifest = m[store.lookup(n)]
2430 manifest = m[store.lookup(n)]
2431 except error.LookupError as e:
2431 except error.LookupError as e:
2432 raise error.Abort(
2432 raise error.Abort(
2433 bytes(e), hint=b"Check your manifest node id"
2433 bytes(e), hint=b"Check your manifest node id"
2434 )
2434 )
2435 manifest.read() # stores revisision in cache too
2435 manifest.read() # stores revisision in cache too
2436 return
2436 return
2437
2437
2438 cache = getcache()
2438 cache = getcache()
2439 if not len(cache):
2439 if not len(cache):
2440 ui.write(_(b'cache empty\n'))
2440 ui.write(_(b'cache empty\n'))
2441 else:
2441 else:
2442 ui.write(
2442 ui.write(
2443 _(
2443 _(
2444 b'cache contains %d manifest entries, in order of most to '
2444 b'cache contains %d manifest entries, in order of most to '
2445 b'least recent:\n'
2445 b'least recent:\n'
2446 )
2446 )
2447 % (len(cache),)
2447 % (len(cache),)
2448 )
2448 )
2449 totalsize = 0
2449 totalsize = 0
2450 for nodeid in cache:
2450 for nodeid in cache:
2451 # Use cache.get to not update the LRU order
2451 # Use cache.get to not update the LRU order
2452 data = cache.peek(nodeid)
2452 data = cache.peek(nodeid)
2453 size = len(data)
2453 size = len(data)
2454 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2454 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2455 ui.write(
2455 ui.write(
2456 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2456 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2457 )
2457 )
2458 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2458 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2459 ui.write(
2459 ui.write(
2460 _(b'total cache data size %s, on-disk %s\n')
2460 _(b'total cache data size %s, on-disk %s\n')
2461 % (util.bytecount(totalsize), util.bytecount(ondisk))
2461 % (util.bytecount(totalsize), util.bytecount(ondisk))
2462 )
2462 )
2463
2463
2464
2464
2465 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2465 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2466 def debugmergestate(ui, repo, *args, **opts):
2466 def debugmergestate(ui, repo, *args, **opts):
2467 """print merge state
2467 """print merge state
2468
2468
2469 Use --verbose to print out information about whether v1 or v2 merge state
2469 Use --verbose to print out information about whether v1 or v2 merge state
2470 was chosen."""
2470 was chosen."""
2471
2471
2472 if ui.verbose:
2472 if ui.verbose:
2473 ms = mergestatemod.mergestate(repo)
2473 ms = mergestatemod.mergestate(repo)
2474
2474
2475 # sort so that reasonable information is on top
2475 # sort so that reasonable information is on top
2476 v1records = ms._readrecordsv1()
2476 v1records = ms._readrecordsv1()
2477 v2records = ms._readrecordsv2()
2477 v2records = ms._readrecordsv2()
2478
2478
2479 if not v1records and not v2records:
2479 if not v1records and not v2records:
2480 pass
2480 pass
2481 elif not v2records:
2481 elif not v2records:
2482 ui.writenoi18n(b'no version 2 merge state\n')
2482 ui.writenoi18n(b'no version 2 merge state\n')
2483 elif ms._v1v2match(v1records, v2records):
2483 elif ms._v1v2match(v1records, v2records):
2484 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2484 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2485 else:
2485 else:
2486 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2486 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2487
2487
2488 opts = pycompat.byteskwargs(opts)
2488 opts = pycompat.byteskwargs(opts)
2489 if not opts[b'template']:
2489 if not opts[b'template']:
2490 opts[b'template'] = (
2490 opts[b'template'] = (
2491 b'{if(commits, "", "no merge state found\n")}'
2491 b'{if(commits, "", "no merge state found\n")}'
2492 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2492 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2493 b'{files % "file: {path} (state \\"{state}\\")\n'
2493 b'{files % "file: {path} (state \\"{state}\\")\n'
2494 b'{if(local_path, "'
2494 b'{if(local_path, "'
2495 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2495 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2496 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2496 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2497 b' other path: {other_path} (node {other_node})\n'
2497 b' other path: {other_path} (node {other_node})\n'
2498 b'")}'
2498 b'")}'
2499 b'{if(rename_side, "'
2499 b'{if(rename_side, "'
2500 b' rename side: {rename_side}\n'
2500 b' rename side: {rename_side}\n'
2501 b' renamed path: {renamed_path}\n'
2501 b' renamed path: {renamed_path}\n'
2502 b'")}'
2502 b'")}'
2503 b'{extras % " extra: {key} = {value}\n"}'
2503 b'{extras % " extra: {key} = {value}\n"}'
2504 b'"}'
2504 b'"}'
2505 b'{extras % "extra: {file} ({key} = {value})\n"}'
2505 b'{extras % "extra: {file} ({key} = {value})\n"}'
2506 )
2506 )
2507
2507
2508 ms = mergestatemod.mergestate.read(repo)
2508 ms = mergestatemod.mergestate.read(repo)
2509
2509
2510 fm = ui.formatter(b'debugmergestate', opts)
2510 fm = ui.formatter(b'debugmergestate', opts)
2511 fm.startitem()
2511 fm.startitem()
2512
2512
2513 fm_commits = fm.nested(b'commits')
2513 fm_commits = fm.nested(b'commits')
2514 if ms.active():
2514 if ms.active():
2515 for name, node, label_index in (
2515 for name, node, label_index in (
2516 (b'local', ms.local, 0),
2516 (b'local', ms.local, 0),
2517 (b'other', ms.other, 1),
2517 (b'other', ms.other, 1),
2518 ):
2518 ):
2519 fm_commits.startitem()
2519 fm_commits.startitem()
2520 fm_commits.data(name=name)
2520 fm_commits.data(name=name)
2521 fm_commits.data(node=hex(node))
2521 fm_commits.data(node=hex(node))
2522 if ms._labels and len(ms._labels) > label_index:
2522 if ms._labels and len(ms._labels) > label_index:
2523 fm_commits.data(label=ms._labels[label_index])
2523 fm_commits.data(label=ms._labels[label_index])
2524 fm_commits.end()
2524 fm_commits.end()
2525
2525
2526 fm_files = fm.nested(b'files')
2526 fm_files = fm.nested(b'files')
2527 if ms.active():
2527 if ms.active():
2528 for f in ms:
2528 for f in ms:
2529 fm_files.startitem()
2529 fm_files.startitem()
2530 fm_files.data(path=f)
2530 fm_files.data(path=f)
2531 state = ms._state[f]
2531 state = ms._state[f]
2532 fm_files.data(state=state[0])
2532 fm_files.data(state=state[0])
2533 if state[0] in (
2533 if state[0] in (
2534 mergestatemod.MERGE_RECORD_UNRESOLVED,
2534 mergestatemod.MERGE_RECORD_UNRESOLVED,
2535 mergestatemod.MERGE_RECORD_RESOLVED,
2535 mergestatemod.MERGE_RECORD_RESOLVED,
2536 ):
2536 ):
2537 fm_files.data(local_key=state[1])
2537 fm_files.data(local_key=state[1])
2538 fm_files.data(local_path=state[2])
2538 fm_files.data(local_path=state[2])
2539 fm_files.data(ancestor_path=state[3])
2539 fm_files.data(ancestor_path=state[3])
2540 fm_files.data(ancestor_node=state[4])
2540 fm_files.data(ancestor_node=state[4])
2541 fm_files.data(other_path=state[5])
2541 fm_files.data(other_path=state[5])
2542 fm_files.data(other_node=state[6])
2542 fm_files.data(other_node=state[6])
2543 fm_files.data(local_flags=state[7])
2543 fm_files.data(local_flags=state[7])
2544 elif state[0] in (
2544 elif state[0] in (
2545 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2545 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2546 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2546 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2547 ):
2547 ):
2548 fm_files.data(renamed_path=state[1])
2548 fm_files.data(renamed_path=state[1])
2549 fm_files.data(rename_side=state[2])
2549 fm_files.data(rename_side=state[2])
2550 fm_extras = fm_files.nested(b'extras')
2550 fm_extras = fm_files.nested(b'extras')
2551 for k, v in sorted(ms.extras(f).items()):
2551 for k, v in sorted(ms.extras(f).items()):
2552 fm_extras.startitem()
2552 fm_extras.startitem()
2553 fm_extras.data(key=k)
2553 fm_extras.data(key=k)
2554 fm_extras.data(value=v)
2554 fm_extras.data(value=v)
2555 fm_extras.end()
2555 fm_extras.end()
2556
2556
2557 fm_files.end()
2557 fm_files.end()
2558
2558
2559 fm_extras = fm.nested(b'extras')
2559 fm_extras = fm.nested(b'extras')
2560 for f, d in sorted(ms.allextras().items()):
2560 for f, d in sorted(ms.allextras().items()):
2561 if f in ms:
2561 if f in ms:
2562 # If file is in mergestate, we have already processed it's extras
2562 # If file is in mergestate, we have already processed it's extras
2563 continue
2563 continue
2564 for k, v in d.items():
2564 for k, v in d.items():
2565 fm_extras.startitem()
2565 fm_extras.startitem()
2566 fm_extras.data(file=f)
2566 fm_extras.data(file=f)
2567 fm_extras.data(key=k)
2567 fm_extras.data(key=k)
2568 fm_extras.data(value=v)
2568 fm_extras.data(value=v)
2569 fm_extras.end()
2569 fm_extras.end()
2570
2570
2571 fm.end()
2571 fm.end()
2572
2572
2573
2573
2574 @command(b'debugnamecomplete', [], _(b'NAME...'))
2574 @command(b'debugnamecomplete', [], _(b'NAME...'))
2575 def debugnamecomplete(ui, repo, *args):
2575 def debugnamecomplete(ui, repo, *args):
2576 '''complete "names" - tags, open branch names, bookmark names'''
2576 '''complete "names" - tags, open branch names, bookmark names'''
2577
2577
2578 names = set()
2578 names = set()
2579 # since we previously only listed open branches, we will handle that
2579 # since we previously only listed open branches, we will handle that
2580 # specially (after this for loop)
2580 # specially (after this for loop)
2581 for name, ns in repo.names.items():
2581 for name, ns in repo.names.items():
2582 if name != b'branches':
2582 if name != b'branches':
2583 names.update(ns.listnames(repo))
2583 names.update(ns.listnames(repo))
2584 names.update(
2584 names.update(
2585 tag
2585 tag
2586 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2586 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2587 if not closed
2587 if not closed
2588 )
2588 )
2589 completions = set()
2589 completions = set()
2590 if not args:
2590 if not args:
2591 args = [b'']
2591 args = [b'']
2592 for a in args:
2592 for a in args:
2593 completions.update(n for n in names if n.startswith(a))
2593 completions.update(n for n in names if n.startswith(a))
2594 ui.write(b'\n'.join(sorted(completions)))
2594 ui.write(b'\n'.join(sorted(completions)))
2595 ui.write(b'\n')
2595 ui.write(b'\n')
2596
2596
2597
2597
2598 @command(
2598 @command(
2599 b'debugnodemap',
2599 b'debugnodemap',
2600 [
2600 [
2601 (
2601 (
2602 b'',
2602 b'',
2603 b'dump-new',
2603 b'dump-new',
2604 False,
2604 False,
2605 _(b'write a (new) persistent binary nodemap on stdout'),
2605 _(b'write a (new) persistent binary nodemap on stdout'),
2606 ),
2606 ),
2607 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2607 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2608 (
2608 (
2609 b'',
2609 b'',
2610 b'check',
2610 b'check',
2611 False,
2611 False,
2612 _(b'check that the data on disk data are correct.'),
2612 _(b'check that the data on disk data are correct.'),
2613 ),
2613 ),
2614 (
2614 (
2615 b'',
2615 b'',
2616 b'metadata',
2616 b'metadata',
2617 False,
2617 False,
2618 _(b'display the on disk meta data for the nodemap'),
2618 _(b'display the on disk meta data for the nodemap'),
2619 ),
2619 ),
2620 ],
2620 ],
2621 )
2621 )
2622 def debugnodemap(ui, repo, **opts):
2622 def debugnodemap(ui, repo, **opts):
2623 """write and inspect on disk nodemap"""
2623 """write and inspect on disk nodemap"""
2624 if opts['dump_new']:
2624 if opts['dump_new']:
2625 unfi = repo.unfiltered()
2625 unfi = repo.unfiltered()
2626 cl = unfi.changelog
2626 cl = unfi.changelog
2627 if util.safehasattr(cl.index, "nodemap_data_all"):
2627 if util.safehasattr(cl.index, "nodemap_data_all"):
2628 data = cl.index.nodemap_data_all()
2628 data = cl.index.nodemap_data_all()
2629 else:
2629 else:
2630 data = nodemap.persistent_data(cl.index)
2630 data = nodemap.persistent_data(cl.index)
2631 ui.write(data)
2631 ui.write(data)
2632 elif opts['dump_disk']:
2632 elif opts['dump_disk']:
2633 unfi = repo.unfiltered()
2633 unfi = repo.unfiltered()
2634 cl = unfi.changelog
2634 cl = unfi.changelog
2635 nm_data = nodemap.persisted_data(cl)
2635 nm_data = nodemap.persisted_data(cl)
2636 if nm_data is not None:
2636 if nm_data is not None:
2637 docket, data = nm_data
2637 docket, data = nm_data
2638 ui.write(data[:])
2638 ui.write(data[:])
2639 elif opts['check']:
2639 elif opts['check']:
2640 unfi = repo.unfiltered()
2640 unfi = repo.unfiltered()
2641 cl = unfi.changelog
2641 cl = unfi.changelog
2642 nm_data = nodemap.persisted_data(cl)
2642 nm_data = nodemap.persisted_data(cl)
2643 if nm_data is not None:
2643 if nm_data is not None:
2644 docket, data = nm_data
2644 docket, data = nm_data
2645 return nodemap.check_data(ui, cl.index, data)
2645 return nodemap.check_data(ui, cl.index, data)
2646 elif opts['metadata']:
2646 elif opts['metadata']:
2647 unfi = repo.unfiltered()
2647 unfi = repo.unfiltered()
2648 cl = unfi.changelog
2648 cl = unfi.changelog
2649 nm_data = nodemap.persisted_data(cl)
2649 nm_data = nodemap.persisted_data(cl)
2650 if nm_data is not None:
2650 if nm_data is not None:
2651 docket, data = nm_data
2651 docket, data = nm_data
2652 ui.write((b"uid: %s\n") % docket.uid)
2652 ui.write((b"uid: %s\n") % docket.uid)
2653 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2653 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2654 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2654 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2655 ui.write((b"data-length: %d\n") % docket.data_length)
2655 ui.write((b"data-length: %d\n") % docket.data_length)
2656 ui.write((b"data-unused: %d\n") % docket.data_unused)
2656 ui.write((b"data-unused: %d\n") % docket.data_unused)
2657 unused_perc = docket.data_unused * 100.0 / docket.data_length
2657 unused_perc = docket.data_unused * 100.0 / docket.data_length
2658 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2658 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2659
2659
2660
2660
2661 @command(
2661 @command(
2662 b'debugobsolete',
2662 b'debugobsolete',
2663 [
2663 [
2664 (b'', b'flags', 0, _(b'markers flag')),
2664 (b'', b'flags', 0, _(b'markers flag')),
2665 (
2665 (
2666 b'',
2666 b'',
2667 b'record-parents',
2667 b'record-parents',
2668 False,
2668 False,
2669 _(b'record parent information for the precursor'),
2669 _(b'record parent information for the precursor'),
2670 ),
2670 ),
2671 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2671 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2672 (
2672 (
2673 b'',
2673 b'',
2674 b'exclusive',
2674 b'exclusive',
2675 False,
2675 False,
2676 _(b'restrict display to markers only relevant to REV'),
2676 _(b'restrict display to markers only relevant to REV'),
2677 ),
2677 ),
2678 (b'', b'index', False, _(b'display index of the marker')),
2678 (b'', b'index', False, _(b'display index of the marker')),
2679 (b'', b'delete', [], _(b'delete markers specified by indices')),
2679 (b'', b'delete', [], _(b'delete markers specified by indices')),
2680 ]
2680 ]
2681 + cmdutil.commitopts2
2681 + cmdutil.commitopts2
2682 + cmdutil.formatteropts,
2682 + cmdutil.formatteropts,
2683 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2683 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2684 )
2684 )
2685 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2685 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2686 """create arbitrary obsolete marker
2686 """create arbitrary obsolete marker
2687
2687
2688 With no arguments, displays the list of obsolescence markers."""
2688 With no arguments, displays the list of obsolescence markers."""
2689
2689
2690 opts = pycompat.byteskwargs(opts)
2690 opts = pycompat.byteskwargs(opts)
2691
2691
2692 def parsenodeid(s):
2692 def parsenodeid(s):
2693 try:
2693 try:
2694 # We do not use revsingle/revrange functions here to accept
2694 # We do not use revsingle/revrange functions here to accept
2695 # arbitrary node identifiers, possibly not present in the
2695 # arbitrary node identifiers, possibly not present in the
2696 # local repository.
2696 # local repository.
2697 n = bin(s)
2697 n = bin(s)
2698 if len(n) != repo.nodeconstants.nodelen:
2698 if len(n) != repo.nodeconstants.nodelen:
2699 raise ValueError
2699 raise ValueError
2700 return n
2700 return n
2701 except ValueError:
2701 except ValueError:
2702 raise error.InputError(
2702 raise error.InputError(
2703 b'changeset references must be full hexadecimal '
2703 b'changeset references must be full hexadecimal '
2704 b'node identifiers'
2704 b'node identifiers'
2705 )
2705 )
2706
2706
2707 if opts.get(b'delete'):
2707 if opts.get(b'delete'):
2708 indices = []
2708 indices = []
2709 for v in opts.get(b'delete'):
2709 for v in opts.get(b'delete'):
2710 try:
2710 try:
2711 indices.append(int(v))
2711 indices.append(int(v))
2712 except ValueError:
2712 except ValueError:
2713 raise error.InputError(
2713 raise error.InputError(
2714 _(b'invalid index value: %r') % v,
2714 _(b'invalid index value: %r') % v,
2715 hint=_(b'use integers for indices'),
2715 hint=_(b'use integers for indices'),
2716 )
2716 )
2717
2717
2718 if repo.currenttransaction():
2718 if repo.currenttransaction():
2719 raise error.Abort(
2719 raise error.Abort(
2720 _(b'cannot delete obsmarkers in the middle of transaction.')
2720 _(b'cannot delete obsmarkers in the middle of transaction.')
2721 )
2721 )
2722
2722
2723 with repo.lock():
2723 with repo.lock():
2724 n = repair.deleteobsmarkers(repo.obsstore, indices)
2724 n = repair.deleteobsmarkers(repo.obsstore, indices)
2725 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2725 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2726
2726
2727 return
2727 return
2728
2728
2729 if precursor is not None:
2729 if precursor is not None:
2730 if opts[b'rev']:
2730 if opts[b'rev']:
2731 raise error.InputError(
2731 raise error.InputError(
2732 b'cannot select revision when creating marker'
2732 b'cannot select revision when creating marker'
2733 )
2733 )
2734 metadata = {}
2734 metadata = {}
2735 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2735 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2736 succs = tuple(parsenodeid(succ) for succ in successors)
2736 succs = tuple(parsenodeid(succ) for succ in successors)
2737 l = repo.lock()
2737 l = repo.lock()
2738 try:
2738 try:
2739 tr = repo.transaction(b'debugobsolete')
2739 tr = repo.transaction(b'debugobsolete')
2740 try:
2740 try:
2741 date = opts.get(b'date')
2741 date = opts.get(b'date')
2742 if date:
2742 if date:
2743 date = dateutil.parsedate(date)
2743 date = dateutil.parsedate(date)
2744 else:
2744 else:
2745 date = None
2745 date = None
2746 prec = parsenodeid(precursor)
2746 prec = parsenodeid(precursor)
2747 parents = None
2747 parents = None
2748 if opts[b'record_parents']:
2748 if opts[b'record_parents']:
2749 if prec not in repo.unfiltered():
2749 if prec not in repo.unfiltered():
2750 raise error.Abort(
2750 raise error.Abort(
2751 b'cannot used --record-parents on '
2751 b'cannot used --record-parents on '
2752 b'unknown changesets'
2752 b'unknown changesets'
2753 )
2753 )
2754 parents = repo.unfiltered()[prec].parents()
2754 parents = repo.unfiltered()[prec].parents()
2755 parents = tuple(p.node() for p in parents)
2755 parents = tuple(p.node() for p in parents)
2756 repo.obsstore.create(
2756 repo.obsstore.create(
2757 tr,
2757 tr,
2758 prec,
2758 prec,
2759 succs,
2759 succs,
2760 opts[b'flags'],
2760 opts[b'flags'],
2761 parents=parents,
2761 parents=parents,
2762 date=date,
2762 date=date,
2763 metadata=metadata,
2763 metadata=metadata,
2764 ui=ui,
2764 ui=ui,
2765 )
2765 )
2766 tr.close()
2766 tr.close()
2767 except ValueError as exc:
2767 except ValueError as exc:
2768 raise error.Abort(
2768 raise error.Abort(
2769 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2769 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2770 )
2770 )
2771 finally:
2771 finally:
2772 tr.release()
2772 tr.release()
2773 finally:
2773 finally:
2774 l.release()
2774 l.release()
2775 else:
2775 else:
2776 if opts[b'rev']:
2776 if opts[b'rev']:
2777 revs = logcmdutil.revrange(repo, opts[b'rev'])
2777 revs = logcmdutil.revrange(repo, opts[b'rev'])
2778 nodes = [repo[r].node() for r in revs]
2778 nodes = [repo[r].node() for r in revs]
2779 markers = list(
2779 markers = list(
2780 obsutil.getmarkers(
2780 obsutil.getmarkers(
2781 repo, nodes=nodes, exclusive=opts[b'exclusive']
2781 repo, nodes=nodes, exclusive=opts[b'exclusive']
2782 )
2782 )
2783 )
2783 )
2784 markers.sort(key=lambda x: x._data)
2784 markers.sort(key=lambda x: x._data)
2785 else:
2785 else:
2786 markers = obsutil.getmarkers(repo)
2786 markers = obsutil.getmarkers(repo)
2787
2787
2788 markerstoiter = markers
2788 markerstoiter = markers
2789 isrelevant = lambda m: True
2789 isrelevant = lambda m: True
2790 if opts.get(b'rev') and opts.get(b'index'):
2790 if opts.get(b'rev') and opts.get(b'index'):
2791 markerstoiter = obsutil.getmarkers(repo)
2791 markerstoiter = obsutil.getmarkers(repo)
2792 markerset = set(markers)
2792 markerset = set(markers)
2793 isrelevant = lambda m: m in markerset
2793 isrelevant = lambda m: m in markerset
2794
2794
2795 fm = ui.formatter(b'debugobsolete', opts)
2795 fm = ui.formatter(b'debugobsolete', opts)
2796 for i, m in enumerate(markerstoiter):
2796 for i, m in enumerate(markerstoiter):
2797 if not isrelevant(m):
2797 if not isrelevant(m):
2798 # marker can be irrelevant when we're iterating over a set
2798 # marker can be irrelevant when we're iterating over a set
2799 # of markers (markerstoiter) which is bigger than the set
2799 # of markers (markerstoiter) which is bigger than the set
2800 # of markers we want to display (markers)
2800 # of markers we want to display (markers)
2801 # this can happen if both --index and --rev options are
2801 # this can happen if both --index and --rev options are
2802 # provided and thus we need to iterate over all of the markers
2802 # provided and thus we need to iterate over all of the markers
2803 # to get the correct indices, but only display the ones that
2803 # to get the correct indices, but only display the ones that
2804 # are relevant to --rev value
2804 # are relevant to --rev value
2805 continue
2805 continue
2806 fm.startitem()
2806 fm.startitem()
2807 ind = i if opts.get(b'index') else None
2807 ind = i if opts.get(b'index') else None
2808 cmdutil.showmarker(fm, m, index=ind)
2808 cmdutil.showmarker(fm, m, index=ind)
2809 fm.end()
2809 fm.end()
2810
2810
2811
2811
2812 @command(
2812 @command(
2813 b'debugp1copies',
2813 b'debugp1copies',
2814 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2814 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2815 _(b'[-r REV]'),
2815 _(b'[-r REV]'),
2816 )
2816 )
2817 def debugp1copies(ui, repo, **opts):
2817 def debugp1copies(ui, repo, **opts):
2818 """dump copy information compared to p1"""
2818 """dump copy information compared to p1"""
2819
2819
2820 opts = pycompat.byteskwargs(opts)
2820 opts = pycompat.byteskwargs(opts)
2821 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2821 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2822 for dst, src in ctx.p1copies().items():
2822 for dst, src in ctx.p1copies().items():
2823 ui.write(b'%s -> %s\n' % (src, dst))
2823 ui.write(b'%s -> %s\n' % (src, dst))
2824
2824
2825
2825
2826 @command(
2826 @command(
2827 b'debugp2copies',
2827 b'debugp2copies',
2828 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2828 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2829 _(b'[-r REV]'),
2829 _(b'[-r REV]'),
2830 )
2830 )
2831 def debugp2copies(ui, repo, **opts):
2831 def debugp2copies(ui, repo, **opts):
2832 """dump copy information compared to p2"""
2832 """dump copy information compared to p2"""
2833
2833
2834 opts = pycompat.byteskwargs(opts)
2834 opts = pycompat.byteskwargs(opts)
2835 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2835 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2836 for dst, src in ctx.p2copies().items():
2836 for dst, src in ctx.p2copies().items():
2837 ui.write(b'%s -> %s\n' % (src, dst))
2837 ui.write(b'%s -> %s\n' % (src, dst))
2838
2838
2839
2839
2840 @command(
2840 @command(
2841 b'debugpathcomplete',
2841 b'debugpathcomplete',
2842 [
2842 [
2843 (b'f', b'full', None, _(b'complete an entire path')),
2843 (b'f', b'full', None, _(b'complete an entire path')),
2844 (b'n', b'normal', None, _(b'show only normal files')),
2844 (b'n', b'normal', None, _(b'show only normal files')),
2845 (b'a', b'added', None, _(b'show only added files')),
2845 (b'a', b'added', None, _(b'show only added files')),
2846 (b'r', b'removed', None, _(b'show only removed files')),
2846 (b'r', b'removed', None, _(b'show only removed files')),
2847 ],
2847 ],
2848 _(b'FILESPEC...'),
2848 _(b'FILESPEC...'),
2849 )
2849 )
2850 def debugpathcomplete(ui, repo, *specs, **opts):
2850 def debugpathcomplete(ui, repo, *specs, **opts):
2851 """complete part or all of a tracked path
2851 """complete part or all of a tracked path
2852
2852
2853 This command supports shells that offer path name completion. It
2853 This command supports shells that offer path name completion. It
2854 currently completes only files already known to the dirstate.
2854 currently completes only files already known to the dirstate.
2855
2855
2856 Completion extends only to the next path segment unless
2856 Completion extends only to the next path segment unless
2857 --full is specified, in which case entire paths are used."""
2857 --full is specified, in which case entire paths are used."""
2858
2858
2859 def complete(path, acceptable):
2859 def complete(path, acceptable):
2860 dirstate = repo.dirstate
2860 dirstate = repo.dirstate
2861 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2861 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2862 rootdir = repo.root + pycompat.ossep
2862 rootdir = repo.root + pycompat.ossep
2863 if spec != repo.root and not spec.startswith(rootdir):
2863 if spec != repo.root and not spec.startswith(rootdir):
2864 return [], []
2864 return [], []
2865 if os.path.isdir(spec):
2865 if os.path.isdir(spec):
2866 spec += b'/'
2866 spec += b'/'
2867 spec = spec[len(rootdir) :]
2867 spec = spec[len(rootdir) :]
2868 fixpaths = pycompat.ossep != b'/'
2868 fixpaths = pycompat.ossep != b'/'
2869 if fixpaths:
2869 if fixpaths:
2870 spec = spec.replace(pycompat.ossep, b'/')
2870 spec = spec.replace(pycompat.ossep, b'/')
2871 speclen = len(spec)
2871 speclen = len(spec)
2872 fullpaths = opts['full']
2872 fullpaths = opts['full']
2873 files, dirs = set(), set()
2873 files, dirs = set(), set()
2874 adddir, addfile = dirs.add, files.add
2874 adddir, addfile = dirs.add, files.add
2875 for f, st in dirstate.items():
2875 for f, st in dirstate.items():
2876 if f.startswith(spec) and st.state in acceptable:
2876 if f.startswith(spec) and st.state in acceptable:
2877 if fixpaths:
2877 if fixpaths:
2878 f = f.replace(b'/', pycompat.ossep)
2878 f = f.replace(b'/', pycompat.ossep)
2879 if fullpaths:
2879 if fullpaths:
2880 addfile(f)
2880 addfile(f)
2881 continue
2881 continue
2882 s = f.find(pycompat.ossep, speclen)
2882 s = f.find(pycompat.ossep, speclen)
2883 if s >= 0:
2883 if s >= 0:
2884 adddir(f[:s])
2884 adddir(f[:s])
2885 else:
2885 else:
2886 addfile(f)
2886 addfile(f)
2887 return files, dirs
2887 return files, dirs
2888
2888
2889 acceptable = b''
2889 acceptable = b''
2890 if opts['normal']:
2890 if opts['normal']:
2891 acceptable += b'nm'
2891 acceptable += b'nm'
2892 if opts['added']:
2892 if opts['added']:
2893 acceptable += b'a'
2893 acceptable += b'a'
2894 if opts['removed']:
2894 if opts['removed']:
2895 acceptable += b'r'
2895 acceptable += b'r'
2896 cwd = repo.getcwd()
2896 cwd = repo.getcwd()
2897 if not specs:
2897 if not specs:
2898 specs = [b'.']
2898 specs = [b'.']
2899
2899
2900 files, dirs = set(), set()
2900 files, dirs = set(), set()
2901 for spec in specs:
2901 for spec in specs:
2902 f, d = complete(spec, acceptable or b'nmar')
2902 f, d = complete(spec, acceptable or b'nmar')
2903 files.update(f)
2903 files.update(f)
2904 dirs.update(d)
2904 dirs.update(d)
2905 files.update(dirs)
2905 files.update(dirs)
2906 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2906 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2907 ui.write(b'\n')
2907 ui.write(b'\n')
2908
2908
2909
2909
2910 @command(
2910 @command(
2911 b'debugpathcopies',
2911 b'debugpathcopies',
2912 cmdutil.walkopts,
2912 cmdutil.walkopts,
2913 b'hg debugpathcopies REV1 REV2 [FILE]',
2913 b'hg debugpathcopies REV1 REV2 [FILE]',
2914 inferrepo=True,
2914 inferrepo=True,
2915 )
2915 )
2916 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2916 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2917 """show copies between two revisions"""
2917 """show copies between two revisions"""
2918 ctx1 = scmutil.revsingle(repo, rev1)
2918 ctx1 = scmutil.revsingle(repo, rev1)
2919 ctx2 = scmutil.revsingle(repo, rev2)
2919 ctx2 = scmutil.revsingle(repo, rev2)
2920 m = scmutil.match(ctx1, pats, opts)
2920 m = scmutil.match(ctx1, pats, opts)
2921 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2921 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2922 ui.write(b'%s -> %s\n' % (src, dst))
2922 ui.write(b'%s -> %s\n' % (src, dst))
2923
2923
2924
2924
2925 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2925 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2926 def debugpeer(ui, path):
2926 def debugpeer(ui, path):
2927 """establish a connection to a peer repository"""
2927 """establish a connection to a peer repository"""
2928 # Always enable peer request logging. Requires --debug to display
2928 # Always enable peer request logging. Requires --debug to display
2929 # though.
2929 # though.
2930 overrides = {
2930 overrides = {
2931 (b'devel', b'debug.peer-request'): True,
2931 (b'devel', b'debug.peer-request'): True,
2932 }
2932 }
2933
2933
2934 with ui.configoverride(overrides):
2934 with ui.configoverride(overrides):
2935 peer = hg.peer(ui, {}, path)
2935 peer = hg.peer(ui, {}, path)
2936
2936
2937 try:
2937 try:
2938 local = peer.local() is not None
2938 local = peer.local() is not None
2939 canpush = peer.canpush()
2939 canpush = peer.canpush()
2940
2940
2941 ui.write(_(b'url: %s\n') % peer.url())
2941 ui.write(_(b'url: %s\n') % peer.url())
2942 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2942 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2943 ui.write(
2943 ui.write(
2944 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2944 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2945 )
2945 )
2946 finally:
2946 finally:
2947 peer.close()
2947 peer.close()
2948
2948
2949
2949
2950 @command(
2950 @command(
2951 b'debugpickmergetool',
2951 b'debugpickmergetool',
2952 [
2952 [
2953 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2953 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2954 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2954 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2955 ]
2955 ]
2956 + cmdutil.walkopts
2956 + cmdutil.walkopts
2957 + cmdutil.mergetoolopts,
2957 + cmdutil.mergetoolopts,
2958 _(b'[PATTERN]...'),
2958 _(b'[PATTERN]...'),
2959 inferrepo=True,
2959 inferrepo=True,
2960 )
2960 )
2961 def debugpickmergetool(ui, repo, *pats, **opts):
2961 def debugpickmergetool(ui, repo, *pats, **opts):
2962 """examine which merge tool is chosen for specified file
2962 """examine which merge tool is chosen for specified file
2963
2963
2964 As described in :hg:`help merge-tools`, Mercurial examines
2964 As described in :hg:`help merge-tools`, Mercurial examines
2965 configurations below in this order to decide which merge tool is
2965 configurations below in this order to decide which merge tool is
2966 chosen for specified file.
2966 chosen for specified file.
2967
2967
2968 1. ``--tool`` option
2968 1. ``--tool`` option
2969 2. ``HGMERGE`` environment variable
2969 2. ``HGMERGE`` environment variable
2970 3. configurations in ``merge-patterns`` section
2970 3. configurations in ``merge-patterns`` section
2971 4. configuration of ``ui.merge``
2971 4. configuration of ``ui.merge``
2972 5. configurations in ``merge-tools`` section
2972 5. configurations in ``merge-tools`` section
2973 6. ``hgmerge`` tool (for historical reason only)
2973 6. ``hgmerge`` tool (for historical reason only)
2974 7. default tool for fallback (``:merge`` or ``:prompt``)
2974 7. default tool for fallback (``:merge`` or ``:prompt``)
2975
2975
2976 This command writes out examination result in the style below::
2976 This command writes out examination result in the style below::
2977
2977
2978 FILE = MERGETOOL
2978 FILE = MERGETOOL
2979
2979
2980 By default, all files known in the first parent context of the
2980 By default, all files known in the first parent context of the
2981 working directory are examined. Use file patterns and/or -I/-X
2981 working directory are examined. Use file patterns and/or -I/-X
2982 options to limit target files. -r/--rev is also useful to examine
2982 options to limit target files. -r/--rev is also useful to examine
2983 files in another context without actual updating to it.
2983 files in another context without actual updating to it.
2984
2984
2985 With --debug, this command shows warning messages while matching
2985 With --debug, this command shows warning messages while matching
2986 against ``merge-patterns`` and so on, too. It is recommended to
2986 against ``merge-patterns`` and so on, too. It is recommended to
2987 use this option with explicit file patterns and/or -I/-X options,
2987 use this option with explicit file patterns and/or -I/-X options,
2988 because this option increases amount of output per file according
2988 because this option increases amount of output per file according
2989 to configurations in hgrc.
2989 to configurations in hgrc.
2990
2990
2991 With -v/--verbose, this command shows configurations below at
2991 With -v/--verbose, this command shows configurations below at
2992 first (only if specified).
2992 first (only if specified).
2993
2993
2994 - ``--tool`` option
2994 - ``--tool`` option
2995 - ``HGMERGE`` environment variable
2995 - ``HGMERGE`` environment variable
2996 - configuration of ``ui.merge``
2996 - configuration of ``ui.merge``
2997
2997
2998 If merge tool is chosen before matching against
2998 If merge tool is chosen before matching against
2999 ``merge-patterns``, this command can't show any helpful
2999 ``merge-patterns``, this command can't show any helpful
3000 information, even with --debug. In such case, information above is
3000 information, even with --debug. In such case, information above is
3001 useful to know why a merge tool is chosen.
3001 useful to know why a merge tool is chosen.
3002 """
3002 """
3003 opts = pycompat.byteskwargs(opts)
3003 opts = pycompat.byteskwargs(opts)
3004 overrides = {}
3004 overrides = {}
3005 if opts[b'tool']:
3005 if opts[b'tool']:
3006 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3006 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3007 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3007 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3008
3008
3009 with ui.configoverride(overrides, b'debugmergepatterns'):
3009 with ui.configoverride(overrides, b'debugmergepatterns'):
3010 hgmerge = encoding.environ.get(b"HGMERGE")
3010 hgmerge = encoding.environ.get(b"HGMERGE")
3011 if hgmerge is not None:
3011 if hgmerge is not None:
3012 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3012 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3013 uimerge = ui.config(b"ui", b"merge")
3013 uimerge = ui.config(b"ui", b"merge")
3014 if uimerge:
3014 if uimerge:
3015 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3015 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3016
3016
3017 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3017 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3018 m = scmutil.match(ctx, pats, opts)
3018 m = scmutil.match(ctx, pats, opts)
3019 changedelete = opts[b'changedelete']
3019 changedelete = opts[b'changedelete']
3020 for path in ctx.walk(m):
3020 for path in ctx.walk(m):
3021 fctx = ctx[path]
3021 fctx = ctx[path]
3022 with ui.silent(
3022 with ui.silent(
3023 error=True
3023 error=True
3024 ) if not ui.debugflag else util.nullcontextmanager():
3024 ) if not ui.debugflag else util.nullcontextmanager():
3025 tool, toolpath = filemerge._picktool(
3025 tool, toolpath = filemerge._picktool(
3026 repo,
3026 repo,
3027 ui,
3027 ui,
3028 path,
3028 path,
3029 fctx.isbinary(),
3029 fctx.isbinary(),
3030 b'l' in fctx.flags(),
3030 b'l' in fctx.flags(),
3031 changedelete,
3031 changedelete,
3032 )
3032 )
3033 ui.write(b'%s = %s\n' % (path, tool))
3033 ui.write(b'%s = %s\n' % (path, tool))
3034
3034
3035
3035
3036 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3036 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3037 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3037 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3038 """access the pushkey key/value protocol
3038 """access the pushkey key/value protocol
3039
3039
3040 With two args, list the keys in the given namespace.
3040 With two args, list the keys in the given namespace.
3041
3041
3042 With five args, set a key to new if it currently is set to old.
3042 With five args, set a key to new if it currently is set to old.
3043 Reports success or failure.
3043 Reports success or failure.
3044 """
3044 """
3045
3045
3046 target = hg.peer(ui, {}, repopath)
3046 target = hg.peer(ui, {}, repopath)
3047 try:
3047 try:
3048 if keyinfo:
3048 if keyinfo:
3049 key, old, new = keyinfo
3049 key, old, new = keyinfo
3050 with target.commandexecutor() as e:
3050 with target.commandexecutor() as e:
3051 r = e.callcommand(
3051 r = e.callcommand(
3052 b'pushkey',
3052 b'pushkey',
3053 {
3053 {
3054 b'namespace': namespace,
3054 b'namespace': namespace,
3055 b'key': key,
3055 b'key': key,
3056 b'old': old,
3056 b'old': old,
3057 b'new': new,
3057 b'new': new,
3058 },
3058 },
3059 ).result()
3059 ).result()
3060
3060
3061 ui.status(pycompat.bytestr(r) + b'\n')
3061 ui.status(pycompat.bytestr(r) + b'\n')
3062 return not r
3062 return not r
3063 else:
3063 else:
3064 for k, v in sorted(target.listkeys(namespace).items()):
3064 for k, v in sorted(target.listkeys(namespace).items()):
3065 ui.write(
3065 ui.write(
3066 b"%s\t%s\n"
3066 b"%s\t%s\n"
3067 % (stringutil.escapestr(k), stringutil.escapestr(v))
3067 % (stringutil.escapestr(k), stringutil.escapestr(v))
3068 )
3068 )
3069 finally:
3069 finally:
3070 target.close()
3070 target.close()
3071
3071
3072
3072
3073 @command(b'debugpvec', [], _(b'A B'))
3073 @command(b'debugpvec', [], _(b'A B'))
3074 def debugpvec(ui, repo, a, b=None):
3074 def debugpvec(ui, repo, a, b=None):
3075 ca = scmutil.revsingle(repo, a)
3075 ca = scmutil.revsingle(repo, a)
3076 cb = scmutil.revsingle(repo, b)
3076 cb = scmutil.revsingle(repo, b)
3077 pa = pvec.ctxpvec(ca)
3077 pa = pvec.ctxpvec(ca)
3078 pb = pvec.ctxpvec(cb)
3078 pb = pvec.ctxpvec(cb)
3079 if pa == pb:
3079 if pa == pb:
3080 rel = b"="
3080 rel = b"="
3081 elif pa > pb:
3081 elif pa > pb:
3082 rel = b">"
3082 rel = b">"
3083 elif pa < pb:
3083 elif pa < pb:
3084 rel = b"<"
3084 rel = b"<"
3085 elif pa | pb:
3085 elif pa | pb:
3086 rel = b"|"
3086 rel = b"|"
3087 ui.write(_(b"a: %s\n") % pa)
3087 ui.write(_(b"a: %s\n") % pa)
3088 ui.write(_(b"b: %s\n") % pb)
3088 ui.write(_(b"b: %s\n") % pb)
3089 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3089 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3090 ui.write(
3090 ui.write(
3091 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3091 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3092 % (
3092 % (
3093 abs(pa._depth - pb._depth),
3093 abs(pa._depth - pb._depth),
3094 pvec._hamming(pa._vec, pb._vec),
3094 pvec._hamming(pa._vec, pb._vec),
3095 pa.distance(pb),
3095 pa.distance(pb),
3096 rel,
3096 rel,
3097 )
3097 )
3098 )
3098 )
3099
3099
3100
3100
3101 @command(
3101 @command(
3102 b'debugrebuilddirstate|debugrebuildstate',
3102 b'debugrebuilddirstate|debugrebuildstate',
3103 [
3103 [
3104 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3104 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3105 (
3105 (
3106 b'',
3106 b'',
3107 b'minimal',
3107 b'minimal',
3108 None,
3108 None,
3109 _(
3109 _(
3110 b'only rebuild files that are inconsistent with '
3110 b'only rebuild files that are inconsistent with '
3111 b'the working copy parent'
3111 b'the working copy parent'
3112 ),
3112 ),
3113 ),
3113 ),
3114 ],
3114 ],
3115 _(b'[-r REV]'),
3115 _(b'[-r REV]'),
3116 )
3116 )
3117 def debugrebuilddirstate(ui, repo, rev, **opts):
3117 def debugrebuilddirstate(ui, repo, rev, **opts):
3118 """rebuild the dirstate as it would look like for the given revision
3118 """rebuild the dirstate as it would look like for the given revision
3119
3119
3120 If no revision is specified the first current parent will be used.
3120 If no revision is specified the first current parent will be used.
3121
3121
3122 The dirstate will be set to the files of the given revision.
3122 The dirstate will be set to the files of the given revision.
3123 The actual working directory content or existing dirstate
3123 The actual working directory content or existing dirstate
3124 information such as adds or removes is not considered.
3124 information such as adds or removes is not considered.
3125
3125
3126 ``minimal`` will only rebuild the dirstate status for files that claim to be
3126 ``minimal`` will only rebuild the dirstate status for files that claim to be
3127 tracked but are not in the parent manifest, or that exist in the parent
3127 tracked but are not in the parent manifest, or that exist in the parent
3128 manifest but are not in the dirstate. It will not change adds, removes, or
3128 manifest but are not in the dirstate. It will not change adds, removes, or
3129 modified files that are in the working copy parent.
3129 modified files that are in the working copy parent.
3130
3130
3131 One use of this command is to make the next :hg:`status` invocation
3131 One use of this command is to make the next :hg:`status` invocation
3132 check the actual file content.
3132 check the actual file content.
3133 """
3133 """
3134 ctx = scmutil.revsingle(repo, rev)
3134 ctx = scmutil.revsingle(repo, rev)
3135 with repo.wlock():
3135 with repo.wlock():
3136 dirstate = repo.dirstate
3136 dirstate = repo.dirstate
3137 changedfiles = None
3137 changedfiles = None
3138 # See command doc for what minimal does.
3138 # See command doc for what minimal does.
3139 if opts.get('minimal'):
3139 if opts.get('minimal'):
3140 manifestfiles = set(ctx.manifest().keys())
3140 manifestfiles = set(ctx.manifest().keys())
3141 dirstatefiles = set(dirstate)
3141 dirstatefiles = set(dirstate)
3142 manifestonly = manifestfiles - dirstatefiles
3142 manifestonly = manifestfiles - dirstatefiles
3143 dsonly = dirstatefiles - manifestfiles
3143 dsonly = dirstatefiles - manifestfiles
3144 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3144 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3145 changedfiles = manifestonly | dsnotadded
3145 changedfiles = manifestonly | dsnotadded
3146
3146
3147 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3147 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3148
3148
3149
3149
3150 @command(
3150 @command(
3151 b'debugrebuildfncache',
3151 b'debugrebuildfncache',
3152 [
3152 [
3153 (
3153 (
3154 b'',
3154 b'',
3155 b'only-data',
3155 b'only-data',
3156 False,
3156 False,
3157 _(b'only look for wrong .d files (much faster)'),
3157 _(b'only look for wrong .d files (much faster)'),
3158 )
3158 )
3159 ],
3159 ],
3160 b'',
3160 b'',
3161 )
3161 )
3162 def debugrebuildfncache(ui, repo, **opts):
3162 def debugrebuildfncache(ui, repo, **opts):
3163 """rebuild the fncache file"""
3163 """rebuild the fncache file"""
3164 opts = pycompat.byteskwargs(opts)
3164 opts = pycompat.byteskwargs(opts)
3165 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3165 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3166
3166
3167
3167
3168 @command(
3168 @command(
3169 b'debugrename',
3169 b'debugrename',
3170 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3170 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3171 _(b'[-r REV] [FILE]...'),
3171 _(b'[-r REV] [FILE]...'),
3172 )
3172 )
3173 def debugrename(ui, repo, *pats, **opts):
3173 def debugrename(ui, repo, *pats, **opts):
3174 """dump rename information"""
3174 """dump rename information"""
3175
3175
3176 opts = pycompat.byteskwargs(opts)
3176 opts = pycompat.byteskwargs(opts)
3177 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3177 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3178 m = scmutil.match(ctx, pats, opts)
3178 m = scmutil.match(ctx, pats, opts)
3179 for abs in ctx.walk(m):
3179 for abs in ctx.walk(m):
3180 fctx = ctx[abs]
3180 fctx = ctx[abs]
3181 o = fctx.filelog().renamed(fctx.filenode())
3181 o = fctx.filelog().renamed(fctx.filenode())
3182 rel = repo.pathto(abs)
3182 rel = repo.pathto(abs)
3183 if o:
3183 if o:
3184 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3184 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3185 else:
3185 else:
3186 ui.write(_(b"%s not renamed\n") % rel)
3186 ui.write(_(b"%s not renamed\n") % rel)
3187
3187
3188
3188
3189 @command(b'debugrequires|debugrequirements', [], b'')
3189 @command(b'debugrequires|debugrequirements', [], b'')
3190 def debugrequirements(ui, repo):
3190 def debugrequirements(ui, repo):
3191 """print the current repo requirements"""
3191 """print the current repo requirements"""
3192 for r in sorted(repo.requirements):
3192 for r in sorted(repo.requirements):
3193 ui.write(b"%s\n" % r)
3193 ui.write(b"%s\n" % r)
3194
3194
3195
3195
3196 @command(
3196 @command(
3197 b'debugrevlog',
3197 b'debugrevlog',
3198 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3198 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3199 _(b'-c|-m|FILE'),
3199 _(b'-c|-m|FILE'),
3200 optionalrepo=True,
3200 optionalrepo=True,
3201 )
3201 )
3202 def debugrevlog(ui, repo, file_=None, **opts):
3202 def debugrevlog(ui, repo, file_=None, **opts):
3203 """show data and statistics about a revlog"""
3203 """show data and statistics about a revlog"""
3204 opts = pycompat.byteskwargs(opts)
3204 opts = pycompat.byteskwargs(opts)
3205 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3205 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3206
3206
3207 if opts.get(b"dump"):
3207 if opts.get(b"dump"):
3208 revlog_debug.dump(ui, r)
3208 revlog_debug.dump(ui, r)
3209 else:
3209 else:
3210 revlog_debug.debug_revlog(ui, r)
3210 revlog_debug.debug_revlog(ui, r)
3211 return 0
3211 return 0
3212
3212
3213
3213
3214 @command(
3214 @command(
3215 b'debugrevlogindex',
3215 b'debugrevlogindex',
3216 cmdutil.debugrevlogopts
3216 cmdutil.debugrevlogopts
3217 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3217 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3218 _(b'[-f FORMAT] -c|-m|FILE'),
3218 _(b'[-f FORMAT] -c|-m|FILE'),
3219 optionalrepo=True,
3219 optionalrepo=True,
3220 )
3220 )
3221 def debugrevlogindex(ui, repo, file_=None, **opts):
3221 def debugrevlogindex(ui, repo, file_=None, **opts):
3222 """dump the contents of a revlog index"""
3222 """dump the contents of a revlog index"""
3223 opts = pycompat.byteskwargs(opts)
3223 opts = pycompat.byteskwargs(opts)
3224 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3224 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3225 format = opts.get(b'format', 0)
3225 format = opts.get(b'format', 0)
3226 if format not in (0, 1):
3226 if format not in (0, 1):
3227 raise error.Abort(_(b"unknown format %d") % format)
3227 raise error.Abort(_(b"unknown format %d") % format)
3228
3228
3229 if ui.debugflag:
3229 if ui.debugflag:
3230 shortfn = hex
3230 shortfn = hex
3231 else:
3231 else:
3232 shortfn = short
3232 shortfn = short
3233
3233
3234 # There might not be anything in r, so have a sane default
3234 # There might not be anything in r, so have a sane default
3235 idlen = 12
3235 idlen = 12
3236 for i in r:
3236 for i in r:
3237 idlen = len(shortfn(r.node(i)))
3237 idlen = len(shortfn(r.node(i)))
3238 break
3238 break
3239
3239
3240 if format == 0:
3240 if format == 0:
3241 if ui.verbose:
3241 if ui.verbose:
3242 ui.writenoi18n(
3242 ui.writenoi18n(
3243 b" rev offset length linkrev %s %s p2\n"
3243 b" rev offset length linkrev %s %s p2\n"
3244 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3244 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3245 )
3245 )
3246 else:
3246 else:
3247 ui.writenoi18n(
3247 ui.writenoi18n(
3248 b" rev linkrev %s %s p2\n"
3248 b" rev linkrev %s %s p2\n"
3249 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3249 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3250 )
3250 )
3251 elif format == 1:
3251 elif format == 1:
3252 if ui.verbose:
3252 if ui.verbose:
3253 ui.writenoi18n(
3253 ui.writenoi18n(
3254 (
3254 (
3255 b" rev flag offset length size link p1"
3255 b" rev flag offset length size link p1"
3256 b" p2 %s\n"
3256 b" p2 %s\n"
3257 )
3257 )
3258 % b"nodeid".rjust(idlen)
3258 % b"nodeid".rjust(idlen)
3259 )
3259 )
3260 else:
3260 else:
3261 ui.writenoi18n(
3261 ui.writenoi18n(
3262 b" rev flag size link p1 p2 %s\n"
3262 b" rev flag size link p1 p2 %s\n"
3263 % b"nodeid".rjust(idlen)
3263 % b"nodeid".rjust(idlen)
3264 )
3264 )
3265
3265
3266 for i in r:
3266 for i in r:
3267 node = r.node(i)
3267 node = r.node(i)
3268 if format == 0:
3268 if format == 0:
3269 try:
3269 try:
3270 pp = r.parents(node)
3270 pp = r.parents(node)
3271 except Exception:
3271 except Exception:
3272 pp = [repo.nullid, repo.nullid]
3272 pp = [repo.nullid, repo.nullid]
3273 if ui.verbose:
3273 if ui.verbose:
3274 ui.write(
3274 ui.write(
3275 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3275 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3276 % (
3276 % (
3277 i,
3277 i,
3278 r.start(i),
3278 r.start(i),
3279 r.length(i),
3279 r.length(i),
3280 r.linkrev(i),
3280 r.linkrev(i),
3281 shortfn(node),
3281 shortfn(node),
3282 shortfn(pp[0]),
3282 shortfn(pp[0]),
3283 shortfn(pp[1]),
3283 shortfn(pp[1]),
3284 )
3284 )
3285 )
3285 )
3286 else:
3286 else:
3287 ui.write(
3287 ui.write(
3288 b"% 6d % 7d %s %s %s\n"
3288 b"% 6d % 7d %s %s %s\n"
3289 % (
3289 % (
3290 i,
3290 i,
3291 r.linkrev(i),
3291 r.linkrev(i),
3292 shortfn(node),
3292 shortfn(node),
3293 shortfn(pp[0]),
3293 shortfn(pp[0]),
3294 shortfn(pp[1]),
3294 shortfn(pp[1]),
3295 )
3295 )
3296 )
3296 )
3297 elif format == 1:
3297 elif format == 1:
3298 pr = r.parentrevs(i)
3298 pr = r.parentrevs(i)
3299 if ui.verbose:
3299 if ui.verbose:
3300 ui.write(
3300 ui.write(
3301 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3301 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3302 % (
3302 % (
3303 i,
3303 i,
3304 r.flags(i),
3304 r.flags(i),
3305 r.start(i),
3305 r.start(i),
3306 r.length(i),
3306 r.length(i),
3307 r.rawsize(i),
3307 r.rawsize(i),
3308 r.linkrev(i),
3308 r.linkrev(i),
3309 pr[0],
3309 pr[0],
3310 pr[1],
3310 pr[1],
3311 shortfn(node),
3311 shortfn(node),
3312 )
3312 )
3313 )
3313 )
3314 else:
3314 else:
3315 ui.write(
3315 ui.write(
3316 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3316 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3317 % (
3317 % (
3318 i,
3318 i,
3319 r.flags(i),
3319 r.flags(i),
3320 r.rawsize(i),
3320 r.rawsize(i),
3321 r.linkrev(i),
3321 r.linkrev(i),
3322 pr[0],
3322 pr[0],
3323 pr[1],
3323 pr[1],
3324 shortfn(node),
3324 shortfn(node),
3325 )
3325 )
3326 )
3326 )
3327
3327
3328
3328
3329 @command(
3329 @command(
3330 b'debugrevspec',
3330 b'debugrevspec',
3331 [
3331 [
3332 (
3332 (
3333 b'',
3333 b'',
3334 b'optimize',
3334 b'optimize',
3335 None,
3335 None,
3336 _(b'print parsed tree after optimizing (DEPRECATED)'),
3336 _(b'print parsed tree after optimizing (DEPRECATED)'),
3337 ),
3337 ),
3338 (
3338 (
3339 b'',
3339 b'',
3340 b'show-revs',
3340 b'show-revs',
3341 True,
3341 True,
3342 _(b'print list of result revisions (default)'),
3342 _(b'print list of result revisions (default)'),
3343 ),
3343 ),
3344 (
3344 (
3345 b's',
3345 b's',
3346 b'show-set',
3346 b'show-set',
3347 None,
3347 None,
3348 _(b'print internal representation of result set'),
3348 _(b'print internal representation of result set'),
3349 ),
3349 ),
3350 (
3350 (
3351 b'p',
3351 b'p',
3352 b'show-stage',
3352 b'show-stage',
3353 [],
3353 [],
3354 _(b'print parsed tree at the given stage'),
3354 _(b'print parsed tree at the given stage'),
3355 _(b'NAME'),
3355 _(b'NAME'),
3356 ),
3356 ),
3357 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3357 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3358 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3358 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3359 ],
3359 ],
3360 b'REVSPEC',
3360 b'REVSPEC',
3361 )
3361 )
3362 def debugrevspec(ui, repo, expr, **opts):
3362 def debugrevspec(ui, repo, expr, **opts):
3363 """parse and apply a revision specification
3363 """parse and apply a revision specification
3364
3364
3365 Use -p/--show-stage option to print the parsed tree at the given stages.
3365 Use -p/--show-stage option to print the parsed tree at the given stages.
3366 Use -p all to print tree at every stage.
3366 Use -p all to print tree at every stage.
3367
3367
3368 Use --no-show-revs option with -s or -p to print only the set
3368 Use --no-show-revs option with -s or -p to print only the set
3369 representation or the parsed tree respectively.
3369 representation or the parsed tree respectively.
3370
3370
3371 Use --verify-optimized to compare the optimized result with the unoptimized
3371 Use --verify-optimized to compare the optimized result with the unoptimized
3372 one. Returns 1 if the optimized result differs.
3372 one. Returns 1 if the optimized result differs.
3373 """
3373 """
3374 opts = pycompat.byteskwargs(opts)
3374 opts = pycompat.byteskwargs(opts)
3375 aliases = ui.configitems(b'revsetalias')
3375 aliases = ui.configitems(b'revsetalias')
3376 stages = [
3376 stages = [
3377 (b'parsed', lambda tree: tree),
3377 (b'parsed', lambda tree: tree),
3378 (
3378 (
3379 b'expanded',
3379 b'expanded',
3380 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3380 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3381 ),
3381 ),
3382 (b'concatenated', revsetlang.foldconcat),
3382 (b'concatenated', revsetlang.foldconcat),
3383 (b'analyzed', revsetlang.analyze),
3383 (b'analyzed', revsetlang.analyze),
3384 (b'optimized', revsetlang.optimize),
3384 (b'optimized', revsetlang.optimize),
3385 ]
3385 ]
3386 if opts[b'no_optimized']:
3386 if opts[b'no_optimized']:
3387 stages = stages[:-1]
3387 stages = stages[:-1]
3388 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3388 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3389 raise error.Abort(
3389 raise error.Abort(
3390 _(b'cannot use --verify-optimized with --no-optimized')
3390 _(b'cannot use --verify-optimized with --no-optimized')
3391 )
3391 )
3392 stagenames = {n for n, f in stages}
3392 stagenames = {n for n, f in stages}
3393
3393
3394 showalways = set()
3394 showalways = set()
3395 showchanged = set()
3395 showchanged = set()
3396 if ui.verbose and not opts[b'show_stage']:
3396 if ui.verbose and not opts[b'show_stage']:
3397 # show parsed tree by --verbose (deprecated)
3397 # show parsed tree by --verbose (deprecated)
3398 showalways.add(b'parsed')
3398 showalways.add(b'parsed')
3399 showchanged.update([b'expanded', b'concatenated'])
3399 showchanged.update([b'expanded', b'concatenated'])
3400 if opts[b'optimize']:
3400 if opts[b'optimize']:
3401 showalways.add(b'optimized')
3401 showalways.add(b'optimized')
3402 if opts[b'show_stage'] and opts[b'optimize']:
3402 if opts[b'show_stage'] and opts[b'optimize']:
3403 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3403 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3404 if opts[b'show_stage'] == [b'all']:
3404 if opts[b'show_stage'] == [b'all']:
3405 showalways.update(stagenames)
3405 showalways.update(stagenames)
3406 else:
3406 else:
3407 for n in opts[b'show_stage']:
3407 for n in opts[b'show_stage']:
3408 if n not in stagenames:
3408 if n not in stagenames:
3409 raise error.Abort(_(b'invalid stage name: %s') % n)
3409 raise error.Abort(_(b'invalid stage name: %s') % n)
3410 showalways.update(opts[b'show_stage'])
3410 showalways.update(opts[b'show_stage'])
3411
3411
3412 treebystage = {}
3412 treebystage = {}
3413 printedtree = None
3413 printedtree = None
3414 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3414 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3415 for n, f in stages:
3415 for n, f in stages:
3416 treebystage[n] = tree = f(tree)
3416 treebystage[n] = tree = f(tree)
3417 if n in showalways or (n in showchanged and tree != printedtree):
3417 if n in showalways or (n in showchanged and tree != printedtree):
3418 if opts[b'show_stage'] or n != b'parsed':
3418 if opts[b'show_stage'] or n != b'parsed':
3419 ui.write(b"* %s:\n" % n)
3419 ui.write(b"* %s:\n" % n)
3420 ui.write(revsetlang.prettyformat(tree), b"\n")
3420 ui.write(revsetlang.prettyformat(tree), b"\n")
3421 printedtree = tree
3421 printedtree = tree
3422
3422
3423 if opts[b'verify_optimized']:
3423 if opts[b'verify_optimized']:
3424 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3424 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3425 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3425 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3426 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3426 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3427 ui.writenoi18n(
3427 ui.writenoi18n(
3428 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3428 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3429 )
3429 )
3430 ui.writenoi18n(
3430 ui.writenoi18n(
3431 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3431 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3432 )
3432 )
3433 arevs = list(arevs)
3433 arevs = list(arevs)
3434 brevs = list(brevs)
3434 brevs = list(brevs)
3435 if arevs == brevs:
3435 if arevs == brevs:
3436 return 0
3436 return 0
3437 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3437 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3438 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3438 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3439 sm = difflib.SequenceMatcher(None, arevs, brevs)
3439 sm = difflib.SequenceMatcher(None, arevs, brevs)
3440 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3440 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3441 if tag in ('delete', 'replace'):
3441 if tag in ('delete', 'replace'):
3442 for c in arevs[alo:ahi]:
3442 for c in arevs[alo:ahi]:
3443 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3443 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3444 if tag in ('insert', 'replace'):
3444 if tag in ('insert', 'replace'):
3445 for c in brevs[blo:bhi]:
3445 for c in brevs[blo:bhi]:
3446 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3446 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3447 if tag == 'equal':
3447 if tag == 'equal':
3448 for c in arevs[alo:ahi]:
3448 for c in arevs[alo:ahi]:
3449 ui.write(b' %d\n' % c)
3449 ui.write(b' %d\n' % c)
3450 return 1
3450 return 1
3451
3451
3452 func = revset.makematcher(tree)
3452 func = revset.makematcher(tree)
3453 revs = func(repo)
3453 revs = func(repo)
3454 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3454 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3455 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3455 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3456 if not opts[b'show_revs']:
3456 if not opts[b'show_revs']:
3457 return
3457 return
3458 for c in revs:
3458 for c in revs:
3459 ui.write(b"%d\n" % c)
3459 ui.write(b"%d\n" % c)
3460
3460
3461
3461
3462 @command(
3462 @command(
3463 b'debugserve',
3463 b'debugserve',
3464 [
3464 [
3465 (
3465 (
3466 b'',
3466 b'',
3467 b'sshstdio',
3467 b'sshstdio',
3468 False,
3468 False,
3469 _(b'run an SSH server bound to process handles'),
3469 _(b'run an SSH server bound to process handles'),
3470 ),
3470 ),
3471 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3471 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3472 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3472 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3473 ],
3473 ],
3474 b'',
3474 b'',
3475 )
3475 )
3476 def debugserve(ui, repo, **opts):
3476 def debugserve(ui, repo, **opts):
3477 """run a server with advanced settings
3477 """run a server with advanced settings
3478
3478
3479 This command is similar to :hg:`serve`. It exists partially as a
3479 This command is similar to :hg:`serve`. It exists partially as a
3480 workaround to the fact that ``hg serve --stdio`` must have specific
3480 workaround to the fact that ``hg serve --stdio`` must have specific
3481 arguments for security reasons.
3481 arguments for security reasons.
3482 """
3482 """
3483 opts = pycompat.byteskwargs(opts)
3483 opts = pycompat.byteskwargs(opts)
3484
3484
3485 if not opts[b'sshstdio']:
3485 if not opts[b'sshstdio']:
3486 raise error.Abort(_(b'only --sshstdio is currently supported'))
3486 raise error.Abort(_(b'only --sshstdio is currently supported'))
3487
3487
3488 logfh = None
3488 logfh = None
3489
3489
3490 if opts[b'logiofd'] and opts[b'logiofile']:
3490 if opts[b'logiofd'] and opts[b'logiofile']:
3491 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3491 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3492
3492
3493 if opts[b'logiofd']:
3493 if opts[b'logiofd']:
3494 # Ideally we would be line buffered. But line buffering in binary
3494 # Ideally we would be line buffered. But line buffering in binary
3495 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3495 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3496 # buffering could have performance impacts. But since this isn't
3496 # buffering could have performance impacts. But since this isn't
3497 # performance critical code, it should be fine.
3497 # performance critical code, it should be fine.
3498 try:
3498 try:
3499 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3499 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3500 except OSError as e:
3500 except OSError as e:
3501 if e.errno != errno.ESPIPE:
3501 if e.errno != errno.ESPIPE:
3502 raise
3502 raise
3503 # can't seek a pipe, so `ab` mode fails on py3
3503 # can't seek a pipe, so `ab` mode fails on py3
3504 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3504 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3505 elif opts[b'logiofile']:
3505 elif opts[b'logiofile']:
3506 logfh = open(opts[b'logiofile'], b'ab', 0)
3506 logfh = open(opts[b'logiofile'], b'ab', 0)
3507
3507
3508 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3508 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3509 s.serve_forever()
3509 s.serve_forever()
3510
3510
3511
3511
3512 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3512 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3513 def debugsetparents(ui, repo, rev1, rev2=None):
3513 def debugsetparents(ui, repo, rev1, rev2=None):
3514 """manually set the parents of the current working directory (DANGEROUS)
3514 """manually set the parents of the current working directory (DANGEROUS)
3515
3515
3516 This command is not what you are looking for and should not be used. Using
3516 This command is not what you are looking for and should not be used. Using
3517 this command will most certainly results in slight corruption of the file
3517 this command will most certainly results in slight corruption of the file
3518 level histories withing your repository. DO NOT USE THIS COMMAND.
3518 level histories withing your repository. DO NOT USE THIS COMMAND.
3519
3519
3520 The command update the p1 and p2 field in the dirstate, and not touching
3520 The command update the p1 and p2 field in the dirstate, and not touching
3521 anything else. This useful for writing repository conversion tools, but
3521 anything else. This useful for writing repository conversion tools, but
3522 should be used with extreme care. For example, neither the working
3522 should be used with extreme care. For example, neither the working
3523 directory nor the dirstate is updated, so file status may be incorrect
3523 directory nor the dirstate is updated, so file status may be incorrect
3524 after running this command. Only used if you are one of the few people that
3524 after running this command. Only used if you are one of the few people that
3525 deeply unstand both conversion tools and file level histories. If you are
3525 deeply unstand both conversion tools and file level histories. If you are
3526 reading this help, you are not one of this people (most of them sailed west
3526 reading this help, you are not one of this people (most of them sailed west
3527 from Mithlond anyway.
3527 from Mithlond anyway.
3528
3528
3529 So one last time DO NOT USE THIS COMMAND.
3529 So one last time DO NOT USE THIS COMMAND.
3530
3530
3531 Returns 0 on success.
3531 Returns 0 on success.
3532 """
3532 """
3533
3533
3534 node1 = scmutil.revsingle(repo, rev1).node()
3534 node1 = scmutil.revsingle(repo, rev1).node()
3535 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3535 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3536
3536
3537 with repo.wlock():
3537 with repo.wlock():
3538 repo.setparents(node1, node2)
3538 repo.setparents(node1, node2)
3539
3539
3540
3540
3541 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3541 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3542 def debugsidedata(ui, repo, file_, rev=None, **opts):
3542 def debugsidedata(ui, repo, file_, rev=None, **opts):
3543 """dump the side data for a cl/manifest/file revision
3543 """dump the side data for a cl/manifest/file revision
3544
3544
3545 Use --verbose to dump the sidedata content."""
3545 Use --verbose to dump the sidedata content."""
3546 opts = pycompat.byteskwargs(opts)
3546 opts = pycompat.byteskwargs(opts)
3547 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3547 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3548 if rev is not None:
3548 if rev is not None:
3549 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3549 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3550 file_, rev = None, file_
3550 file_, rev = None, file_
3551 elif rev is None:
3551 elif rev is None:
3552 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3552 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3553 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3553 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3554 r = getattr(r, '_revlog', r)
3554 r = getattr(r, '_revlog', r)
3555 try:
3555 try:
3556 sidedata = r.sidedata(r.lookup(rev))
3556 sidedata = r.sidedata(r.lookup(rev))
3557 except KeyError:
3557 except KeyError:
3558 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3558 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3559 if sidedata:
3559 if sidedata:
3560 sidedata = list(sidedata.items())
3560 sidedata = list(sidedata.items())
3561 sidedata.sort()
3561 sidedata.sort()
3562 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3562 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3563 for key, value in sidedata:
3563 for key, value in sidedata:
3564 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3564 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3565 if ui.verbose:
3565 if ui.verbose:
3566 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3566 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3567
3567
3568
3568
3569 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3569 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3570 def debugssl(ui, repo, source=None, **opts):
3570 def debugssl(ui, repo, source=None, **opts):
3571 """test a secure connection to a server
3571 """test a secure connection to a server
3572
3572
3573 This builds the certificate chain for the server on Windows, installing the
3573 This builds the certificate chain for the server on Windows, installing the
3574 missing intermediates and trusted root via Windows Update if necessary. It
3574 missing intermediates and trusted root via Windows Update if necessary. It
3575 does nothing on other platforms.
3575 does nothing on other platforms.
3576
3576
3577 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3577 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3578 that server is used. See :hg:`help urls` for more information.
3578 that server is used. See :hg:`help urls` for more information.
3579
3579
3580 If the update succeeds, retry the original operation. Otherwise, the cause
3580 If the update succeeds, retry the original operation. Otherwise, the cause
3581 of the SSL error is likely another issue.
3581 of the SSL error is likely another issue.
3582 """
3582 """
3583 if not pycompat.iswindows:
3583 if not pycompat.iswindows:
3584 raise error.Abort(
3584 raise error.Abort(
3585 _(b'certificate chain building is only possible on Windows')
3585 _(b'certificate chain building is only possible on Windows')
3586 )
3586 )
3587
3587
3588 if not source:
3588 if not source:
3589 if not repo:
3589 if not repo:
3590 raise error.Abort(
3590 raise error.Abort(
3591 _(
3591 _(
3592 b"there is no Mercurial repository here, and no "
3592 b"there is no Mercurial repository here, and no "
3593 b"server specified"
3593 b"server specified"
3594 )
3594 )
3595 )
3595 )
3596 source = b"default"
3596 source = b"default"
3597
3597
3598 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3598 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3599 url = path.url
3599 url = path.url
3600
3600
3601 defaultport = {b'https': 443, b'ssh': 22}
3601 defaultport = {b'https': 443, b'ssh': 22}
3602 if url.scheme in defaultport:
3602 if url.scheme in defaultport:
3603 try:
3603 try:
3604 addr = (url.host, int(url.port or defaultport[url.scheme]))
3604 addr = (url.host, int(url.port or defaultport[url.scheme]))
3605 except ValueError:
3605 except ValueError:
3606 raise error.Abort(_(b"malformed port number in URL"))
3606 raise error.Abort(_(b"malformed port number in URL"))
3607 else:
3607 else:
3608 raise error.Abort(_(b"only https and ssh connections are supported"))
3608 raise error.Abort(_(b"only https and ssh connections are supported"))
3609
3609
3610 from . import win32
3610 from . import win32
3611
3611
3612 s = ssl.wrap_socket(
3612 s = ssl.wrap_socket(
3613 socket.socket(),
3613 socket.socket(),
3614 ssl_version=ssl.PROTOCOL_TLS,
3614 ssl_version=ssl.PROTOCOL_TLS,
3615 cert_reqs=ssl.CERT_NONE,
3615 cert_reqs=ssl.CERT_NONE,
3616 ca_certs=None,
3616 ca_certs=None,
3617 )
3617 )
3618
3618
3619 try:
3619 try:
3620 s.connect(addr)
3620 s.connect(addr)
3621 cert = s.getpeercert(True)
3621 cert = s.getpeercert(True)
3622
3622
3623 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3623 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3624
3624
3625 complete = win32.checkcertificatechain(cert, build=False)
3625 complete = win32.checkcertificatechain(cert, build=False)
3626
3626
3627 if not complete:
3627 if not complete:
3628 ui.status(_(b'certificate chain is incomplete, updating... '))
3628 ui.status(_(b'certificate chain is incomplete, updating... '))
3629
3629
3630 if not win32.checkcertificatechain(cert):
3630 if not win32.checkcertificatechain(cert):
3631 ui.status(_(b'failed.\n'))
3631 ui.status(_(b'failed.\n'))
3632 else:
3632 else:
3633 ui.status(_(b'done.\n'))
3633 ui.status(_(b'done.\n'))
3634 else:
3634 else:
3635 ui.status(_(b'full certificate chain is available\n'))
3635 ui.status(_(b'full certificate chain is available\n'))
3636 finally:
3636 finally:
3637 s.close()
3637 s.close()
3638
3638
3639
3639
3640 @command(
3640 @command(
3641 b"debugbackupbundle",
3641 b"debugbackupbundle",
3642 [
3642 [
3643 (
3643 (
3644 b"",
3644 b"",
3645 b"recover",
3645 b"recover",
3646 b"",
3646 b"",
3647 b"brings the specified changeset back into the repository",
3647 b"brings the specified changeset back into the repository",
3648 )
3648 )
3649 ]
3649 ]
3650 + cmdutil.logopts,
3650 + cmdutil.logopts,
3651 _(b"hg debugbackupbundle [--recover HASH]"),
3651 _(b"hg debugbackupbundle [--recover HASH]"),
3652 )
3652 )
3653 def debugbackupbundle(ui, repo, *pats, **opts):
3653 def debugbackupbundle(ui, repo, *pats, **opts):
3654 """lists the changesets available in backup bundles
3654 """lists the changesets available in backup bundles
3655
3655
3656 Without any arguments, this command prints a list of the changesets in each
3656 Without any arguments, this command prints a list of the changesets in each
3657 backup bundle.
3657 backup bundle.
3658
3658
3659 --recover takes a changeset hash and unbundles the first bundle that
3659 --recover takes a changeset hash and unbundles the first bundle that
3660 contains that hash, which puts that changeset back in your repository.
3660 contains that hash, which puts that changeset back in your repository.
3661
3661
3662 --verbose will print the entire commit message and the bundle path for that
3662 --verbose will print the entire commit message and the bundle path for that
3663 backup.
3663 backup.
3664 """
3664 """
3665 backups = list(
3665 backups = list(
3666 filter(
3666 filter(
3667 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3667 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3668 )
3668 )
3669 )
3669 )
3670 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3670 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3671
3671
3672 opts = pycompat.byteskwargs(opts)
3672 opts = pycompat.byteskwargs(opts)
3673 opts[b"bundle"] = b""
3673 opts[b"bundle"] = b""
3674 opts[b"force"] = None
3674 opts[b"force"] = None
3675 limit = logcmdutil.getlimit(opts)
3675 limit = logcmdutil.getlimit(opts)
3676
3676
3677 def display(other, chlist, displayer):
3677 def display(other, chlist, displayer):
3678 if opts.get(b"newest_first"):
3678 if opts.get(b"newest_first"):
3679 chlist.reverse()
3679 chlist.reverse()
3680 count = 0
3680 count = 0
3681 for n in chlist:
3681 for n in chlist:
3682 if limit is not None and count >= limit:
3682 if limit is not None and count >= limit:
3683 break
3683 break
3684 parents = [
3684 parents = [
3685 True for p in other.changelog.parents(n) if p != repo.nullid
3685 True for p in other.changelog.parents(n) if p != repo.nullid
3686 ]
3686 ]
3687 if opts.get(b"no_merges") and len(parents) == 2:
3687 if opts.get(b"no_merges") and len(parents) == 2:
3688 continue
3688 continue
3689 count += 1
3689 count += 1
3690 displayer.show(other[n])
3690 displayer.show(other[n])
3691
3691
3692 recovernode = opts.get(b"recover")
3692 recovernode = opts.get(b"recover")
3693 if recovernode:
3693 if recovernode:
3694 if scmutil.isrevsymbol(repo, recovernode):
3694 if scmutil.isrevsymbol(repo, recovernode):
3695 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3695 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3696 return
3696 return
3697 elif backups:
3697 elif backups:
3698 msg = _(
3698 msg = _(
3699 b"Recover changesets using: hg debugbackupbundle --recover "
3699 b"Recover changesets using: hg debugbackupbundle --recover "
3700 b"<changeset hash>\n\nAvailable backup changesets:"
3700 b"<changeset hash>\n\nAvailable backup changesets:"
3701 )
3701 )
3702 ui.status(msg, label=b"status.removed")
3702 ui.status(msg, label=b"status.removed")
3703 else:
3703 else:
3704 ui.status(_(b"no backup changesets found\n"))
3704 ui.status(_(b"no backup changesets found\n"))
3705 return
3705 return
3706
3706
3707 for backup in backups:
3707 for backup in backups:
3708 # Much of this is copied from the hg incoming logic
3708 # Much of this is copied from the hg incoming logic
3709 source = os.path.relpath(backup, encoding.getcwd())
3709 source = os.path.relpath(backup, encoding.getcwd())
3710 source, branches = urlutil.get_unique_pull_path(
3710 path = urlutil.get_unique_pull_path_obj(
3711 b'debugbackupbundle',
3711 b'debugbackupbundle',
3712 repo,
3713 ui,
3712 ui,
3714 source,
3713 source,
3715 default_branches=opts.get(b'branch'),
3716 )
3714 )
3717 try:
3715 try:
3718 other = hg.peer(repo, opts, source)
3716 other = hg.peer(repo, opts, path)
3719 except error.LookupError as ex:
3717 except error.LookupError as ex:
3720 msg = _(b"\nwarning: unable to open bundle %s") % source
3718 msg = _(b"\nwarning: unable to open bundle %s") % path.loc
3721 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3719 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3722 ui.warn(msg, hint=hint)
3720 ui.warn(msg, hint=hint)
3723 continue
3721 continue
3722 branches = (path.branch, opts.get(b'branch', []))
3724 revs, checkout = hg.addbranchrevs(
3723 revs, checkout = hg.addbranchrevs(
3725 repo, other, branches, opts.get(b"rev")
3724 repo, other, branches, opts.get(b"rev")
3726 )
3725 )
3727
3726
3728 if revs:
3727 if revs:
3729 revs = [other.lookup(rev) for rev in revs]
3728 revs = [other.lookup(rev) for rev in revs]
3730
3729
3731 with ui.silent():
3730 with ui.silent():
3732 try:
3731 try:
3733 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3732 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3734 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3733 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3735 )
3734 )
3736 except error.LookupError:
3735 except error.LookupError:
3737 continue
3736 continue
3738
3737
3739 try:
3738 try:
3740 if not chlist:
3739 if not chlist:
3741 continue
3740 continue
3742 if recovernode:
3741 if recovernode:
3743 with repo.lock(), repo.transaction(b"unbundle") as tr:
3742 with repo.lock(), repo.transaction(b"unbundle") as tr:
3744 if scmutil.isrevsymbol(other, recovernode):
3743 if scmutil.isrevsymbol(other, recovernode):
3745 ui.status(_(b"Unbundling %s\n") % (recovernode))
3744 ui.status(_(b"Unbundling %s\n") % (recovernode))
3746 f = hg.openpath(ui, source)
3745 f = hg.openpath(ui, path.loc)
3747 gen = exchange.readbundle(ui, f, source)
3746 gen = exchange.readbundle(ui, f, path.loc)
3748 if isinstance(gen, bundle2.unbundle20):
3747 if isinstance(gen, bundle2.unbundle20):
3749 bundle2.applybundle(
3748 bundle2.applybundle(
3750 repo,
3749 repo,
3751 gen,
3750 gen,
3752 tr,
3751 tr,
3753 source=b"unbundle",
3752 source=b"unbundle",
3754 url=b"bundle:" + source,
3753 url=b"bundle:" + path.loc,
3755 )
3754 )
3756 else:
3755 else:
3757 gen.apply(repo, b"unbundle", b"bundle:" + source)
3756 gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
3758 break
3757 break
3759 else:
3758 else:
3760 backupdate = encoding.strtolocal(
3759 backupdate = encoding.strtolocal(
3761 time.strftime(
3760 time.strftime(
3762 "%a %H:%M, %Y-%m-%d",
3761 "%a %H:%M, %Y-%m-%d",
3763 time.localtime(os.path.getmtime(source)),
3762 time.localtime(os.path.getmtime(path.loc)),
3764 )
3763 )
3765 )
3764 )
3766 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3765 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3767 if ui.verbose:
3766 if ui.verbose:
3768 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3767 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
3769 else:
3768 else:
3770 opts[
3769 opts[
3771 b"template"
3770 b"template"
3772 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3771 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3773 displayer = logcmdutil.changesetdisplayer(
3772 displayer = logcmdutil.changesetdisplayer(
3774 ui, other, opts, False
3773 ui, other, opts, False
3775 )
3774 )
3776 display(other, chlist, displayer)
3775 display(other, chlist, displayer)
3777 displayer.close()
3776 displayer.close()
3778 finally:
3777 finally:
3779 cleanupfn()
3778 cleanupfn()
3780
3779
3781
3780
3782 @command(
3781 @command(
3783 b'debugsub',
3782 b'debugsub',
3784 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3783 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3785 _(b'[-r REV] [REV]'),
3784 _(b'[-r REV] [REV]'),
3786 )
3785 )
3787 def debugsub(ui, repo, rev=None):
3786 def debugsub(ui, repo, rev=None):
3788 ctx = scmutil.revsingle(repo, rev, None)
3787 ctx = scmutil.revsingle(repo, rev, None)
3789 for k, v in sorted(ctx.substate.items()):
3788 for k, v in sorted(ctx.substate.items()):
3790 ui.writenoi18n(b'path %s\n' % k)
3789 ui.writenoi18n(b'path %s\n' % k)
3791 ui.writenoi18n(b' source %s\n' % v[0])
3790 ui.writenoi18n(b' source %s\n' % v[0])
3792 ui.writenoi18n(b' revision %s\n' % v[1])
3791 ui.writenoi18n(b' revision %s\n' % v[1])
3793
3792
3794
3793
3795 @command(b'debugshell', optionalrepo=True)
3794 @command(b'debugshell', optionalrepo=True)
3796 def debugshell(ui, repo):
3795 def debugshell(ui, repo):
3797 """run an interactive Python interpreter
3796 """run an interactive Python interpreter
3798
3797
3799 The local namespace is provided with a reference to the ui and
3798 The local namespace is provided with a reference to the ui and
3800 the repo instance (if available).
3799 the repo instance (if available).
3801 """
3800 """
3802 import code
3801 import code
3803
3802
3804 imported_objects = {
3803 imported_objects = {
3805 'ui': ui,
3804 'ui': ui,
3806 'repo': repo,
3805 'repo': repo,
3807 }
3806 }
3808
3807
3809 code.interact(local=imported_objects)
3808 code.interact(local=imported_objects)
3810
3809
3811
3810
3812 @command(
3811 @command(
3813 b'debugsuccessorssets',
3812 b'debugsuccessorssets',
3814 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3813 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3815 _(b'[REV]'),
3814 _(b'[REV]'),
3816 )
3815 )
3817 def debugsuccessorssets(ui, repo, *revs, **opts):
3816 def debugsuccessorssets(ui, repo, *revs, **opts):
3818 """show set of successors for revision
3817 """show set of successors for revision
3819
3818
3820 A successors set of changeset A is a consistent group of revisions that
3819 A successors set of changeset A is a consistent group of revisions that
3821 succeed A. It contains non-obsolete changesets only unless closests
3820 succeed A. It contains non-obsolete changesets only unless closests
3822 successors set is set.
3821 successors set is set.
3823
3822
3824 In most cases a changeset A has a single successors set containing a single
3823 In most cases a changeset A has a single successors set containing a single
3825 successor (changeset A replaced by A').
3824 successor (changeset A replaced by A').
3826
3825
3827 A changeset that is made obsolete with no successors are called "pruned".
3826 A changeset that is made obsolete with no successors are called "pruned".
3828 Such changesets have no successors sets at all.
3827 Such changesets have no successors sets at all.
3829
3828
3830 A changeset that has been "split" will have a successors set containing
3829 A changeset that has been "split" will have a successors set containing
3831 more than one successor.
3830 more than one successor.
3832
3831
3833 A changeset that has been rewritten in multiple different ways is called
3832 A changeset that has been rewritten in multiple different ways is called
3834 "divergent". Such changesets have multiple successor sets (each of which
3833 "divergent". Such changesets have multiple successor sets (each of which
3835 may also be split, i.e. have multiple successors).
3834 may also be split, i.e. have multiple successors).
3836
3835
3837 Results are displayed as follows::
3836 Results are displayed as follows::
3838
3837
3839 <rev1>
3838 <rev1>
3840 <successors-1A>
3839 <successors-1A>
3841 <rev2>
3840 <rev2>
3842 <successors-2A>
3841 <successors-2A>
3843 <successors-2B1> <successors-2B2> <successors-2B3>
3842 <successors-2B1> <successors-2B2> <successors-2B3>
3844
3843
3845 Here rev2 has two possible (i.e. divergent) successors sets. The first
3844 Here rev2 has two possible (i.e. divergent) successors sets. The first
3846 holds one element, whereas the second holds three (i.e. the changeset has
3845 holds one element, whereas the second holds three (i.e. the changeset has
3847 been split).
3846 been split).
3848 """
3847 """
3849 # passed to successorssets caching computation from one call to another
3848 # passed to successorssets caching computation from one call to another
3850 cache = {}
3849 cache = {}
3851 ctx2str = bytes
3850 ctx2str = bytes
3852 node2str = short
3851 node2str = short
3853 for rev in logcmdutil.revrange(repo, revs):
3852 for rev in logcmdutil.revrange(repo, revs):
3854 ctx = repo[rev]
3853 ctx = repo[rev]
3855 ui.write(b'%s\n' % ctx2str(ctx))
3854 ui.write(b'%s\n' % ctx2str(ctx))
3856 for succsset in obsutil.successorssets(
3855 for succsset in obsutil.successorssets(
3857 repo, ctx.node(), closest=opts['closest'], cache=cache
3856 repo, ctx.node(), closest=opts['closest'], cache=cache
3858 ):
3857 ):
3859 if succsset:
3858 if succsset:
3860 ui.write(b' ')
3859 ui.write(b' ')
3861 ui.write(node2str(succsset[0]))
3860 ui.write(node2str(succsset[0]))
3862 for node in succsset[1:]:
3861 for node in succsset[1:]:
3863 ui.write(b' ')
3862 ui.write(b' ')
3864 ui.write(node2str(node))
3863 ui.write(node2str(node))
3865 ui.write(b'\n')
3864 ui.write(b'\n')
3866
3865
3867
3866
3868 @command(b'debugtagscache', [])
3867 @command(b'debugtagscache', [])
3869 def debugtagscache(ui, repo):
3868 def debugtagscache(ui, repo):
3870 """display the contents of .hg/cache/hgtagsfnodes1"""
3869 """display the contents of .hg/cache/hgtagsfnodes1"""
3871 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3870 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3872 flog = repo.file(b'.hgtags')
3871 flog = repo.file(b'.hgtags')
3873 for r in repo:
3872 for r in repo:
3874 node = repo[r].node()
3873 node = repo[r].node()
3875 tagsnode = cache.getfnode(node, computemissing=False)
3874 tagsnode = cache.getfnode(node, computemissing=False)
3876 if tagsnode:
3875 if tagsnode:
3877 tagsnodedisplay = hex(tagsnode)
3876 tagsnodedisplay = hex(tagsnode)
3878 if not flog.hasnode(tagsnode):
3877 if not flog.hasnode(tagsnode):
3879 tagsnodedisplay += b' (unknown node)'
3878 tagsnodedisplay += b' (unknown node)'
3880 elif tagsnode is None:
3879 elif tagsnode is None:
3881 tagsnodedisplay = b'missing'
3880 tagsnodedisplay = b'missing'
3882 else:
3881 else:
3883 tagsnodedisplay = b'invalid'
3882 tagsnodedisplay = b'invalid'
3884
3883
3885 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3884 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3886
3885
3887
3886
3888 @command(
3887 @command(
3889 b'debugtemplate',
3888 b'debugtemplate',
3890 [
3889 [
3891 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3890 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3892 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3891 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3893 ],
3892 ],
3894 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3893 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3895 optionalrepo=True,
3894 optionalrepo=True,
3896 )
3895 )
3897 def debugtemplate(ui, repo, tmpl, **opts):
3896 def debugtemplate(ui, repo, tmpl, **opts):
3898 """parse and apply a template
3897 """parse and apply a template
3899
3898
3900 If -r/--rev is given, the template is processed as a log template and
3899 If -r/--rev is given, the template is processed as a log template and
3901 applied to the given changesets. Otherwise, it is processed as a generic
3900 applied to the given changesets. Otherwise, it is processed as a generic
3902 template.
3901 template.
3903
3902
3904 Use --verbose to print the parsed tree.
3903 Use --verbose to print the parsed tree.
3905 """
3904 """
3906 revs = None
3905 revs = None
3907 if opts['rev']:
3906 if opts['rev']:
3908 if repo is None:
3907 if repo is None:
3909 raise error.RepoError(
3908 raise error.RepoError(
3910 _(b'there is no Mercurial repository here (.hg not found)')
3909 _(b'there is no Mercurial repository here (.hg not found)')
3911 )
3910 )
3912 revs = logcmdutil.revrange(repo, opts['rev'])
3911 revs = logcmdutil.revrange(repo, opts['rev'])
3913
3912
3914 props = {}
3913 props = {}
3915 for d in opts['define']:
3914 for d in opts['define']:
3916 try:
3915 try:
3917 k, v = (e.strip() for e in d.split(b'=', 1))
3916 k, v = (e.strip() for e in d.split(b'=', 1))
3918 if not k or k == b'ui':
3917 if not k or k == b'ui':
3919 raise ValueError
3918 raise ValueError
3920 props[k] = v
3919 props[k] = v
3921 except ValueError:
3920 except ValueError:
3922 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3921 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3923
3922
3924 if ui.verbose:
3923 if ui.verbose:
3925 aliases = ui.configitems(b'templatealias')
3924 aliases = ui.configitems(b'templatealias')
3926 tree = templater.parse(tmpl)
3925 tree = templater.parse(tmpl)
3927 ui.note(templater.prettyformat(tree), b'\n')
3926 ui.note(templater.prettyformat(tree), b'\n')
3928 newtree = templater.expandaliases(tree, aliases)
3927 newtree = templater.expandaliases(tree, aliases)
3929 if newtree != tree:
3928 if newtree != tree:
3930 ui.notenoi18n(
3929 ui.notenoi18n(
3931 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3930 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3932 )
3931 )
3933
3932
3934 if revs is None:
3933 if revs is None:
3935 tres = formatter.templateresources(ui, repo)
3934 tres = formatter.templateresources(ui, repo)
3936 t = formatter.maketemplater(ui, tmpl, resources=tres)
3935 t = formatter.maketemplater(ui, tmpl, resources=tres)
3937 if ui.verbose:
3936 if ui.verbose:
3938 kwds, funcs = t.symbolsuseddefault()
3937 kwds, funcs = t.symbolsuseddefault()
3939 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3938 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3940 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3939 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3941 ui.write(t.renderdefault(props))
3940 ui.write(t.renderdefault(props))
3942 else:
3941 else:
3943 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3942 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3944 if ui.verbose:
3943 if ui.verbose:
3945 kwds, funcs = displayer.t.symbolsuseddefault()
3944 kwds, funcs = displayer.t.symbolsuseddefault()
3946 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3945 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3947 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3946 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3948 for r in revs:
3947 for r in revs:
3949 displayer.show(repo[r], **pycompat.strkwargs(props))
3948 displayer.show(repo[r], **pycompat.strkwargs(props))
3950 displayer.close()
3949 displayer.close()
3951
3950
3952
3951
3953 @command(
3952 @command(
3954 b'debuguigetpass',
3953 b'debuguigetpass',
3955 [
3954 [
3956 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3955 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3957 ],
3956 ],
3958 _(b'[-p TEXT]'),
3957 _(b'[-p TEXT]'),
3959 norepo=True,
3958 norepo=True,
3960 )
3959 )
3961 def debuguigetpass(ui, prompt=b''):
3960 def debuguigetpass(ui, prompt=b''):
3962 """show prompt to type password"""
3961 """show prompt to type password"""
3963 r = ui.getpass(prompt)
3962 r = ui.getpass(prompt)
3964 if r is None:
3963 if r is None:
3965 r = b"<default response>"
3964 r = b"<default response>"
3966 ui.writenoi18n(b'response: %s\n' % r)
3965 ui.writenoi18n(b'response: %s\n' % r)
3967
3966
3968
3967
3969 @command(
3968 @command(
3970 b'debuguiprompt',
3969 b'debuguiprompt',
3971 [
3970 [
3972 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3971 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3973 ],
3972 ],
3974 _(b'[-p TEXT]'),
3973 _(b'[-p TEXT]'),
3975 norepo=True,
3974 norepo=True,
3976 )
3975 )
3977 def debuguiprompt(ui, prompt=b''):
3976 def debuguiprompt(ui, prompt=b''):
3978 """show plain prompt"""
3977 """show plain prompt"""
3979 r = ui.prompt(prompt)
3978 r = ui.prompt(prompt)
3980 ui.writenoi18n(b'response: %s\n' % r)
3979 ui.writenoi18n(b'response: %s\n' % r)
3981
3980
3982
3981
3983 @command(b'debugupdatecaches', [])
3982 @command(b'debugupdatecaches', [])
3984 def debugupdatecaches(ui, repo, *pats, **opts):
3983 def debugupdatecaches(ui, repo, *pats, **opts):
3985 """warm all known caches in the repository"""
3984 """warm all known caches in the repository"""
3986 with repo.wlock(), repo.lock():
3985 with repo.wlock(), repo.lock():
3987 repo.updatecaches(caches=repository.CACHES_ALL)
3986 repo.updatecaches(caches=repository.CACHES_ALL)
3988
3987
3989
3988
3990 @command(
3989 @command(
3991 b'debugupgraderepo',
3990 b'debugupgraderepo',
3992 [
3991 [
3993 (
3992 (
3994 b'o',
3993 b'o',
3995 b'optimize',
3994 b'optimize',
3996 [],
3995 [],
3997 _(b'extra optimization to perform'),
3996 _(b'extra optimization to perform'),
3998 _(b'NAME'),
3997 _(b'NAME'),
3999 ),
3998 ),
4000 (b'', b'run', False, _(b'performs an upgrade')),
3999 (b'', b'run', False, _(b'performs an upgrade')),
4001 (b'', b'backup', True, _(b'keep the old repository content around')),
4000 (b'', b'backup', True, _(b'keep the old repository content around')),
4002 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4001 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4003 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4002 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4004 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4003 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4005 ],
4004 ],
4006 )
4005 )
4007 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4006 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4008 """upgrade a repository to use different features
4007 """upgrade a repository to use different features
4009
4008
4010 If no arguments are specified, the repository is evaluated for upgrade
4009 If no arguments are specified, the repository is evaluated for upgrade
4011 and a list of problems and potential optimizations is printed.
4010 and a list of problems and potential optimizations is printed.
4012
4011
4013 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4012 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4014 can be influenced via additional arguments. More details will be provided
4013 can be influenced via additional arguments. More details will be provided
4015 by the command output when run without ``--run``.
4014 by the command output when run without ``--run``.
4016
4015
4017 During the upgrade, the repository will be locked and no writes will be
4016 During the upgrade, the repository will be locked and no writes will be
4018 allowed.
4017 allowed.
4019
4018
4020 At the end of the upgrade, the repository may not be readable while new
4019 At the end of the upgrade, the repository may not be readable while new
4021 repository data is swapped in. This window will be as long as it takes to
4020 repository data is swapped in. This window will be as long as it takes to
4022 rename some directories inside the ``.hg`` directory. On most machines, this
4021 rename some directories inside the ``.hg`` directory. On most machines, this
4023 should complete almost instantaneously and the chances of a consumer being
4022 should complete almost instantaneously and the chances of a consumer being
4024 unable to access the repository should be low.
4023 unable to access the repository should be low.
4025
4024
4026 By default, all revlogs will be upgraded. You can restrict this using flags
4025 By default, all revlogs will be upgraded. You can restrict this using flags
4027 such as `--manifest`:
4026 such as `--manifest`:
4028
4027
4029 * `--manifest`: only optimize the manifest
4028 * `--manifest`: only optimize the manifest
4030 * `--no-manifest`: optimize all revlog but the manifest
4029 * `--no-manifest`: optimize all revlog but the manifest
4031 * `--changelog`: optimize the changelog only
4030 * `--changelog`: optimize the changelog only
4032 * `--no-changelog --no-manifest`: optimize filelogs only
4031 * `--no-changelog --no-manifest`: optimize filelogs only
4033 * `--filelogs`: optimize the filelogs only
4032 * `--filelogs`: optimize the filelogs only
4034 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4033 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4035 """
4034 """
4036 return upgrade.upgraderepo(
4035 return upgrade.upgraderepo(
4037 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4036 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4038 )
4037 )
4039
4038
4040
4039
4041 @command(
4040 @command(
4042 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4041 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4043 )
4042 )
4044 def debugwalk(ui, repo, *pats, **opts):
4043 def debugwalk(ui, repo, *pats, **opts):
4045 """show how files match on given patterns"""
4044 """show how files match on given patterns"""
4046 opts = pycompat.byteskwargs(opts)
4045 opts = pycompat.byteskwargs(opts)
4047 m = scmutil.match(repo[None], pats, opts)
4046 m = scmutil.match(repo[None], pats, opts)
4048 if ui.verbose:
4047 if ui.verbose:
4049 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4048 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4050 items = list(repo[None].walk(m))
4049 items = list(repo[None].walk(m))
4051 if not items:
4050 if not items:
4052 return
4051 return
4053 f = lambda fn: fn
4052 f = lambda fn: fn
4054 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4053 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4055 f = lambda fn: util.normpath(fn)
4054 f = lambda fn: util.normpath(fn)
4056 fmt = b'f %%-%ds %%-%ds %%s' % (
4055 fmt = b'f %%-%ds %%-%ds %%s' % (
4057 max([len(abs) for abs in items]),
4056 max([len(abs) for abs in items]),
4058 max([len(repo.pathto(abs)) for abs in items]),
4057 max([len(repo.pathto(abs)) for abs in items]),
4059 )
4058 )
4060 for abs in items:
4059 for abs in items:
4061 line = fmt % (
4060 line = fmt % (
4062 abs,
4061 abs,
4063 f(repo.pathto(abs)),
4062 f(repo.pathto(abs)),
4064 m.exact(abs) and b'exact' or b'',
4063 m.exact(abs) and b'exact' or b'',
4065 )
4064 )
4066 ui.write(b"%s\n" % line.rstrip())
4065 ui.write(b"%s\n" % line.rstrip())
4067
4066
4068
4067
4069 @command(b'debugwhyunstable', [], _(b'REV'))
4068 @command(b'debugwhyunstable', [], _(b'REV'))
4070 def debugwhyunstable(ui, repo, rev):
4069 def debugwhyunstable(ui, repo, rev):
4071 """explain instabilities of a changeset"""
4070 """explain instabilities of a changeset"""
4072 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4071 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4073 dnodes = b''
4072 dnodes = b''
4074 if entry.get(b'divergentnodes'):
4073 if entry.get(b'divergentnodes'):
4075 dnodes = (
4074 dnodes = (
4076 b' '.join(
4075 b' '.join(
4077 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4076 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4078 for ctx in entry[b'divergentnodes']
4077 for ctx in entry[b'divergentnodes']
4079 )
4078 )
4080 + b' '
4079 + b' '
4081 )
4080 )
4082 ui.write(
4081 ui.write(
4083 b'%s: %s%s %s\n'
4082 b'%s: %s%s %s\n'
4084 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4083 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4085 )
4084 )
4086
4085
4087
4086
4088 @command(
4087 @command(
4089 b'debugwireargs',
4088 b'debugwireargs',
4090 [
4089 [
4091 (b'', b'three', b'', b'three'),
4090 (b'', b'three', b'', b'three'),
4092 (b'', b'four', b'', b'four'),
4091 (b'', b'four', b'', b'four'),
4093 (b'', b'five', b'', b'five'),
4092 (b'', b'five', b'', b'five'),
4094 ]
4093 ]
4095 + cmdutil.remoteopts,
4094 + cmdutil.remoteopts,
4096 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4095 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4097 norepo=True,
4096 norepo=True,
4098 )
4097 )
4099 def debugwireargs(ui, repopath, *vals, **opts):
4098 def debugwireargs(ui, repopath, *vals, **opts):
4100 opts = pycompat.byteskwargs(opts)
4099 opts = pycompat.byteskwargs(opts)
4101 repo = hg.peer(ui, opts, repopath)
4100 repo = hg.peer(ui, opts, repopath)
4102 try:
4101 try:
4103 for opt in cmdutil.remoteopts:
4102 for opt in cmdutil.remoteopts:
4104 del opts[opt[1]]
4103 del opts[opt[1]]
4105 args = {}
4104 args = {}
4106 for k, v in opts.items():
4105 for k, v in opts.items():
4107 if v:
4106 if v:
4108 args[k] = v
4107 args[k] = v
4109 args = pycompat.strkwargs(args)
4108 args = pycompat.strkwargs(args)
4110 # run twice to check that we don't mess up the stream for the next command
4109 # run twice to check that we don't mess up the stream for the next command
4111 res1 = repo.debugwireargs(*vals, **args)
4110 res1 = repo.debugwireargs(*vals, **args)
4112 res2 = repo.debugwireargs(*vals, **args)
4111 res2 = repo.debugwireargs(*vals, **args)
4113 ui.write(b"%s\n" % res1)
4112 ui.write(b"%s\n" % res1)
4114 if res1 != res2:
4113 if res1 != res2:
4115 ui.warn(b"%s\n" % res2)
4114 ui.warn(b"%s\n" % res2)
4116 finally:
4115 finally:
4117 repo.close()
4116 repo.close()
4118
4117
4119
4118
4120 def _parsewirelangblocks(fh):
4119 def _parsewirelangblocks(fh):
4121 activeaction = None
4120 activeaction = None
4122 blocklines = []
4121 blocklines = []
4123 lastindent = 0
4122 lastindent = 0
4124
4123
4125 for line in fh:
4124 for line in fh:
4126 line = line.rstrip()
4125 line = line.rstrip()
4127 if not line:
4126 if not line:
4128 continue
4127 continue
4129
4128
4130 if line.startswith(b'#'):
4129 if line.startswith(b'#'):
4131 continue
4130 continue
4132
4131
4133 if not line.startswith(b' '):
4132 if not line.startswith(b' '):
4134 # New block. Flush previous one.
4133 # New block. Flush previous one.
4135 if activeaction:
4134 if activeaction:
4136 yield activeaction, blocklines
4135 yield activeaction, blocklines
4137
4136
4138 activeaction = line
4137 activeaction = line
4139 blocklines = []
4138 blocklines = []
4140 lastindent = 0
4139 lastindent = 0
4141 continue
4140 continue
4142
4141
4143 # Else we start with an indent.
4142 # Else we start with an indent.
4144
4143
4145 if not activeaction:
4144 if not activeaction:
4146 raise error.Abort(_(b'indented line outside of block'))
4145 raise error.Abort(_(b'indented line outside of block'))
4147
4146
4148 indent = len(line) - len(line.lstrip())
4147 indent = len(line) - len(line.lstrip())
4149
4148
4150 # If this line is indented more than the last line, concatenate it.
4149 # If this line is indented more than the last line, concatenate it.
4151 if indent > lastindent and blocklines:
4150 if indent > lastindent and blocklines:
4152 blocklines[-1] += line.lstrip()
4151 blocklines[-1] += line.lstrip()
4153 else:
4152 else:
4154 blocklines.append(line)
4153 blocklines.append(line)
4155 lastindent = indent
4154 lastindent = indent
4156
4155
4157 # Flush last block.
4156 # Flush last block.
4158 if activeaction:
4157 if activeaction:
4159 yield activeaction, blocklines
4158 yield activeaction, blocklines
4160
4159
4161
4160
4162 @command(
4161 @command(
4163 b'debugwireproto',
4162 b'debugwireproto',
4164 [
4163 [
4165 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4164 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4166 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4165 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4167 (
4166 (
4168 b'',
4167 b'',
4169 b'noreadstderr',
4168 b'noreadstderr',
4170 False,
4169 False,
4171 _(b'do not read from stderr of the remote'),
4170 _(b'do not read from stderr of the remote'),
4172 ),
4171 ),
4173 (
4172 (
4174 b'',
4173 b'',
4175 b'nologhandshake',
4174 b'nologhandshake',
4176 False,
4175 False,
4177 _(b'do not log I/O related to the peer handshake'),
4176 _(b'do not log I/O related to the peer handshake'),
4178 ),
4177 ),
4179 ]
4178 ]
4180 + cmdutil.remoteopts,
4179 + cmdutil.remoteopts,
4181 _(b'[PATH]'),
4180 _(b'[PATH]'),
4182 optionalrepo=True,
4181 optionalrepo=True,
4183 )
4182 )
4184 def debugwireproto(ui, repo, path=None, **opts):
4183 def debugwireproto(ui, repo, path=None, **opts):
4185 """send wire protocol commands to a server
4184 """send wire protocol commands to a server
4186
4185
4187 This command can be used to issue wire protocol commands to remote
4186 This command can be used to issue wire protocol commands to remote
4188 peers and to debug the raw data being exchanged.
4187 peers and to debug the raw data being exchanged.
4189
4188
4190 ``--localssh`` will start an SSH server against the current repository
4189 ``--localssh`` will start an SSH server against the current repository
4191 and connect to that. By default, the connection will perform a handshake
4190 and connect to that. By default, the connection will perform a handshake
4192 and establish an appropriate peer instance.
4191 and establish an appropriate peer instance.
4193
4192
4194 ``--peer`` can be used to bypass the handshake protocol and construct a
4193 ``--peer`` can be used to bypass the handshake protocol and construct a
4195 peer instance using the specified class type. Valid values are ``raw``,
4194 peer instance using the specified class type. Valid values are ``raw``,
4196 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4195 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4197 don't support higher-level command actions.
4196 don't support higher-level command actions.
4198
4197
4199 ``--noreadstderr`` can be used to disable automatic reading from stderr
4198 ``--noreadstderr`` can be used to disable automatic reading from stderr
4200 of the peer (for SSH connections only). Disabling automatic reading of
4199 of the peer (for SSH connections only). Disabling automatic reading of
4201 stderr is useful for making output more deterministic.
4200 stderr is useful for making output more deterministic.
4202
4201
4203 Commands are issued via a mini language which is specified via stdin.
4202 Commands are issued via a mini language which is specified via stdin.
4204 The language consists of individual actions to perform. An action is
4203 The language consists of individual actions to perform. An action is
4205 defined by a block. A block is defined as a line with no leading
4204 defined by a block. A block is defined as a line with no leading
4206 space followed by 0 or more lines with leading space. Blocks are
4205 space followed by 0 or more lines with leading space. Blocks are
4207 effectively a high-level command with additional metadata.
4206 effectively a high-level command with additional metadata.
4208
4207
4209 Lines beginning with ``#`` are ignored.
4208 Lines beginning with ``#`` are ignored.
4210
4209
4211 The following sections denote available actions.
4210 The following sections denote available actions.
4212
4211
4213 raw
4212 raw
4214 ---
4213 ---
4215
4214
4216 Send raw data to the server.
4215 Send raw data to the server.
4217
4216
4218 The block payload contains the raw data to send as one atomic send
4217 The block payload contains the raw data to send as one atomic send
4219 operation. The data may not actually be delivered in a single system
4218 operation. The data may not actually be delivered in a single system
4220 call: it depends on the abilities of the transport being used.
4219 call: it depends on the abilities of the transport being used.
4221
4220
4222 Each line in the block is de-indented and concatenated. Then, that
4221 Each line in the block is de-indented and concatenated. Then, that
4223 value is evaluated as a Python b'' literal. This allows the use of
4222 value is evaluated as a Python b'' literal. This allows the use of
4224 backslash escaping, etc.
4223 backslash escaping, etc.
4225
4224
4226 raw+
4225 raw+
4227 ----
4226 ----
4228
4227
4229 Behaves like ``raw`` except flushes output afterwards.
4228 Behaves like ``raw`` except flushes output afterwards.
4230
4229
4231 command <X>
4230 command <X>
4232 -----------
4231 -----------
4233
4232
4234 Send a request to run a named command, whose name follows the ``command``
4233 Send a request to run a named command, whose name follows the ``command``
4235 string.
4234 string.
4236
4235
4237 Arguments to the command are defined as lines in this block. The format of
4236 Arguments to the command are defined as lines in this block. The format of
4238 each line is ``<key> <value>``. e.g.::
4237 each line is ``<key> <value>``. e.g.::
4239
4238
4240 command listkeys
4239 command listkeys
4241 namespace bookmarks
4240 namespace bookmarks
4242
4241
4243 If the value begins with ``eval:``, it will be interpreted as a Python
4242 If the value begins with ``eval:``, it will be interpreted as a Python
4244 literal expression. Otherwise values are interpreted as Python b'' literals.
4243 literal expression. Otherwise values are interpreted as Python b'' literals.
4245 This allows sending complex types and encoding special byte sequences via
4244 This allows sending complex types and encoding special byte sequences via
4246 backslash escaping.
4245 backslash escaping.
4247
4246
4248 The following arguments have special meaning:
4247 The following arguments have special meaning:
4249
4248
4250 ``PUSHFILE``
4249 ``PUSHFILE``
4251 When defined, the *push* mechanism of the peer will be used instead
4250 When defined, the *push* mechanism of the peer will be used instead
4252 of the static request-response mechanism and the content of the
4251 of the static request-response mechanism and the content of the
4253 file specified in the value of this argument will be sent as the
4252 file specified in the value of this argument will be sent as the
4254 command payload.
4253 command payload.
4255
4254
4256 This can be used to submit a local bundle file to the remote.
4255 This can be used to submit a local bundle file to the remote.
4257
4256
4258 batchbegin
4257 batchbegin
4259 ----------
4258 ----------
4260
4259
4261 Instruct the peer to begin a batched send.
4260 Instruct the peer to begin a batched send.
4262
4261
4263 All ``command`` blocks are queued for execution until the next
4262 All ``command`` blocks are queued for execution until the next
4264 ``batchsubmit`` block.
4263 ``batchsubmit`` block.
4265
4264
4266 batchsubmit
4265 batchsubmit
4267 -----------
4266 -----------
4268
4267
4269 Submit previously queued ``command`` blocks as a batch request.
4268 Submit previously queued ``command`` blocks as a batch request.
4270
4269
4271 This action MUST be paired with a ``batchbegin`` action.
4270 This action MUST be paired with a ``batchbegin`` action.
4272
4271
4273 httprequest <method> <path>
4272 httprequest <method> <path>
4274 ---------------------------
4273 ---------------------------
4275
4274
4276 (HTTP peer only)
4275 (HTTP peer only)
4277
4276
4278 Send an HTTP request to the peer.
4277 Send an HTTP request to the peer.
4279
4278
4280 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4279 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4281
4280
4282 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4281 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4283 headers to add to the request. e.g. ``Accept: foo``.
4282 headers to add to the request. e.g. ``Accept: foo``.
4284
4283
4285 The following arguments are special:
4284 The following arguments are special:
4286
4285
4287 ``BODYFILE``
4286 ``BODYFILE``
4288 The content of the file defined as the value to this argument will be
4287 The content of the file defined as the value to this argument will be
4289 transferred verbatim as the HTTP request body.
4288 transferred verbatim as the HTTP request body.
4290
4289
4291 ``frame <type> <flags> <payload>``
4290 ``frame <type> <flags> <payload>``
4292 Send a unified protocol frame as part of the request body.
4291 Send a unified protocol frame as part of the request body.
4293
4292
4294 All frames will be collected and sent as the body to the HTTP
4293 All frames will be collected and sent as the body to the HTTP
4295 request.
4294 request.
4296
4295
4297 close
4296 close
4298 -----
4297 -----
4299
4298
4300 Close the connection to the server.
4299 Close the connection to the server.
4301
4300
4302 flush
4301 flush
4303 -----
4302 -----
4304
4303
4305 Flush data written to the server.
4304 Flush data written to the server.
4306
4305
4307 readavailable
4306 readavailable
4308 -------------
4307 -------------
4309
4308
4310 Close the write end of the connection and read all available data from
4309 Close the write end of the connection and read all available data from
4311 the server.
4310 the server.
4312
4311
4313 If the connection to the server encompasses multiple pipes, we poll both
4312 If the connection to the server encompasses multiple pipes, we poll both
4314 pipes and read available data.
4313 pipes and read available data.
4315
4314
4316 readline
4315 readline
4317 --------
4316 --------
4318
4317
4319 Read a line of output from the server. If there are multiple output
4318 Read a line of output from the server. If there are multiple output
4320 pipes, reads only the main pipe.
4319 pipes, reads only the main pipe.
4321
4320
4322 ereadline
4321 ereadline
4323 ---------
4322 ---------
4324
4323
4325 Like ``readline``, but read from the stderr pipe, if available.
4324 Like ``readline``, but read from the stderr pipe, if available.
4326
4325
4327 read <X>
4326 read <X>
4328 --------
4327 --------
4329
4328
4330 ``read()`` N bytes from the server's main output pipe.
4329 ``read()`` N bytes from the server's main output pipe.
4331
4330
4332 eread <X>
4331 eread <X>
4333 ---------
4332 ---------
4334
4333
4335 ``read()`` N bytes from the server's stderr pipe, if available.
4334 ``read()`` N bytes from the server's stderr pipe, if available.
4336
4335
4337 Specifying Unified Frame-Based Protocol Frames
4336 Specifying Unified Frame-Based Protocol Frames
4338 ----------------------------------------------
4337 ----------------------------------------------
4339
4338
4340 It is possible to emit a *Unified Frame-Based Protocol* by using special
4339 It is possible to emit a *Unified Frame-Based Protocol* by using special
4341 syntax.
4340 syntax.
4342
4341
4343 A frame is composed as a type, flags, and payload. These can be parsed
4342 A frame is composed as a type, flags, and payload. These can be parsed
4344 from a string of the form:
4343 from a string of the form:
4345
4344
4346 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4345 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4347
4346
4348 ``request-id`` and ``stream-id`` are integers defining the request and
4347 ``request-id`` and ``stream-id`` are integers defining the request and
4349 stream identifiers.
4348 stream identifiers.
4350
4349
4351 ``type`` can be an integer value for the frame type or the string name
4350 ``type`` can be an integer value for the frame type or the string name
4352 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4351 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4353 ``command-name``.
4352 ``command-name``.
4354
4353
4355 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4354 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4356 components. Each component (and there can be just one) can be an integer
4355 components. Each component (and there can be just one) can be an integer
4357 or a flag name for stream flags or frame flags, respectively. Values are
4356 or a flag name for stream flags or frame flags, respectively. Values are
4358 resolved to integers and then bitwise OR'd together.
4357 resolved to integers and then bitwise OR'd together.
4359
4358
4360 ``payload`` represents the raw frame payload. If it begins with
4359 ``payload`` represents the raw frame payload. If it begins with
4361 ``cbor:``, the following string is evaluated as Python code and the
4360 ``cbor:``, the following string is evaluated as Python code and the
4362 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4361 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4363 as a Python byte string literal.
4362 as a Python byte string literal.
4364 """
4363 """
4365 opts = pycompat.byteskwargs(opts)
4364 opts = pycompat.byteskwargs(opts)
4366
4365
4367 if opts[b'localssh'] and not repo:
4366 if opts[b'localssh'] and not repo:
4368 raise error.Abort(_(b'--localssh requires a repository'))
4367 raise error.Abort(_(b'--localssh requires a repository'))
4369
4368
4370 if opts[b'peer'] and opts[b'peer'] not in (
4369 if opts[b'peer'] and opts[b'peer'] not in (
4371 b'raw',
4370 b'raw',
4372 b'ssh1',
4371 b'ssh1',
4373 ):
4372 ):
4374 raise error.Abort(
4373 raise error.Abort(
4375 _(b'invalid value for --peer'),
4374 _(b'invalid value for --peer'),
4376 hint=_(b'valid values are "raw" and "ssh1"'),
4375 hint=_(b'valid values are "raw" and "ssh1"'),
4377 )
4376 )
4378
4377
4379 if path and opts[b'localssh']:
4378 if path and opts[b'localssh']:
4380 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4379 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4381
4380
4382 if ui.interactive():
4381 if ui.interactive():
4383 ui.write(_(b'(waiting for commands on stdin)\n'))
4382 ui.write(_(b'(waiting for commands on stdin)\n'))
4384
4383
4385 blocks = list(_parsewirelangblocks(ui.fin))
4384 blocks = list(_parsewirelangblocks(ui.fin))
4386
4385
4387 proc = None
4386 proc = None
4388 stdin = None
4387 stdin = None
4389 stdout = None
4388 stdout = None
4390 stderr = None
4389 stderr = None
4391 opener = None
4390 opener = None
4392
4391
4393 if opts[b'localssh']:
4392 if opts[b'localssh']:
4394 # We start the SSH server in its own process so there is process
4393 # We start the SSH server in its own process so there is process
4395 # separation. This prevents a whole class of potential bugs around
4394 # separation. This prevents a whole class of potential bugs around
4396 # shared state from interfering with server operation.
4395 # shared state from interfering with server operation.
4397 args = procutil.hgcmd() + [
4396 args = procutil.hgcmd() + [
4398 b'-R',
4397 b'-R',
4399 repo.root,
4398 repo.root,
4400 b'debugserve',
4399 b'debugserve',
4401 b'--sshstdio',
4400 b'--sshstdio',
4402 ]
4401 ]
4403 proc = subprocess.Popen(
4402 proc = subprocess.Popen(
4404 pycompat.rapply(procutil.tonativestr, args),
4403 pycompat.rapply(procutil.tonativestr, args),
4405 stdin=subprocess.PIPE,
4404 stdin=subprocess.PIPE,
4406 stdout=subprocess.PIPE,
4405 stdout=subprocess.PIPE,
4407 stderr=subprocess.PIPE,
4406 stderr=subprocess.PIPE,
4408 bufsize=0,
4407 bufsize=0,
4409 )
4408 )
4410
4409
4411 stdin = proc.stdin
4410 stdin = proc.stdin
4412 stdout = proc.stdout
4411 stdout = proc.stdout
4413 stderr = proc.stderr
4412 stderr = proc.stderr
4414
4413
4415 # We turn the pipes into observers so we can log I/O.
4414 # We turn the pipes into observers so we can log I/O.
4416 if ui.verbose or opts[b'peer'] == b'raw':
4415 if ui.verbose or opts[b'peer'] == b'raw':
4417 stdin = util.makeloggingfileobject(
4416 stdin = util.makeloggingfileobject(
4418 ui, proc.stdin, b'i', logdata=True
4417 ui, proc.stdin, b'i', logdata=True
4419 )
4418 )
4420 stdout = util.makeloggingfileobject(
4419 stdout = util.makeloggingfileobject(
4421 ui, proc.stdout, b'o', logdata=True
4420 ui, proc.stdout, b'o', logdata=True
4422 )
4421 )
4423 stderr = util.makeloggingfileobject(
4422 stderr = util.makeloggingfileobject(
4424 ui, proc.stderr, b'e', logdata=True
4423 ui, proc.stderr, b'e', logdata=True
4425 )
4424 )
4426
4425
4427 # --localssh also implies the peer connection settings.
4426 # --localssh also implies the peer connection settings.
4428
4427
4429 url = b'ssh://localserver'
4428 url = b'ssh://localserver'
4430 autoreadstderr = not opts[b'noreadstderr']
4429 autoreadstderr = not opts[b'noreadstderr']
4431
4430
4432 if opts[b'peer'] == b'ssh1':
4431 if opts[b'peer'] == b'ssh1':
4433 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4432 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4434 peer = sshpeer.sshv1peer(
4433 peer = sshpeer.sshv1peer(
4435 ui,
4434 ui,
4436 url,
4435 url,
4437 proc,
4436 proc,
4438 stdin,
4437 stdin,
4439 stdout,
4438 stdout,
4440 stderr,
4439 stderr,
4441 None,
4440 None,
4442 autoreadstderr=autoreadstderr,
4441 autoreadstderr=autoreadstderr,
4443 )
4442 )
4444 elif opts[b'peer'] == b'raw':
4443 elif opts[b'peer'] == b'raw':
4445 ui.write(_(b'using raw connection to peer\n'))
4444 ui.write(_(b'using raw connection to peer\n'))
4446 peer = None
4445 peer = None
4447 else:
4446 else:
4448 ui.write(_(b'creating ssh peer from handshake results\n'))
4447 ui.write(_(b'creating ssh peer from handshake results\n'))
4449 peer = sshpeer.makepeer(
4448 peer = sshpeer.makepeer(
4450 ui,
4449 ui,
4451 url,
4450 url,
4452 proc,
4451 proc,
4453 stdin,
4452 stdin,
4454 stdout,
4453 stdout,
4455 stderr,
4454 stderr,
4456 autoreadstderr=autoreadstderr,
4455 autoreadstderr=autoreadstderr,
4457 )
4456 )
4458
4457
4459 elif path:
4458 elif path:
4460 # We bypass hg.peer() so we can proxy the sockets.
4459 # We bypass hg.peer() so we can proxy the sockets.
4461 # TODO consider not doing this because we skip
4460 # TODO consider not doing this because we skip
4462 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4461 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4463 u = urlutil.url(path)
4462 u = urlutil.url(path)
4464 if u.scheme != b'http':
4463 if u.scheme != b'http':
4465 raise error.Abort(_(b'only http:// paths are currently supported'))
4464 raise error.Abort(_(b'only http:// paths are currently supported'))
4466
4465
4467 url, authinfo = u.authinfo()
4466 url, authinfo = u.authinfo()
4468 openerargs = {
4467 openerargs = {
4469 'useragent': b'Mercurial debugwireproto',
4468 'useragent': b'Mercurial debugwireproto',
4470 }
4469 }
4471
4470
4472 # Turn pipes/sockets into observers so we can log I/O.
4471 # Turn pipes/sockets into observers so we can log I/O.
4473 if ui.verbose:
4472 if ui.verbose:
4474 openerargs.update(
4473 openerargs.update(
4475 {
4474 {
4476 'loggingfh': ui,
4475 'loggingfh': ui,
4477 'loggingname': b's',
4476 'loggingname': b's',
4478 'loggingopts': {
4477 'loggingopts': {
4479 'logdata': True,
4478 'logdata': True,
4480 'logdataapis': False,
4479 'logdataapis': False,
4481 },
4480 },
4482 }
4481 }
4483 )
4482 )
4484
4483
4485 if ui.debugflag:
4484 if ui.debugflag:
4486 openerargs['loggingopts']['logdataapis'] = True
4485 openerargs['loggingopts']['logdataapis'] = True
4487
4486
4488 # Don't send default headers when in raw mode. This allows us to
4487 # Don't send default headers when in raw mode. This allows us to
4489 # bypass most of the behavior of our URL handling code so we can
4488 # bypass most of the behavior of our URL handling code so we can
4490 # have near complete control over what's sent on the wire.
4489 # have near complete control over what's sent on the wire.
4491 if opts[b'peer'] == b'raw':
4490 if opts[b'peer'] == b'raw':
4492 openerargs['sendaccept'] = False
4491 openerargs['sendaccept'] = False
4493
4492
4494 opener = urlmod.opener(ui, authinfo, **openerargs)
4493 opener = urlmod.opener(ui, authinfo, **openerargs)
4495
4494
4496 if opts[b'peer'] == b'raw':
4495 if opts[b'peer'] == b'raw':
4497 ui.write(_(b'using raw connection to peer\n'))
4496 ui.write(_(b'using raw connection to peer\n'))
4498 peer = None
4497 peer = None
4499 elif opts[b'peer']:
4498 elif opts[b'peer']:
4500 raise error.Abort(
4499 raise error.Abort(
4501 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4500 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4502 )
4501 )
4503 else:
4502 else:
4504 peer = httppeer.makepeer(ui, path, opener=opener)
4503 peer = httppeer.makepeer(ui, path, opener=opener)
4505
4504
4506 # We /could/ populate stdin/stdout with sock.makefile()...
4505 # We /could/ populate stdin/stdout with sock.makefile()...
4507 else:
4506 else:
4508 raise error.Abort(_(b'unsupported connection configuration'))
4507 raise error.Abort(_(b'unsupported connection configuration'))
4509
4508
4510 batchedcommands = None
4509 batchedcommands = None
4511
4510
4512 # Now perform actions based on the parsed wire language instructions.
4511 # Now perform actions based on the parsed wire language instructions.
4513 for action, lines in blocks:
4512 for action, lines in blocks:
4514 if action in (b'raw', b'raw+'):
4513 if action in (b'raw', b'raw+'):
4515 if not stdin:
4514 if not stdin:
4516 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4515 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4517
4516
4518 # Concatenate the data together.
4517 # Concatenate the data together.
4519 data = b''.join(l.lstrip() for l in lines)
4518 data = b''.join(l.lstrip() for l in lines)
4520 data = stringutil.unescapestr(data)
4519 data = stringutil.unescapestr(data)
4521 stdin.write(data)
4520 stdin.write(data)
4522
4521
4523 if action == b'raw+':
4522 if action == b'raw+':
4524 stdin.flush()
4523 stdin.flush()
4525 elif action == b'flush':
4524 elif action == b'flush':
4526 if not stdin:
4525 if not stdin:
4527 raise error.Abort(_(b'cannot call flush on this peer'))
4526 raise error.Abort(_(b'cannot call flush on this peer'))
4528 stdin.flush()
4527 stdin.flush()
4529 elif action.startswith(b'command'):
4528 elif action.startswith(b'command'):
4530 if not peer:
4529 if not peer:
4531 raise error.Abort(
4530 raise error.Abort(
4532 _(
4531 _(
4533 b'cannot send commands unless peer instance '
4532 b'cannot send commands unless peer instance '
4534 b'is available'
4533 b'is available'
4535 )
4534 )
4536 )
4535 )
4537
4536
4538 command = action.split(b' ', 1)[1]
4537 command = action.split(b' ', 1)[1]
4539
4538
4540 args = {}
4539 args = {}
4541 for line in lines:
4540 for line in lines:
4542 # We need to allow empty values.
4541 # We need to allow empty values.
4543 fields = line.lstrip().split(b' ', 1)
4542 fields = line.lstrip().split(b' ', 1)
4544 if len(fields) == 1:
4543 if len(fields) == 1:
4545 key = fields[0]
4544 key = fields[0]
4546 value = b''
4545 value = b''
4547 else:
4546 else:
4548 key, value = fields
4547 key, value = fields
4549
4548
4550 if value.startswith(b'eval:'):
4549 if value.startswith(b'eval:'):
4551 value = stringutil.evalpythonliteral(value[5:])
4550 value = stringutil.evalpythonliteral(value[5:])
4552 else:
4551 else:
4553 value = stringutil.unescapestr(value)
4552 value = stringutil.unescapestr(value)
4554
4553
4555 args[key] = value
4554 args[key] = value
4556
4555
4557 if batchedcommands is not None:
4556 if batchedcommands is not None:
4558 batchedcommands.append((command, args))
4557 batchedcommands.append((command, args))
4559 continue
4558 continue
4560
4559
4561 ui.status(_(b'sending %s command\n') % command)
4560 ui.status(_(b'sending %s command\n') % command)
4562
4561
4563 if b'PUSHFILE' in args:
4562 if b'PUSHFILE' in args:
4564 with open(args[b'PUSHFILE'], 'rb') as fh:
4563 with open(args[b'PUSHFILE'], 'rb') as fh:
4565 del args[b'PUSHFILE']
4564 del args[b'PUSHFILE']
4566 res, output = peer._callpush(
4565 res, output = peer._callpush(
4567 command, fh, **pycompat.strkwargs(args)
4566 command, fh, **pycompat.strkwargs(args)
4568 )
4567 )
4569 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4568 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4570 ui.status(
4569 ui.status(
4571 _(b'remote output: %s\n') % stringutil.escapestr(output)
4570 _(b'remote output: %s\n') % stringutil.escapestr(output)
4572 )
4571 )
4573 else:
4572 else:
4574 with peer.commandexecutor() as e:
4573 with peer.commandexecutor() as e:
4575 res = e.callcommand(command, args).result()
4574 res = e.callcommand(command, args).result()
4576
4575
4577 ui.status(
4576 ui.status(
4578 _(b'response: %s\n')
4577 _(b'response: %s\n')
4579 % stringutil.pprint(res, bprefix=True, indent=2)
4578 % stringutil.pprint(res, bprefix=True, indent=2)
4580 )
4579 )
4581
4580
4582 elif action == b'batchbegin':
4581 elif action == b'batchbegin':
4583 if batchedcommands is not None:
4582 if batchedcommands is not None:
4584 raise error.Abort(_(b'nested batchbegin not allowed'))
4583 raise error.Abort(_(b'nested batchbegin not allowed'))
4585
4584
4586 batchedcommands = []
4585 batchedcommands = []
4587 elif action == b'batchsubmit':
4586 elif action == b'batchsubmit':
4588 # There is a batching API we could go through. But it would be
4587 # There is a batching API we could go through. But it would be
4589 # difficult to normalize requests into function calls. It is easier
4588 # difficult to normalize requests into function calls. It is easier
4590 # to bypass this layer and normalize to commands + args.
4589 # to bypass this layer and normalize to commands + args.
4591 ui.status(
4590 ui.status(
4592 _(b'sending batch with %d sub-commands\n')
4591 _(b'sending batch with %d sub-commands\n')
4593 % len(batchedcommands)
4592 % len(batchedcommands)
4594 )
4593 )
4595 assert peer is not None
4594 assert peer is not None
4596 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4595 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4597 ui.status(
4596 ui.status(
4598 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4597 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4599 )
4598 )
4600
4599
4601 batchedcommands = None
4600 batchedcommands = None
4602
4601
4603 elif action.startswith(b'httprequest '):
4602 elif action.startswith(b'httprequest '):
4604 if not opener:
4603 if not opener:
4605 raise error.Abort(
4604 raise error.Abort(
4606 _(b'cannot use httprequest without an HTTP peer')
4605 _(b'cannot use httprequest without an HTTP peer')
4607 )
4606 )
4608
4607
4609 request = action.split(b' ', 2)
4608 request = action.split(b' ', 2)
4610 if len(request) != 3:
4609 if len(request) != 3:
4611 raise error.Abort(
4610 raise error.Abort(
4612 _(
4611 _(
4613 b'invalid httprequest: expected format is '
4612 b'invalid httprequest: expected format is '
4614 b'"httprequest <method> <path>'
4613 b'"httprequest <method> <path>'
4615 )
4614 )
4616 )
4615 )
4617
4616
4618 method, httppath = request[1:]
4617 method, httppath = request[1:]
4619 headers = {}
4618 headers = {}
4620 body = None
4619 body = None
4621 frames = []
4620 frames = []
4622 for line in lines:
4621 for line in lines:
4623 line = line.lstrip()
4622 line = line.lstrip()
4624 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4623 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4625 if m:
4624 if m:
4626 # Headers need to use native strings.
4625 # Headers need to use native strings.
4627 key = pycompat.strurl(m.group(1))
4626 key = pycompat.strurl(m.group(1))
4628 value = pycompat.strurl(m.group(2))
4627 value = pycompat.strurl(m.group(2))
4629 headers[key] = value
4628 headers[key] = value
4630 continue
4629 continue
4631
4630
4632 if line.startswith(b'BODYFILE '):
4631 if line.startswith(b'BODYFILE '):
4633 with open(line.split(b' ', 1), b'rb') as fh:
4632 with open(line.split(b' ', 1), b'rb') as fh:
4634 body = fh.read()
4633 body = fh.read()
4635 elif line.startswith(b'frame '):
4634 elif line.startswith(b'frame '):
4636 frame = wireprotoframing.makeframefromhumanstring(
4635 frame = wireprotoframing.makeframefromhumanstring(
4637 line[len(b'frame ') :]
4636 line[len(b'frame ') :]
4638 )
4637 )
4639
4638
4640 frames.append(frame)
4639 frames.append(frame)
4641 else:
4640 else:
4642 raise error.Abort(
4641 raise error.Abort(
4643 _(b'unknown argument to httprequest: %s') % line
4642 _(b'unknown argument to httprequest: %s') % line
4644 )
4643 )
4645
4644
4646 url = path + httppath
4645 url = path + httppath
4647
4646
4648 if frames:
4647 if frames:
4649 body = b''.join(bytes(f) for f in frames)
4648 body = b''.join(bytes(f) for f in frames)
4650
4649
4651 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4650 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4652
4651
4653 # urllib.Request insists on using has_data() as a proxy for
4652 # urllib.Request insists on using has_data() as a proxy for
4654 # determining the request method. Override that to use our
4653 # determining the request method. Override that to use our
4655 # explicitly requested method.
4654 # explicitly requested method.
4656 req.get_method = lambda: pycompat.sysstr(method)
4655 req.get_method = lambda: pycompat.sysstr(method)
4657
4656
4658 try:
4657 try:
4659 res = opener.open(req)
4658 res = opener.open(req)
4660 body = res.read()
4659 body = res.read()
4661 except util.urlerr.urlerror as e:
4660 except util.urlerr.urlerror as e:
4662 # read() method must be called, but only exists in Python 2
4661 # read() method must be called, but only exists in Python 2
4663 getattr(e, 'read', lambda: None)()
4662 getattr(e, 'read', lambda: None)()
4664 continue
4663 continue
4665
4664
4666 ct = res.headers.get('Content-Type')
4665 ct = res.headers.get('Content-Type')
4667 if ct == 'application/mercurial-cbor':
4666 if ct == 'application/mercurial-cbor':
4668 ui.write(
4667 ui.write(
4669 _(b'cbor> %s\n')
4668 _(b'cbor> %s\n')
4670 % stringutil.pprint(
4669 % stringutil.pprint(
4671 cborutil.decodeall(body), bprefix=True, indent=2
4670 cborutil.decodeall(body), bprefix=True, indent=2
4672 )
4671 )
4673 )
4672 )
4674
4673
4675 elif action == b'close':
4674 elif action == b'close':
4676 assert peer is not None
4675 assert peer is not None
4677 peer.close()
4676 peer.close()
4678 elif action == b'readavailable':
4677 elif action == b'readavailable':
4679 if not stdout or not stderr:
4678 if not stdout or not stderr:
4680 raise error.Abort(
4679 raise error.Abort(
4681 _(b'readavailable not available on this peer')
4680 _(b'readavailable not available on this peer')
4682 )
4681 )
4683
4682
4684 stdin.close()
4683 stdin.close()
4685 stdout.read()
4684 stdout.read()
4686 stderr.read()
4685 stderr.read()
4687
4686
4688 elif action == b'readline':
4687 elif action == b'readline':
4689 if not stdout:
4688 if not stdout:
4690 raise error.Abort(_(b'readline not available on this peer'))
4689 raise error.Abort(_(b'readline not available on this peer'))
4691 stdout.readline()
4690 stdout.readline()
4692 elif action == b'ereadline':
4691 elif action == b'ereadline':
4693 if not stderr:
4692 if not stderr:
4694 raise error.Abort(_(b'ereadline not available on this peer'))
4693 raise error.Abort(_(b'ereadline not available on this peer'))
4695 stderr.readline()
4694 stderr.readline()
4696 elif action.startswith(b'read '):
4695 elif action.startswith(b'read '):
4697 count = int(action.split(b' ', 1)[1])
4696 count = int(action.split(b' ', 1)[1])
4698 if not stdout:
4697 if not stdout:
4699 raise error.Abort(_(b'read not available on this peer'))
4698 raise error.Abort(_(b'read not available on this peer'))
4700 stdout.read(count)
4699 stdout.read(count)
4701 elif action.startswith(b'eread '):
4700 elif action.startswith(b'eread '):
4702 count = int(action.split(b' ', 1)[1])
4701 count = int(action.split(b' ', 1)[1])
4703 if not stderr:
4702 if not stderr:
4704 raise error.Abort(_(b'eread not available on this peer'))
4703 raise error.Abort(_(b'eread not available on this peer'))
4705 stderr.read(count)
4704 stderr.read(count)
4706 else:
4705 else:
4707 raise error.Abort(_(b'unknown action: %s') % action)
4706 raise error.Abort(_(b'unknown action: %s') % action)
4708
4707
4709 if batchedcommands is not None:
4708 if batchedcommands is not None:
4710 raise error.Abort(_(b'unclosed "batchbegin" request'))
4709 raise error.Abort(_(b'unclosed "batchbegin" request'))
4711
4710
4712 if peer:
4711 if peer:
4713 peer.close()
4712 peer.close()
4714
4713
4715 if proc:
4714 if proc:
4716 proc.kill()
4715 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now