##// END OF EJS Templates
debugdeltachain: stop summing the same chain over and over...
marmoute -
r51243:af776c3d stable
parent child Browse files
Show More
@@ -1,4776 +1,4783 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import binascii
9 import binascii
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import subprocess
24 import subprocess
25 import sys
25 import sys
26 import time
26 import time
27
27
28 from .i18n import _
28 from .i18n import _
29 from .node import (
29 from .node import (
30 bin,
30 bin,
31 hex,
31 hex,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .pycompat import (
35 from .pycompat import (
36 getattr,
36 getattr,
37 open,
37 open,
38 )
38 )
39 from . import (
39 from . import (
40 bundle2,
40 bundle2,
41 bundlerepo,
41 bundlerepo,
42 changegroup,
42 changegroup,
43 cmdutil,
43 cmdutil,
44 color,
44 color,
45 context,
45 context,
46 copies,
46 copies,
47 dagparser,
47 dagparser,
48 dirstateutils,
48 dirstateutils,
49 encoding,
49 encoding,
50 error,
50 error,
51 exchange,
51 exchange,
52 extensions,
52 extensions,
53 filemerge,
53 filemerge,
54 filesetlang,
54 filesetlang,
55 formatter,
55 formatter,
56 hg,
56 hg,
57 httppeer,
57 httppeer,
58 localrepo,
58 localrepo,
59 lock as lockmod,
59 lock as lockmod,
60 logcmdutil,
60 logcmdutil,
61 mergestate as mergestatemod,
61 mergestate as mergestatemod,
62 metadata,
62 metadata,
63 obsolete,
63 obsolete,
64 obsutil,
64 obsutil,
65 pathutil,
65 pathutil,
66 phases,
66 phases,
67 policy,
67 policy,
68 pvec,
68 pvec,
69 pycompat,
69 pycompat,
70 registrar,
70 registrar,
71 repair,
71 repair,
72 repoview,
72 repoview,
73 requirements,
73 requirements,
74 revlog,
74 revlog,
75 revset,
75 revset,
76 revsetlang,
76 revsetlang,
77 scmutil,
77 scmutil,
78 setdiscovery,
78 setdiscovery,
79 simplemerge,
79 simplemerge,
80 sshpeer,
80 sshpeer,
81 sslutil,
81 sslutil,
82 streamclone,
82 streamclone,
83 strip,
83 strip,
84 tags as tagsmod,
84 tags as tagsmod,
85 templater,
85 templater,
86 treediscovery,
86 treediscovery,
87 upgrade,
87 upgrade,
88 url as urlmod,
88 url as urlmod,
89 util,
89 util,
90 verify,
90 verify,
91 vfs as vfsmod,
91 vfs as vfsmod,
92 wireprotoframing,
92 wireprotoframing,
93 wireprotoserver,
93 wireprotoserver,
94 )
94 )
95 from .interfaces import repository
95 from .interfaces import repository
96 from .utils import (
96 from .utils import (
97 cborutil,
97 cborutil,
98 compression,
98 compression,
99 dateutil,
99 dateutil,
100 procutil,
100 procutil,
101 stringutil,
101 stringutil,
102 urlutil,
102 urlutil,
103 )
103 )
104
104
105 from .revlogutils import (
105 from .revlogutils import (
106 constants as revlog_constants,
106 constants as revlog_constants,
107 debug as revlog_debug,
107 debug as revlog_debug,
108 deltas as deltautil,
108 deltas as deltautil,
109 nodemap,
109 nodemap,
110 rewrite,
110 rewrite,
111 sidedata,
111 sidedata,
112 )
112 )
113
113
114 release = lockmod.release
114 release = lockmod.release
115
115
116 table = {}
116 table = {}
117 table.update(strip.command._table)
117 table.update(strip.command._table)
118 command = registrar.command(table)
118 command = registrar.command(table)
119
119
120
120
121 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
121 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
122 def debugancestor(ui, repo, *args):
122 def debugancestor(ui, repo, *args):
123 """find the ancestor revision of two revisions in a given index"""
123 """find the ancestor revision of two revisions in a given index"""
124 if len(args) == 3:
124 if len(args) == 3:
125 index, rev1, rev2 = args
125 index, rev1, rev2 = args
126 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
126 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
127 lookup = r.lookup
127 lookup = r.lookup
128 elif len(args) == 2:
128 elif len(args) == 2:
129 if not repo:
129 if not repo:
130 raise error.Abort(
130 raise error.Abort(
131 _(b'there is no Mercurial repository here (.hg not found)')
131 _(b'there is no Mercurial repository here (.hg not found)')
132 )
132 )
133 rev1, rev2 = args
133 rev1, rev2 = args
134 r = repo.changelog
134 r = repo.changelog
135 lookup = repo.lookup
135 lookup = repo.lookup
136 else:
136 else:
137 raise error.Abort(_(b'either two or three arguments required'))
137 raise error.Abort(_(b'either two or three arguments required'))
138 a = r.ancestor(lookup(rev1), lookup(rev2))
138 a = r.ancestor(lookup(rev1), lookup(rev2))
139 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
139 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
140
140
141
141
142 @command(b'debugantivirusrunning', [])
142 @command(b'debugantivirusrunning', [])
143 def debugantivirusrunning(ui, repo):
143 def debugantivirusrunning(ui, repo):
144 """attempt to trigger an antivirus scanner to see if one is active"""
144 """attempt to trigger an antivirus scanner to see if one is active"""
145 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
145 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
146 f.write(
146 f.write(
147 util.b85decode(
147 util.b85decode(
148 # This is a base85-armored version of the EICAR test file. See
148 # This is a base85-armored version of the EICAR test file. See
149 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
149 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
150 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
150 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
151 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
151 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
152 )
152 )
153 )
153 )
154 # Give an AV engine time to scan the file.
154 # Give an AV engine time to scan the file.
155 time.sleep(2)
155 time.sleep(2)
156 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
156 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
157
157
158
158
159 @command(b'debugapplystreamclonebundle', [], b'FILE')
159 @command(b'debugapplystreamclonebundle', [], b'FILE')
160 def debugapplystreamclonebundle(ui, repo, fname):
160 def debugapplystreamclonebundle(ui, repo, fname):
161 """apply a stream clone bundle file"""
161 """apply a stream clone bundle file"""
162 f = hg.openpath(ui, fname)
162 f = hg.openpath(ui, fname)
163 gen = exchange.readbundle(ui, f, fname)
163 gen = exchange.readbundle(ui, f, fname)
164 gen.apply(repo)
164 gen.apply(repo)
165
165
166
166
167 @command(
167 @command(
168 b'debugbuilddag',
168 b'debugbuilddag',
169 [
169 [
170 (
170 (
171 b'm',
171 b'm',
172 b'mergeable-file',
172 b'mergeable-file',
173 None,
173 None,
174 _(b'add single file mergeable changes'),
174 _(b'add single file mergeable changes'),
175 ),
175 ),
176 (
176 (
177 b'o',
177 b'o',
178 b'overwritten-file',
178 b'overwritten-file',
179 None,
179 None,
180 _(b'add single file all revs overwrite'),
180 _(b'add single file all revs overwrite'),
181 ),
181 ),
182 (b'n', b'new-file', None, _(b'add new file at each rev')),
182 (b'n', b'new-file', None, _(b'add new file at each rev')),
183 (
183 (
184 b'',
184 b'',
185 b'from-existing',
185 b'from-existing',
186 None,
186 None,
187 _(b'continue from a non-empty repository'),
187 _(b'continue from a non-empty repository'),
188 ),
188 ),
189 ],
189 ],
190 _(b'[OPTION]... [TEXT]'),
190 _(b'[OPTION]... [TEXT]'),
191 )
191 )
192 def debugbuilddag(
192 def debugbuilddag(
193 ui,
193 ui,
194 repo,
194 repo,
195 text=None,
195 text=None,
196 mergeable_file=False,
196 mergeable_file=False,
197 overwritten_file=False,
197 overwritten_file=False,
198 new_file=False,
198 new_file=False,
199 from_existing=False,
199 from_existing=False,
200 ):
200 ):
201 """builds a repo with a given DAG from scratch in the current empty repo
201 """builds a repo with a given DAG from scratch in the current empty repo
202
202
203 The description of the DAG is read from stdin if not given on the
203 The description of the DAG is read from stdin if not given on the
204 command line.
204 command line.
205
205
206 Elements:
206 Elements:
207
207
208 - "+n" is a linear run of n nodes based on the current default parent
208 - "+n" is a linear run of n nodes based on the current default parent
209 - "." is a single node based on the current default parent
209 - "." is a single node based on the current default parent
210 - "$" resets the default parent to null (implied at the start);
210 - "$" resets the default parent to null (implied at the start);
211 otherwise the default parent is always the last node created
211 otherwise the default parent is always the last node created
212 - "<p" sets the default parent to the backref p
212 - "<p" sets the default parent to the backref p
213 - "*p" is a fork at parent p, which is a backref
213 - "*p" is a fork at parent p, which is a backref
214 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
214 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
215 - "/p2" is a merge of the preceding node and p2
215 - "/p2" is a merge of the preceding node and p2
216 - ":tag" defines a local tag for the preceding node
216 - ":tag" defines a local tag for the preceding node
217 - "@branch" sets the named branch for subsequent nodes
217 - "@branch" sets the named branch for subsequent nodes
218 - "#...\\n" is a comment up to the end of the line
218 - "#...\\n" is a comment up to the end of the line
219
219
220 Whitespace between the above elements is ignored.
220 Whitespace between the above elements is ignored.
221
221
222 A backref is either
222 A backref is either
223
223
224 - a number n, which references the node curr-n, where curr is the current
224 - a number n, which references the node curr-n, where curr is the current
225 node, or
225 node, or
226 - the name of a local tag you placed earlier using ":tag", or
226 - the name of a local tag you placed earlier using ":tag", or
227 - empty to denote the default parent.
227 - empty to denote the default parent.
228
228
229 All string valued-elements are either strictly alphanumeric, or must
229 All string valued-elements are either strictly alphanumeric, or must
230 be enclosed in double quotes ("..."), with "\\" as escape character.
230 be enclosed in double quotes ("..."), with "\\" as escape character.
231 """
231 """
232
232
233 if text is None:
233 if text is None:
234 ui.status(_(b"reading DAG from stdin\n"))
234 ui.status(_(b"reading DAG from stdin\n"))
235 text = ui.fin.read()
235 text = ui.fin.read()
236
236
237 cl = repo.changelog
237 cl = repo.changelog
238 if len(cl) > 0 and not from_existing:
238 if len(cl) > 0 and not from_existing:
239 raise error.Abort(_(b'repository is not empty'))
239 raise error.Abort(_(b'repository is not empty'))
240
240
241 # determine number of revs in DAG
241 # determine number of revs in DAG
242 total = 0
242 total = 0
243 for type, data in dagparser.parsedag(text):
243 for type, data in dagparser.parsedag(text):
244 if type == b'n':
244 if type == b'n':
245 total += 1
245 total += 1
246
246
247 if mergeable_file:
247 if mergeable_file:
248 linesperrev = 2
248 linesperrev = 2
249 # make a file with k lines per rev
249 # make a file with k lines per rev
250 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
250 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
251 initialmergedlines.append(b"")
251 initialmergedlines.append(b"")
252
252
253 tags = []
253 tags = []
254 progress = ui.makeprogress(
254 progress = ui.makeprogress(
255 _(b'building'), unit=_(b'revisions'), total=total
255 _(b'building'), unit=_(b'revisions'), total=total
256 )
256 )
257 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
257 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
258 at = -1
258 at = -1
259 atbranch = b'default'
259 atbranch = b'default'
260 nodeids = []
260 nodeids = []
261 id = 0
261 id = 0
262 progress.update(id)
262 progress.update(id)
263 for type, data in dagparser.parsedag(text):
263 for type, data in dagparser.parsedag(text):
264 if type == b'n':
264 if type == b'n':
265 ui.note((b'node %s\n' % pycompat.bytestr(data)))
265 ui.note((b'node %s\n' % pycompat.bytestr(data)))
266 id, ps = data
266 id, ps = data
267
267
268 files = []
268 files = []
269 filecontent = {}
269 filecontent = {}
270
270
271 p2 = None
271 p2 = None
272 if mergeable_file:
272 if mergeable_file:
273 fn = b"mf"
273 fn = b"mf"
274 p1 = repo[ps[0]]
274 p1 = repo[ps[0]]
275 if len(ps) > 1:
275 if len(ps) > 1:
276 p2 = repo[ps[1]]
276 p2 = repo[ps[1]]
277 pa = p1.ancestor(p2)
277 pa = p1.ancestor(p2)
278 base, local, other = [
278 base, local, other = [
279 x[fn].data() for x in (pa, p1, p2)
279 x[fn].data() for x in (pa, p1, p2)
280 ]
280 ]
281 m3 = simplemerge.Merge3Text(base, local, other)
281 m3 = simplemerge.Merge3Text(base, local, other)
282 ml = [
282 ml = [
283 l.strip()
283 l.strip()
284 for l in simplemerge.render_minimized(m3)[0]
284 for l in simplemerge.render_minimized(m3)[0]
285 ]
285 ]
286 ml.append(b"")
286 ml.append(b"")
287 elif at > 0:
287 elif at > 0:
288 ml = p1[fn].data().split(b"\n")
288 ml = p1[fn].data().split(b"\n")
289 else:
289 else:
290 ml = initialmergedlines
290 ml = initialmergedlines
291 ml[id * linesperrev] += b" r%i" % id
291 ml[id * linesperrev] += b" r%i" % id
292 mergedtext = b"\n".join(ml)
292 mergedtext = b"\n".join(ml)
293 files.append(fn)
293 files.append(fn)
294 filecontent[fn] = mergedtext
294 filecontent[fn] = mergedtext
295
295
296 if overwritten_file:
296 if overwritten_file:
297 fn = b"of"
297 fn = b"of"
298 files.append(fn)
298 files.append(fn)
299 filecontent[fn] = b"r%i\n" % id
299 filecontent[fn] = b"r%i\n" % id
300
300
301 if new_file:
301 if new_file:
302 fn = b"nf%i" % id
302 fn = b"nf%i" % id
303 files.append(fn)
303 files.append(fn)
304 filecontent[fn] = b"r%i\n" % id
304 filecontent[fn] = b"r%i\n" % id
305 if len(ps) > 1:
305 if len(ps) > 1:
306 if not p2:
306 if not p2:
307 p2 = repo[ps[1]]
307 p2 = repo[ps[1]]
308 for fn in p2:
308 for fn in p2:
309 if fn.startswith(b"nf"):
309 if fn.startswith(b"nf"):
310 files.append(fn)
310 files.append(fn)
311 filecontent[fn] = p2[fn].data()
311 filecontent[fn] = p2[fn].data()
312
312
313 def fctxfn(repo, cx, path):
313 def fctxfn(repo, cx, path):
314 if path in filecontent:
314 if path in filecontent:
315 return context.memfilectx(
315 return context.memfilectx(
316 repo, cx, path, filecontent[path]
316 repo, cx, path, filecontent[path]
317 )
317 )
318 return None
318 return None
319
319
320 if len(ps) == 0 or ps[0] < 0:
320 if len(ps) == 0 or ps[0] < 0:
321 pars = [None, None]
321 pars = [None, None]
322 elif len(ps) == 1:
322 elif len(ps) == 1:
323 pars = [nodeids[ps[0]], None]
323 pars = [nodeids[ps[0]], None]
324 else:
324 else:
325 pars = [nodeids[p] for p in ps]
325 pars = [nodeids[p] for p in ps]
326 cx = context.memctx(
326 cx = context.memctx(
327 repo,
327 repo,
328 pars,
328 pars,
329 b"r%i" % id,
329 b"r%i" % id,
330 files,
330 files,
331 fctxfn,
331 fctxfn,
332 date=(id, 0),
332 date=(id, 0),
333 user=b"debugbuilddag",
333 user=b"debugbuilddag",
334 extra={b'branch': atbranch},
334 extra={b'branch': atbranch},
335 )
335 )
336 nodeid = repo.commitctx(cx)
336 nodeid = repo.commitctx(cx)
337 nodeids.append(nodeid)
337 nodeids.append(nodeid)
338 at = id
338 at = id
339 elif type == b'l':
339 elif type == b'l':
340 id, name = data
340 id, name = data
341 ui.note((b'tag %s\n' % name))
341 ui.note((b'tag %s\n' % name))
342 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
342 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
343 elif type == b'a':
343 elif type == b'a':
344 ui.note((b'branch %s\n' % data))
344 ui.note((b'branch %s\n' % data))
345 atbranch = data
345 atbranch = data
346 progress.update(id)
346 progress.update(id)
347
347
348 if tags:
348 if tags:
349 repo.vfs.write(b"localtags", b"".join(tags))
349 repo.vfs.write(b"localtags", b"".join(tags))
350
350
351
351
352 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
352 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
353 indent_string = b' ' * indent
353 indent_string = b' ' * indent
354 if all:
354 if all:
355 ui.writenoi18n(
355 ui.writenoi18n(
356 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
356 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
357 % indent_string
357 % indent_string
358 )
358 )
359
359
360 def showchunks(named):
360 def showchunks(named):
361 ui.write(b"\n%s%s\n" % (indent_string, named))
361 ui.write(b"\n%s%s\n" % (indent_string, named))
362 for deltadata in gen.deltaiter():
362 for deltadata in gen.deltaiter():
363 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
363 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
364 ui.write(
364 ui.write(
365 b"%s%s %s %s %s %s %d\n"
365 b"%s%s %s %s %s %s %d\n"
366 % (
366 % (
367 indent_string,
367 indent_string,
368 hex(node),
368 hex(node),
369 hex(p1),
369 hex(p1),
370 hex(p2),
370 hex(p2),
371 hex(cs),
371 hex(cs),
372 hex(deltabase),
372 hex(deltabase),
373 len(delta),
373 len(delta),
374 )
374 )
375 )
375 )
376
376
377 gen.changelogheader()
377 gen.changelogheader()
378 showchunks(b"changelog")
378 showchunks(b"changelog")
379 gen.manifestheader()
379 gen.manifestheader()
380 showchunks(b"manifest")
380 showchunks(b"manifest")
381 for chunkdata in iter(gen.filelogheader, {}):
381 for chunkdata in iter(gen.filelogheader, {}):
382 fname = chunkdata[b'filename']
382 fname = chunkdata[b'filename']
383 showchunks(fname)
383 showchunks(fname)
384 else:
384 else:
385 if isinstance(gen, bundle2.unbundle20):
385 if isinstance(gen, bundle2.unbundle20):
386 raise error.Abort(_(b'use debugbundle2 for this file'))
386 raise error.Abort(_(b'use debugbundle2 for this file'))
387 gen.changelogheader()
387 gen.changelogheader()
388 for deltadata in gen.deltaiter():
388 for deltadata in gen.deltaiter():
389 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
389 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
390 ui.write(b"%s%s\n" % (indent_string, hex(node)))
390 ui.write(b"%s%s\n" % (indent_string, hex(node)))
391
391
392
392
393 def _debugobsmarkers(ui, part, indent=0, **opts):
393 def _debugobsmarkers(ui, part, indent=0, **opts):
394 """display version and markers contained in 'data'"""
394 """display version and markers contained in 'data'"""
395 opts = pycompat.byteskwargs(opts)
395 opts = pycompat.byteskwargs(opts)
396 data = part.read()
396 data = part.read()
397 indent_string = b' ' * indent
397 indent_string = b' ' * indent
398 try:
398 try:
399 version, markers = obsolete._readmarkers(data)
399 version, markers = obsolete._readmarkers(data)
400 except error.UnknownVersion as exc:
400 except error.UnknownVersion as exc:
401 msg = b"%sunsupported version: %s (%d bytes)\n"
401 msg = b"%sunsupported version: %s (%d bytes)\n"
402 msg %= indent_string, exc.version, len(data)
402 msg %= indent_string, exc.version, len(data)
403 ui.write(msg)
403 ui.write(msg)
404 else:
404 else:
405 msg = b"%sversion: %d (%d bytes)\n"
405 msg = b"%sversion: %d (%d bytes)\n"
406 msg %= indent_string, version, len(data)
406 msg %= indent_string, version, len(data)
407 ui.write(msg)
407 ui.write(msg)
408 fm = ui.formatter(b'debugobsolete', opts)
408 fm = ui.formatter(b'debugobsolete', opts)
409 for rawmarker in sorted(markers):
409 for rawmarker in sorted(markers):
410 m = obsutil.marker(None, rawmarker)
410 m = obsutil.marker(None, rawmarker)
411 fm.startitem()
411 fm.startitem()
412 fm.plain(indent_string)
412 fm.plain(indent_string)
413 cmdutil.showmarker(fm, m)
413 cmdutil.showmarker(fm, m)
414 fm.end()
414 fm.end()
415
415
416
416
417 def _debugphaseheads(ui, data, indent=0):
417 def _debugphaseheads(ui, data, indent=0):
418 """display version and markers contained in 'data'"""
418 """display version and markers contained in 'data'"""
419 indent_string = b' ' * indent
419 indent_string = b' ' * indent
420 headsbyphase = phases.binarydecode(data)
420 headsbyphase = phases.binarydecode(data)
421 for phase in phases.allphases:
421 for phase in phases.allphases:
422 for head in headsbyphase[phase]:
422 for head in headsbyphase[phase]:
423 ui.write(indent_string)
423 ui.write(indent_string)
424 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
424 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
425
425
426
426
427 def _quasirepr(thing):
427 def _quasirepr(thing):
428 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
428 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
429 return b'{%s}' % (
429 return b'{%s}' % (
430 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
430 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
431 )
431 )
432 return pycompat.bytestr(repr(thing))
432 return pycompat.bytestr(repr(thing))
433
433
434
434
435 def _debugbundle2(ui, gen, all=None, **opts):
435 def _debugbundle2(ui, gen, all=None, **opts):
436 """lists the contents of a bundle2"""
436 """lists the contents of a bundle2"""
437 if not isinstance(gen, bundle2.unbundle20):
437 if not isinstance(gen, bundle2.unbundle20):
438 raise error.Abort(_(b'not a bundle2 file'))
438 raise error.Abort(_(b'not a bundle2 file'))
439 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
439 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
440 parttypes = opts.get('part_type', [])
440 parttypes = opts.get('part_type', [])
441 for part in gen.iterparts():
441 for part in gen.iterparts():
442 if parttypes and part.type not in parttypes:
442 if parttypes and part.type not in parttypes:
443 continue
443 continue
444 msg = b'%s -- %s (mandatory: %r)\n'
444 msg = b'%s -- %s (mandatory: %r)\n'
445 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
445 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
446 if part.type == b'changegroup':
446 if part.type == b'changegroup':
447 version = part.params.get(b'version', b'01')
447 version = part.params.get(b'version', b'01')
448 cg = changegroup.getunbundler(version, part, b'UN')
448 cg = changegroup.getunbundler(version, part, b'UN')
449 if not ui.quiet:
449 if not ui.quiet:
450 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
450 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
451 if part.type == b'obsmarkers':
451 if part.type == b'obsmarkers':
452 if not ui.quiet:
452 if not ui.quiet:
453 _debugobsmarkers(ui, part, indent=4, **opts)
453 _debugobsmarkers(ui, part, indent=4, **opts)
454 if part.type == b'phase-heads':
454 if part.type == b'phase-heads':
455 if not ui.quiet:
455 if not ui.quiet:
456 _debugphaseheads(ui, part, indent=4)
456 _debugphaseheads(ui, part, indent=4)
457
457
458
458
459 @command(
459 @command(
460 b'debugbundle',
460 b'debugbundle',
461 [
461 [
462 (b'a', b'all', None, _(b'show all details')),
462 (b'a', b'all', None, _(b'show all details')),
463 (b'', b'part-type', [], _(b'show only the named part type')),
463 (b'', b'part-type', [], _(b'show only the named part type')),
464 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
464 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
465 ],
465 ],
466 _(b'FILE'),
466 _(b'FILE'),
467 norepo=True,
467 norepo=True,
468 )
468 )
469 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
469 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
470 """lists the contents of a bundle"""
470 """lists the contents of a bundle"""
471 with hg.openpath(ui, bundlepath) as f:
471 with hg.openpath(ui, bundlepath) as f:
472 if spec:
472 if spec:
473 spec = exchange.getbundlespec(ui, f)
473 spec = exchange.getbundlespec(ui, f)
474 ui.write(b'%s\n' % spec)
474 ui.write(b'%s\n' % spec)
475 return
475 return
476
476
477 gen = exchange.readbundle(ui, f, bundlepath)
477 gen = exchange.readbundle(ui, f, bundlepath)
478 if isinstance(gen, bundle2.unbundle20):
478 if isinstance(gen, bundle2.unbundle20):
479 return _debugbundle2(ui, gen, all=all, **opts)
479 return _debugbundle2(ui, gen, all=all, **opts)
480 _debugchangegroup(ui, gen, all=all, **opts)
480 _debugchangegroup(ui, gen, all=all, **opts)
481
481
482
482
483 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
483 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
484 def debugcapabilities(ui, path, **opts):
484 def debugcapabilities(ui, path, **opts):
485 """lists the capabilities of a remote peer"""
485 """lists the capabilities of a remote peer"""
486 opts = pycompat.byteskwargs(opts)
486 opts = pycompat.byteskwargs(opts)
487 peer = hg.peer(ui, opts, path)
487 peer = hg.peer(ui, opts, path)
488 try:
488 try:
489 caps = peer.capabilities()
489 caps = peer.capabilities()
490 ui.writenoi18n(b'Main capabilities:\n')
490 ui.writenoi18n(b'Main capabilities:\n')
491 for c in sorted(caps):
491 for c in sorted(caps):
492 ui.write(b' %s\n' % c)
492 ui.write(b' %s\n' % c)
493 b2caps = bundle2.bundle2caps(peer)
493 b2caps = bundle2.bundle2caps(peer)
494 if b2caps:
494 if b2caps:
495 ui.writenoi18n(b'Bundle2 capabilities:\n')
495 ui.writenoi18n(b'Bundle2 capabilities:\n')
496 for key, values in sorted(b2caps.items()):
496 for key, values in sorted(b2caps.items()):
497 ui.write(b' %s\n' % key)
497 ui.write(b' %s\n' % key)
498 for v in values:
498 for v in values:
499 ui.write(b' %s\n' % v)
499 ui.write(b' %s\n' % v)
500 finally:
500 finally:
501 peer.close()
501 peer.close()
502
502
503
503
504 @command(
504 @command(
505 b'debugchangedfiles',
505 b'debugchangedfiles',
506 [
506 [
507 (
507 (
508 b'',
508 b'',
509 b'compute',
509 b'compute',
510 False,
510 False,
511 b"compute information instead of reading it from storage",
511 b"compute information instead of reading it from storage",
512 ),
512 ),
513 ],
513 ],
514 b'REV',
514 b'REV',
515 )
515 )
516 def debugchangedfiles(ui, repo, rev, **opts):
516 def debugchangedfiles(ui, repo, rev, **opts):
517 """list the stored files changes for a revision"""
517 """list the stored files changes for a revision"""
518 ctx = logcmdutil.revsingle(repo, rev, None)
518 ctx = logcmdutil.revsingle(repo, rev, None)
519 files = None
519 files = None
520
520
521 if opts['compute']:
521 if opts['compute']:
522 files = metadata.compute_all_files_changes(ctx)
522 files = metadata.compute_all_files_changes(ctx)
523 else:
523 else:
524 sd = repo.changelog.sidedata(ctx.rev())
524 sd = repo.changelog.sidedata(ctx.rev())
525 files_block = sd.get(sidedata.SD_FILES)
525 files_block = sd.get(sidedata.SD_FILES)
526 if files_block is not None:
526 if files_block is not None:
527 files = metadata.decode_files_sidedata(sd)
527 files = metadata.decode_files_sidedata(sd)
528 if files is not None:
528 if files is not None:
529 for f in sorted(files.touched):
529 for f in sorted(files.touched):
530 if f in files.added:
530 if f in files.added:
531 action = b"added"
531 action = b"added"
532 elif f in files.removed:
532 elif f in files.removed:
533 action = b"removed"
533 action = b"removed"
534 elif f in files.merged:
534 elif f in files.merged:
535 action = b"merged"
535 action = b"merged"
536 elif f in files.salvaged:
536 elif f in files.salvaged:
537 action = b"salvaged"
537 action = b"salvaged"
538 else:
538 else:
539 action = b"touched"
539 action = b"touched"
540
540
541 copy_parent = b""
541 copy_parent = b""
542 copy_source = b""
542 copy_source = b""
543 if f in files.copied_from_p1:
543 if f in files.copied_from_p1:
544 copy_parent = b"p1"
544 copy_parent = b"p1"
545 copy_source = files.copied_from_p1[f]
545 copy_source = files.copied_from_p1[f]
546 elif f in files.copied_from_p2:
546 elif f in files.copied_from_p2:
547 copy_parent = b"p2"
547 copy_parent = b"p2"
548 copy_source = files.copied_from_p2[f]
548 copy_source = files.copied_from_p2[f]
549
549
550 data = (action, copy_parent, f, copy_source)
550 data = (action, copy_parent, f, copy_source)
551 template = b"%-8s %2s: %s, %s;\n"
551 template = b"%-8s %2s: %s, %s;\n"
552 ui.write(template % data)
552 ui.write(template % data)
553
553
554
554
555 @command(b'debugcheckstate', [], b'')
555 @command(b'debugcheckstate', [], b'')
556 def debugcheckstate(ui, repo):
556 def debugcheckstate(ui, repo):
557 """validate the correctness of the current dirstate"""
557 """validate the correctness of the current dirstate"""
558 errors = verify.verifier(repo)._verify_dirstate()
558 errors = verify.verifier(repo)._verify_dirstate()
559 if errors:
559 if errors:
560 errstr = _(b"dirstate inconsistent with current parent's manifest")
560 errstr = _(b"dirstate inconsistent with current parent's manifest")
561 raise error.Abort(errstr)
561 raise error.Abort(errstr)
562
562
563
563
564 @command(
564 @command(
565 b'debugcolor',
565 b'debugcolor',
566 [(b'', b'style', None, _(b'show all configured styles'))],
566 [(b'', b'style', None, _(b'show all configured styles'))],
567 b'hg debugcolor',
567 b'hg debugcolor',
568 )
568 )
569 def debugcolor(ui, repo, **opts):
569 def debugcolor(ui, repo, **opts):
570 """show available color, effects or style"""
570 """show available color, effects or style"""
571 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
571 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
572 if opts.get('style'):
572 if opts.get('style'):
573 return _debugdisplaystyle(ui)
573 return _debugdisplaystyle(ui)
574 else:
574 else:
575 return _debugdisplaycolor(ui)
575 return _debugdisplaycolor(ui)
576
576
577
577
578 def _debugdisplaycolor(ui):
578 def _debugdisplaycolor(ui):
579 ui = ui.copy()
579 ui = ui.copy()
580 ui._styles.clear()
580 ui._styles.clear()
581 for effect in color._activeeffects(ui).keys():
581 for effect in color._activeeffects(ui).keys():
582 ui._styles[effect] = effect
582 ui._styles[effect] = effect
583 if ui._terminfoparams:
583 if ui._terminfoparams:
584 for k, v in ui.configitems(b'color'):
584 for k, v in ui.configitems(b'color'):
585 if k.startswith(b'color.'):
585 if k.startswith(b'color.'):
586 ui._styles[k] = k[6:]
586 ui._styles[k] = k[6:]
587 elif k.startswith(b'terminfo.'):
587 elif k.startswith(b'terminfo.'):
588 ui._styles[k] = k[9:]
588 ui._styles[k] = k[9:]
589 ui.write(_(b'available colors:\n'))
589 ui.write(_(b'available colors:\n'))
590 # sort label with a '_' after the other to group '_background' entry.
590 # sort label with a '_' after the other to group '_background' entry.
591 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
591 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
592 for colorname, label in items:
592 for colorname, label in items:
593 ui.write(b'%s\n' % colorname, label=label)
593 ui.write(b'%s\n' % colorname, label=label)
594
594
595
595
596 def _debugdisplaystyle(ui):
596 def _debugdisplaystyle(ui):
597 ui.write(_(b'available style:\n'))
597 ui.write(_(b'available style:\n'))
598 if not ui._styles:
598 if not ui._styles:
599 return
599 return
600 width = max(len(s) for s in ui._styles)
600 width = max(len(s) for s in ui._styles)
601 for label, effects in sorted(ui._styles.items()):
601 for label, effects in sorted(ui._styles.items()):
602 ui.write(b'%s' % label, label=label)
602 ui.write(b'%s' % label, label=label)
603 if effects:
603 if effects:
604 # 50
604 # 50
605 ui.write(b': ')
605 ui.write(b': ')
606 ui.write(b' ' * (max(0, width - len(label))))
606 ui.write(b' ' * (max(0, width - len(label))))
607 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
607 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
608 ui.write(b'\n')
608 ui.write(b'\n')
609
609
610
610
611 @command(b'debugcreatestreamclonebundle', [], b'FILE')
611 @command(b'debugcreatestreamclonebundle', [], b'FILE')
612 def debugcreatestreamclonebundle(ui, repo, fname):
612 def debugcreatestreamclonebundle(ui, repo, fname):
613 """create a stream clone bundle file
613 """create a stream clone bundle file
614
614
615 Stream bundles are special bundles that are essentially archives of
615 Stream bundles are special bundles that are essentially archives of
616 revlog files. They are commonly used for cloning very quickly.
616 revlog files. They are commonly used for cloning very quickly.
617 """
617 """
618 # TODO we may want to turn this into an abort when this functionality
618 # TODO we may want to turn this into an abort when this functionality
619 # is moved into `hg bundle`.
619 # is moved into `hg bundle`.
620 if phases.hassecret(repo):
620 if phases.hassecret(repo):
621 ui.warn(
621 ui.warn(
622 _(
622 _(
623 b'(warning: stream clone bundle will contain secret '
623 b'(warning: stream clone bundle will contain secret '
624 b'revisions)\n'
624 b'revisions)\n'
625 )
625 )
626 )
626 )
627
627
628 requirements, gen = streamclone.generatebundlev1(repo)
628 requirements, gen = streamclone.generatebundlev1(repo)
629 changegroup.writechunks(ui, gen, fname)
629 changegroup.writechunks(ui, gen, fname)
630
630
631 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
631 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
632
632
633
633
634 @command(
634 @command(
635 b'debugdag',
635 b'debugdag',
636 [
636 [
637 (b't', b'tags', None, _(b'use tags as labels')),
637 (b't', b'tags', None, _(b'use tags as labels')),
638 (b'b', b'branches', None, _(b'annotate with branch names')),
638 (b'b', b'branches', None, _(b'annotate with branch names')),
639 (b'', b'dots', None, _(b'use dots for runs')),
639 (b'', b'dots', None, _(b'use dots for runs')),
640 (b's', b'spaces', None, _(b'separate elements by spaces')),
640 (b's', b'spaces', None, _(b'separate elements by spaces')),
641 ],
641 ],
642 _(b'[OPTION]... [FILE [REV]...]'),
642 _(b'[OPTION]... [FILE [REV]...]'),
643 optionalrepo=True,
643 optionalrepo=True,
644 )
644 )
645 def debugdag(ui, repo, file_=None, *revs, **opts):
645 def debugdag(ui, repo, file_=None, *revs, **opts):
646 """format the changelog or an index DAG as a concise textual description
646 """format the changelog or an index DAG as a concise textual description
647
647
648 If you pass a revlog index, the revlog's DAG is emitted. If you list
648 If you pass a revlog index, the revlog's DAG is emitted. If you list
649 revision numbers, they get labeled in the output as rN.
649 revision numbers, they get labeled in the output as rN.
650
650
651 Otherwise, the changelog DAG of the current repo is emitted.
651 Otherwise, the changelog DAG of the current repo is emitted.
652 """
652 """
653 spaces = opts.get('spaces')
653 spaces = opts.get('spaces')
654 dots = opts.get('dots')
654 dots = opts.get('dots')
655 if file_:
655 if file_:
656 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
656 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
657 revs = {int(r) for r in revs}
657 revs = {int(r) for r in revs}
658
658
659 def events():
659 def events():
660 for r in rlog:
660 for r in rlog:
661 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
661 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
662 if r in revs:
662 if r in revs:
663 yield b'l', (r, b"r%i" % r)
663 yield b'l', (r, b"r%i" % r)
664
664
665 elif repo:
665 elif repo:
666 cl = repo.changelog
666 cl = repo.changelog
667 tags = opts.get('tags')
667 tags = opts.get('tags')
668 branches = opts.get('branches')
668 branches = opts.get('branches')
669 if tags:
669 if tags:
670 labels = {}
670 labels = {}
671 for l, n in repo.tags().items():
671 for l, n in repo.tags().items():
672 labels.setdefault(cl.rev(n), []).append(l)
672 labels.setdefault(cl.rev(n), []).append(l)
673
673
674 def events():
674 def events():
675 b = b"default"
675 b = b"default"
676 for r in cl:
676 for r in cl:
677 if branches:
677 if branches:
678 newb = cl.read(cl.node(r))[5][b'branch']
678 newb = cl.read(cl.node(r))[5][b'branch']
679 if newb != b:
679 if newb != b:
680 yield b'a', newb
680 yield b'a', newb
681 b = newb
681 b = newb
682 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
682 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
683 if tags:
683 if tags:
684 ls = labels.get(r)
684 ls = labels.get(r)
685 if ls:
685 if ls:
686 for l in ls:
686 for l in ls:
687 yield b'l', (r, l)
687 yield b'l', (r, l)
688
688
689 else:
689 else:
690 raise error.Abort(_(b'need repo for changelog dag'))
690 raise error.Abort(_(b'need repo for changelog dag'))
691
691
692 for line in dagparser.dagtextlines(
692 for line in dagparser.dagtextlines(
693 events(),
693 events(),
694 addspaces=spaces,
694 addspaces=spaces,
695 wraplabels=True,
695 wraplabels=True,
696 wrapannotations=True,
696 wrapannotations=True,
697 wrapnonlinear=dots,
697 wrapnonlinear=dots,
698 usedots=dots,
698 usedots=dots,
699 maxlinewidth=70,
699 maxlinewidth=70,
700 ):
700 ):
701 ui.write(line)
701 ui.write(line)
702 ui.write(b"\n")
702 ui.write(b"\n")
703
703
704
704
705 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
705 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
706 def debugdata(ui, repo, file_, rev=None, **opts):
706 def debugdata(ui, repo, file_, rev=None, **opts):
707 """dump the contents of a data file revision"""
707 """dump the contents of a data file revision"""
708 opts = pycompat.byteskwargs(opts)
708 opts = pycompat.byteskwargs(opts)
709 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
709 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
710 if rev is not None:
710 if rev is not None:
711 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
711 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
712 file_, rev = None, file_
712 file_, rev = None, file_
713 elif rev is None:
713 elif rev is None:
714 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
714 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
715 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
715 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
716 try:
716 try:
717 ui.write(r.rawdata(r.lookup(rev)))
717 ui.write(r.rawdata(r.lookup(rev)))
718 except KeyError:
718 except KeyError:
719 raise error.Abort(_(b'invalid revision identifier %s') % rev)
719 raise error.Abort(_(b'invalid revision identifier %s') % rev)
720
720
721
721
722 @command(
722 @command(
723 b'debugdate',
723 b'debugdate',
724 [(b'e', b'extended', None, _(b'try extended date formats'))],
724 [(b'e', b'extended', None, _(b'try extended date formats'))],
725 _(b'[-e] DATE [RANGE]'),
725 _(b'[-e] DATE [RANGE]'),
726 norepo=True,
726 norepo=True,
727 optionalrepo=True,
727 optionalrepo=True,
728 )
728 )
729 def debugdate(ui, date, range=None, **opts):
729 def debugdate(ui, date, range=None, **opts):
730 """parse and display a date"""
730 """parse and display a date"""
731 if opts["extended"]:
731 if opts["extended"]:
732 d = dateutil.parsedate(date, dateutil.extendeddateformats)
732 d = dateutil.parsedate(date, dateutil.extendeddateformats)
733 else:
733 else:
734 d = dateutil.parsedate(date)
734 d = dateutil.parsedate(date)
735 ui.writenoi18n(b"internal: %d %d\n" % d)
735 ui.writenoi18n(b"internal: %d %d\n" % d)
736 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
736 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
737 if range:
737 if range:
738 m = dateutil.matchdate(range)
738 m = dateutil.matchdate(range)
739 ui.writenoi18n(b"match: %s\n" % m(d[0]))
739 ui.writenoi18n(b"match: %s\n" % m(d[0]))
740
740
741
741
742 @command(
742 @command(
743 b'debugdeltachain',
743 b'debugdeltachain',
744 cmdutil.debugrevlogopts + cmdutil.formatteropts,
744 cmdutil.debugrevlogopts + cmdutil.formatteropts,
745 _(b'-c|-m|FILE'),
745 _(b'-c|-m|FILE'),
746 optionalrepo=True,
746 optionalrepo=True,
747 )
747 )
748 def debugdeltachain(ui, repo, file_=None, **opts):
748 def debugdeltachain(ui, repo, file_=None, **opts):
749 """dump information about delta chains in a revlog
749 """dump information about delta chains in a revlog
750
750
751 Output can be templatized. Available template keywords are:
751 Output can be templatized. Available template keywords are:
752
752
753 :``rev``: revision number
753 :``rev``: revision number
754 :``p1``: parent 1 revision number (for reference)
754 :``p1``: parent 1 revision number (for reference)
755 :``p2``: parent 2 revision number (for reference)
755 :``p2``: parent 2 revision number (for reference)
756 :``chainid``: delta chain identifier (numbered by unique base)
756 :``chainid``: delta chain identifier (numbered by unique base)
757 :``chainlen``: delta chain length to this revision
757 :``chainlen``: delta chain length to this revision
758 :``prevrev``: previous revision in delta chain
758 :``prevrev``: previous revision in delta chain
759 :``deltatype``: role of delta / how it was computed
759 :``deltatype``: role of delta / how it was computed
760 - base: a full snapshot
760 - base: a full snapshot
761 - snap: an intermediate snapshot
761 - snap: an intermediate snapshot
762 - p1: a delta against the first parent
762 - p1: a delta against the first parent
763 - p2: a delta against the second parent
763 - p2: a delta against the second parent
764 - skip1: a delta against the same base as p1
764 - skip1: a delta against the same base as p1
765 (when p1 has empty delta
765 (when p1 has empty delta
766 - skip2: a delta against the same base as p2
766 - skip2: a delta against the same base as p2
767 (when p2 has empty delta
767 (when p2 has empty delta
768 - prev: a delta against the previous revision
768 - prev: a delta against the previous revision
769 - other: a delta against an arbitrary revision
769 - other: a delta against an arbitrary revision
770 :``compsize``: compressed size of revision
770 :``compsize``: compressed size of revision
771 :``uncompsize``: uncompressed size of revision
771 :``uncompsize``: uncompressed size of revision
772 :``chainsize``: total size of compressed revisions in chain
772 :``chainsize``: total size of compressed revisions in chain
773 :``chainratio``: total chain size divided by uncompressed revision size
773 :``chainratio``: total chain size divided by uncompressed revision size
774 (new delta chains typically start at ratio 2.00)
774 (new delta chains typically start at ratio 2.00)
775 :``lindist``: linear distance from base revision in delta chain to end
775 :``lindist``: linear distance from base revision in delta chain to end
776 of this revision
776 of this revision
777 :``extradist``: total size of revisions not part of this delta chain from
777 :``extradist``: total size of revisions not part of this delta chain from
778 base of delta chain to end of this revision; a measurement
778 base of delta chain to end of this revision; a measurement
779 of how much extra data we need to read/seek across to read
779 of how much extra data we need to read/seek across to read
780 the delta chain for this revision
780 the delta chain for this revision
781 :``extraratio``: extradist divided by chainsize; another representation of
781 :``extraratio``: extradist divided by chainsize; another representation of
782 how much unrelated data is needed to load this delta chain
782 how much unrelated data is needed to load this delta chain
783
783
784 If the repository is configured to use the sparse read, additional keywords
784 If the repository is configured to use the sparse read, additional keywords
785 are available:
785 are available:
786
786
787 :``readsize``: total size of data read from the disk for a revision
787 :``readsize``: total size of data read from the disk for a revision
788 (sum of the sizes of all the blocks)
788 (sum of the sizes of all the blocks)
789 :``largestblock``: size of the largest block of data read from the disk
789 :``largestblock``: size of the largest block of data read from the disk
790 :``readdensity``: density of useful bytes in the data read from the disk
790 :``readdensity``: density of useful bytes in the data read from the disk
791 :``srchunks``: in how many data hunks the whole revision would be read
791 :``srchunks``: in how many data hunks the whole revision would be read
792
792
793 The sparse read can be enabled with experimental.sparse-read = True
793 The sparse read can be enabled with experimental.sparse-read = True
794 """
794 """
795 opts = pycompat.byteskwargs(opts)
795 opts = pycompat.byteskwargs(opts)
796 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
796 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
797 index = r.index
797 index = r.index
798 start = r.start
798 start = r.start
799 length = r.length
799 length = r.length
800 generaldelta = r._generaldelta
800 generaldelta = r._generaldelta
801 withsparseread = getattr(r, '_withsparseread', False)
801 withsparseread = getattr(r, '_withsparseread', False)
802
802
803 # security to avoid crash on corrupted revlogs
803 # security to avoid crash on corrupted revlogs
804 total_revs = len(index)
804 total_revs = len(index)
805
805
806 chain_size_cache = {}
807
806 def revinfo(rev):
808 def revinfo(rev):
807 e = index[rev]
809 e = index[rev]
808 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
810 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
809 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
811 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
810 chainsize = 0
811
812
812 base = e[revlog_constants.ENTRY_DELTA_BASE]
813 base = e[revlog_constants.ENTRY_DELTA_BASE]
813 p1 = e[revlog_constants.ENTRY_PARENT_1]
814 p1 = e[revlog_constants.ENTRY_PARENT_1]
814 p2 = e[revlog_constants.ENTRY_PARENT_2]
815 p2 = e[revlog_constants.ENTRY_PARENT_2]
815
816
816 # If the parents of a revision has an empty delta, we never try to delta
817 # If the parents of a revision has an empty delta, we never try to delta
817 # against that parent, but directly against the delta base of that
818 # against that parent, but directly against the delta base of that
818 # parent (recursively). It avoids adding a useless entry in the chain.
819 # parent (recursively). It avoids adding a useless entry in the chain.
819 #
820 #
820 # However we need to detect that as a special case for delta-type, that
821 # However we need to detect that as a special case for delta-type, that
821 # is not simply "other".
822 # is not simply "other".
822 p1_base = p1
823 p1_base = p1
823 if p1 != nullrev and p1 < total_revs:
824 if p1 != nullrev and p1 < total_revs:
824 e1 = index[p1]
825 e1 = index[p1]
825 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
826 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
826 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
827 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
827 if (
828 if (
828 new_base == p1_base
829 new_base == p1_base
829 or new_base == nullrev
830 or new_base == nullrev
830 or new_base >= total_revs
831 or new_base >= total_revs
831 ):
832 ):
832 break
833 break
833 p1_base = new_base
834 p1_base = new_base
834 e1 = index[p1_base]
835 e1 = index[p1_base]
835 p2_base = p2
836 p2_base = p2
836 if p2 != nullrev and p2 < total_revs:
837 if p2 != nullrev and p2 < total_revs:
837 e2 = index[p2]
838 e2 = index[p2]
838 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
839 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
839 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
840 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
840 if (
841 if (
841 new_base == p2_base
842 new_base == p2_base
842 or new_base == nullrev
843 or new_base == nullrev
843 or new_base >= total_revs
844 or new_base >= total_revs
844 ):
845 ):
845 break
846 break
846 p2_base = new_base
847 p2_base = new_base
847 e2 = index[p2_base]
848 e2 = index[p2_base]
848
849
849 if generaldelta:
850 if generaldelta:
850 if base == p1:
851 if base == p1:
851 deltatype = b'p1'
852 deltatype = b'p1'
852 elif base == p2:
853 elif base == p2:
853 deltatype = b'p2'
854 deltatype = b'p2'
854 elif base == rev:
855 elif base == rev:
855 deltatype = b'base'
856 deltatype = b'base'
856 elif base == p1_base:
857 elif base == p1_base:
857 deltatype = b'skip1'
858 deltatype = b'skip1'
858 elif base == p2_base:
859 elif base == p2_base:
859 deltatype = b'skip2'
860 deltatype = b'skip2'
860 elif r.issnapshot(rev):
861 elif r.issnapshot(rev):
861 deltatype = b'snap'
862 deltatype = b'snap'
862 elif base == rev - 1:
863 elif base == rev - 1:
863 deltatype = b'prev'
864 deltatype = b'prev'
864 else:
865 else:
865 deltatype = b'other'
866 deltatype = b'other'
866 else:
867 else:
867 if base == rev:
868 if base == rev:
868 deltatype = b'base'
869 deltatype = b'base'
869 else:
870 else:
870 deltatype = b'prev'
871 deltatype = b'prev'
871
872
872 chain = r._deltachain(rev)[0]
873 chain = r._deltachain(rev)[0]
873 for iterrev in chain:
874 chain_size = 0
874 e = index[iterrev]
875 for iter_rev in reversed(chain):
875 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
876 cached = chain_size_cache.get(iter_rev)
876
877 if cached is not None:
877 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
878 chain_size += cached
879 break
880 e = index[iter_rev]
881 chain_size += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
882 chain_size_cache[rev] = chain_size
883
884 return p1, p2, compsize, uncompsize, deltatype, chain, chain_size
878
885
879 fm = ui.formatter(b'debugdeltachain', opts)
886 fm = ui.formatter(b'debugdeltachain', opts)
880
887
881 fm.plain(
888 fm.plain(
882 b' rev p1 p2 chain# chainlen prev delta '
889 b' rev p1 p2 chain# chainlen prev delta '
883 b'size rawsize chainsize ratio lindist extradist '
890 b'size rawsize chainsize ratio lindist extradist '
884 b'extraratio'
891 b'extraratio'
885 )
892 )
886 if withsparseread:
893 if withsparseread:
887 fm.plain(b' readsize largestblk rddensity srchunks')
894 fm.plain(b' readsize largestblk rddensity srchunks')
888 fm.plain(b'\n')
895 fm.plain(b'\n')
889
896
890 chainbases = {}
897 chainbases = {}
891 for rev in r:
898 for rev in r:
892 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
899 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
893 chainbase = chain[0]
900 chainbase = chain[0]
894 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
901 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
895 basestart = start(chainbase)
902 basestart = start(chainbase)
896 revstart = start(rev)
903 revstart = start(rev)
897 lineardist = revstart + comp - basestart
904 lineardist = revstart + comp - basestart
898 extradist = lineardist - chainsize
905 extradist = lineardist - chainsize
899 try:
906 try:
900 prevrev = chain[-2]
907 prevrev = chain[-2]
901 except IndexError:
908 except IndexError:
902 prevrev = -1
909 prevrev = -1
903
910
904 if uncomp != 0:
911 if uncomp != 0:
905 chainratio = float(chainsize) / float(uncomp)
912 chainratio = float(chainsize) / float(uncomp)
906 else:
913 else:
907 chainratio = chainsize
914 chainratio = chainsize
908
915
909 if chainsize != 0:
916 if chainsize != 0:
910 extraratio = float(extradist) / float(chainsize)
917 extraratio = float(extradist) / float(chainsize)
911 else:
918 else:
912 extraratio = extradist
919 extraratio = extradist
913
920
914 fm.startitem()
921 fm.startitem()
915 fm.write(
922 fm.write(
916 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
923 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
917 b'uncompsize chainsize chainratio lindist extradist '
924 b'uncompsize chainsize chainratio lindist extradist '
918 b'extraratio',
925 b'extraratio',
919 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
926 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
920 rev,
927 rev,
921 p1,
928 p1,
922 p2,
929 p2,
923 chainid,
930 chainid,
924 len(chain),
931 len(chain),
925 prevrev,
932 prevrev,
926 deltatype,
933 deltatype,
927 comp,
934 comp,
928 uncomp,
935 uncomp,
929 chainsize,
936 chainsize,
930 chainratio,
937 chainratio,
931 lineardist,
938 lineardist,
932 extradist,
939 extradist,
933 extraratio,
940 extraratio,
934 rev=rev,
941 rev=rev,
935 chainid=chainid,
942 chainid=chainid,
936 chainlen=len(chain),
943 chainlen=len(chain),
937 prevrev=prevrev,
944 prevrev=prevrev,
938 deltatype=deltatype,
945 deltatype=deltatype,
939 compsize=comp,
946 compsize=comp,
940 uncompsize=uncomp,
947 uncompsize=uncomp,
941 chainsize=chainsize,
948 chainsize=chainsize,
942 chainratio=chainratio,
949 chainratio=chainratio,
943 lindist=lineardist,
950 lindist=lineardist,
944 extradist=extradist,
951 extradist=extradist,
945 extraratio=extraratio,
952 extraratio=extraratio,
946 )
953 )
947 if withsparseread:
954 if withsparseread:
948 readsize = 0
955 readsize = 0
949 largestblock = 0
956 largestblock = 0
950 srchunks = 0
957 srchunks = 0
951
958
952 for revschunk in deltautil.slicechunk(r, chain):
959 for revschunk in deltautil.slicechunk(r, chain):
953 srchunks += 1
960 srchunks += 1
954 blkend = start(revschunk[-1]) + length(revschunk[-1])
961 blkend = start(revschunk[-1]) + length(revschunk[-1])
955 blksize = blkend - start(revschunk[0])
962 blksize = blkend - start(revschunk[0])
956
963
957 readsize += blksize
964 readsize += blksize
958 if largestblock < blksize:
965 if largestblock < blksize:
959 largestblock = blksize
966 largestblock = blksize
960
967
961 if readsize:
968 if readsize:
962 readdensity = float(chainsize) / float(readsize)
969 readdensity = float(chainsize) / float(readsize)
963 else:
970 else:
964 readdensity = 1
971 readdensity = 1
965
972
966 fm.write(
973 fm.write(
967 b'readsize largestblock readdensity srchunks',
974 b'readsize largestblock readdensity srchunks',
968 b' %10d %10d %9.5f %8d',
975 b' %10d %10d %9.5f %8d',
969 readsize,
976 readsize,
970 largestblock,
977 largestblock,
971 readdensity,
978 readdensity,
972 srchunks,
979 srchunks,
973 readsize=readsize,
980 readsize=readsize,
974 largestblock=largestblock,
981 largestblock=largestblock,
975 readdensity=readdensity,
982 readdensity=readdensity,
976 srchunks=srchunks,
983 srchunks=srchunks,
977 )
984 )
978
985
979 fm.plain(b'\n')
986 fm.plain(b'\n')
980
987
981 fm.end()
988 fm.end()
982
989
983
990
984 @command(
991 @command(
985 b'debug-delta-find',
992 b'debug-delta-find',
986 cmdutil.debugrevlogopts
993 cmdutil.debugrevlogopts
987 + cmdutil.formatteropts
994 + cmdutil.formatteropts
988 + [
995 + [
989 (
996 (
990 b'',
997 b'',
991 b'source',
998 b'source',
992 b'full',
999 b'full',
993 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
1000 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
994 ),
1001 ),
995 ],
1002 ],
996 _(b'-c|-m|FILE REV'),
1003 _(b'-c|-m|FILE REV'),
997 optionalrepo=True,
1004 optionalrepo=True,
998 )
1005 )
999 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1006 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1000 """display the computation to get to a valid delta for storing REV
1007 """display the computation to get to a valid delta for storing REV
1001
1008
1002 This command will replay the process used to find the "best" delta to store
1009 This command will replay the process used to find the "best" delta to store
1003 a revision and display information about all the steps used to get to that
1010 a revision and display information about all the steps used to get to that
1004 result.
1011 result.
1005
1012
1006 By default, the process is fed with a the full-text for the revision. This
1013 By default, the process is fed with a the full-text for the revision. This
1007 can be controlled with the --source flag.
1014 can be controlled with the --source flag.
1008
1015
1009 The revision use the revision number of the target storage (not changelog
1016 The revision use the revision number of the target storage (not changelog
1010 revision number).
1017 revision number).
1011
1018
1012 note: the process is initiated from a full text of the revision to store.
1019 note: the process is initiated from a full text of the revision to store.
1013 """
1020 """
1014 opts = pycompat.byteskwargs(opts)
1021 opts = pycompat.byteskwargs(opts)
1015 if arg_2 is None:
1022 if arg_2 is None:
1016 file_ = None
1023 file_ = None
1017 rev = arg_1
1024 rev = arg_1
1018 else:
1025 else:
1019 file_ = arg_1
1026 file_ = arg_1
1020 rev = arg_2
1027 rev = arg_2
1021
1028
1022 rev = int(rev)
1029 rev = int(rev)
1023
1030
1024 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1031 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1025 p1r, p2r = revlog.parentrevs(rev)
1032 p1r, p2r = revlog.parentrevs(rev)
1026
1033
1027 if source == b'full':
1034 if source == b'full':
1028 base_rev = nullrev
1035 base_rev = nullrev
1029 elif source == b'storage':
1036 elif source == b'storage':
1030 base_rev = revlog.deltaparent(rev)
1037 base_rev = revlog.deltaparent(rev)
1031 elif source == b'p1':
1038 elif source == b'p1':
1032 base_rev = p1r
1039 base_rev = p1r
1033 elif source == b'p2':
1040 elif source == b'p2':
1034 base_rev = p2r
1041 base_rev = p2r
1035 elif source == b'prev':
1042 elif source == b'prev':
1036 base_rev = rev - 1
1043 base_rev = rev - 1
1037 else:
1044 else:
1038 raise error.InputError(b"invalid --source value: %s" % source)
1045 raise error.InputError(b"invalid --source value: %s" % source)
1039
1046
1040 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
1047 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
1041
1048
1042
1049
1043 @command(
1050 @command(
1044 b'debugdirstate|debugstate',
1051 b'debugdirstate|debugstate',
1045 [
1052 [
1046 (
1053 (
1047 b'',
1054 b'',
1048 b'nodates',
1055 b'nodates',
1049 None,
1056 None,
1050 _(b'do not display the saved mtime (DEPRECATED)'),
1057 _(b'do not display the saved mtime (DEPRECATED)'),
1051 ),
1058 ),
1052 (b'', b'dates', True, _(b'display the saved mtime')),
1059 (b'', b'dates', True, _(b'display the saved mtime')),
1053 (b'', b'datesort', None, _(b'sort by saved mtime')),
1060 (b'', b'datesort', None, _(b'sort by saved mtime')),
1054 (
1061 (
1055 b'',
1062 b'',
1056 b'docket',
1063 b'docket',
1057 False,
1064 False,
1058 _(b'display the docket (metadata file) instead'),
1065 _(b'display the docket (metadata file) instead'),
1059 ),
1066 ),
1060 (
1067 (
1061 b'',
1068 b'',
1062 b'all',
1069 b'all',
1063 False,
1070 False,
1064 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1071 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1065 ),
1072 ),
1066 ],
1073 ],
1067 _(b'[OPTION]...'),
1074 _(b'[OPTION]...'),
1068 )
1075 )
1069 def debugstate(ui, repo, **opts):
1076 def debugstate(ui, repo, **opts):
1070 """show the contents of the current dirstate"""
1077 """show the contents of the current dirstate"""
1071
1078
1072 if opts.get("docket"):
1079 if opts.get("docket"):
1073 if not repo.dirstate._use_dirstate_v2:
1080 if not repo.dirstate._use_dirstate_v2:
1074 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1081 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1075
1082
1076 docket = repo.dirstate._map.docket
1083 docket = repo.dirstate._map.docket
1077 (
1084 (
1078 start_offset,
1085 start_offset,
1079 root_nodes,
1086 root_nodes,
1080 nodes_with_entry,
1087 nodes_with_entry,
1081 nodes_with_copy,
1088 nodes_with_copy,
1082 unused_bytes,
1089 unused_bytes,
1083 _unused,
1090 _unused,
1084 ignore_pattern,
1091 ignore_pattern,
1085 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1092 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1086
1093
1087 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1094 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1088 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1095 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1089 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1096 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1090 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1097 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1091 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1098 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1092 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1099 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1093 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1100 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1094 ui.write(
1101 ui.write(
1095 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1102 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1096 )
1103 )
1097 return
1104 return
1098
1105
1099 nodates = not opts['dates']
1106 nodates = not opts['dates']
1100 if opts.get('nodates') is not None:
1107 if opts.get('nodates') is not None:
1101 nodates = True
1108 nodates = True
1102 datesort = opts.get('datesort')
1109 datesort = opts.get('datesort')
1103
1110
1104 if datesort:
1111 if datesort:
1105
1112
1106 def keyfunc(entry):
1113 def keyfunc(entry):
1107 filename, _state, _mode, _size, mtime = entry
1114 filename, _state, _mode, _size, mtime = entry
1108 return (mtime, filename)
1115 return (mtime, filename)
1109
1116
1110 else:
1117 else:
1111 keyfunc = None # sort by filename
1118 keyfunc = None # sort by filename
1112 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1119 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1113 entries.sort(key=keyfunc)
1120 entries.sort(key=keyfunc)
1114 for entry in entries:
1121 for entry in entries:
1115 filename, state, mode, size, mtime = entry
1122 filename, state, mode, size, mtime = entry
1116 if mtime == -1:
1123 if mtime == -1:
1117 timestr = b'unset '
1124 timestr = b'unset '
1118 elif nodates:
1125 elif nodates:
1119 timestr = b'set '
1126 timestr = b'set '
1120 else:
1127 else:
1121 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1128 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1122 timestr = encoding.strtolocal(timestr)
1129 timestr = encoding.strtolocal(timestr)
1123 if mode & 0o20000:
1130 if mode & 0o20000:
1124 mode = b'lnk'
1131 mode = b'lnk'
1125 else:
1132 else:
1126 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1133 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1127 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1134 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1128 for f in repo.dirstate.copies():
1135 for f in repo.dirstate.copies():
1129 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1136 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1130
1137
1131
1138
1132 @command(
1139 @command(
1133 b'debugdirstateignorepatternshash',
1140 b'debugdirstateignorepatternshash',
1134 [],
1141 [],
1135 _(b''),
1142 _(b''),
1136 )
1143 )
1137 def debugdirstateignorepatternshash(ui, repo, **opts):
1144 def debugdirstateignorepatternshash(ui, repo, **opts):
1138 """show the hash of ignore patterns stored in dirstate if v2,
1145 """show the hash of ignore patterns stored in dirstate if v2,
1139 or nothing for dirstate-v2
1146 or nothing for dirstate-v2
1140 """
1147 """
1141 if repo.dirstate._use_dirstate_v2:
1148 if repo.dirstate._use_dirstate_v2:
1142 docket = repo.dirstate._map.docket
1149 docket = repo.dirstate._map.docket
1143 hash_len = 20 # 160 bits for SHA-1
1150 hash_len = 20 # 160 bits for SHA-1
1144 hash_bytes = docket.tree_metadata[-hash_len:]
1151 hash_bytes = docket.tree_metadata[-hash_len:]
1145 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1152 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1146
1153
1147
1154
1148 @command(
1155 @command(
1149 b'debugdiscovery',
1156 b'debugdiscovery',
1150 [
1157 [
1151 (b'', b'old', None, _(b'use old-style discovery')),
1158 (b'', b'old', None, _(b'use old-style discovery')),
1152 (
1159 (
1153 b'',
1160 b'',
1154 b'nonheads',
1161 b'nonheads',
1155 None,
1162 None,
1156 _(b'use old-style discovery with non-heads included'),
1163 _(b'use old-style discovery with non-heads included'),
1157 ),
1164 ),
1158 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1165 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1159 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1166 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1160 (
1167 (
1161 b'',
1168 b'',
1162 b'local-as-revs',
1169 b'local-as-revs',
1163 b"",
1170 b"",
1164 b'treat local has having these revisions only',
1171 b'treat local has having these revisions only',
1165 ),
1172 ),
1166 (
1173 (
1167 b'',
1174 b'',
1168 b'remote-as-revs',
1175 b'remote-as-revs',
1169 b"",
1176 b"",
1170 b'use local as remote, with only these revisions',
1177 b'use local as remote, with only these revisions',
1171 ),
1178 ),
1172 ]
1179 ]
1173 + cmdutil.remoteopts
1180 + cmdutil.remoteopts
1174 + cmdutil.formatteropts,
1181 + cmdutil.formatteropts,
1175 _(b'[--rev REV] [OTHER]'),
1182 _(b'[--rev REV] [OTHER]'),
1176 )
1183 )
1177 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1184 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1178 """runs the changeset discovery protocol in isolation
1185 """runs the changeset discovery protocol in isolation
1179
1186
1180 The local peer can be "replaced" by a subset of the local repository by
1187 The local peer can be "replaced" by a subset of the local repository by
1181 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1188 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1182 can be "replaced" by a subset of the local repository using the
1189 can be "replaced" by a subset of the local repository using the
1183 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1190 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1184 discovery situations.
1191 discovery situations.
1185
1192
1186 The following developer oriented config are relevant for people playing with this command:
1193 The following developer oriented config are relevant for people playing with this command:
1187
1194
1188 * devel.discovery.exchange-heads=True
1195 * devel.discovery.exchange-heads=True
1189
1196
1190 If False, the discovery will not start with
1197 If False, the discovery will not start with
1191 remote head fetching and local head querying.
1198 remote head fetching and local head querying.
1192
1199
1193 * devel.discovery.grow-sample=True
1200 * devel.discovery.grow-sample=True
1194
1201
1195 If False, the sample size used in set discovery will not be increased
1202 If False, the sample size used in set discovery will not be increased
1196 through the process
1203 through the process
1197
1204
1198 * devel.discovery.grow-sample.dynamic=True
1205 * devel.discovery.grow-sample.dynamic=True
1199
1206
1200 When discovery.grow-sample.dynamic is True, the default, the sample size is
1207 When discovery.grow-sample.dynamic is True, the default, the sample size is
1201 adapted to the shape of the undecided set (it is set to the max of:
1208 adapted to the shape of the undecided set (it is set to the max of:
1202 <target-size>, len(roots(undecided)), len(heads(undecided)
1209 <target-size>, len(roots(undecided)), len(heads(undecided)
1203
1210
1204 * devel.discovery.grow-sample.rate=1.05
1211 * devel.discovery.grow-sample.rate=1.05
1205
1212
1206 the rate at which the sample grow
1213 the rate at which the sample grow
1207
1214
1208 * devel.discovery.randomize=True
1215 * devel.discovery.randomize=True
1209
1216
1210 If andom sampling during discovery are deterministic. It is meant for
1217 If andom sampling during discovery are deterministic. It is meant for
1211 integration tests.
1218 integration tests.
1212
1219
1213 * devel.discovery.sample-size=200
1220 * devel.discovery.sample-size=200
1214
1221
1215 Control the initial size of the discovery sample
1222 Control the initial size of the discovery sample
1216
1223
1217 * devel.discovery.sample-size.initial=100
1224 * devel.discovery.sample-size.initial=100
1218
1225
1219 Control the initial size of the discovery for initial change
1226 Control the initial size of the discovery for initial change
1220 """
1227 """
1221 opts = pycompat.byteskwargs(opts)
1228 opts = pycompat.byteskwargs(opts)
1222 unfi = repo.unfiltered()
1229 unfi = repo.unfiltered()
1223
1230
1224 # setup potential extra filtering
1231 # setup potential extra filtering
1225 local_revs = opts[b"local_as_revs"]
1232 local_revs = opts[b"local_as_revs"]
1226 remote_revs = opts[b"remote_as_revs"]
1233 remote_revs = opts[b"remote_as_revs"]
1227
1234
1228 # make sure tests are repeatable
1235 # make sure tests are repeatable
1229 random.seed(int(opts[b'seed']))
1236 random.seed(int(opts[b'seed']))
1230
1237
1231 if not remote_revs:
1238 if not remote_revs:
1232 path = urlutil.get_unique_pull_path_obj(
1239 path = urlutil.get_unique_pull_path_obj(
1233 b'debugdiscovery', ui, remoteurl
1240 b'debugdiscovery', ui, remoteurl
1234 )
1241 )
1235 branches = (path.branch, [])
1242 branches = (path.branch, [])
1236 remote = hg.peer(repo, opts, path)
1243 remote = hg.peer(repo, opts, path)
1237 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1244 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1238 else:
1245 else:
1239 branches = (None, [])
1246 branches = (None, [])
1240 remote_filtered_revs = logcmdutil.revrange(
1247 remote_filtered_revs = logcmdutil.revrange(
1241 unfi, [b"not (::(%s))" % remote_revs]
1248 unfi, [b"not (::(%s))" % remote_revs]
1242 )
1249 )
1243 remote_filtered_revs = frozenset(remote_filtered_revs)
1250 remote_filtered_revs = frozenset(remote_filtered_revs)
1244
1251
1245 def remote_func(x):
1252 def remote_func(x):
1246 return remote_filtered_revs
1253 return remote_filtered_revs
1247
1254
1248 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1255 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1249
1256
1250 remote = repo.peer()
1257 remote = repo.peer()
1251 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1258 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1252
1259
1253 if local_revs:
1260 if local_revs:
1254 local_filtered_revs = logcmdutil.revrange(
1261 local_filtered_revs = logcmdutil.revrange(
1255 unfi, [b"not (::(%s))" % local_revs]
1262 unfi, [b"not (::(%s))" % local_revs]
1256 )
1263 )
1257 local_filtered_revs = frozenset(local_filtered_revs)
1264 local_filtered_revs = frozenset(local_filtered_revs)
1258
1265
1259 def local_func(x):
1266 def local_func(x):
1260 return local_filtered_revs
1267 return local_filtered_revs
1261
1268
1262 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1269 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1263 repo = repo.filtered(b'debug-discovery-local-filter')
1270 repo = repo.filtered(b'debug-discovery-local-filter')
1264
1271
1265 data = {}
1272 data = {}
1266 if opts.get(b'old'):
1273 if opts.get(b'old'):
1267
1274
1268 def doit(pushedrevs, remoteheads, remote=remote):
1275 def doit(pushedrevs, remoteheads, remote=remote):
1269 if not util.safehasattr(remote, b'branches'):
1276 if not util.safehasattr(remote, b'branches'):
1270 # enable in-client legacy support
1277 # enable in-client legacy support
1271 remote = localrepo.locallegacypeer(remote.local())
1278 remote = localrepo.locallegacypeer(remote.local())
1272 if remote_revs:
1279 if remote_revs:
1273 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1280 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1274 remote._repo = r
1281 remote._repo = r
1275 common, _in, hds = treediscovery.findcommonincoming(
1282 common, _in, hds = treediscovery.findcommonincoming(
1276 repo, remote, force=True, audit=data
1283 repo, remote, force=True, audit=data
1277 )
1284 )
1278 common = set(common)
1285 common = set(common)
1279 if not opts.get(b'nonheads'):
1286 if not opts.get(b'nonheads'):
1280 ui.writenoi18n(
1287 ui.writenoi18n(
1281 b"unpruned common: %s\n"
1288 b"unpruned common: %s\n"
1282 % b" ".join(sorted(short(n) for n in common))
1289 % b" ".join(sorted(short(n) for n in common))
1283 )
1290 )
1284
1291
1285 clnode = repo.changelog.node
1292 clnode = repo.changelog.node
1286 common = repo.revs(b'heads(::%ln)', common)
1293 common = repo.revs(b'heads(::%ln)', common)
1287 common = {clnode(r) for r in common}
1294 common = {clnode(r) for r in common}
1288 return common, hds
1295 return common, hds
1289
1296
1290 else:
1297 else:
1291
1298
1292 def doit(pushedrevs, remoteheads, remote=remote):
1299 def doit(pushedrevs, remoteheads, remote=remote):
1293 nodes = None
1300 nodes = None
1294 if pushedrevs:
1301 if pushedrevs:
1295 revs = logcmdutil.revrange(repo, pushedrevs)
1302 revs = logcmdutil.revrange(repo, pushedrevs)
1296 nodes = [repo[r].node() for r in revs]
1303 nodes = [repo[r].node() for r in revs]
1297 common, any, hds = setdiscovery.findcommonheads(
1304 common, any, hds = setdiscovery.findcommonheads(
1298 ui,
1305 ui,
1299 repo,
1306 repo,
1300 remote,
1307 remote,
1301 ancestorsof=nodes,
1308 ancestorsof=nodes,
1302 audit=data,
1309 audit=data,
1303 abortwhenunrelated=False,
1310 abortwhenunrelated=False,
1304 )
1311 )
1305 return common, hds
1312 return common, hds
1306
1313
1307 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1314 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1308 localrevs = opts[b'rev']
1315 localrevs = opts[b'rev']
1309
1316
1310 fm = ui.formatter(b'debugdiscovery', opts)
1317 fm = ui.formatter(b'debugdiscovery', opts)
1311 if fm.strict_format:
1318 if fm.strict_format:
1312
1319
1313 @contextlib.contextmanager
1320 @contextlib.contextmanager
1314 def may_capture_output():
1321 def may_capture_output():
1315 ui.pushbuffer()
1322 ui.pushbuffer()
1316 yield
1323 yield
1317 data[b'output'] = ui.popbuffer()
1324 data[b'output'] = ui.popbuffer()
1318
1325
1319 else:
1326 else:
1320 may_capture_output = util.nullcontextmanager
1327 may_capture_output = util.nullcontextmanager
1321 with may_capture_output():
1328 with may_capture_output():
1322 with util.timedcm('debug-discovery') as t:
1329 with util.timedcm('debug-discovery') as t:
1323 common, hds = doit(localrevs, remoterevs)
1330 common, hds = doit(localrevs, remoterevs)
1324
1331
1325 # compute all statistics
1332 # compute all statistics
1326 if len(common) == 1 and repo.nullid in common:
1333 if len(common) == 1 and repo.nullid in common:
1327 common = set()
1334 common = set()
1328 heads_common = set(common)
1335 heads_common = set(common)
1329 heads_remote = set(hds)
1336 heads_remote = set(hds)
1330 heads_local = set(repo.heads())
1337 heads_local = set(repo.heads())
1331 # note: they cannot be a local or remote head that is in common and not
1338 # note: they cannot be a local or remote head that is in common and not
1332 # itself a head of common.
1339 # itself a head of common.
1333 heads_common_local = heads_common & heads_local
1340 heads_common_local = heads_common & heads_local
1334 heads_common_remote = heads_common & heads_remote
1341 heads_common_remote = heads_common & heads_remote
1335 heads_common_both = heads_common & heads_remote & heads_local
1342 heads_common_both = heads_common & heads_remote & heads_local
1336
1343
1337 all = repo.revs(b'all()')
1344 all = repo.revs(b'all()')
1338 common = repo.revs(b'::%ln', common)
1345 common = repo.revs(b'::%ln', common)
1339 roots_common = repo.revs(b'roots(::%ld)', common)
1346 roots_common = repo.revs(b'roots(::%ld)', common)
1340 missing = repo.revs(b'not ::%ld', common)
1347 missing = repo.revs(b'not ::%ld', common)
1341 heads_missing = repo.revs(b'heads(%ld)', missing)
1348 heads_missing = repo.revs(b'heads(%ld)', missing)
1342 roots_missing = repo.revs(b'roots(%ld)', missing)
1349 roots_missing = repo.revs(b'roots(%ld)', missing)
1343 assert len(common) + len(missing) == len(all)
1350 assert len(common) + len(missing) == len(all)
1344
1351
1345 initial_undecided = repo.revs(
1352 initial_undecided = repo.revs(
1346 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1353 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1347 )
1354 )
1348 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1355 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1349 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1356 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1350 common_initial_undecided = initial_undecided & common
1357 common_initial_undecided = initial_undecided & common
1351 missing_initial_undecided = initial_undecided & missing
1358 missing_initial_undecided = initial_undecided & missing
1352
1359
1353 data[b'elapsed'] = t.elapsed
1360 data[b'elapsed'] = t.elapsed
1354 data[b'nb-common-heads'] = len(heads_common)
1361 data[b'nb-common-heads'] = len(heads_common)
1355 data[b'nb-common-heads-local'] = len(heads_common_local)
1362 data[b'nb-common-heads-local'] = len(heads_common_local)
1356 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1363 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1357 data[b'nb-common-heads-both'] = len(heads_common_both)
1364 data[b'nb-common-heads-both'] = len(heads_common_both)
1358 data[b'nb-common-roots'] = len(roots_common)
1365 data[b'nb-common-roots'] = len(roots_common)
1359 data[b'nb-head-local'] = len(heads_local)
1366 data[b'nb-head-local'] = len(heads_local)
1360 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1367 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1361 data[b'nb-head-remote'] = len(heads_remote)
1368 data[b'nb-head-remote'] = len(heads_remote)
1362 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1369 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1363 heads_common_remote
1370 heads_common_remote
1364 )
1371 )
1365 data[b'nb-revs'] = len(all)
1372 data[b'nb-revs'] = len(all)
1366 data[b'nb-revs-common'] = len(common)
1373 data[b'nb-revs-common'] = len(common)
1367 data[b'nb-revs-missing'] = len(missing)
1374 data[b'nb-revs-missing'] = len(missing)
1368 data[b'nb-missing-heads'] = len(heads_missing)
1375 data[b'nb-missing-heads'] = len(heads_missing)
1369 data[b'nb-missing-roots'] = len(roots_missing)
1376 data[b'nb-missing-roots'] = len(roots_missing)
1370 data[b'nb-ini_und'] = len(initial_undecided)
1377 data[b'nb-ini_und'] = len(initial_undecided)
1371 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1378 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1372 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1379 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1373 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1380 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1374 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1381 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1375
1382
1376 fm.startitem()
1383 fm.startitem()
1377 fm.data(**pycompat.strkwargs(data))
1384 fm.data(**pycompat.strkwargs(data))
1378 # display discovery summary
1385 # display discovery summary
1379 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1386 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1380 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1387 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1381 if b'total-round-trips-heads' in data:
1388 if b'total-round-trips-heads' in data:
1382 fm.plain(
1389 fm.plain(
1383 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1390 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1384 )
1391 )
1385 if b'total-round-trips-branches' in data:
1392 if b'total-round-trips-branches' in data:
1386 fm.plain(
1393 fm.plain(
1387 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1394 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1388 % data
1395 % data
1389 )
1396 )
1390 if b'total-round-trips-between' in data:
1397 if b'total-round-trips-between' in data:
1391 fm.plain(
1398 fm.plain(
1392 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1399 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1393 )
1400 )
1394 fm.plain(b"queries: %(total-queries)9d\n" % data)
1401 fm.plain(b"queries: %(total-queries)9d\n" % data)
1395 if b'total-queries-branches' in data:
1402 if b'total-queries-branches' in data:
1396 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1403 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1397 if b'total-queries-between' in data:
1404 if b'total-queries-between' in data:
1398 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1405 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1399 fm.plain(b"heads summary:\n")
1406 fm.plain(b"heads summary:\n")
1400 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1407 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1401 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1408 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1402 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1409 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1403 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1410 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1404 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1411 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1405 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1412 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1406 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1413 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1407 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1414 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1408 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1415 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1409 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1416 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1410 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1417 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1411 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1418 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1412 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1419 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1413 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1420 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1414 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1421 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1415 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1422 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1416 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1423 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1417 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1424 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1418 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1425 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1419 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1426 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1420 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1427 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1421 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1428 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1422
1429
1423 if ui.verbose:
1430 if ui.verbose:
1424 fm.plain(
1431 fm.plain(
1425 b"common heads: %s\n"
1432 b"common heads: %s\n"
1426 % b" ".join(sorted(short(n) for n in heads_common))
1433 % b" ".join(sorted(short(n) for n in heads_common))
1427 )
1434 )
1428 fm.end()
1435 fm.end()
1429
1436
1430
1437
1431 _chunksize = 4 << 10
1438 _chunksize = 4 << 10
1432
1439
1433
1440
1434 @command(
1441 @command(
1435 b'debugdownload',
1442 b'debugdownload',
1436 [
1443 [
1437 (b'o', b'output', b'', _(b'path')),
1444 (b'o', b'output', b'', _(b'path')),
1438 ],
1445 ],
1439 optionalrepo=True,
1446 optionalrepo=True,
1440 )
1447 )
1441 def debugdownload(ui, repo, url, output=None, **opts):
1448 def debugdownload(ui, repo, url, output=None, **opts):
1442 """download a resource using Mercurial logic and config"""
1449 """download a resource using Mercurial logic and config"""
1443 fh = urlmod.open(ui, url, output)
1450 fh = urlmod.open(ui, url, output)
1444
1451
1445 dest = ui
1452 dest = ui
1446 if output:
1453 if output:
1447 dest = open(output, b"wb", _chunksize)
1454 dest = open(output, b"wb", _chunksize)
1448 try:
1455 try:
1449 data = fh.read(_chunksize)
1456 data = fh.read(_chunksize)
1450 while data:
1457 while data:
1451 dest.write(data)
1458 dest.write(data)
1452 data = fh.read(_chunksize)
1459 data = fh.read(_chunksize)
1453 finally:
1460 finally:
1454 if output:
1461 if output:
1455 dest.close()
1462 dest.close()
1456
1463
1457
1464
1458 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1465 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1459 def debugextensions(ui, repo, **opts):
1466 def debugextensions(ui, repo, **opts):
1460 '''show information about active extensions'''
1467 '''show information about active extensions'''
1461 opts = pycompat.byteskwargs(opts)
1468 opts = pycompat.byteskwargs(opts)
1462 exts = extensions.extensions(ui)
1469 exts = extensions.extensions(ui)
1463 hgver = util.version()
1470 hgver = util.version()
1464 fm = ui.formatter(b'debugextensions', opts)
1471 fm = ui.formatter(b'debugextensions', opts)
1465 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1472 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1466 isinternal = extensions.ismoduleinternal(extmod)
1473 isinternal = extensions.ismoduleinternal(extmod)
1467 extsource = None
1474 extsource = None
1468
1475
1469 if util.safehasattr(extmod, '__file__'):
1476 if util.safehasattr(extmod, '__file__'):
1470 extsource = pycompat.fsencode(extmod.__file__)
1477 extsource = pycompat.fsencode(extmod.__file__)
1471 elif getattr(sys, 'oxidized', False):
1478 elif getattr(sys, 'oxidized', False):
1472 extsource = pycompat.sysexecutable
1479 extsource = pycompat.sysexecutable
1473 if isinternal:
1480 if isinternal:
1474 exttestedwith = [] # never expose magic string to users
1481 exttestedwith = [] # never expose magic string to users
1475 else:
1482 else:
1476 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1483 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1477 extbuglink = getattr(extmod, 'buglink', None)
1484 extbuglink = getattr(extmod, 'buglink', None)
1478
1485
1479 fm.startitem()
1486 fm.startitem()
1480
1487
1481 if ui.quiet or ui.verbose:
1488 if ui.quiet or ui.verbose:
1482 fm.write(b'name', b'%s\n', extname)
1489 fm.write(b'name', b'%s\n', extname)
1483 else:
1490 else:
1484 fm.write(b'name', b'%s', extname)
1491 fm.write(b'name', b'%s', extname)
1485 if isinternal or hgver in exttestedwith:
1492 if isinternal or hgver in exttestedwith:
1486 fm.plain(b'\n')
1493 fm.plain(b'\n')
1487 elif not exttestedwith:
1494 elif not exttestedwith:
1488 fm.plain(_(b' (untested!)\n'))
1495 fm.plain(_(b' (untested!)\n'))
1489 else:
1496 else:
1490 lasttestedversion = exttestedwith[-1]
1497 lasttestedversion = exttestedwith[-1]
1491 fm.plain(b' (%s!)\n' % lasttestedversion)
1498 fm.plain(b' (%s!)\n' % lasttestedversion)
1492
1499
1493 fm.condwrite(
1500 fm.condwrite(
1494 ui.verbose and extsource,
1501 ui.verbose and extsource,
1495 b'source',
1502 b'source',
1496 _(b' location: %s\n'),
1503 _(b' location: %s\n'),
1497 extsource or b"",
1504 extsource or b"",
1498 )
1505 )
1499
1506
1500 if ui.verbose:
1507 if ui.verbose:
1501 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1508 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1502 fm.data(bundled=isinternal)
1509 fm.data(bundled=isinternal)
1503
1510
1504 fm.condwrite(
1511 fm.condwrite(
1505 ui.verbose and exttestedwith,
1512 ui.verbose and exttestedwith,
1506 b'testedwith',
1513 b'testedwith',
1507 _(b' tested with: %s\n'),
1514 _(b' tested with: %s\n'),
1508 fm.formatlist(exttestedwith, name=b'ver'),
1515 fm.formatlist(exttestedwith, name=b'ver'),
1509 )
1516 )
1510
1517
1511 fm.condwrite(
1518 fm.condwrite(
1512 ui.verbose and extbuglink,
1519 ui.verbose and extbuglink,
1513 b'buglink',
1520 b'buglink',
1514 _(b' bug reporting: %s\n'),
1521 _(b' bug reporting: %s\n'),
1515 extbuglink or b"",
1522 extbuglink or b"",
1516 )
1523 )
1517
1524
1518 fm.end()
1525 fm.end()
1519
1526
1520
1527
1521 @command(
1528 @command(
1522 b'debugfileset',
1529 b'debugfileset',
1523 [
1530 [
1524 (
1531 (
1525 b'r',
1532 b'r',
1526 b'rev',
1533 b'rev',
1527 b'',
1534 b'',
1528 _(b'apply the filespec on this revision'),
1535 _(b'apply the filespec on this revision'),
1529 _(b'REV'),
1536 _(b'REV'),
1530 ),
1537 ),
1531 (
1538 (
1532 b'',
1539 b'',
1533 b'all-files',
1540 b'all-files',
1534 False,
1541 False,
1535 _(b'test files from all revisions and working directory'),
1542 _(b'test files from all revisions and working directory'),
1536 ),
1543 ),
1537 (
1544 (
1538 b's',
1545 b's',
1539 b'show-matcher',
1546 b'show-matcher',
1540 None,
1547 None,
1541 _(b'print internal representation of matcher'),
1548 _(b'print internal representation of matcher'),
1542 ),
1549 ),
1543 (
1550 (
1544 b'p',
1551 b'p',
1545 b'show-stage',
1552 b'show-stage',
1546 [],
1553 [],
1547 _(b'print parsed tree at the given stage'),
1554 _(b'print parsed tree at the given stage'),
1548 _(b'NAME'),
1555 _(b'NAME'),
1549 ),
1556 ),
1550 ],
1557 ],
1551 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1558 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1552 )
1559 )
1553 def debugfileset(ui, repo, expr, **opts):
1560 def debugfileset(ui, repo, expr, **opts):
1554 '''parse and apply a fileset specification'''
1561 '''parse and apply a fileset specification'''
1555 from . import fileset
1562 from . import fileset
1556
1563
1557 fileset.symbols # force import of fileset so we have predicates to optimize
1564 fileset.symbols # force import of fileset so we have predicates to optimize
1558 opts = pycompat.byteskwargs(opts)
1565 opts = pycompat.byteskwargs(opts)
1559 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1566 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1560
1567
1561 stages = [
1568 stages = [
1562 (b'parsed', pycompat.identity),
1569 (b'parsed', pycompat.identity),
1563 (b'analyzed', filesetlang.analyze),
1570 (b'analyzed', filesetlang.analyze),
1564 (b'optimized', filesetlang.optimize),
1571 (b'optimized', filesetlang.optimize),
1565 ]
1572 ]
1566 stagenames = {n for n, f in stages}
1573 stagenames = {n for n, f in stages}
1567
1574
1568 showalways = set()
1575 showalways = set()
1569 if ui.verbose and not opts[b'show_stage']:
1576 if ui.verbose and not opts[b'show_stage']:
1570 # show parsed tree by --verbose (deprecated)
1577 # show parsed tree by --verbose (deprecated)
1571 showalways.add(b'parsed')
1578 showalways.add(b'parsed')
1572 if opts[b'show_stage'] == [b'all']:
1579 if opts[b'show_stage'] == [b'all']:
1573 showalways.update(stagenames)
1580 showalways.update(stagenames)
1574 else:
1581 else:
1575 for n in opts[b'show_stage']:
1582 for n in opts[b'show_stage']:
1576 if n not in stagenames:
1583 if n not in stagenames:
1577 raise error.Abort(_(b'invalid stage name: %s') % n)
1584 raise error.Abort(_(b'invalid stage name: %s') % n)
1578 showalways.update(opts[b'show_stage'])
1585 showalways.update(opts[b'show_stage'])
1579
1586
1580 tree = filesetlang.parse(expr)
1587 tree = filesetlang.parse(expr)
1581 for n, f in stages:
1588 for n, f in stages:
1582 tree = f(tree)
1589 tree = f(tree)
1583 if n in showalways:
1590 if n in showalways:
1584 if opts[b'show_stage'] or n != b'parsed':
1591 if opts[b'show_stage'] or n != b'parsed':
1585 ui.write(b"* %s:\n" % n)
1592 ui.write(b"* %s:\n" % n)
1586 ui.write(filesetlang.prettyformat(tree), b"\n")
1593 ui.write(filesetlang.prettyformat(tree), b"\n")
1587
1594
1588 files = set()
1595 files = set()
1589 if opts[b'all_files']:
1596 if opts[b'all_files']:
1590 for r in repo:
1597 for r in repo:
1591 c = repo[r]
1598 c = repo[r]
1592 files.update(c.files())
1599 files.update(c.files())
1593 files.update(c.substate)
1600 files.update(c.substate)
1594 if opts[b'all_files'] or ctx.rev() is None:
1601 if opts[b'all_files'] or ctx.rev() is None:
1595 wctx = repo[None]
1602 wctx = repo[None]
1596 files.update(
1603 files.update(
1597 repo.dirstate.walk(
1604 repo.dirstate.walk(
1598 scmutil.matchall(repo),
1605 scmutil.matchall(repo),
1599 subrepos=list(wctx.substate),
1606 subrepos=list(wctx.substate),
1600 unknown=True,
1607 unknown=True,
1601 ignored=True,
1608 ignored=True,
1602 )
1609 )
1603 )
1610 )
1604 files.update(wctx.substate)
1611 files.update(wctx.substate)
1605 else:
1612 else:
1606 files.update(ctx.files())
1613 files.update(ctx.files())
1607 files.update(ctx.substate)
1614 files.update(ctx.substate)
1608
1615
1609 m = ctx.matchfileset(repo.getcwd(), expr)
1616 m = ctx.matchfileset(repo.getcwd(), expr)
1610 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1617 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1611 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1618 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1612 for f in sorted(files):
1619 for f in sorted(files):
1613 if not m(f):
1620 if not m(f):
1614 continue
1621 continue
1615 ui.write(b"%s\n" % f)
1622 ui.write(b"%s\n" % f)
1616
1623
1617
1624
1618 @command(
1625 @command(
1619 b"debug-repair-issue6528",
1626 b"debug-repair-issue6528",
1620 [
1627 [
1621 (
1628 (
1622 b'',
1629 b'',
1623 b'to-report',
1630 b'to-report',
1624 b'',
1631 b'',
1625 _(b'build a report of affected revisions to this file'),
1632 _(b'build a report of affected revisions to this file'),
1626 _(b'FILE'),
1633 _(b'FILE'),
1627 ),
1634 ),
1628 (
1635 (
1629 b'',
1636 b'',
1630 b'from-report',
1637 b'from-report',
1631 b'',
1638 b'',
1632 _(b'repair revisions listed in this report file'),
1639 _(b'repair revisions listed in this report file'),
1633 _(b'FILE'),
1640 _(b'FILE'),
1634 ),
1641 ),
1635 (
1642 (
1636 b'',
1643 b'',
1637 b'paranoid',
1644 b'paranoid',
1638 False,
1645 False,
1639 _(b'check that both detection methods do the same thing'),
1646 _(b'check that both detection methods do the same thing'),
1640 ),
1647 ),
1641 ]
1648 ]
1642 + cmdutil.dryrunopts,
1649 + cmdutil.dryrunopts,
1643 )
1650 )
1644 def debug_repair_issue6528(ui, repo, **opts):
1651 def debug_repair_issue6528(ui, repo, **opts):
1645 """find affected revisions and repair them. See issue6528 for more details.
1652 """find affected revisions and repair them. See issue6528 for more details.
1646
1653
1647 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1654 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1648 computation of affected revisions for a given repository across clones.
1655 computation of affected revisions for a given repository across clones.
1649 The report format is line-based (with empty lines ignored):
1656 The report format is line-based (with empty lines ignored):
1650
1657
1651 ```
1658 ```
1652 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1659 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1653 ```
1660 ```
1654
1661
1655 There can be multiple broken revisions per filelog, they are separated by
1662 There can be multiple broken revisions per filelog, they are separated by
1656 a comma with no spaces. The only space is between the revision(s) and the
1663 a comma with no spaces. The only space is between the revision(s) and the
1657 filename.
1664 filename.
1658
1665
1659 Note that this does *not* mean that this repairs future affected revisions,
1666 Note that this does *not* mean that this repairs future affected revisions,
1660 that needs a separate fix at the exchange level that was introduced in
1667 that needs a separate fix at the exchange level that was introduced in
1661 Mercurial 5.9.1.
1668 Mercurial 5.9.1.
1662
1669
1663 There is a `--paranoid` flag to test that the fast implementation is correct
1670 There is a `--paranoid` flag to test that the fast implementation is correct
1664 by checking it against the slow implementation. Since this matter is quite
1671 by checking it against the slow implementation. Since this matter is quite
1665 urgent and testing every edge-case is probably quite costly, we use this
1672 urgent and testing every edge-case is probably quite costly, we use this
1666 method to test on large repositories as a fuzzing method of sorts.
1673 method to test on large repositories as a fuzzing method of sorts.
1667 """
1674 """
1668 cmdutil.check_incompatible_arguments(
1675 cmdutil.check_incompatible_arguments(
1669 opts, 'to_report', ['from_report', 'dry_run']
1676 opts, 'to_report', ['from_report', 'dry_run']
1670 )
1677 )
1671 dry_run = opts.get('dry_run')
1678 dry_run = opts.get('dry_run')
1672 to_report = opts.get('to_report')
1679 to_report = opts.get('to_report')
1673 from_report = opts.get('from_report')
1680 from_report = opts.get('from_report')
1674 paranoid = opts.get('paranoid')
1681 paranoid = opts.get('paranoid')
1675 # TODO maybe add filelog pattern and revision pattern parameters to help
1682 # TODO maybe add filelog pattern and revision pattern parameters to help
1676 # narrow down the search for users that know what they're looking for?
1683 # narrow down the search for users that know what they're looking for?
1677
1684
1678 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1685 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1679 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1686 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1680 raise error.Abort(_(msg))
1687 raise error.Abort(_(msg))
1681
1688
1682 rewrite.repair_issue6528(
1689 rewrite.repair_issue6528(
1683 ui,
1690 ui,
1684 repo,
1691 repo,
1685 dry_run=dry_run,
1692 dry_run=dry_run,
1686 to_report=to_report,
1693 to_report=to_report,
1687 from_report=from_report,
1694 from_report=from_report,
1688 paranoid=paranoid,
1695 paranoid=paranoid,
1689 )
1696 )
1690
1697
1691
1698
1692 @command(b'debugformat', [] + cmdutil.formatteropts)
1699 @command(b'debugformat', [] + cmdutil.formatteropts)
1693 def debugformat(ui, repo, **opts):
1700 def debugformat(ui, repo, **opts):
1694 """display format information about the current repository
1701 """display format information about the current repository
1695
1702
1696 Use --verbose to get extra information about current config value and
1703 Use --verbose to get extra information about current config value and
1697 Mercurial default."""
1704 Mercurial default."""
1698 opts = pycompat.byteskwargs(opts)
1705 opts = pycompat.byteskwargs(opts)
1699 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1706 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1700 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1707 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1701
1708
1702 def makeformatname(name):
1709 def makeformatname(name):
1703 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1710 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1704
1711
1705 fm = ui.formatter(b'debugformat', opts)
1712 fm = ui.formatter(b'debugformat', opts)
1706 if fm.isplain():
1713 if fm.isplain():
1707
1714
1708 def formatvalue(value):
1715 def formatvalue(value):
1709 if util.safehasattr(value, b'startswith'):
1716 if util.safehasattr(value, b'startswith'):
1710 return value
1717 return value
1711 if value:
1718 if value:
1712 return b'yes'
1719 return b'yes'
1713 else:
1720 else:
1714 return b'no'
1721 return b'no'
1715
1722
1716 else:
1723 else:
1717 formatvalue = pycompat.identity
1724 formatvalue = pycompat.identity
1718
1725
1719 fm.plain(b'format-variant')
1726 fm.plain(b'format-variant')
1720 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1727 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1721 fm.plain(b' repo')
1728 fm.plain(b' repo')
1722 if ui.verbose:
1729 if ui.verbose:
1723 fm.plain(b' config default')
1730 fm.plain(b' config default')
1724 fm.plain(b'\n')
1731 fm.plain(b'\n')
1725 for fv in upgrade.allformatvariant:
1732 for fv in upgrade.allformatvariant:
1726 fm.startitem()
1733 fm.startitem()
1727 repovalue = fv.fromrepo(repo)
1734 repovalue = fv.fromrepo(repo)
1728 configvalue = fv.fromconfig(repo)
1735 configvalue = fv.fromconfig(repo)
1729
1736
1730 if repovalue != configvalue:
1737 if repovalue != configvalue:
1731 namelabel = b'formatvariant.name.mismatchconfig'
1738 namelabel = b'formatvariant.name.mismatchconfig'
1732 repolabel = b'formatvariant.repo.mismatchconfig'
1739 repolabel = b'formatvariant.repo.mismatchconfig'
1733 elif repovalue != fv.default:
1740 elif repovalue != fv.default:
1734 namelabel = b'formatvariant.name.mismatchdefault'
1741 namelabel = b'formatvariant.name.mismatchdefault'
1735 repolabel = b'formatvariant.repo.mismatchdefault'
1742 repolabel = b'formatvariant.repo.mismatchdefault'
1736 else:
1743 else:
1737 namelabel = b'formatvariant.name.uptodate'
1744 namelabel = b'formatvariant.name.uptodate'
1738 repolabel = b'formatvariant.repo.uptodate'
1745 repolabel = b'formatvariant.repo.uptodate'
1739
1746
1740 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1747 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1741 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1748 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1742 if fv.default != configvalue:
1749 if fv.default != configvalue:
1743 configlabel = b'formatvariant.config.special'
1750 configlabel = b'formatvariant.config.special'
1744 else:
1751 else:
1745 configlabel = b'formatvariant.config.default'
1752 configlabel = b'formatvariant.config.default'
1746 fm.condwrite(
1753 fm.condwrite(
1747 ui.verbose,
1754 ui.verbose,
1748 b'config',
1755 b'config',
1749 b' %6s',
1756 b' %6s',
1750 formatvalue(configvalue),
1757 formatvalue(configvalue),
1751 label=configlabel,
1758 label=configlabel,
1752 )
1759 )
1753 fm.condwrite(
1760 fm.condwrite(
1754 ui.verbose,
1761 ui.verbose,
1755 b'default',
1762 b'default',
1756 b' %7s',
1763 b' %7s',
1757 formatvalue(fv.default),
1764 formatvalue(fv.default),
1758 label=b'formatvariant.default',
1765 label=b'formatvariant.default',
1759 )
1766 )
1760 fm.plain(b'\n')
1767 fm.plain(b'\n')
1761 fm.end()
1768 fm.end()
1762
1769
1763
1770
1764 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1771 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1765 def debugfsinfo(ui, path=b"."):
1772 def debugfsinfo(ui, path=b"."):
1766 """show information detected about current filesystem"""
1773 """show information detected about current filesystem"""
1767 ui.writenoi18n(b'path: %s\n' % path)
1774 ui.writenoi18n(b'path: %s\n' % path)
1768 ui.writenoi18n(
1775 ui.writenoi18n(
1769 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1776 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1770 )
1777 )
1771 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1778 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1772 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1779 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1773 ui.writenoi18n(
1780 ui.writenoi18n(
1774 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1781 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1775 )
1782 )
1776 ui.writenoi18n(
1783 ui.writenoi18n(
1777 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1784 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1778 )
1785 )
1779 casesensitive = b'(unknown)'
1786 casesensitive = b'(unknown)'
1780 try:
1787 try:
1781 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1788 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1782 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1789 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1783 except OSError:
1790 except OSError:
1784 pass
1791 pass
1785 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1792 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1786
1793
1787
1794
1788 @command(
1795 @command(
1789 b'debuggetbundle',
1796 b'debuggetbundle',
1790 [
1797 [
1791 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1798 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1792 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1799 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1793 (
1800 (
1794 b't',
1801 b't',
1795 b'type',
1802 b'type',
1796 b'bzip2',
1803 b'bzip2',
1797 _(b'bundle compression type to use'),
1804 _(b'bundle compression type to use'),
1798 _(b'TYPE'),
1805 _(b'TYPE'),
1799 ),
1806 ),
1800 ],
1807 ],
1801 _(b'REPO FILE [-H|-C ID]...'),
1808 _(b'REPO FILE [-H|-C ID]...'),
1802 norepo=True,
1809 norepo=True,
1803 )
1810 )
1804 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1811 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1805 """retrieves a bundle from a repo
1812 """retrieves a bundle from a repo
1806
1813
1807 Every ID must be a full-length hex node id string. Saves the bundle to the
1814 Every ID must be a full-length hex node id string. Saves the bundle to the
1808 given file.
1815 given file.
1809 """
1816 """
1810 opts = pycompat.byteskwargs(opts)
1817 opts = pycompat.byteskwargs(opts)
1811 repo = hg.peer(ui, opts, repopath)
1818 repo = hg.peer(ui, opts, repopath)
1812 if not repo.capable(b'getbundle'):
1819 if not repo.capable(b'getbundle'):
1813 raise error.Abort(b"getbundle() not supported by target repository")
1820 raise error.Abort(b"getbundle() not supported by target repository")
1814 args = {}
1821 args = {}
1815 if common:
1822 if common:
1816 args['common'] = [bin(s) for s in common]
1823 args['common'] = [bin(s) for s in common]
1817 if head:
1824 if head:
1818 args['heads'] = [bin(s) for s in head]
1825 args['heads'] = [bin(s) for s in head]
1819 # TODO: get desired bundlecaps from command line.
1826 # TODO: get desired bundlecaps from command line.
1820 args['bundlecaps'] = None
1827 args['bundlecaps'] = None
1821 bundle = repo.getbundle(b'debug', **args)
1828 bundle = repo.getbundle(b'debug', **args)
1822
1829
1823 bundletype = opts.get(b'type', b'bzip2').lower()
1830 bundletype = opts.get(b'type', b'bzip2').lower()
1824 btypes = {
1831 btypes = {
1825 b'none': b'HG10UN',
1832 b'none': b'HG10UN',
1826 b'bzip2': b'HG10BZ',
1833 b'bzip2': b'HG10BZ',
1827 b'gzip': b'HG10GZ',
1834 b'gzip': b'HG10GZ',
1828 b'bundle2': b'HG20',
1835 b'bundle2': b'HG20',
1829 }
1836 }
1830 bundletype = btypes.get(bundletype)
1837 bundletype = btypes.get(bundletype)
1831 if bundletype not in bundle2.bundletypes:
1838 if bundletype not in bundle2.bundletypes:
1832 raise error.Abort(_(b'unknown bundle type specified with --type'))
1839 raise error.Abort(_(b'unknown bundle type specified with --type'))
1833 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1840 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1834
1841
1835
1842
1836 @command(b'debugignore', [], b'[FILE]')
1843 @command(b'debugignore', [], b'[FILE]')
1837 def debugignore(ui, repo, *files, **opts):
1844 def debugignore(ui, repo, *files, **opts):
1838 """display the combined ignore pattern and information about ignored files
1845 """display the combined ignore pattern and information about ignored files
1839
1846
1840 With no argument display the combined ignore pattern.
1847 With no argument display the combined ignore pattern.
1841
1848
1842 Given space separated file names, shows if the given file is ignored and
1849 Given space separated file names, shows if the given file is ignored and
1843 if so, show the ignore rule (file and line number) that matched it.
1850 if so, show the ignore rule (file and line number) that matched it.
1844 """
1851 """
1845 ignore = repo.dirstate._ignore
1852 ignore = repo.dirstate._ignore
1846 if not files:
1853 if not files:
1847 # Show all the patterns
1854 # Show all the patterns
1848 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1855 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1849 else:
1856 else:
1850 m = scmutil.match(repo[None], pats=files)
1857 m = scmutil.match(repo[None], pats=files)
1851 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1858 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1852 for f in m.files():
1859 for f in m.files():
1853 nf = util.normpath(f)
1860 nf = util.normpath(f)
1854 ignored = None
1861 ignored = None
1855 ignoredata = None
1862 ignoredata = None
1856 if nf != b'.':
1863 if nf != b'.':
1857 if ignore(nf):
1864 if ignore(nf):
1858 ignored = nf
1865 ignored = nf
1859 ignoredata = repo.dirstate._ignorefileandline(nf)
1866 ignoredata = repo.dirstate._ignorefileandline(nf)
1860 else:
1867 else:
1861 for p in pathutil.finddirs(nf):
1868 for p in pathutil.finddirs(nf):
1862 if ignore(p):
1869 if ignore(p):
1863 ignored = p
1870 ignored = p
1864 ignoredata = repo.dirstate._ignorefileandline(p)
1871 ignoredata = repo.dirstate._ignorefileandline(p)
1865 break
1872 break
1866 if ignored:
1873 if ignored:
1867 if ignored == nf:
1874 if ignored == nf:
1868 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1875 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1869 else:
1876 else:
1870 ui.write(
1877 ui.write(
1871 _(
1878 _(
1872 b"%s is ignored because of "
1879 b"%s is ignored because of "
1873 b"containing directory %s\n"
1880 b"containing directory %s\n"
1874 )
1881 )
1875 % (uipathfn(f), ignored)
1882 % (uipathfn(f), ignored)
1876 )
1883 )
1877 ignorefile, lineno, line = ignoredata
1884 ignorefile, lineno, line = ignoredata
1878 ui.write(
1885 ui.write(
1879 _(b"(ignore rule in %s, line %d: '%s')\n")
1886 _(b"(ignore rule in %s, line %d: '%s')\n")
1880 % (ignorefile, lineno, line)
1887 % (ignorefile, lineno, line)
1881 )
1888 )
1882 else:
1889 else:
1883 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1890 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1884
1891
1885
1892
1886 @command(
1893 @command(
1887 b'debug-revlog-index|debugindex',
1894 b'debug-revlog-index|debugindex',
1888 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1895 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1889 _(b'-c|-m|FILE'),
1896 _(b'-c|-m|FILE'),
1890 )
1897 )
1891 def debugindex(ui, repo, file_=None, **opts):
1898 def debugindex(ui, repo, file_=None, **opts):
1892 """dump index data for a revlog"""
1899 """dump index data for a revlog"""
1893 opts = pycompat.byteskwargs(opts)
1900 opts = pycompat.byteskwargs(opts)
1894 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1901 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1895
1902
1896 fm = ui.formatter(b'debugindex', opts)
1903 fm = ui.formatter(b'debugindex', opts)
1897
1904
1898 revlog = getattr(store, b'_revlog', store)
1905 revlog = getattr(store, b'_revlog', store)
1899
1906
1900 return revlog_debug.debug_index(
1907 return revlog_debug.debug_index(
1901 ui,
1908 ui,
1902 repo,
1909 repo,
1903 formatter=fm,
1910 formatter=fm,
1904 revlog=revlog,
1911 revlog=revlog,
1905 full_node=ui.debugflag,
1912 full_node=ui.debugflag,
1906 )
1913 )
1907
1914
1908
1915
1909 @command(
1916 @command(
1910 b'debugindexdot',
1917 b'debugindexdot',
1911 cmdutil.debugrevlogopts,
1918 cmdutil.debugrevlogopts,
1912 _(b'-c|-m|FILE'),
1919 _(b'-c|-m|FILE'),
1913 optionalrepo=True,
1920 optionalrepo=True,
1914 )
1921 )
1915 def debugindexdot(ui, repo, file_=None, **opts):
1922 def debugindexdot(ui, repo, file_=None, **opts):
1916 """dump an index DAG as a graphviz dot file"""
1923 """dump an index DAG as a graphviz dot file"""
1917 opts = pycompat.byteskwargs(opts)
1924 opts = pycompat.byteskwargs(opts)
1918 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1925 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1919 ui.writenoi18n(b"digraph G {\n")
1926 ui.writenoi18n(b"digraph G {\n")
1920 for i in r:
1927 for i in r:
1921 node = r.node(i)
1928 node = r.node(i)
1922 pp = r.parents(node)
1929 pp = r.parents(node)
1923 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1930 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1924 if pp[1] != repo.nullid:
1931 if pp[1] != repo.nullid:
1925 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1932 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1926 ui.write(b"}\n")
1933 ui.write(b"}\n")
1927
1934
1928
1935
1929 @command(b'debugindexstats', [])
1936 @command(b'debugindexstats', [])
1930 def debugindexstats(ui, repo):
1937 def debugindexstats(ui, repo):
1931 """show stats related to the changelog index"""
1938 """show stats related to the changelog index"""
1932 repo.changelog.shortest(repo.nullid, 1)
1939 repo.changelog.shortest(repo.nullid, 1)
1933 index = repo.changelog.index
1940 index = repo.changelog.index
1934 if not util.safehasattr(index, b'stats'):
1941 if not util.safehasattr(index, b'stats'):
1935 raise error.Abort(_(b'debugindexstats only works with native code'))
1942 raise error.Abort(_(b'debugindexstats only works with native code'))
1936 for k, v in sorted(index.stats().items()):
1943 for k, v in sorted(index.stats().items()):
1937 ui.write(b'%s: %d\n' % (k, v))
1944 ui.write(b'%s: %d\n' % (k, v))
1938
1945
1939
1946
1940 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1947 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1941 def debuginstall(ui, **opts):
1948 def debuginstall(ui, **opts):
1942 """test Mercurial installation
1949 """test Mercurial installation
1943
1950
1944 Returns 0 on success.
1951 Returns 0 on success.
1945 """
1952 """
1946 opts = pycompat.byteskwargs(opts)
1953 opts = pycompat.byteskwargs(opts)
1947
1954
1948 problems = 0
1955 problems = 0
1949
1956
1950 fm = ui.formatter(b'debuginstall', opts)
1957 fm = ui.formatter(b'debuginstall', opts)
1951 fm.startitem()
1958 fm.startitem()
1952
1959
1953 # encoding might be unknown or wrong. don't translate these messages.
1960 # encoding might be unknown or wrong. don't translate these messages.
1954 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1961 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1955 err = None
1962 err = None
1956 try:
1963 try:
1957 codecs.lookup(pycompat.sysstr(encoding.encoding))
1964 codecs.lookup(pycompat.sysstr(encoding.encoding))
1958 except LookupError as inst:
1965 except LookupError as inst:
1959 err = stringutil.forcebytestr(inst)
1966 err = stringutil.forcebytestr(inst)
1960 problems += 1
1967 problems += 1
1961 fm.condwrite(
1968 fm.condwrite(
1962 err,
1969 err,
1963 b'encodingerror',
1970 b'encodingerror',
1964 b" %s\n (check that your locale is properly set)\n",
1971 b" %s\n (check that your locale is properly set)\n",
1965 err,
1972 err,
1966 )
1973 )
1967
1974
1968 # Python
1975 # Python
1969 pythonlib = None
1976 pythonlib = None
1970 if util.safehasattr(os, '__file__'):
1977 if util.safehasattr(os, '__file__'):
1971 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1978 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1972 elif getattr(sys, 'oxidized', False):
1979 elif getattr(sys, 'oxidized', False):
1973 pythonlib = pycompat.sysexecutable
1980 pythonlib = pycompat.sysexecutable
1974
1981
1975 fm.write(
1982 fm.write(
1976 b'pythonexe',
1983 b'pythonexe',
1977 _(b"checking Python executable (%s)\n"),
1984 _(b"checking Python executable (%s)\n"),
1978 pycompat.sysexecutable or _(b"unknown"),
1985 pycompat.sysexecutable or _(b"unknown"),
1979 )
1986 )
1980 fm.write(
1987 fm.write(
1981 b'pythonimplementation',
1988 b'pythonimplementation',
1982 _(b"checking Python implementation (%s)\n"),
1989 _(b"checking Python implementation (%s)\n"),
1983 pycompat.sysbytes(platform.python_implementation()),
1990 pycompat.sysbytes(platform.python_implementation()),
1984 )
1991 )
1985 fm.write(
1992 fm.write(
1986 b'pythonver',
1993 b'pythonver',
1987 _(b"checking Python version (%s)\n"),
1994 _(b"checking Python version (%s)\n"),
1988 (b"%d.%d.%d" % sys.version_info[:3]),
1995 (b"%d.%d.%d" % sys.version_info[:3]),
1989 )
1996 )
1990 fm.write(
1997 fm.write(
1991 b'pythonlib',
1998 b'pythonlib',
1992 _(b"checking Python lib (%s)...\n"),
1999 _(b"checking Python lib (%s)...\n"),
1993 pythonlib or _(b"unknown"),
2000 pythonlib or _(b"unknown"),
1994 )
2001 )
1995
2002
1996 try:
2003 try:
1997 from . import rustext # pytype: disable=import-error
2004 from . import rustext # pytype: disable=import-error
1998
2005
1999 rustext.__doc__ # trigger lazy import
2006 rustext.__doc__ # trigger lazy import
2000 except ImportError:
2007 except ImportError:
2001 rustext = None
2008 rustext = None
2002
2009
2003 security = set(sslutil.supportedprotocols)
2010 security = set(sslutil.supportedprotocols)
2004 if sslutil.hassni:
2011 if sslutil.hassni:
2005 security.add(b'sni')
2012 security.add(b'sni')
2006
2013
2007 fm.write(
2014 fm.write(
2008 b'pythonsecurity',
2015 b'pythonsecurity',
2009 _(b"checking Python security support (%s)\n"),
2016 _(b"checking Python security support (%s)\n"),
2010 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2017 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2011 )
2018 )
2012
2019
2013 # These are warnings, not errors. So don't increment problem count. This
2020 # These are warnings, not errors. So don't increment problem count. This
2014 # may change in the future.
2021 # may change in the future.
2015 if b'tls1.2' not in security:
2022 if b'tls1.2' not in security:
2016 fm.plain(
2023 fm.plain(
2017 _(
2024 _(
2018 b' TLS 1.2 not supported by Python install; '
2025 b' TLS 1.2 not supported by Python install; '
2019 b'network connections lack modern security\n'
2026 b'network connections lack modern security\n'
2020 )
2027 )
2021 )
2028 )
2022 if b'sni' not in security:
2029 if b'sni' not in security:
2023 fm.plain(
2030 fm.plain(
2024 _(
2031 _(
2025 b' SNI not supported by Python install; may have '
2032 b' SNI not supported by Python install; may have '
2026 b'connectivity issues with some servers\n'
2033 b'connectivity issues with some servers\n'
2027 )
2034 )
2028 )
2035 )
2029
2036
2030 fm.plain(
2037 fm.plain(
2031 _(
2038 _(
2032 b"checking Rust extensions (%s)\n"
2039 b"checking Rust extensions (%s)\n"
2033 % (b'missing' if rustext is None else b'installed')
2040 % (b'missing' if rustext is None else b'installed')
2034 ),
2041 ),
2035 )
2042 )
2036
2043
2037 # TODO print CA cert info
2044 # TODO print CA cert info
2038
2045
2039 # hg version
2046 # hg version
2040 hgver = util.version()
2047 hgver = util.version()
2041 fm.write(
2048 fm.write(
2042 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2049 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2043 )
2050 )
2044 fm.write(
2051 fm.write(
2045 b'hgverextra',
2052 b'hgverextra',
2046 _(b"checking Mercurial custom build (%s)\n"),
2053 _(b"checking Mercurial custom build (%s)\n"),
2047 b'+'.join(hgver.split(b'+')[1:]),
2054 b'+'.join(hgver.split(b'+')[1:]),
2048 )
2055 )
2049
2056
2050 # compiled modules
2057 # compiled modules
2051 hgmodules = None
2058 hgmodules = None
2052 if util.safehasattr(sys.modules[__name__], '__file__'):
2059 if util.safehasattr(sys.modules[__name__], '__file__'):
2053 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2060 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2054 elif getattr(sys, 'oxidized', False):
2061 elif getattr(sys, 'oxidized', False):
2055 hgmodules = pycompat.sysexecutable
2062 hgmodules = pycompat.sysexecutable
2056
2063
2057 fm.write(
2064 fm.write(
2058 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2065 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2059 )
2066 )
2060 fm.write(
2067 fm.write(
2061 b'hgmodules',
2068 b'hgmodules',
2062 _(b"checking installed modules (%s)...\n"),
2069 _(b"checking installed modules (%s)...\n"),
2063 hgmodules or _(b"unknown"),
2070 hgmodules or _(b"unknown"),
2064 )
2071 )
2065
2072
2066 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2073 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2067 rustext = rustandc # for now, that's the only case
2074 rustext = rustandc # for now, that's the only case
2068 cext = policy.policy in (b'c', b'allow') or rustandc
2075 cext = policy.policy in (b'c', b'allow') or rustandc
2069 nopure = cext or rustext
2076 nopure = cext or rustext
2070 if nopure:
2077 if nopure:
2071 err = None
2078 err = None
2072 try:
2079 try:
2073 if cext:
2080 if cext:
2074 from .cext import ( # pytype: disable=import-error
2081 from .cext import ( # pytype: disable=import-error
2075 base85,
2082 base85,
2076 bdiff,
2083 bdiff,
2077 mpatch,
2084 mpatch,
2078 osutil,
2085 osutil,
2079 )
2086 )
2080
2087
2081 # quiet pyflakes
2088 # quiet pyflakes
2082 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2089 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2083 if rustext:
2090 if rustext:
2084 from .rustext import ( # pytype: disable=import-error
2091 from .rustext import ( # pytype: disable=import-error
2085 ancestor,
2092 ancestor,
2086 dirstate,
2093 dirstate,
2087 )
2094 )
2088
2095
2089 dir(ancestor), dir(dirstate) # quiet pyflakes
2096 dir(ancestor), dir(dirstate) # quiet pyflakes
2090 except Exception as inst:
2097 except Exception as inst:
2091 err = stringutil.forcebytestr(inst)
2098 err = stringutil.forcebytestr(inst)
2092 problems += 1
2099 problems += 1
2093 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2100 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2094
2101
2095 compengines = util.compengines._engines.values()
2102 compengines = util.compengines._engines.values()
2096 fm.write(
2103 fm.write(
2097 b'compengines',
2104 b'compengines',
2098 _(b'checking registered compression engines (%s)\n'),
2105 _(b'checking registered compression engines (%s)\n'),
2099 fm.formatlist(
2106 fm.formatlist(
2100 sorted(e.name() for e in compengines),
2107 sorted(e.name() for e in compengines),
2101 name=b'compengine',
2108 name=b'compengine',
2102 fmt=b'%s',
2109 fmt=b'%s',
2103 sep=b', ',
2110 sep=b', ',
2104 ),
2111 ),
2105 )
2112 )
2106 fm.write(
2113 fm.write(
2107 b'compenginesavail',
2114 b'compenginesavail',
2108 _(b'checking available compression engines (%s)\n'),
2115 _(b'checking available compression engines (%s)\n'),
2109 fm.formatlist(
2116 fm.formatlist(
2110 sorted(e.name() for e in compengines if e.available()),
2117 sorted(e.name() for e in compengines if e.available()),
2111 name=b'compengine',
2118 name=b'compengine',
2112 fmt=b'%s',
2119 fmt=b'%s',
2113 sep=b', ',
2120 sep=b', ',
2114 ),
2121 ),
2115 )
2122 )
2116 wirecompengines = compression.compengines.supportedwireengines(
2123 wirecompengines = compression.compengines.supportedwireengines(
2117 compression.SERVERROLE
2124 compression.SERVERROLE
2118 )
2125 )
2119 fm.write(
2126 fm.write(
2120 b'compenginesserver',
2127 b'compenginesserver',
2121 _(
2128 _(
2122 b'checking available compression engines '
2129 b'checking available compression engines '
2123 b'for wire protocol (%s)\n'
2130 b'for wire protocol (%s)\n'
2124 ),
2131 ),
2125 fm.formatlist(
2132 fm.formatlist(
2126 [e.name() for e in wirecompengines if e.wireprotosupport()],
2133 [e.name() for e in wirecompengines if e.wireprotosupport()],
2127 name=b'compengine',
2134 name=b'compengine',
2128 fmt=b'%s',
2135 fmt=b'%s',
2129 sep=b', ',
2136 sep=b', ',
2130 ),
2137 ),
2131 )
2138 )
2132 re2 = b'missing'
2139 re2 = b'missing'
2133 if util._re2:
2140 if util._re2:
2134 re2 = b'available'
2141 re2 = b'available'
2135 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2142 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2136 fm.data(re2=bool(util._re2))
2143 fm.data(re2=bool(util._re2))
2137
2144
2138 # templates
2145 # templates
2139 p = templater.templatedir()
2146 p = templater.templatedir()
2140 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2147 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2141 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2148 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2142 if p:
2149 if p:
2143 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2150 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2144 if m:
2151 if m:
2145 # template found, check if it is working
2152 # template found, check if it is working
2146 err = None
2153 err = None
2147 try:
2154 try:
2148 templater.templater.frommapfile(m)
2155 templater.templater.frommapfile(m)
2149 except Exception as inst:
2156 except Exception as inst:
2150 err = stringutil.forcebytestr(inst)
2157 err = stringutil.forcebytestr(inst)
2151 p = None
2158 p = None
2152 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2159 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2153 else:
2160 else:
2154 p = None
2161 p = None
2155 fm.condwrite(
2162 fm.condwrite(
2156 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2163 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2157 )
2164 )
2158 fm.condwrite(
2165 fm.condwrite(
2159 not m,
2166 not m,
2160 b'defaulttemplatenotfound',
2167 b'defaulttemplatenotfound',
2161 _(b" template '%s' not found\n"),
2168 _(b" template '%s' not found\n"),
2162 b"default",
2169 b"default",
2163 )
2170 )
2164 if not p:
2171 if not p:
2165 problems += 1
2172 problems += 1
2166 fm.condwrite(
2173 fm.condwrite(
2167 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2174 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2168 )
2175 )
2169
2176
2170 # editor
2177 # editor
2171 editor = ui.geteditor()
2178 editor = ui.geteditor()
2172 editor = util.expandpath(editor)
2179 editor = util.expandpath(editor)
2173 editorbin = procutil.shellsplit(editor)[0]
2180 editorbin = procutil.shellsplit(editor)[0]
2174 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2181 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2175 cmdpath = procutil.findexe(editorbin)
2182 cmdpath = procutil.findexe(editorbin)
2176 fm.condwrite(
2183 fm.condwrite(
2177 not cmdpath and editor == b'vi',
2184 not cmdpath and editor == b'vi',
2178 b'vinotfound',
2185 b'vinotfound',
2179 _(
2186 _(
2180 b" No commit editor set and can't find %s in PATH\n"
2187 b" No commit editor set and can't find %s in PATH\n"
2181 b" (specify a commit editor in your configuration"
2188 b" (specify a commit editor in your configuration"
2182 b" file)\n"
2189 b" file)\n"
2183 ),
2190 ),
2184 not cmdpath and editor == b'vi' and editorbin,
2191 not cmdpath and editor == b'vi' and editorbin,
2185 )
2192 )
2186 fm.condwrite(
2193 fm.condwrite(
2187 not cmdpath and editor != b'vi',
2194 not cmdpath and editor != b'vi',
2188 b'editornotfound',
2195 b'editornotfound',
2189 _(
2196 _(
2190 b" Can't find editor '%s' in PATH\n"
2197 b" Can't find editor '%s' in PATH\n"
2191 b" (specify a commit editor in your configuration"
2198 b" (specify a commit editor in your configuration"
2192 b" file)\n"
2199 b" file)\n"
2193 ),
2200 ),
2194 not cmdpath and editorbin,
2201 not cmdpath and editorbin,
2195 )
2202 )
2196 if not cmdpath and editor != b'vi':
2203 if not cmdpath and editor != b'vi':
2197 problems += 1
2204 problems += 1
2198
2205
2199 # check username
2206 # check username
2200 username = None
2207 username = None
2201 err = None
2208 err = None
2202 try:
2209 try:
2203 username = ui.username()
2210 username = ui.username()
2204 except error.Abort as e:
2211 except error.Abort as e:
2205 err = e.message
2212 err = e.message
2206 problems += 1
2213 problems += 1
2207
2214
2208 fm.condwrite(
2215 fm.condwrite(
2209 username, b'username', _(b"checking username (%s)\n"), username
2216 username, b'username', _(b"checking username (%s)\n"), username
2210 )
2217 )
2211 fm.condwrite(
2218 fm.condwrite(
2212 err,
2219 err,
2213 b'usernameerror',
2220 b'usernameerror',
2214 _(
2221 _(
2215 b"checking username...\n %s\n"
2222 b"checking username...\n %s\n"
2216 b" (specify a username in your configuration file)\n"
2223 b" (specify a username in your configuration file)\n"
2217 ),
2224 ),
2218 err,
2225 err,
2219 )
2226 )
2220
2227
2221 for name, mod in extensions.extensions():
2228 for name, mod in extensions.extensions():
2222 handler = getattr(mod, 'debuginstall', None)
2229 handler = getattr(mod, 'debuginstall', None)
2223 if handler is not None:
2230 if handler is not None:
2224 problems += handler(ui, fm)
2231 problems += handler(ui, fm)
2225
2232
2226 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2233 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2227 if not problems:
2234 if not problems:
2228 fm.data(problems=problems)
2235 fm.data(problems=problems)
2229 fm.condwrite(
2236 fm.condwrite(
2230 problems,
2237 problems,
2231 b'problems',
2238 b'problems',
2232 _(b"%d problems detected, please check your install!\n"),
2239 _(b"%d problems detected, please check your install!\n"),
2233 problems,
2240 problems,
2234 )
2241 )
2235 fm.end()
2242 fm.end()
2236
2243
2237 return problems
2244 return problems
2238
2245
2239
2246
2240 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2247 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2241 def debugknown(ui, repopath, *ids, **opts):
2248 def debugknown(ui, repopath, *ids, **opts):
2242 """test whether node ids are known to a repo
2249 """test whether node ids are known to a repo
2243
2250
2244 Every ID must be a full-length hex node id string. Returns a list of 0s
2251 Every ID must be a full-length hex node id string. Returns a list of 0s
2245 and 1s indicating unknown/known.
2252 and 1s indicating unknown/known.
2246 """
2253 """
2247 opts = pycompat.byteskwargs(opts)
2254 opts = pycompat.byteskwargs(opts)
2248 repo = hg.peer(ui, opts, repopath)
2255 repo = hg.peer(ui, opts, repopath)
2249 if not repo.capable(b'known'):
2256 if not repo.capable(b'known'):
2250 raise error.Abort(b"known() not supported by target repository")
2257 raise error.Abort(b"known() not supported by target repository")
2251 flags = repo.known([bin(s) for s in ids])
2258 flags = repo.known([bin(s) for s in ids])
2252 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2259 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2253
2260
2254
2261
2255 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2262 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2256 def debuglabelcomplete(ui, repo, *args):
2263 def debuglabelcomplete(ui, repo, *args):
2257 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2264 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2258 debugnamecomplete(ui, repo, *args)
2265 debugnamecomplete(ui, repo, *args)
2259
2266
2260
2267
2261 @command(
2268 @command(
2262 b'debuglocks',
2269 b'debuglocks',
2263 [
2270 [
2264 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2271 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2265 (
2272 (
2266 b'W',
2273 b'W',
2267 b'force-free-wlock',
2274 b'force-free-wlock',
2268 None,
2275 None,
2269 _(b'free the working state lock (DANGEROUS)'),
2276 _(b'free the working state lock (DANGEROUS)'),
2270 ),
2277 ),
2271 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2278 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2272 (
2279 (
2273 b'S',
2280 b'S',
2274 b'set-wlock',
2281 b'set-wlock',
2275 None,
2282 None,
2276 _(b'set the working state lock until stopped'),
2283 _(b'set the working state lock until stopped'),
2277 ),
2284 ),
2278 ],
2285 ],
2279 _(b'[OPTION]...'),
2286 _(b'[OPTION]...'),
2280 )
2287 )
2281 def debuglocks(ui, repo, **opts):
2288 def debuglocks(ui, repo, **opts):
2282 """show or modify state of locks
2289 """show or modify state of locks
2283
2290
2284 By default, this command will show which locks are held. This
2291 By default, this command will show which locks are held. This
2285 includes the user and process holding the lock, the amount of time
2292 includes the user and process holding the lock, the amount of time
2286 the lock has been held, and the machine name where the process is
2293 the lock has been held, and the machine name where the process is
2287 running if it's not local.
2294 running if it's not local.
2288
2295
2289 Locks protect the integrity of Mercurial's data, so should be
2296 Locks protect the integrity of Mercurial's data, so should be
2290 treated with care. System crashes or other interruptions may cause
2297 treated with care. System crashes or other interruptions may cause
2291 locks to not be properly released, though Mercurial will usually
2298 locks to not be properly released, though Mercurial will usually
2292 detect and remove such stale locks automatically.
2299 detect and remove such stale locks automatically.
2293
2300
2294 However, detecting stale locks may not always be possible (for
2301 However, detecting stale locks may not always be possible (for
2295 instance, on a shared filesystem). Removing locks may also be
2302 instance, on a shared filesystem). Removing locks may also be
2296 blocked by filesystem permissions.
2303 blocked by filesystem permissions.
2297
2304
2298 Setting a lock will prevent other commands from changing the data.
2305 Setting a lock will prevent other commands from changing the data.
2299 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2306 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2300 The set locks are removed when the command exits.
2307 The set locks are removed when the command exits.
2301
2308
2302 Returns 0 if no locks are held.
2309 Returns 0 if no locks are held.
2303
2310
2304 """
2311 """
2305
2312
2306 if opts.get('force_free_lock'):
2313 if opts.get('force_free_lock'):
2307 repo.svfs.tryunlink(b'lock')
2314 repo.svfs.tryunlink(b'lock')
2308 if opts.get('force_free_wlock'):
2315 if opts.get('force_free_wlock'):
2309 repo.vfs.tryunlink(b'wlock')
2316 repo.vfs.tryunlink(b'wlock')
2310 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2317 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2311 return 0
2318 return 0
2312
2319
2313 locks = []
2320 locks = []
2314 try:
2321 try:
2315 if opts.get('set_wlock'):
2322 if opts.get('set_wlock'):
2316 try:
2323 try:
2317 locks.append(repo.wlock(False))
2324 locks.append(repo.wlock(False))
2318 except error.LockHeld:
2325 except error.LockHeld:
2319 raise error.Abort(_(b'wlock is already held'))
2326 raise error.Abort(_(b'wlock is already held'))
2320 if opts.get('set_lock'):
2327 if opts.get('set_lock'):
2321 try:
2328 try:
2322 locks.append(repo.lock(False))
2329 locks.append(repo.lock(False))
2323 except error.LockHeld:
2330 except error.LockHeld:
2324 raise error.Abort(_(b'lock is already held'))
2331 raise error.Abort(_(b'lock is already held'))
2325 if len(locks):
2332 if len(locks):
2326 try:
2333 try:
2327 if ui.interactive():
2334 if ui.interactive():
2328 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2335 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2329 ui.promptchoice(prompt)
2336 ui.promptchoice(prompt)
2330 else:
2337 else:
2331 msg = b"%d locks held, waiting for signal\n"
2338 msg = b"%d locks held, waiting for signal\n"
2332 msg %= len(locks)
2339 msg %= len(locks)
2333 ui.status(msg)
2340 ui.status(msg)
2334 while True: # XXX wait for a signal
2341 while True: # XXX wait for a signal
2335 time.sleep(0.1)
2342 time.sleep(0.1)
2336 except KeyboardInterrupt:
2343 except KeyboardInterrupt:
2337 msg = b"signal-received releasing locks\n"
2344 msg = b"signal-received releasing locks\n"
2338 ui.status(msg)
2345 ui.status(msg)
2339 return 0
2346 return 0
2340 finally:
2347 finally:
2341 release(*locks)
2348 release(*locks)
2342
2349
2343 now = time.time()
2350 now = time.time()
2344 held = 0
2351 held = 0
2345
2352
2346 def report(vfs, name, method):
2353 def report(vfs, name, method):
2347 # this causes stale locks to get reaped for more accurate reporting
2354 # this causes stale locks to get reaped for more accurate reporting
2348 try:
2355 try:
2349 l = method(False)
2356 l = method(False)
2350 except error.LockHeld:
2357 except error.LockHeld:
2351 l = None
2358 l = None
2352
2359
2353 if l:
2360 if l:
2354 l.release()
2361 l.release()
2355 else:
2362 else:
2356 try:
2363 try:
2357 st = vfs.lstat(name)
2364 st = vfs.lstat(name)
2358 age = now - st[stat.ST_MTIME]
2365 age = now - st[stat.ST_MTIME]
2359 user = util.username(st.st_uid)
2366 user = util.username(st.st_uid)
2360 locker = vfs.readlock(name)
2367 locker = vfs.readlock(name)
2361 if b":" in locker:
2368 if b":" in locker:
2362 host, pid = locker.split(b':')
2369 host, pid = locker.split(b':')
2363 if host == socket.gethostname():
2370 if host == socket.gethostname():
2364 locker = b'user %s, process %s' % (user or b'None', pid)
2371 locker = b'user %s, process %s' % (user or b'None', pid)
2365 else:
2372 else:
2366 locker = b'user %s, process %s, host %s' % (
2373 locker = b'user %s, process %s, host %s' % (
2367 user or b'None',
2374 user or b'None',
2368 pid,
2375 pid,
2369 host,
2376 host,
2370 )
2377 )
2371 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2378 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2372 return 1
2379 return 1
2373 except FileNotFoundError:
2380 except FileNotFoundError:
2374 pass
2381 pass
2375
2382
2376 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2383 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2377 return 0
2384 return 0
2378
2385
2379 held += report(repo.svfs, b"lock", repo.lock)
2386 held += report(repo.svfs, b"lock", repo.lock)
2380 held += report(repo.vfs, b"wlock", repo.wlock)
2387 held += report(repo.vfs, b"wlock", repo.wlock)
2381
2388
2382 return held
2389 return held
2383
2390
2384
2391
2385 @command(
2392 @command(
2386 b'debugmanifestfulltextcache',
2393 b'debugmanifestfulltextcache',
2387 [
2394 [
2388 (b'', b'clear', False, _(b'clear the cache')),
2395 (b'', b'clear', False, _(b'clear the cache')),
2389 (
2396 (
2390 b'a',
2397 b'a',
2391 b'add',
2398 b'add',
2392 [],
2399 [],
2393 _(b'add the given manifest nodes to the cache'),
2400 _(b'add the given manifest nodes to the cache'),
2394 _(b'NODE'),
2401 _(b'NODE'),
2395 ),
2402 ),
2396 ],
2403 ],
2397 b'',
2404 b'',
2398 )
2405 )
2399 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2406 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2400 """show, clear or amend the contents of the manifest fulltext cache"""
2407 """show, clear or amend the contents of the manifest fulltext cache"""
2401
2408
2402 def getcache():
2409 def getcache():
2403 r = repo.manifestlog.getstorage(b'')
2410 r = repo.manifestlog.getstorage(b'')
2404 try:
2411 try:
2405 return r._fulltextcache
2412 return r._fulltextcache
2406 except AttributeError:
2413 except AttributeError:
2407 msg = _(
2414 msg = _(
2408 b"Current revlog implementation doesn't appear to have a "
2415 b"Current revlog implementation doesn't appear to have a "
2409 b"manifest fulltext cache\n"
2416 b"manifest fulltext cache\n"
2410 )
2417 )
2411 raise error.Abort(msg)
2418 raise error.Abort(msg)
2412
2419
2413 if opts.get('clear'):
2420 if opts.get('clear'):
2414 with repo.wlock():
2421 with repo.wlock():
2415 cache = getcache()
2422 cache = getcache()
2416 cache.clear(clear_persisted_data=True)
2423 cache.clear(clear_persisted_data=True)
2417 return
2424 return
2418
2425
2419 if add:
2426 if add:
2420 with repo.wlock():
2427 with repo.wlock():
2421 m = repo.manifestlog
2428 m = repo.manifestlog
2422 store = m.getstorage(b'')
2429 store = m.getstorage(b'')
2423 for n in add:
2430 for n in add:
2424 try:
2431 try:
2425 manifest = m[store.lookup(n)]
2432 manifest = m[store.lookup(n)]
2426 except error.LookupError as e:
2433 except error.LookupError as e:
2427 raise error.Abort(
2434 raise error.Abort(
2428 bytes(e), hint=b"Check your manifest node id"
2435 bytes(e), hint=b"Check your manifest node id"
2429 )
2436 )
2430 manifest.read() # stores revisision in cache too
2437 manifest.read() # stores revisision in cache too
2431 return
2438 return
2432
2439
2433 cache = getcache()
2440 cache = getcache()
2434 if not len(cache):
2441 if not len(cache):
2435 ui.write(_(b'cache empty\n'))
2442 ui.write(_(b'cache empty\n'))
2436 else:
2443 else:
2437 ui.write(
2444 ui.write(
2438 _(
2445 _(
2439 b'cache contains %d manifest entries, in order of most to '
2446 b'cache contains %d manifest entries, in order of most to '
2440 b'least recent:\n'
2447 b'least recent:\n'
2441 )
2448 )
2442 % (len(cache),)
2449 % (len(cache),)
2443 )
2450 )
2444 totalsize = 0
2451 totalsize = 0
2445 for nodeid in cache:
2452 for nodeid in cache:
2446 # Use cache.get to not update the LRU order
2453 # Use cache.get to not update the LRU order
2447 data = cache.peek(nodeid)
2454 data = cache.peek(nodeid)
2448 size = len(data)
2455 size = len(data)
2449 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2456 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2450 ui.write(
2457 ui.write(
2451 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2458 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2452 )
2459 )
2453 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2460 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2454 ui.write(
2461 ui.write(
2455 _(b'total cache data size %s, on-disk %s\n')
2462 _(b'total cache data size %s, on-disk %s\n')
2456 % (util.bytecount(totalsize), util.bytecount(ondisk))
2463 % (util.bytecount(totalsize), util.bytecount(ondisk))
2457 )
2464 )
2458
2465
2459
2466
2460 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2467 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2461 def debugmergestate(ui, repo, *args, **opts):
2468 def debugmergestate(ui, repo, *args, **opts):
2462 """print merge state
2469 """print merge state
2463
2470
2464 Use --verbose to print out information about whether v1 or v2 merge state
2471 Use --verbose to print out information about whether v1 or v2 merge state
2465 was chosen."""
2472 was chosen."""
2466
2473
2467 if ui.verbose:
2474 if ui.verbose:
2468 ms = mergestatemod.mergestate(repo)
2475 ms = mergestatemod.mergestate(repo)
2469
2476
2470 # sort so that reasonable information is on top
2477 # sort so that reasonable information is on top
2471 v1records = ms._readrecordsv1()
2478 v1records = ms._readrecordsv1()
2472 v2records = ms._readrecordsv2()
2479 v2records = ms._readrecordsv2()
2473
2480
2474 if not v1records and not v2records:
2481 if not v1records and not v2records:
2475 pass
2482 pass
2476 elif not v2records:
2483 elif not v2records:
2477 ui.writenoi18n(b'no version 2 merge state\n')
2484 ui.writenoi18n(b'no version 2 merge state\n')
2478 elif ms._v1v2match(v1records, v2records):
2485 elif ms._v1v2match(v1records, v2records):
2479 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2486 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2480 else:
2487 else:
2481 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2488 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2482
2489
2483 opts = pycompat.byteskwargs(opts)
2490 opts = pycompat.byteskwargs(opts)
2484 if not opts[b'template']:
2491 if not opts[b'template']:
2485 opts[b'template'] = (
2492 opts[b'template'] = (
2486 b'{if(commits, "", "no merge state found\n")}'
2493 b'{if(commits, "", "no merge state found\n")}'
2487 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2494 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2488 b'{files % "file: {path} (state \\"{state}\\")\n'
2495 b'{files % "file: {path} (state \\"{state}\\")\n'
2489 b'{if(local_path, "'
2496 b'{if(local_path, "'
2490 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2497 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2491 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2498 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2492 b' other path: {other_path} (node {other_node})\n'
2499 b' other path: {other_path} (node {other_node})\n'
2493 b'")}'
2500 b'")}'
2494 b'{if(rename_side, "'
2501 b'{if(rename_side, "'
2495 b' rename side: {rename_side}\n'
2502 b' rename side: {rename_side}\n'
2496 b' renamed path: {renamed_path}\n'
2503 b' renamed path: {renamed_path}\n'
2497 b'")}'
2504 b'")}'
2498 b'{extras % " extra: {key} = {value}\n"}'
2505 b'{extras % " extra: {key} = {value}\n"}'
2499 b'"}'
2506 b'"}'
2500 b'{extras % "extra: {file} ({key} = {value})\n"}'
2507 b'{extras % "extra: {file} ({key} = {value})\n"}'
2501 )
2508 )
2502
2509
2503 ms = mergestatemod.mergestate.read(repo)
2510 ms = mergestatemod.mergestate.read(repo)
2504
2511
2505 fm = ui.formatter(b'debugmergestate', opts)
2512 fm = ui.formatter(b'debugmergestate', opts)
2506 fm.startitem()
2513 fm.startitem()
2507
2514
2508 fm_commits = fm.nested(b'commits')
2515 fm_commits = fm.nested(b'commits')
2509 if ms.active():
2516 if ms.active():
2510 for name, node, label_index in (
2517 for name, node, label_index in (
2511 (b'local', ms.local, 0),
2518 (b'local', ms.local, 0),
2512 (b'other', ms.other, 1),
2519 (b'other', ms.other, 1),
2513 ):
2520 ):
2514 fm_commits.startitem()
2521 fm_commits.startitem()
2515 fm_commits.data(name=name)
2522 fm_commits.data(name=name)
2516 fm_commits.data(node=hex(node))
2523 fm_commits.data(node=hex(node))
2517 if ms._labels and len(ms._labels) > label_index:
2524 if ms._labels and len(ms._labels) > label_index:
2518 fm_commits.data(label=ms._labels[label_index])
2525 fm_commits.data(label=ms._labels[label_index])
2519 fm_commits.end()
2526 fm_commits.end()
2520
2527
2521 fm_files = fm.nested(b'files')
2528 fm_files = fm.nested(b'files')
2522 if ms.active():
2529 if ms.active():
2523 for f in ms:
2530 for f in ms:
2524 fm_files.startitem()
2531 fm_files.startitem()
2525 fm_files.data(path=f)
2532 fm_files.data(path=f)
2526 state = ms._state[f]
2533 state = ms._state[f]
2527 fm_files.data(state=state[0])
2534 fm_files.data(state=state[0])
2528 if state[0] in (
2535 if state[0] in (
2529 mergestatemod.MERGE_RECORD_UNRESOLVED,
2536 mergestatemod.MERGE_RECORD_UNRESOLVED,
2530 mergestatemod.MERGE_RECORD_RESOLVED,
2537 mergestatemod.MERGE_RECORD_RESOLVED,
2531 ):
2538 ):
2532 fm_files.data(local_key=state[1])
2539 fm_files.data(local_key=state[1])
2533 fm_files.data(local_path=state[2])
2540 fm_files.data(local_path=state[2])
2534 fm_files.data(ancestor_path=state[3])
2541 fm_files.data(ancestor_path=state[3])
2535 fm_files.data(ancestor_node=state[4])
2542 fm_files.data(ancestor_node=state[4])
2536 fm_files.data(other_path=state[5])
2543 fm_files.data(other_path=state[5])
2537 fm_files.data(other_node=state[6])
2544 fm_files.data(other_node=state[6])
2538 fm_files.data(local_flags=state[7])
2545 fm_files.data(local_flags=state[7])
2539 elif state[0] in (
2546 elif state[0] in (
2540 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2547 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2541 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2548 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2542 ):
2549 ):
2543 fm_files.data(renamed_path=state[1])
2550 fm_files.data(renamed_path=state[1])
2544 fm_files.data(rename_side=state[2])
2551 fm_files.data(rename_side=state[2])
2545 fm_extras = fm_files.nested(b'extras')
2552 fm_extras = fm_files.nested(b'extras')
2546 for k, v in sorted(ms.extras(f).items()):
2553 for k, v in sorted(ms.extras(f).items()):
2547 fm_extras.startitem()
2554 fm_extras.startitem()
2548 fm_extras.data(key=k)
2555 fm_extras.data(key=k)
2549 fm_extras.data(value=v)
2556 fm_extras.data(value=v)
2550 fm_extras.end()
2557 fm_extras.end()
2551
2558
2552 fm_files.end()
2559 fm_files.end()
2553
2560
2554 fm_extras = fm.nested(b'extras')
2561 fm_extras = fm.nested(b'extras')
2555 for f, d in sorted(ms.allextras().items()):
2562 for f, d in sorted(ms.allextras().items()):
2556 if f in ms:
2563 if f in ms:
2557 # If file is in mergestate, we have already processed it's extras
2564 # If file is in mergestate, we have already processed it's extras
2558 continue
2565 continue
2559 for k, v in d.items():
2566 for k, v in d.items():
2560 fm_extras.startitem()
2567 fm_extras.startitem()
2561 fm_extras.data(file=f)
2568 fm_extras.data(file=f)
2562 fm_extras.data(key=k)
2569 fm_extras.data(key=k)
2563 fm_extras.data(value=v)
2570 fm_extras.data(value=v)
2564 fm_extras.end()
2571 fm_extras.end()
2565
2572
2566 fm.end()
2573 fm.end()
2567
2574
2568
2575
2569 @command(b'debugnamecomplete', [], _(b'NAME...'))
2576 @command(b'debugnamecomplete', [], _(b'NAME...'))
2570 def debugnamecomplete(ui, repo, *args):
2577 def debugnamecomplete(ui, repo, *args):
2571 '''complete "names" - tags, open branch names, bookmark names'''
2578 '''complete "names" - tags, open branch names, bookmark names'''
2572
2579
2573 names = set()
2580 names = set()
2574 # since we previously only listed open branches, we will handle that
2581 # since we previously only listed open branches, we will handle that
2575 # specially (after this for loop)
2582 # specially (after this for loop)
2576 for name, ns in repo.names.items():
2583 for name, ns in repo.names.items():
2577 if name != b'branches':
2584 if name != b'branches':
2578 names.update(ns.listnames(repo))
2585 names.update(ns.listnames(repo))
2579 names.update(
2586 names.update(
2580 tag
2587 tag
2581 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2588 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2582 if not closed
2589 if not closed
2583 )
2590 )
2584 completions = set()
2591 completions = set()
2585 if not args:
2592 if not args:
2586 args = [b'']
2593 args = [b'']
2587 for a in args:
2594 for a in args:
2588 completions.update(n for n in names if n.startswith(a))
2595 completions.update(n for n in names if n.startswith(a))
2589 ui.write(b'\n'.join(sorted(completions)))
2596 ui.write(b'\n'.join(sorted(completions)))
2590 ui.write(b'\n')
2597 ui.write(b'\n')
2591
2598
2592
2599
2593 @command(
2600 @command(
2594 b'debugnodemap',
2601 b'debugnodemap',
2595 [
2602 [
2596 (
2603 (
2597 b'',
2604 b'',
2598 b'dump-new',
2605 b'dump-new',
2599 False,
2606 False,
2600 _(b'write a (new) persistent binary nodemap on stdout'),
2607 _(b'write a (new) persistent binary nodemap on stdout'),
2601 ),
2608 ),
2602 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2609 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2603 (
2610 (
2604 b'',
2611 b'',
2605 b'check',
2612 b'check',
2606 False,
2613 False,
2607 _(b'check that the data on disk data are correct.'),
2614 _(b'check that the data on disk data are correct.'),
2608 ),
2615 ),
2609 (
2616 (
2610 b'',
2617 b'',
2611 b'metadata',
2618 b'metadata',
2612 False,
2619 False,
2613 _(b'display the on disk meta data for the nodemap'),
2620 _(b'display the on disk meta data for the nodemap'),
2614 ),
2621 ),
2615 ],
2622 ],
2616 )
2623 )
2617 def debugnodemap(ui, repo, **opts):
2624 def debugnodemap(ui, repo, **opts):
2618 """write and inspect on disk nodemap"""
2625 """write and inspect on disk nodemap"""
2619 if opts['dump_new']:
2626 if opts['dump_new']:
2620 unfi = repo.unfiltered()
2627 unfi = repo.unfiltered()
2621 cl = unfi.changelog
2628 cl = unfi.changelog
2622 if util.safehasattr(cl.index, "nodemap_data_all"):
2629 if util.safehasattr(cl.index, "nodemap_data_all"):
2623 data = cl.index.nodemap_data_all()
2630 data = cl.index.nodemap_data_all()
2624 else:
2631 else:
2625 data = nodemap.persistent_data(cl.index)
2632 data = nodemap.persistent_data(cl.index)
2626 ui.write(data)
2633 ui.write(data)
2627 elif opts['dump_disk']:
2634 elif opts['dump_disk']:
2628 unfi = repo.unfiltered()
2635 unfi = repo.unfiltered()
2629 cl = unfi.changelog
2636 cl = unfi.changelog
2630 nm_data = nodemap.persisted_data(cl)
2637 nm_data = nodemap.persisted_data(cl)
2631 if nm_data is not None:
2638 if nm_data is not None:
2632 docket, data = nm_data
2639 docket, data = nm_data
2633 ui.write(data[:])
2640 ui.write(data[:])
2634 elif opts['check']:
2641 elif opts['check']:
2635 unfi = repo.unfiltered()
2642 unfi = repo.unfiltered()
2636 cl = unfi.changelog
2643 cl = unfi.changelog
2637 nm_data = nodemap.persisted_data(cl)
2644 nm_data = nodemap.persisted_data(cl)
2638 if nm_data is not None:
2645 if nm_data is not None:
2639 docket, data = nm_data
2646 docket, data = nm_data
2640 return nodemap.check_data(ui, cl.index, data)
2647 return nodemap.check_data(ui, cl.index, data)
2641 elif opts['metadata']:
2648 elif opts['metadata']:
2642 unfi = repo.unfiltered()
2649 unfi = repo.unfiltered()
2643 cl = unfi.changelog
2650 cl = unfi.changelog
2644 nm_data = nodemap.persisted_data(cl)
2651 nm_data = nodemap.persisted_data(cl)
2645 if nm_data is not None:
2652 if nm_data is not None:
2646 docket, data = nm_data
2653 docket, data = nm_data
2647 ui.write((b"uid: %s\n") % docket.uid)
2654 ui.write((b"uid: %s\n") % docket.uid)
2648 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2655 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2649 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2656 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2650 ui.write((b"data-length: %d\n") % docket.data_length)
2657 ui.write((b"data-length: %d\n") % docket.data_length)
2651 ui.write((b"data-unused: %d\n") % docket.data_unused)
2658 ui.write((b"data-unused: %d\n") % docket.data_unused)
2652 unused_perc = docket.data_unused * 100.0 / docket.data_length
2659 unused_perc = docket.data_unused * 100.0 / docket.data_length
2653 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2660 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2654
2661
2655
2662
2656 @command(
2663 @command(
2657 b'debugobsolete',
2664 b'debugobsolete',
2658 [
2665 [
2659 (b'', b'flags', 0, _(b'markers flag')),
2666 (b'', b'flags', 0, _(b'markers flag')),
2660 (
2667 (
2661 b'',
2668 b'',
2662 b'record-parents',
2669 b'record-parents',
2663 False,
2670 False,
2664 _(b'record parent information for the precursor'),
2671 _(b'record parent information for the precursor'),
2665 ),
2672 ),
2666 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2673 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2667 (
2674 (
2668 b'',
2675 b'',
2669 b'exclusive',
2676 b'exclusive',
2670 False,
2677 False,
2671 _(b'restrict display to markers only relevant to REV'),
2678 _(b'restrict display to markers only relevant to REV'),
2672 ),
2679 ),
2673 (b'', b'index', False, _(b'display index of the marker')),
2680 (b'', b'index', False, _(b'display index of the marker')),
2674 (b'', b'delete', [], _(b'delete markers specified by indices')),
2681 (b'', b'delete', [], _(b'delete markers specified by indices')),
2675 ]
2682 ]
2676 + cmdutil.commitopts2
2683 + cmdutil.commitopts2
2677 + cmdutil.formatteropts,
2684 + cmdutil.formatteropts,
2678 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2685 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2679 )
2686 )
2680 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2687 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2681 """create arbitrary obsolete marker
2688 """create arbitrary obsolete marker
2682
2689
2683 With no arguments, displays the list of obsolescence markers."""
2690 With no arguments, displays the list of obsolescence markers."""
2684
2691
2685 opts = pycompat.byteskwargs(opts)
2692 opts = pycompat.byteskwargs(opts)
2686
2693
2687 def parsenodeid(s):
2694 def parsenodeid(s):
2688 try:
2695 try:
2689 # We do not use revsingle/revrange functions here to accept
2696 # We do not use revsingle/revrange functions here to accept
2690 # arbitrary node identifiers, possibly not present in the
2697 # arbitrary node identifiers, possibly not present in the
2691 # local repository.
2698 # local repository.
2692 n = bin(s)
2699 n = bin(s)
2693 if len(n) != repo.nodeconstants.nodelen:
2700 if len(n) != repo.nodeconstants.nodelen:
2694 raise ValueError
2701 raise ValueError
2695 return n
2702 return n
2696 except ValueError:
2703 except ValueError:
2697 raise error.InputError(
2704 raise error.InputError(
2698 b'changeset references must be full hexadecimal '
2705 b'changeset references must be full hexadecimal '
2699 b'node identifiers'
2706 b'node identifiers'
2700 )
2707 )
2701
2708
2702 if opts.get(b'delete'):
2709 if opts.get(b'delete'):
2703 indices = []
2710 indices = []
2704 for v in opts.get(b'delete'):
2711 for v in opts.get(b'delete'):
2705 try:
2712 try:
2706 indices.append(int(v))
2713 indices.append(int(v))
2707 except ValueError:
2714 except ValueError:
2708 raise error.InputError(
2715 raise error.InputError(
2709 _(b'invalid index value: %r') % v,
2716 _(b'invalid index value: %r') % v,
2710 hint=_(b'use integers for indices'),
2717 hint=_(b'use integers for indices'),
2711 )
2718 )
2712
2719
2713 if repo.currenttransaction():
2720 if repo.currenttransaction():
2714 raise error.Abort(
2721 raise error.Abort(
2715 _(b'cannot delete obsmarkers in the middle of transaction.')
2722 _(b'cannot delete obsmarkers in the middle of transaction.')
2716 )
2723 )
2717
2724
2718 with repo.lock():
2725 with repo.lock():
2719 n = repair.deleteobsmarkers(repo.obsstore, indices)
2726 n = repair.deleteobsmarkers(repo.obsstore, indices)
2720 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2727 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2721
2728
2722 return
2729 return
2723
2730
2724 if precursor is not None:
2731 if precursor is not None:
2725 if opts[b'rev']:
2732 if opts[b'rev']:
2726 raise error.InputError(
2733 raise error.InputError(
2727 b'cannot select revision when creating marker'
2734 b'cannot select revision when creating marker'
2728 )
2735 )
2729 metadata = {}
2736 metadata = {}
2730 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2737 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2731 succs = tuple(parsenodeid(succ) for succ in successors)
2738 succs = tuple(parsenodeid(succ) for succ in successors)
2732 l = repo.lock()
2739 l = repo.lock()
2733 try:
2740 try:
2734 tr = repo.transaction(b'debugobsolete')
2741 tr = repo.transaction(b'debugobsolete')
2735 try:
2742 try:
2736 date = opts.get(b'date')
2743 date = opts.get(b'date')
2737 if date:
2744 if date:
2738 date = dateutil.parsedate(date)
2745 date = dateutil.parsedate(date)
2739 else:
2746 else:
2740 date = None
2747 date = None
2741 prec = parsenodeid(precursor)
2748 prec = parsenodeid(precursor)
2742 parents = None
2749 parents = None
2743 if opts[b'record_parents']:
2750 if opts[b'record_parents']:
2744 if prec not in repo.unfiltered():
2751 if prec not in repo.unfiltered():
2745 raise error.Abort(
2752 raise error.Abort(
2746 b'cannot used --record-parents on '
2753 b'cannot used --record-parents on '
2747 b'unknown changesets'
2754 b'unknown changesets'
2748 )
2755 )
2749 parents = repo.unfiltered()[prec].parents()
2756 parents = repo.unfiltered()[prec].parents()
2750 parents = tuple(p.node() for p in parents)
2757 parents = tuple(p.node() for p in parents)
2751 repo.obsstore.create(
2758 repo.obsstore.create(
2752 tr,
2759 tr,
2753 prec,
2760 prec,
2754 succs,
2761 succs,
2755 opts[b'flags'],
2762 opts[b'flags'],
2756 parents=parents,
2763 parents=parents,
2757 date=date,
2764 date=date,
2758 metadata=metadata,
2765 metadata=metadata,
2759 ui=ui,
2766 ui=ui,
2760 )
2767 )
2761 tr.close()
2768 tr.close()
2762 except ValueError as exc:
2769 except ValueError as exc:
2763 raise error.Abort(
2770 raise error.Abort(
2764 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2771 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2765 )
2772 )
2766 finally:
2773 finally:
2767 tr.release()
2774 tr.release()
2768 finally:
2775 finally:
2769 l.release()
2776 l.release()
2770 else:
2777 else:
2771 if opts[b'rev']:
2778 if opts[b'rev']:
2772 revs = logcmdutil.revrange(repo, opts[b'rev'])
2779 revs = logcmdutil.revrange(repo, opts[b'rev'])
2773 nodes = [repo[r].node() for r in revs]
2780 nodes = [repo[r].node() for r in revs]
2774 markers = list(
2781 markers = list(
2775 obsutil.getmarkers(
2782 obsutil.getmarkers(
2776 repo, nodes=nodes, exclusive=opts[b'exclusive']
2783 repo, nodes=nodes, exclusive=opts[b'exclusive']
2777 )
2784 )
2778 )
2785 )
2779 markers.sort(key=lambda x: x._data)
2786 markers.sort(key=lambda x: x._data)
2780 else:
2787 else:
2781 markers = obsutil.getmarkers(repo)
2788 markers = obsutil.getmarkers(repo)
2782
2789
2783 markerstoiter = markers
2790 markerstoiter = markers
2784 isrelevant = lambda m: True
2791 isrelevant = lambda m: True
2785 if opts.get(b'rev') and opts.get(b'index'):
2792 if opts.get(b'rev') and opts.get(b'index'):
2786 markerstoiter = obsutil.getmarkers(repo)
2793 markerstoiter = obsutil.getmarkers(repo)
2787 markerset = set(markers)
2794 markerset = set(markers)
2788 isrelevant = lambda m: m in markerset
2795 isrelevant = lambda m: m in markerset
2789
2796
2790 fm = ui.formatter(b'debugobsolete', opts)
2797 fm = ui.formatter(b'debugobsolete', opts)
2791 for i, m in enumerate(markerstoiter):
2798 for i, m in enumerate(markerstoiter):
2792 if not isrelevant(m):
2799 if not isrelevant(m):
2793 # marker can be irrelevant when we're iterating over a set
2800 # marker can be irrelevant when we're iterating over a set
2794 # of markers (markerstoiter) which is bigger than the set
2801 # of markers (markerstoiter) which is bigger than the set
2795 # of markers we want to display (markers)
2802 # of markers we want to display (markers)
2796 # this can happen if both --index and --rev options are
2803 # this can happen if both --index and --rev options are
2797 # provided and thus we need to iterate over all of the markers
2804 # provided and thus we need to iterate over all of the markers
2798 # to get the correct indices, but only display the ones that
2805 # to get the correct indices, but only display the ones that
2799 # are relevant to --rev value
2806 # are relevant to --rev value
2800 continue
2807 continue
2801 fm.startitem()
2808 fm.startitem()
2802 ind = i if opts.get(b'index') else None
2809 ind = i if opts.get(b'index') else None
2803 cmdutil.showmarker(fm, m, index=ind)
2810 cmdutil.showmarker(fm, m, index=ind)
2804 fm.end()
2811 fm.end()
2805
2812
2806
2813
2807 @command(
2814 @command(
2808 b'debugp1copies',
2815 b'debugp1copies',
2809 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2816 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2810 _(b'[-r REV]'),
2817 _(b'[-r REV]'),
2811 )
2818 )
2812 def debugp1copies(ui, repo, **opts):
2819 def debugp1copies(ui, repo, **opts):
2813 """dump copy information compared to p1"""
2820 """dump copy information compared to p1"""
2814
2821
2815 opts = pycompat.byteskwargs(opts)
2822 opts = pycompat.byteskwargs(opts)
2816 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2823 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2817 for dst, src in ctx.p1copies().items():
2824 for dst, src in ctx.p1copies().items():
2818 ui.write(b'%s -> %s\n' % (src, dst))
2825 ui.write(b'%s -> %s\n' % (src, dst))
2819
2826
2820
2827
2821 @command(
2828 @command(
2822 b'debugp2copies',
2829 b'debugp2copies',
2823 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2830 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2824 _(b'[-r REV]'),
2831 _(b'[-r REV]'),
2825 )
2832 )
2826 def debugp2copies(ui, repo, **opts):
2833 def debugp2copies(ui, repo, **opts):
2827 """dump copy information compared to p2"""
2834 """dump copy information compared to p2"""
2828
2835
2829 opts = pycompat.byteskwargs(opts)
2836 opts = pycompat.byteskwargs(opts)
2830 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2837 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2831 for dst, src in ctx.p2copies().items():
2838 for dst, src in ctx.p2copies().items():
2832 ui.write(b'%s -> %s\n' % (src, dst))
2839 ui.write(b'%s -> %s\n' % (src, dst))
2833
2840
2834
2841
2835 @command(
2842 @command(
2836 b'debugpathcomplete',
2843 b'debugpathcomplete',
2837 [
2844 [
2838 (b'f', b'full', None, _(b'complete an entire path')),
2845 (b'f', b'full', None, _(b'complete an entire path')),
2839 (b'n', b'normal', None, _(b'show only normal files')),
2846 (b'n', b'normal', None, _(b'show only normal files')),
2840 (b'a', b'added', None, _(b'show only added files')),
2847 (b'a', b'added', None, _(b'show only added files')),
2841 (b'r', b'removed', None, _(b'show only removed files')),
2848 (b'r', b'removed', None, _(b'show only removed files')),
2842 ],
2849 ],
2843 _(b'FILESPEC...'),
2850 _(b'FILESPEC...'),
2844 )
2851 )
2845 def debugpathcomplete(ui, repo, *specs, **opts):
2852 def debugpathcomplete(ui, repo, *specs, **opts):
2846 """complete part or all of a tracked path
2853 """complete part or all of a tracked path
2847
2854
2848 This command supports shells that offer path name completion. It
2855 This command supports shells that offer path name completion. It
2849 currently completes only files already known to the dirstate.
2856 currently completes only files already known to the dirstate.
2850
2857
2851 Completion extends only to the next path segment unless
2858 Completion extends only to the next path segment unless
2852 --full is specified, in which case entire paths are used."""
2859 --full is specified, in which case entire paths are used."""
2853
2860
2854 def complete(path, acceptable):
2861 def complete(path, acceptable):
2855 dirstate = repo.dirstate
2862 dirstate = repo.dirstate
2856 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2863 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2857 rootdir = repo.root + pycompat.ossep
2864 rootdir = repo.root + pycompat.ossep
2858 if spec != repo.root and not spec.startswith(rootdir):
2865 if spec != repo.root and not spec.startswith(rootdir):
2859 return [], []
2866 return [], []
2860 if os.path.isdir(spec):
2867 if os.path.isdir(spec):
2861 spec += b'/'
2868 spec += b'/'
2862 spec = spec[len(rootdir) :]
2869 spec = spec[len(rootdir) :]
2863 fixpaths = pycompat.ossep != b'/'
2870 fixpaths = pycompat.ossep != b'/'
2864 if fixpaths:
2871 if fixpaths:
2865 spec = spec.replace(pycompat.ossep, b'/')
2872 spec = spec.replace(pycompat.ossep, b'/')
2866 speclen = len(spec)
2873 speclen = len(spec)
2867 fullpaths = opts['full']
2874 fullpaths = opts['full']
2868 files, dirs = set(), set()
2875 files, dirs = set(), set()
2869 adddir, addfile = dirs.add, files.add
2876 adddir, addfile = dirs.add, files.add
2870 for f, st in dirstate.items():
2877 for f, st in dirstate.items():
2871 if f.startswith(spec) and st.state in acceptable:
2878 if f.startswith(spec) and st.state in acceptable:
2872 if fixpaths:
2879 if fixpaths:
2873 f = f.replace(b'/', pycompat.ossep)
2880 f = f.replace(b'/', pycompat.ossep)
2874 if fullpaths:
2881 if fullpaths:
2875 addfile(f)
2882 addfile(f)
2876 continue
2883 continue
2877 s = f.find(pycompat.ossep, speclen)
2884 s = f.find(pycompat.ossep, speclen)
2878 if s >= 0:
2885 if s >= 0:
2879 adddir(f[:s])
2886 adddir(f[:s])
2880 else:
2887 else:
2881 addfile(f)
2888 addfile(f)
2882 return files, dirs
2889 return files, dirs
2883
2890
2884 acceptable = b''
2891 acceptable = b''
2885 if opts['normal']:
2892 if opts['normal']:
2886 acceptable += b'nm'
2893 acceptable += b'nm'
2887 if opts['added']:
2894 if opts['added']:
2888 acceptable += b'a'
2895 acceptable += b'a'
2889 if opts['removed']:
2896 if opts['removed']:
2890 acceptable += b'r'
2897 acceptable += b'r'
2891 cwd = repo.getcwd()
2898 cwd = repo.getcwd()
2892 if not specs:
2899 if not specs:
2893 specs = [b'.']
2900 specs = [b'.']
2894
2901
2895 files, dirs = set(), set()
2902 files, dirs = set(), set()
2896 for spec in specs:
2903 for spec in specs:
2897 f, d = complete(spec, acceptable or b'nmar')
2904 f, d = complete(spec, acceptable or b'nmar')
2898 files.update(f)
2905 files.update(f)
2899 dirs.update(d)
2906 dirs.update(d)
2900 files.update(dirs)
2907 files.update(dirs)
2901 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2908 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2902 ui.write(b'\n')
2909 ui.write(b'\n')
2903
2910
2904
2911
2905 @command(
2912 @command(
2906 b'debugpathcopies',
2913 b'debugpathcopies',
2907 cmdutil.walkopts,
2914 cmdutil.walkopts,
2908 b'hg debugpathcopies REV1 REV2 [FILE]',
2915 b'hg debugpathcopies REV1 REV2 [FILE]',
2909 inferrepo=True,
2916 inferrepo=True,
2910 )
2917 )
2911 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2918 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2912 """show copies between two revisions"""
2919 """show copies between two revisions"""
2913 ctx1 = scmutil.revsingle(repo, rev1)
2920 ctx1 = scmutil.revsingle(repo, rev1)
2914 ctx2 = scmutil.revsingle(repo, rev2)
2921 ctx2 = scmutil.revsingle(repo, rev2)
2915 m = scmutil.match(ctx1, pats, opts)
2922 m = scmutil.match(ctx1, pats, opts)
2916 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2923 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2917 ui.write(b'%s -> %s\n' % (src, dst))
2924 ui.write(b'%s -> %s\n' % (src, dst))
2918
2925
2919
2926
2920 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2927 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2921 def debugpeer(ui, path):
2928 def debugpeer(ui, path):
2922 """establish a connection to a peer repository"""
2929 """establish a connection to a peer repository"""
2923 # Always enable peer request logging. Requires --debug to display
2930 # Always enable peer request logging. Requires --debug to display
2924 # though.
2931 # though.
2925 overrides = {
2932 overrides = {
2926 (b'devel', b'debug.peer-request'): True,
2933 (b'devel', b'debug.peer-request'): True,
2927 }
2934 }
2928
2935
2929 with ui.configoverride(overrides):
2936 with ui.configoverride(overrides):
2930 peer = hg.peer(ui, {}, path)
2937 peer = hg.peer(ui, {}, path)
2931
2938
2932 try:
2939 try:
2933 local = peer.local() is not None
2940 local = peer.local() is not None
2934 canpush = peer.canpush()
2941 canpush = peer.canpush()
2935
2942
2936 ui.write(_(b'url: %s\n') % peer.url())
2943 ui.write(_(b'url: %s\n') % peer.url())
2937 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2944 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2938 ui.write(
2945 ui.write(
2939 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2946 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2940 )
2947 )
2941 finally:
2948 finally:
2942 peer.close()
2949 peer.close()
2943
2950
2944
2951
2945 @command(
2952 @command(
2946 b'debugpickmergetool',
2953 b'debugpickmergetool',
2947 [
2954 [
2948 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2955 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2949 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2956 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2950 ]
2957 ]
2951 + cmdutil.walkopts
2958 + cmdutil.walkopts
2952 + cmdutil.mergetoolopts,
2959 + cmdutil.mergetoolopts,
2953 _(b'[PATTERN]...'),
2960 _(b'[PATTERN]...'),
2954 inferrepo=True,
2961 inferrepo=True,
2955 )
2962 )
2956 def debugpickmergetool(ui, repo, *pats, **opts):
2963 def debugpickmergetool(ui, repo, *pats, **opts):
2957 """examine which merge tool is chosen for specified file
2964 """examine which merge tool is chosen for specified file
2958
2965
2959 As described in :hg:`help merge-tools`, Mercurial examines
2966 As described in :hg:`help merge-tools`, Mercurial examines
2960 configurations below in this order to decide which merge tool is
2967 configurations below in this order to decide which merge tool is
2961 chosen for specified file.
2968 chosen for specified file.
2962
2969
2963 1. ``--tool`` option
2970 1. ``--tool`` option
2964 2. ``HGMERGE`` environment variable
2971 2. ``HGMERGE`` environment variable
2965 3. configurations in ``merge-patterns`` section
2972 3. configurations in ``merge-patterns`` section
2966 4. configuration of ``ui.merge``
2973 4. configuration of ``ui.merge``
2967 5. configurations in ``merge-tools`` section
2974 5. configurations in ``merge-tools`` section
2968 6. ``hgmerge`` tool (for historical reason only)
2975 6. ``hgmerge`` tool (for historical reason only)
2969 7. default tool for fallback (``:merge`` or ``:prompt``)
2976 7. default tool for fallback (``:merge`` or ``:prompt``)
2970
2977
2971 This command writes out examination result in the style below::
2978 This command writes out examination result in the style below::
2972
2979
2973 FILE = MERGETOOL
2980 FILE = MERGETOOL
2974
2981
2975 By default, all files known in the first parent context of the
2982 By default, all files known in the first parent context of the
2976 working directory are examined. Use file patterns and/or -I/-X
2983 working directory are examined. Use file patterns and/or -I/-X
2977 options to limit target files. -r/--rev is also useful to examine
2984 options to limit target files. -r/--rev is also useful to examine
2978 files in another context without actual updating to it.
2985 files in another context without actual updating to it.
2979
2986
2980 With --debug, this command shows warning messages while matching
2987 With --debug, this command shows warning messages while matching
2981 against ``merge-patterns`` and so on, too. It is recommended to
2988 against ``merge-patterns`` and so on, too. It is recommended to
2982 use this option with explicit file patterns and/or -I/-X options,
2989 use this option with explicit file patterns and/or -I/-X options,
2983 because this option increases amount of output per file according
2990 because this option increases amount of output per file according
2984 to configurations in hgrc.
2991 to configurations in hgrc.
2985
2992
2986 With -v/--verbose, this command shows configurations below at
2993 With -v/--verbose, this command shows configurations below at
2987 first (only if specified).
2994 first (only if specified).
2988
2995
2989 - ``--tool`` option
2996 - ``--tool`` option
2990 - ``HGMERGE`` environment variable
2997 - ``HGMERGE`` environment variable
2991 - configuration of ``ui.merge``
2998 - configuration of ``ui.merge``
2992
2999
2993 If merge tool is chosen before matching against
3000 If merge tool is chosen before matching against
2994 ``merge-patterns``, this command can't show any helpful
3001 ``merge-patterns``, this command can't show any helpful
2995 information, even with --debug. In such case, information above is
3002 information, even with --debug. In such case, information above is
2996 useful to know why a merge tool is chosen.
3003 useful to know why a merge tool is chosen.
2997 """
3004 """
2998 opts = pycompat.byteskwargs(opts)
3005 opts = pycompat.byteskwargs(opts)
2999 overrides = {}
3006 overrides = {}
3000 if opts[b'tool']:
3007 if opts[b'tool']:
3001 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3008 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3002 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3009 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3003
3010
3004 with ui.configoverride(overrides, b'debugmergepatterns'):
3011 with ui.configoverride(overrides, b'debugmergepatterns'):
3005 hgmerge = encoding.environ.get(b"HGMERGE")
3012 hgmerge = encoding.environ.get(b"HGMERGE")
3006 if hgmerge is not None:
3013 if hgmerge is not None:
3007 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3014 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3008 uimerge = ui.config(b"ui", b"merge")
3015 uimerge = ui.config(b"ui", b"merge")
3009 if uimerge:
3016 if uimerge:
3010 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3017 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3011
3018
3012 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3019 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3013 m = scmutil.match(ctx, pats, opts)
3020 m = scmutil.match(ctx, pats, opts)
3014 changedelete = opts[b'changedelete']
3021 changedelete = opts[b'changedelete']
3015 for path in ctx.walk(m):
3022 for path in ctx.walk(m):
3016 fctx = ctx[path]
3023 fctx = ctx[path]
3017 with ui.silent(
3024 with ui.silent(
3018 error=True
3025 error=True
3019 ) if not ui.debugflag else util.nullcontextmanager():
3026 ) if not ui.debugflag else util.nullcontextmanager():
3020 tool, toolpath = filemerge._picktool(
3027 tool, toolpath = filemerge._picktool(
3021 repo,
3028 repo,
3022 ui,
3029 ui,
3023 path,
3030 path,
3024 fctx.isbinary(),
3031 fctx.isbinary(),
3025 b'l' in fctx.flags(),
3032 b'l' in fctx.flags(),
3026 changedelete,
3033 changedelete,
3027 )
3034 )
3028 ui.write(b'%s = %s\n' % (path, tool))
3035 ui.write(b'%s = %s\n' % (path, tool))
3029
3036
3030
3037
3031 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3038 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3032 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3039 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3033 """access the pushkey key/value protocol
3040 """access the pushkey key/value protocol
3034
3041
3035 With two args, list the keys in the given namespace.
3042 With two args, list the keys in the given namespace.
3036
3043
3037 With five args, set a key to new if it currently is set to old.
3044 With five args, set a key to new if it currently is set to old.
3038 Reports success or failure.
3045 Reports success or failure.
3039 """
3046 """
3040
3047
3041 target = hg.peer(ui, {}, repopath)
3048 target = hg.peer(ui, {}, repopath)
3042 try:
3049 try:
3043 if keyinfo:
3050 if keyinfo:
3044 key, old, new = keyinfo
3051 key, old, new = keyinfo
3045 with target.commandexecutor() as e:
3052 with target.commandexecutor() as e:
3046 r = e.callcommand(
3053 r = e.callcommand(
3047 b'pushkey',
3054 b'pushkey',
3048 {
3055 {
3049 b'namespace': namespace,
3056 b'namespace': namespace,
3050 b'key': key,
3057 b'key': key,
3051 b'old': old,
3058 b'old': old,
3052 b'new': new,
3059 b'new': new,
3053 },
3060 },
3054 ).result()
3061 ).result()
3055
3062
3056 ui.status(pycompat.bytestr(r) + b'\n')
3063 ui.status(pycompat.bytestr(r) + b'\n')
3057 return not r
3064 return not r
3058 else:
3065 else:
3059 for k, v in sorted(target.listkeys(namespace).items()):
3066 for k, v in sorted(target.listkeys(namespace).items()):
3060 ui.write(
3067 ui.write(
3061 b"%s\t%s\n"
3068 b"%s\t%s\n"
3062 % (stringutil.escapestr(k), stringutil.escapestr(v))
3069 % (stringutil.escapestr(k), stringutil.escapestr(v))
3063 )
3070 )
3064 finally:
3071 finally:
3065 target.close()
3072 target.close()
3066
3073
3067
3074
3068 @command(b'debugpvec', [], _(b'A B'))
3075 @command(b'debugpvec', [], _(b'A B'))
3069 def debugpvec(ui, repo, a, b=None):
3076 def debugpvec(ui, repo, a, b=None):
3070 ca = scmutil.revsingle(repo, a)
3077 ca = scmutil.revsingle(repo, a)
3071 cb = scmutil.revsingle(repo, b)
3078 cb = scmutil.revsingle(repo, b)
3072 pa = pvec.ctxpvec(ca)
3079 pa = pvec.ctxpvec(ca)
3073 pb = pvec.ctxpvec(cb)
3080 pb = pvec.ctxpvec(cb)
3074 if pa == pb:
3081 if pa == pb:
3075 rel = b"="
3082 rel = b"="
3076 elif pa > pb:
3083 elif pa > pb:
3077 rel = b">"
3084 rel = b">"
3078 elif pa < pb:
3085 elif pa < pb:
3079 rel = b"<"
3086 rel = b"<"
3080 elif pa | pb:
3087 elif pa | pb:
3081 rel = b"|"
3088 rel = b"|"
3082 ui.write(_(b"a: %s\n") % pa)
3089 ui.write(_(b"a: %s\n") % pa)
3083 ui.write(_(b"b: %s\n") % pb)
3090 ui.write(_(b"b: %s\n") % pb)
3084 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3091 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3085 ui.write(
3092 ui.write(
3086 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3093 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3087 % (
3094 % (
3088 abs(pa._depth - pb._depth),
3095 abs(pa._depth - pb._depth),
3089 pvec._hamming(pa._vec, pb._vec),
3096 pvec._hamming(pa._vec, pb._vec),
3090 pa.distance(pb),
3097 pa.distance(pb),
3091 rel,
3098 rel,
3092 )
3099 )
3093 )
3100 )
3094
3101
3095
3102
3096 @command(
3103 @command(
3097 b'debugrebuilddirstate|debugrebuildstate',
3104 b'debugrebuilddirstate|debugrebuildstate',
3098 [
3105 [
3099 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3106 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3100 (
3107 (
3101 b'',
3108 b'',
3102 b'minimal',
3109 b'minimal',
3103 None,
3110 None,
3104 _(
3111 _(
3105 b'only rebuild files that are inconsistent with '
3112 b'only rebuild files that are inconsistent with '
3106 b'the working copy parent'
3113 b'the working copy parent'
3107 ),
3114 ),
3108 ),
3115 ),
3109 ],
3116 ],
3110 _(b'[-r REV]'),
3117 _(b'[-r REV]'),
3111 )
3118 )
3112 def debugrebuilddirstate(ui, repo, rev, **opts):
3119 def debugrebuilddirstate(ui, repo, rev, **opts):
3113 """rebuild the dirstate as it would look like for the given revision
3120 """rebuild the dirstate as it would look like for the given revision
3114
3121
3115 If no revision is specified the first current parent will be used.
3122 If no revision is specified the first current parent will be used.
3116
3123
3117 The dirstate will be set to the files of the given revision.
3124 The dirstate will be set to the files of the given revision.
3118 The actual working directory content or existing dirstate
3125 The actual working directory content or existing dirstate
3119 information such as adds or removes is not considered.
3126 information such as adds or removes is not considered.
3120
3127
3121 ``minimal`` will only rebuild the dirstate status for files that claim to be
3128 ``minimal`` will only rebuild the dirstate status for files that claim to be
3122 tracked but are not in the parent manifest, or that exist in the parent
3129 tracked but are not in the parent manifest, or that exist in the parent
3123 manifest but are not in the dirstate. It will not change adds, removes, or
3130 manifest but are not in the dirstate. It will not change adds, removes, or
3124 modified files that are in the working copy parent.
3131 modified files that are in the working copy parent.
3125
3132
3126 One use of this command is to make the next :hg:`status` invocation
3133 One use of this command is to make the next :hg:`status` invocation
3127 check the actual file content.
3134 check the actual file content.
3128 """
3135 """
3129 ctx = scmutil.revsingle(repo, rev)
3136 ctx = scmutil.revsingle(repo, rev)
3130 with repo.wlock():
3137 with repo.wlock():
3131 if repo.currenttransaction() is not None:
3138 if repo.currenttransaction() is not None:
3132 msg = b'rebuild the dirstate outside of a transaction'
3139 msg = b'rebuild the dirstate outside of a transaction'
3133 raise error.ProgrammingError(msg)
3140 raise error.ProgrammingError(msg)
3134 dirstate = repo.dirstate
3141 dirstate = repo.dirstate
3135 changedfiles = None
3142 changedfiles = None
3136 # See command doc for what minimal does.
3143 # See command doc for what minimal does.
3137 if opts.get('minimal'):
3144 if opts.get('minimal'):
3138 manifestfiles = set(ctx.manifest().keys())
3145 manifestfiles = set(ctx.manifest().keys())
3139 dirstatefiles = set(dirstate)
3146 dirstatefiles = set(dirstate)
3140 manifestonly = manifestfiles - dirstatefiles
3147 manifestonly = manifestfiles - dirstatefiles
3141 dsonly = dirstatefiles - manifestfiles
3148 dsonly = dirstatefiles - manifestfiles
3142 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3149 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3143 changedfiles = manifestonly | dsnotadded
3150 changedfiles = manifestonly | dsnotadded
3144
3151
3145 with dirstate.changing_parents(repo):
3152 with dirstate.changing_parents(repo):
3146 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3153 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3147
3154
3148
3155
3149 @command(
3156 @command(
3150 b'debugrebuildfncache',
3157 b'debugrebuildfncache',
3151 [
3158 [
3152 (
3159 (
3153 b'',
3160 b'',
3154 b'only-data',
3161 b'only-data',
3155 False,
3162 False,
3156 _(b'only look for wrong .d files (much faster)'),
3163 _(b'only look for wrong .d files (much faster)'),
3157 )
3164 )
3158 ],
3165 ],
3159 b'',
3166 b'',
3160 )
3167 )
3161 def debugrebuildfncache(ui, repo, **opts):
3168 def debugrebuildfncache(ui, repo, **opts):
3162 """rebuild the fncache file"""
3169 """rebuild the fncache file"""
3163 opts = pycompat.byteskwargs(opts)
3170 opts = pycompat.byteskwargs(opts)
3164 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3171 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3165
3172
3166
3173
3167 @command(
3174 @command(
3168 b'debugrename',
3175 b'debugrename',
3169 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3176 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3170 _(b'[-r REV] [FILE]...'),
3177 _(b'[-r REV] [FILE]...'),
3171 )
3178 )
3172 def debugrename(ui, repo, *pats, **opts):
3179 def debugrename(ui, repo, *pats, **opts):
3173 """dump rename information"""
3180 """dump rename information"""
3174
3181
3175 opts = pycompat.byteskwargs(opts)
3182 opts = pycompat.byteskwargs(opts)
3176 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3183 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3177 m = scmutil.match(ctx, pats, opts)
3184 m = scmutil.match(ctx, pats, opts)
3178 for abs in ctx.walk(m):
3185 for abs in ctx.walk(m):
3179 fctx = ctx[abs]
3186 fctx = ctx[abs]
3180 o = fctx.filelog().renamed(fctx.filenode())
3187 o = fctx.filelog().renamed(fctx.filenode())
3181 rel = repo.pathto(abs)
3188 rel = repo.pathto(abs)
3182 if o:
3189 if o:
3183 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3190 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3184 else:
3191 else:
3185 ui.write(_(b"%s not renamed\n") % rel)
3192 ui.write(_(b"%s not renamed\n") % rel)
3186
3193
3187
3194
3188 @command(b'debugrequires|debugrequirements', [], b'')
3195 @command(b'debugrequires|debugrequirements', [], b'')
3189 def debugrequirements(ui, repo):
3196 def debugrequirements(ui, repo):
3190 """print the current repo requirements"""
3197 """print the current repo requirements"""
3191 for r in sorted(repo.requirements):
3198 for r in sorted(repo.requirements):
3192 ui.write(b"%s\n" % r)
3199 ui.write(b"%s\n" % r)
3193
3200
3194
3201
3195 @command(
3202 @command(
3196 b'debugrevlog',
3203 b'debugrevlog',
3197 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3204 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3198 _(b'-c|-m|FILE'),
3205 _(b'-c|-m|FILE'),
3199 optionalrepo=True,
3206 optionalrepo=True,
3200 )
3207 )
3201 def debugrevlog(ui, repo, file_=None, **opts):
3208 def debugrevlog(ui, repo, file_=None, **opts):
3202 """show data and statistics about a revlog"""
3209 """show data and statistics about a revlog"""
3203 opts = pycompat.byteskwargs(opts)
3210 opts = pycompat.byteskwargs(opts)
3204 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3211 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3205
3212
3206 if opts.get(b"dump"):
3213 if opts.get(b"dump"):
3207 revlog_debug.dump(ui, r)
3214 revlog_debug.dump(ui, r)
3208 else:
3215 else:
3209 revlog_debug.debug_revlog(ui, r)
3216 revlog_debug.debug_revlog(ui, r)
3210 return 0
3217 return 0
3211
3218
3212
3219
3213 @command(
3220 @command(
3214 b'debugrevlogindex',
3221 b'debugrevlogindex',
3215 cmdutil.debugrevlogopts
3222 cmdutil.debugrevlogopts
3216 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3223 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3217 _(b'[-f FORMAT] -c|-m|FILE'),
3224 _(b'[-f FORMAT] -c|-m|FILE'),
3218 optionalrepo=True,
3225 optionalrepo=True,
3219 )
3226 )
3220 def debugrevlogindex(ui, repo, file_=None, **opts):
3227 def debugrevlogindex(ui, repo, file_=None, **opts):
3221 """dump the contents of a revlog index"""
3228 """dump the contents of a revlog index"""
3222 opts = pycompat.byteskwargs(opts)
3229 opts = pycompat.byteskwargs(opts)
3223 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3230 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3224 format = opts.get(b'format', 0)
3231 format = opts.get(b'format', 0)
3225 if format not in (0, 1):
3232 if format not in (0, 1):
3226 raise error.Abort(_(b"unknown format %d") % format)
3233 raise error.Abort(_(b"unknown format %d") % format)
3227
3234
3228 if ui.debugflag:
3235 if ui.debugflag:
3229 shortfn = hex
3236 shortfn = hex
3230 else:
3237 else:
3231 shortfn = short
3238 shortfn = short
3232
3239
3233 # There might not be anything in r, so have a sane default
3240 # There might not be anything in r, so have a sane default
3234 idlen = 12
3241 idlen = 12
3235 for i in r:
3242 for i in r:
3236 idlen = len(shortfn(r.node(i)))
3243 idlen = len(shortfn(r.node(i)))
3237 break
3244 break
3238
3245
3239 if format == 0:
3246 if format == 0:
3240 if ui.verbose:
3247 if ui.verbose:
3241 ui.writenoi18n(
3248 ui.writenoi18n(
3242 b" rev offset length linkrev %s %s p2\n"
3249 b" rev offset length linkrev %s %s p2\n"
3243 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3250 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3244 )
3251 )
3245 else:
3252 else:
3246 ui.writenoi18n(
3253 ui.writenoi18n(
3247 b" rev linkrev %s %s p2\n"
3254 b" rev linkrev %s %s p2\n"
3248 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3255 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3249 )
3256 )
3250 elif format == 1:
3257 elif format == 1:
3251 if ui.verbose:
3258 if ui.verbose:
3252 ui.writenoi18n(
3259 ui.writenoi18n(
3253 (
3260 (
3254 b" rev flag offset length size link p1"
3261 b" rev flag offset length size link p1"
3255 b" p2 %s\n"
3262 b" p2 %s\n"
3256 )
3263 )
3257 % b"nodeid".rjust(idlen)
3264 % b"nodeid".rjust(idlen)
3258 )
3265 )
3259 else:
3266 else:
3260 ui.writenoi18n(
3267 ui.writenoi18n(
3261 b" rev flag size link p1 p2 %s\n"
3268 b" rev flag size link p1 p2 %s\n"
3262 % b"nodeid".rjust(idlen)
3269 % b"nodeid".rjust(idlen)
3263 )
3270 )
3264
3271
3265 for i in r:
3272 for i in r:
3266 node = r.node(i)
3273 node = r.node(i)
3267 if format == 0:
3274 if format == 0:
3268 try:
3275 try:
3269 pp = r.parents(node)
3276 pp = r.parents(node)
3270 except Exception:
3277 except Exception:
3271 pp = [repo.nullid, repo.nullid]
3278 pp = [repo.nullid, repo.nullid]
3272 if ui.verbose:
3279 if ui.verbose:
3273 ui.write(
3280 ui.write(
3274 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3281 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3275 % (
3282 % (
3276 i,
3283 i,
3277 r.start(i),
3284 r.start(i),
3278 r.length(i),
3285 r.length(i),
3279 r.linkrev(i),
3286 r.linkrev(i),
3280 shortfn(node),
3287 shortfn(node),
3281 shortfn(pp[0]),
3288 shortfn(pp[0]),
3282 shortfn(pp[1]),
3289 shortfn(pp[1]),
3283 )
3290 )
3284 )
3291 )
3285 else:
3292 else:
3286 ui.write(
3293 ui.write(
3287 b"% 6d % 7d %s %s %s\n"
3294 b"% 6d % 7d %s %s %s\n"
3288 % (
3295 % (
3289 i,
3296 i,
3290 r.linkrev(i),
3297 r.linkrev(i),
3291 shortfn(node),
3298 shortfn(node),
3292 shortfn(pp[0]),
3299 shortfn(pp[0]),
3293 shortfn(pp[1]),
3300 shortfn(pp[1]),
3294 )
3301 )
3295 )
3302 )
3296 elif format == 1:
3303 elif format == 1:
3297 pr = r.parentrevs(i)
3304 pr = r.parentrevs(i)
3298 if ui.verbose:
3305 if ui.verbose:
3299 ui.write(
3306 ui.write(
3300 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3307 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3301 % (
3308 % (
3302 i,
3309 i,
3303 r.flags(i),
3310 r.flags(i),
3304 r.start(i),
3311 r.start(i),
3305 r.length(i),
3312 r.length(i),
3306 r.rawsize(i),
3313 r.rawsize(i),
3307 r.linkrev(i),
3314 r.linkrev(i),
3308 pr[0],
3315 pr[0],
3309 pr[1],
3316 pr[1],
3310 shortfn(node),
3317 shortfn(node),
3311 )
3318 )
3312 )
3319 )
3313 else:
3320 else:
3314 ui.write(
3321 ui.write(
3315 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3322 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3316 % (
3323 % (
3317 i,
3324 i,
3318 r.flags(i),
3325 r.flags(i),
3319 r.rawsize(i),
3326 r.rawsize(i),
3320 r.linkrev(i),
3327 r.linkrev(i),
3321 pr[0],
3328 pr[0],
3322 pr[1],
3329 pr[1],
3323 shortfn(node),
3330 shortfn(node),
3324 )
3331 )
3325 )
3332 )
3326
3333
3327
3334
3328 @command(
3335 @command(
3329 b'debugrevspec',
3336 b'debugrevspec',
3330 [
3337 [
3331 (
3338 (
3332 b'',
3339 b'',
3333 b'optimize',
3340 b'optimize',
3334 None,
3341 None,
3335 _(b'print parsed tree after optimizing (DEPRECATED)'),
3342 _(b'print parsed tree after optimizing (DEPRECATED)'),
3336 ),
3343 ),
3337 (
3344 (
3338 b'',
3345 b'',
3339 b'show-revs',
3346 b'show-revs',
3340 True,
3347 True,
3341 _(b'print list of result revisions (default)'),
3348 _(b'print list of result revisions (default)'),
3342 ),
3349 ),
3343 (
3350 (
3344 b's',
3351 b's',
3345 b'show-set',
3352 b'show-set',
3346 None,
3353 None,
3347 _(b'print internal representation of result set'),
3354 _(b'print internal representation of result set'),
3348 ),
3355 ),
3349 (
3356 (
3350 b'p',
3357 b'p',
3351 b'show-stage',
3358 b'show-stage',
3352 [],
3359 [],
3353 _(b'print parsed tree at the given stage'),
3360 _(b'print parsed tree at the given stage'),
3354 _(b'NAME'),
3361 _(b'NAME'),
3355 ),
3362 ),
3356 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3363 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3357 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3364 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3358 ],
3365 ],
3359 b'REVSPEC',
3366 b'REVSPEC',
3360 )
3367 )
3361 def debugrevspec(ui, repo, expr, **opts):
3368 def debugrevspec(ui, repo, expr, **opts):
3362 """parse and apply a revision specification
3369 """parse and apply a revision specification
3363
3370
3364 Use -p/--show-stage option to print the parsed tree at the given stages.
3371 Use -p/--show-stage option to print the parsed tree at the given stages.
3365 Use -p all to print tree at every stage.
3372 Use -p all to print tree at every stage.
3366
3373
3367 Use --no-show-revs option with -s or -p to print only the set
3374 Use --no-show-revs option with -s or -p to print only the set
3368 representation or the parsed tree respectively.
3375 representation or the parsed tree respectively.
3369
3376
3370 Use --verify-optimized to compare the optimized result with the unoptimized
3377 Use --verify-optimized to compare the optimized result with the unoptimized
3371 one. Returns 1 if the optimized result differs.
3378 one. Returns 1 if the optimized result differs.
3372 """
3379 """
3373 opts = pycompat.byteskwargs(opts)
3380 opts = pycompat.byteskwargs(opts)
3374 aliases = ui.configitems(b'revsetalias')
3381 aliases = ui.configitems(b'revsetalias')
3375 stages = [
3382 stages = [
3376 (b'parsed', lambda tree: tree),
3383 (b'parsed', lambda tree: tree),
3377 (
3384 (
3378 b'expanded',
3385 b'expanded',
3379 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3386 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3380 ),
3387 ),
3381 (b'concatenated', revsetlang.foldconcat),
3388 (b'concatenated', revsetlang.foldconcat),
3382 (b'analyzed', revsetlang.analyze),
3389 (b'analyzed', revsetlang.analyze),
3383 (b'optimized', revsetlang.optimize),
3390 (b'optimized', revsetlang.optimize),
3384 ]
3391 ]
3385 if opts[b'no_optimized']:
3392 if opts[b'no_optimized']:
3386 stages = stages[:-1]
3393 stages = stages[:-1]
3387 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3394 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3388 raise error.Abort(
3395 raise error.Abort(
3389 _(b'cannot use --verify-optimized with --no-optimized')
3396 _(b'cannot use --verify-optimized with --no-optimized')
3390 )
3397 )
3391 stagenames = {n for n, f in stages}
3398 stagenames = {n for n, f in stages}
3392
3399
3393 showalways = set()
3400 showalways = set()
3394 showchanged = set()
3401 showchanged = set()
3395 if ui.verbose and not opts[b'show_stage']:
3402 if ui.verbose and not opts[b'show_stage']:
3396 # show parsed tree by --verbose (deprecated)
3403 # show parsed tree by --verbose (deprecated)
3397 showalways.add(b'parsed')
3404 showalways.add(b'parsed')
3398 showchanged.update([b'expanded', b'concatenated'])
3405 showchanged.update([b'expanded', b'concatenated'])
3399 if opts[b'optimize']:
3406 if opts[b'optimize']:
3400 showalways.add(b'optimized')
3407 showalways.add(b'optimized')
3401 if opts[b'show_stage'] and opts[b'optimize']:
3408 if opts[b'show_stage'] and opts[b'optimize']:
3402 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3409 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3403 if opts[b'show_stage'] == [b'all']:
3410 if opts[b'show_stage'] == [b'all']:
3404 showalways.update(stagenames)
3411 showalways.update(stagenames)
3405 else:
3412 else:
3406 for n in opts[b'show_stage']:
3413 for n in opts[b'show_stage']:
3407 if n not in stagenames:
3414 if n not in stagenames:
3408 raise error.Abort(_(b'invalid stage name: %s') % n)
3415 raise error.Abort(_(b'invalid stage name: %s') % n)
3409 showalways.update(opts[b'show_stage'])
3416 showalways.update(opts[b'show_stage'])
3410
3417
3411 treebystage = {}
3418 treebystage = {}
3412 printedtree = None
3419 printedtree = None
3413 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3420 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3414 for n, f in stages:
3421 for n, f in stages:
3415 treebystage[n] = tree = f(tree)
3422 treebystage[n] = tree = f(tree)
3416 if n in showalways or (n in showchanged and tree != printedtree):
3423 if n in showalways or (n in showchanged and tree != printedtree):
3417 if opts[b'show_stage'] or n != b'parsed':
3424 if opts[b'show_stage'] or n != b'parsed':
3418 ui.write(b"* %s:\n" % n)
3425 ui.write(b"* %s:\n" % n)
3419 ui.write(revsetlang.prettyformat(tree), b"\n")
3426 ui.write(revsetlang.prettyformat(tree), b"\n")
3420 printedtree = tree
3427 printedtree = tree
3421
3428
3422 if opts[b'verify_optimized']:
3429 if opts[b'verify_optimized']:
3423 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3430 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3424 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3431 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3425 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3432 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3426 ui.writenoi18n(
3433 ui.writenoi18n(
3427 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3434 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3428 )
3435 )
3429 ui.writenoi18n(
3436 ui.writenoi18n(
3430 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3437 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3431 )
3438 )
3432 arevs = list(arevs)
3439 arevs = list(arevs)
3433 brevs = list(brevs)
3440 brevs = list(brevs)
3434 if arevs == brevs:
3441 if arevs == brevs:
3435 return 0
3442 return 0
3436 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3443 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3437 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3444 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3438 sm = difflib.SequenceMatcher(None, arevs, brevs)
3445 sm = difflib.SequenceMatcher(None, arevs, brevs)
3439 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3446 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3440 if tag in ('delete', 'replace'):
3447 if tag in ('delete', 'replace'):
3441 for c in arevs[alo:ahi]:
3448 for c in arevs[alo:ahi]:
3442 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3449 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3443 if tag in ('insert', 'replace'):
3450 if tag in ('insert', 'replace'):
3444 for c in brevs[blo:bhi]:
3451 for c in brevs[blo:bhi]:
3445 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3452 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3446 if tag == 'equal':
3453 if tag == 'equal':
3447 for c in arevs[alo:ahi]:
3454 for c in arevs[alo:ahi]:
3448 ui.write(b' %d\n' % c)
3455 ui.write(b' %d\n' % c)
3449 return 1
3456 return 1
3450
3457
3451 func = revset.makematcher(tree)
3458 func = revset.makematcher(tree)
3452 revs = func(repo)
3459 revs = func(repo)
3453 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3460 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3454 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3461 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3455 if not opts[b'show_revs']:
3462 if not opts[b'show_revs']:
3456 return
3463 return
3457 for c in revs:
3464 for c in revs:
3458 ui.write(b"%d\n" % c)
3465 ui.write(b"%d\n" % c)
3459
3466
3460
3467
3461 @command(
3468 @command(
3462 b'debugserve',
3469 b'debugserve',
3463 [
3470 [
3464 (
3471 (
3465 b'',
3472 b'',
3466 b'sshstdio',
3473 b'sshstdio',
3467 False,
3474 False,
3468 _(b'run an SSH server bound to process handles'),
3475 _(b'run an SSH server bound to process handles'),
3469 ),
3476 ),
3470 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3477 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3471 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3478 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3472 ],
3479 ],
3473 b'',
3480 b'',
3474 )
3481 )
3475 def debugserve(ui, repo, **opts):
3482 def debugserve(ui, repo, **opts):
3476 """run a server with advanced settings
3483 """run a server with advanced settings
3477
3484
3478 This command is similar to :hg:`serve`. It exists partially as a
3485 This command is similar to :hg:`serve`. It exists partially as a
3479 workaround to the fact that ``hg serve --stdio`` must have specific
3486 workaround to the fact that ``hg serve --stdio`` must have specific
3480 arguments for security reasons.
3487 arguments for security reasons.
3481 """
3488 """
3482 opts = pycompat.byteskwargs(opts)
3489 opts = pycompat.byteskwargs(opts)
3483
3490
3484 if not opts[b'sshstdio']:
3491 if not opts[b'sshstdio']:
3485 raise error.Abort(_(b'only --sshstdio is currently supported'))
3492 raise error.Abort(_(b'only --sshstdio is currently supported'))
3486
3493
3487 logfh = None
3494 logfh = None
3488
3495
3489 if opts[b'logiofd'] and opts[b'logiofile']:
3496 if opts[b'logiofd'] and opts[b'logiofile']:
3490 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3497 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3491
3498
3492 if opts[b'logiofd']:
3499 if opts[b'logiofd']:
3493 # Ideally we would be line buffered. But line buffering in binary
3500 # Ideally we would be line buffered. But line buffering in binary
3494 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3501 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3495 # buffering could have performance impacts. But since this isn't
3502 # buffering could have performance impacts. But since this isn't
3496 # performance critical code, it should be fine.
3503 # performance critical code, it should be fine.
3497 try:
3504 try:
3498 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3505 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3499 except OSError as e:
3506 except OSError as e:
3500 if e.errno != errno.ESPIPE:
3507 if e.errno != errno.ESPIPE:
3501 raise
3508 raise
3502 # can't seek a pipe, so `ab` mode fails on py3
3509 # can't seek a pipe, so `ab` mode fails on py3
3503 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3510 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3504 elif opts[b'logiofile']:
3511 elif opts[b'logiofile']:
3505 logfh = open(opts[b'logiofile'], b'ab', 0)
3512 logfh = open(opts[b'logiofile'], b'ab', 0)
3506
3513
3507 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3514 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3508 s.serve_forever()
3515 s.serve_forever()
3509
3516
3510
3517
3511 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3518 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3512 def debugsetparents(ui, repo, rev1, rev2=None):
3519 def debugsetparents(ui, repo, rev1, rev2=None):
3513 """manually set the parents of the current working directory (DANGEROUS)
3520 """manually set the parents of the current working directory (DANGEROUS)
3514
3521
3515 This command is not what you are looking for and should not be used. Using
3522 This command is not what you are looking for and should not be used. Using
3516 this command will most certainly results in slight corruption of the file
3523 this command will most certainly results in slight corruption of the file
3517 level histories withing your repository. DO NOT USE THIS COMMAND.
3524 level histories withing your repository. DO NOT USE THIS COMMAND.
3518
3525
3519 The command update the p1 and p2 field in the dirstate, and not touching
3526 The command update the p1 and p2 field in the dirstate, and not touching
3520 anything else. This useful for writing repository conversion tools, but
3527 anything else. This useful for writing repository conversion tools, but
3521 should be used with extreme care. For example, neither the working
3528 should be used with extreme care. For example, neither the working
3522 directory nor the dirstate is updated, so file status may be incorrect
3529 directory nor the dirstate is updated, so file status may be incorrect
3523 after running this command. Only used if you are one of the few people that
3530 after running this command. Only used if you are one of the few people that
3524 deeply unstand both conversion tools and file level histories. If you are
3531 deeply unstand both conversion tools and file level histories. If you are
3525 reading this help, you are not one of this people (most of them sailed west
3532 reading this help, you are not one of this people (most of them sailed west
3526 from Mithlond anyway.
3533 from Mithlond anyway.
3527
3534
3528 So one last time DO NOT USE THIS COMMAND.
3535 So one last time DO NOT USE THIS COMMAND.
3529
3536
3530 Returns 0 on success.
3537 Returns 0 on success.
3531 """
3538 """
3532
3539
3533 node1 = scmutil.revsingle(repo, rev1).node()
3540 node1 = scmutil.revsingle(repo, rev1).node()
3534 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3541 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3535
3542
3536 with repo.wlock():
3543 with repo.wlock():
3537 repo.setparents(node1, node2)
3544 repo.setparents(node1, node2)
3538
3545
3539
3546
3540 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3547 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3541 def debugsidedata(ui, repo, file_, rev=None, **opts):
3548 def debugsidedata(ui, repo, file_, rev=None, **opts):
3542 """dump the side data for a cl/manifest/file revision
3549 """dump the side data for a cl/manifest/file revision
3543
3550
3544 Use --verbose to dump the sidedata content."""
3551 Use --verbose to dump the sidedata content."""
3545 opts = pycompat.byteskwargs(opts)
3552 opts = pycompat.byteskwargs(opts)
3546 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3553 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3547 if rev is not None:
3554 if rev is not None:
3548 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3555 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3549 file_, rev = None, file_
3556 file_, rev = None, file_
3550 elif rev is None:
3557 elif rev is None:
3551 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3558 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3552 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3559 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3553 r = getattr(r, '_revlog', r)
3560 r = getattr(r, '_revlog', r)
3554 try:
3561 try:
3555 sidedata = r.sidedata(r.lookup(rev))
3562 sidedata = r.sidedata(r.lookup(rev))
3556 except KeyError:
3563 except KeyError:
3557 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3564 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3558 if sidedata:
3565 if sidedata:
3559 sidedata = list(sidedata.items())
3566 sidedata = list(sidedata.items())
3560 sidedata.sort()
3567 sidedata.sort()
3561 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3568 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3562 for key, value in sidedata:
3569 for key, value in sidedata:
3563 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3570 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3564 if ui.verbose:
3571 if ui.verbose:
3565 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3572 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3566
3573
3567
3574
3568 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3575 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3569 def debugssl(ui, repo, source=None, **opts):
3576 def debugssl(ui, repo, source=None, **opts):
3570 """test a secure connection to a server
3577 """test a secure connection to a server
3571
3578
3572 This builds the certificate chain for the server on Windows, installing the
3579 This builds the certificate chain for the server on Windows, installing the
3573 missing intermediates and trusted root via Windows Update if necessary. It
3580 missing intermediates and trusted root via Windows Update if necessary. It
3574 does nothing on other platforms.
3581 does nothing on other platforms.
3575
3582
3576 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3583 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3577 that server is used. See :hg:`help urls` for more information.
3584 that server is used. See :hg:`help urls` for more information.
3578
3585
3579 If the update succeeds, retry the original operation. Otherwise, the cause
3586 If the update succeeds, retry the original operation. Otherwise, the cause
3580 of the SSL error is likely another issue.
3587 of the SSL error is likely another issue.
3581 """
3588 """
3582 if not pycompat.iswindows:
3589 if not pycompat.iswindows:
3583 raise error.Abort(
3590 raise error.Abort(
3584 _(b'certificate chain building is only possible on Windows')
3591 _(b'certificate chain building is only possible on Windows')
3585 )
3592 )
3586
3593
3587 if not source:
3594 if not source:
3588 if not repo:
3595 if not repo:
3589 raise error.Abort(
3596 raise error.Abort(
3590 _(
3597 _(
3591 b"there is no Mercurial repository here, and no "
3598 b"there is no Mercurial repository here, and no "
3592 b"server specified"
3599 b"server specified"
3593 )
3600 )
3594 )
3601 )
3595 source = b"default"
3602 source = b"default"
3596
3603
3597 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3604 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3598 url = path.url
3605 url = path.url
3599
3606
3600 defaultport = {b'https': 443, b'ssh': 22}
3607 defaultport = {b'https': 443, b'ssh': 22}
3601 if url.scheme in defaultport:
3608 if url.scheme in defaultport:
3602 try:
3609 try:
3603 addr = (url.host, int(url.port or defaultport[url.scheme]))
3610 addr = (url.host, int(url.port or defaultport[url.scheme]))
3604 except ValueError:
3611 except ValueError:
3605 raise error.Abort(_(b"malformed port number in URL"))
3612 raise error.Abort(_(b"malformed port number in URL"))
3606 else:
3613 else:
3607 raise error.Abort(_(b"only https and ssh connections are supported"))
3614 raise error.Abort(_(b"only https and ssh connections are supported"))
3608
3615
3609 from . import win32
3616 from . import win32
3610
3617
3611 s = ssl.wrap_socket(
3618 s = ssl.wrap_socket(
3612 socket.socket(),
3619 socket.socket(),
3613 ssl_version=ssl.PROTOCOL_TLS,
3620 ssl_version=ssl.PROTOCOL_TLS,
3614 cert_reqs=ssl.CERT_NONE,
3621 cert_reqs=ssl.CERT_NONE,
3615 ca_certs=None,
3622 ca_certs=None,
3616 )
3623 )
3617
3624
3618 try:
3625 try:
3619 s.connect(addr)
3626 s.connect(addr)
3620 cert = s.getpeercert(True)
3627 cert = s.getpeercert(True)
3621
3628
3622 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3629 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3623
3630
3624 complete = win32.checkcertificatechain(cert, build=False)
3631 complete = win32.checkcertificatechain(cert, build=False)
3625
3632
3626 if not complete:
3633 if not complete:
3627 ui.status(_(b'certificate chain is incomplete, updating... '))
3634 ui.status(_(b'certificate chain is incomplete, updating... '))
3628
3635
3629 if not win32.checkcertificatechain(cert):
3636 if not win32.checkcertificatechain(cert):
3630 ui.status(_(b'failed.\n'))
3637 ui.status(_(b'failed.\n'))
3631 else:
3638 else:
3632 ui.status(_(b'done.\n'))
3639 ui.status(_(b'done.\n'))
3633 else:
3640 else:
3634 ui.status(_(b'full certificate chain is available\n'))
3641 ui.status(_(b'full certificate chain is available\n'))
3635 finally:
3642 finally:
3636 s.close()
3643 s.close()
3637
3644
3638
3645
3639 @command(
3646 @command(
3640 b"debugbackupbundle",
3647 b"debugbackupbundle",
3641 [
3648 [
3642 (
3649 (
3643 b"",
3650 b"",
3644 b"recover",
3651 b"recover",
3645 b"",
3652 b"",
3646 b"brings the specified changeset back into the repository",
3653 b"brings the specified changeset back into the repository",
3647 )
3654 )
3648 ]
3655 ]
3649 + cmdutil.logopts,
3656 + cmdutil.logopts,
3650 _(b"hg debugbackupbundle [--recover HASH]"),
3657 _(b"hg debugbackupbundle [--recover HASH]"),
3651 )
3658 )
3652 def debugbackupbundle(ui, repo, *pats, **opts):
3659 def debugbackupbundle(ui, repo, *pats, **opts):
3653 """lists the changesets available in backup bundles
3660 """lists the changesets available in backup bundles
3654
3661
3655 Without any arguments, this command prints a list of the changesets in each
3662 Without any arguments, this command prints a list of the changesets in each
3656 backup bundle.
3663 backup bundle.
3657
3664
3658 --recover takes a changeset hash and unbundles the first bundle that
3665 --recover takes a changeset hash and unbundles the first bundle that
3659 contains that hash, which puts that changeset back in your repository.
3666 contains that hash, which puts that changeset back in your repository.
3660
3667
3661 --verbose will print the entire commit message and the bundle path for that
3668 --verbose will print the entire commit message and the bundle path for that
3662 backup.
3669 backup.
3663 """
3670 """
3664 backups = list(
3671 backups = list(
3665 filter(
3672 filter(
3666 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3673 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3667 )
3674 )
3668 )
3675 )
3669 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3676 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3670
3677
3671 opts = pycompat.byteskwargs(opts)
3678 opts = pycompat.byteskwargs(opts)
3672 opts[b"bundle"] = b""
3679 opts[b"bundle"] = b""
3673 opts[b"force"] = None
3680 opts[b"force"] = None
3674 limit = logcmdutil.getlimit(opts)
3681 limit = logcmdutil.getlimit(opts)
3675
3682
3676 def display(other, chlist, displayer):
3683 def display(other, chlist, displayer):
3677 if opts.get(b"newest_first"):
3684 if opts.get(b"newest_first"):
3678 chlist.reverse()
3685 chlist.reverse()
3679 count = 0
3686 count = 0
3680 for n in chlist:
3687 for n in chlist:
3681 if limit is not None and count >= limit:
3688 if limit is not None and count >= limit:
3682 break
3689 break
3683 parents = [
3690 parents = [
3684 True for p in other.changelog.parents(n) if p != repo.nullid
3691 True for p in other.changelog.parents(n) if p != repo.nullid
3685 ]
3692 ]
3686 if opts.get(b"no_merges") and len(parents) == 2:
3693 if opts.get(b"no_merges") and len(parents) == 2:
3687 continue
3694 continue
3688 count += 1
3695 count += 1
3689 displayer.show(other[n])
3696 displayer.show(other[n])
3690
3697
3691 recovernode = opts.get(b"recover")
3698 recovernode = opts.get(b"recover")
3692 if recovernode:
3699 if recovernode:
3693 if scmutil.isrevsymbol(repo, recovernode):
3700 if scmutil.isrevsymbol(repo, recovernode):
3694 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3701 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3695 return
3702 return
3696 elif backups:
3703 elif backups:
3697 msg = _(
3704 msg = _(
3698 b"Recover changesets using: hg debugbackupbundle --recover "
3705 b"Recover changesets using: hg debugbackupbundle --recover "
3699 b"<changeset hash>\n\nAvailable backup changesets:"
3706 b"<changeset hash>\n\nAvailable backup changesets:"
3700 )
3707 )
3701 ui.status(msg, label=b"status.removed")
3708 ui.status(msg, label=b"status.removed")
3702 else:
3709 else:
3703 ui.status(_(b"no backup changesets found\n"))
3710 ui.status(_(b"no backup changesets found\n"))
3704 return
3711 return
3705
3712
3706 for backup in backups:
3713 for backup in backups:
3707 # Much of this is copied from the hg incoming logic
3714 # Much of this is copied from the hg incoming logic
3708 source = os.path.relpath(backup, encoding.getcwd())
3715 source = os.path.relpath(backup, encoding.getcwd())
3709 path = urlutil.get_unique_pull_path_obj(
3716 path = urlutil.get_unique_pull_path_obj(
3710 b'debugbackupbundle',
3717 b'debugbackupbundle',
3711 ui,
3718 ui,
3712 source,
3719 source,
3713 )
3720 )
3714 try:
3721 try:
3715 other = hg.peer(repo, opts, path)
3722 other = hg.peer(repo, opts, path)
3716 except error.LookupError as ex:
3723 except error.LookupError as ex:
3717 msg = _(b"\nwarning: unable to open bundle %s") % path.loc
3724 msg = _(b"\nwarning: unable to open bundle %s") % path.loc
3718 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3725 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3719 ui.warn(msg, hint=hint)
3726 ui.warn(msg, hint=hint)
3720 continue
3727 continue
3721 branches = (path.branch, opts.get(b'branch', []))
3728 branches = (path.branch, opts.get(b'branch', []))
3722 revs, checkout = hg.addbranchrevs(
3729 revs, checkout = hg.addbranchrevs(
3723 repo, other, branches, opts.get(b"rev")
3730 repo, other, branches, opts.get(b"rev")
3724 )
3731 )
3725
3732
3726 if revs:
3733 if revs:
3727 revs = [other.lookup(rev) for rev in revs]
3734 revs = [other.lookup(rev) for rev in revs]
3728
3735
3729 with ui.silent():
3736 with ui.silent():
3730 try:
3737 try:
3731 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3738 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3732 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3739 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3733 )
3740 )
3734 except error.LookupError:
3741 except error.LookupError:
3735 continue
3742 continue
3736
3743
3737 try:
3744 try:
3738 if not chlist:
3745 if not chlist:
3739 continue
3746 continue
3740 if recovernode:
3747 if recovernode:
3741 with repo.lock(), repo.transaction(b"unbundle") as tr:
3748 with repo.lock(), repo.transaction(b"unbundle") as tr:
3742 if scmutil.isrevsymbol(other, recovernode):
3749 if scmutil.isrevsymbol(other, recovernode):
3743 ui.status(_(b"Unbundling %s\n") % (recovernode))
3750 ui.status(_(b"Unbundling %s\n") % (recovernode))
3744 f = hg.openpath(ui, path.loc)
3751 f = hg.openpath(ui, path.loc)
3745 gen = exchange.readbundle(ui, f, path.loc)
3752 gen = exchange.readbundle(ui, f, path.loc)
3746 if isinstance(gen, bundle2.unbundle20):
3753 if isinstance(gen, bundle2.unbundle20):
3747 bundle2.applybundle(
3754 bundle2.applybundle(
3748 repo,
3755 repo,
3749 gen,
3756 gen,
3750 tr,
3757 tr,
3751 source=b"unbundle",
3758 source=b"unbundle",
3752 url=b"bundle:" + path.loc,
3759 url=b"bundle:" + path.loc,
3753 )
3760 )
3754 else:
3761 else:
3755 gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
3762 gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
3756 break
3763 break
3757 else:
3764 else:
3758 backupdate = encoding.strtolocal(
3765 backupdate = encoding.strtolocal(
3759 time.strftime(
3766 time.strftime(
3760 "%a %H:%M, %Y-%m-%d",
3767 "%a %H:%M, %Y-%m-%d",
3761 time.localtime(os.path.getmtime(path.loc)),
3768 time.localtime(os.path.getmtime(path.loc)),
3762 )
3769 )
3763 )
3770 )
3764 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3771 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3765 if ui.verbose:
3772 if ui.verbose:
3766 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
3773 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
3767 else:
3774 else:
3768 opts[
3775 opts[
3769 b"template"
3776 b"template"
3770 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3777 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3771 displayer = logcmdutil.changesetdisplayer(
3778 displayer = logcmdutil.changesetdisplayer(
3772 ui, other, opts, False
3779 ui, other, opts, False
3773 )
3780 )
3774 display(other, chlist, displayer)
3781 display(other, chlist, displayer)
3775 displayer.close()
3782 displayer.close()
3776 finally:
3783 finally:
3777 cleanupfn()
3784 cleanupfn()
3778
3785
3779
3786
3780 @command(
3787 @command(
3781 b'debugsub',
3788 b'debugsub',
3782 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3789 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3783 _(b'[-r REV] [REV]'),
3790 _(b'[-r REV] [REV]'),
3784 )
3791 )
3785 def debugsub(ui, repo, rev=None):
3792 def debugsub(ui, repo, rev=None):
3786 ctx = scmutil.revsingle(repo, rev, None)
3793 ctx = scmutil.revsingle(repo, rev, None)
3787 for k, v in sorted(ctx.substate.items()):
3794 for k, v in sorted(ctx.substate.items()):
3788 ui.writenoi18n(b'path %s\n' % k)
3795 ui.writenoi18n(b'path %s\n' % k)
3789 ui.writenoi18n(b' source %s\n' % v[0])
3796 ui.writenoi18n(b' source %s\n' % v[0])
3790 ui.writenoi18n(b' revision %s\n' % v[1])
3797 ui.writenoi18n(b' revision %s\n' % v[1])
3791
3798
3792
3799
3793 @command(
3800 @command(
3794 b'debugshell',
3801 b'debugshell',
3795 [
3802 [
3796 (
3803 (
3797 b'c',
3804 b'c',
3798 b'command',
3805 b'command',
3799 b'',
3806 b'',
3800 _(b'program passed in as a string'),
3807 _(b'program passed in as a string'),
3801 _(b'COMMAND'),
3808 _(b'COMMAND'),
3802 )
3809 )
3803 ],
3810 ],
3804 _(b'[-c COMMAND]'),
3811 _(b'[-c COMMAND]'),
3805 optionalrepo=True,
3812 optionalrepo=True,
3806 )
3813 )
3807 def debugshell(ui, repo, **opts):
3814 def debugshell(ui, repo, **opts):
3808 """run an interactive Python interpreter
3815 """run an interactive Python interpreter
3809
3816
3810 The local namespace is provided with a reference to the ui and
3817 The local namespace is provided with a reference to the ui and
3811 the repo instance (if available).
3818 the repo instance (if available).
3812 """
3819 """
3813 import code
3820 import code
3814
3821
3815 imported_objects = {
3822 imported_objects = {
3816 'ui': ui,
3823 'ui': ui,
3817 'repo': repo,
3824 'repo': repo,
3818 }
3825 }
3819
3826
3820 # py2exe disables initialization of the site module, which is responsible
3827 # py2exe disables initialization of the site module, which is responsible
3821 # for arranging for ``quit()`` to exit the interpreter. Manually initialize
3828 # for arranging for ``quit()`` to exit the interpreter. Manually initialize
3822 # the stuff that site normally does here, so that the interpreter can be
3829 # the stuff that site normally does here, so that the interpreter can be
3823 # quit in a consistent manner, whether run with pyoxidizer, exewrapper.c,
3830 # quit in a consistent manner, whether run with pyoxidizer, exewrapper.c,
3824 # py.exe, or py2exe.
3831 # py.exe, or py2exe.
3825 if getattr(sys, "frozen", None) == 'console_exe':
3832 if getattr(sys, "frozen", None) == 'console_exe':
3826 try:
3833 try:
3827 import site
3834 import site
3828
3835
3829 site.setcopyright()
3836 site.setcopyright()
3830 site.sethelper()
3837 site.sethelper()
3831 site.setquit()
3838 site.setquit()
3832 except ImportError:
3839 except ImportError:
3833 site = None # Keep PyCharm happy
3840 site = None # Keep PyCharm happy
3834
3841
3835 command = opts.get('command')
3842 command = opts.get('command')
3836 if command:
3843 if command:
3837 compiled = code.compile_command(encoding.strfromlocal(command))
3844 compiled = code.compile_command(encoding.strfromlocal(command))
3838 code.InteractiveInterpreter(locals=imported_objects).runcode(compiled)
3845 code.InteractiveInterpreter(locals=imported_objects).runcode(compiled)
3839 return
3846 return
3840
3847
3841 code.interact(local=imported_objects)
3848 code.interact(local=imported_objects)
3842
3849
3843
3850
3844 @command(
3851 @command(
3845 b'debug-revlog-stats',
3852 b'debug-revlog-stats',
3846 [
3853 [
3847 (b'c', b'changelog', None, _(b'Display changelog statistics')),
3854 (b'c', b'changelog', None, _(b'Display changelog statistics')),
3848 (b'm', b'manifest', None, _(b'Display manifest statistics')),
3855 (b'm', b'manifest', None, _(b'Display manifest statistics')),
3849 (b'f', b'filelogs', None, _(b'Display filelogs statistics')),
3856 (b'f', b'filelogs', None, _(b'Display filelogs statistics')),
3850 ]
3857 ]
3851 + cmdutil.formatteropts,
3858 + cmdutil.formatteropts,
3852 )
3859 )
3853 def debug_revlog_stats(ui, repo, **opts):
3860 def debug_revlog_stats(ui, repo, **opts):
3854 """display statistics about revlogs in the store"""
3861 """display statistics about revlogs in the store"""
3855 opts = pycompat.byteskwargs(opts)
3862 opts = pycompat.byteskwargs(opts)
3856 changelog = opts[b"changelog"]
3863 changelog = opts[b"changelog"]
3857 manifest = opts[b"manifest"]
3864 manifest = opts[b"manifest"]
3858 filelogs = opts[b"filelogs"]
3865 filelogs = opts[b"filelogs"]
3859
3866
3860 if changelog is None and manifest is None and filelogs is None:
3867 if changelog is None and manifest is None and filelogs is None:
3861 changelog = True
3868 changelog = True
3862 manifest = True
3869 manifest = True
3863 filelogs = True
3870 filelogs = True
3864
3871
3865 repo = repo.unfiltered()
3872 repo = repo.unfiltered()
3866 fm = ui.formatter(b'debug-revlog-stats', opts)
3873 fm = ui.formatter(b'debug-revlog-stats', opts)
3867 revlog_debug.debug_revlog_stats(repo, fm, changelog, manifest, filelogs)
3874 revlog_debug.debug_revlog_stats(repo, fm, changelog, manifest, filelogs)
3868 fm.end()
3875 fm.end()
3869
3876
3870
3877
3871 @command(
3878 @command(
3872 b'debugsuccessorssets',
3879 b'debugsuccessorssets',
3873 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3880 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3874 _(b'[REV]'),
3881 _(b'[REV]'),
3875 )
3882 )
3876 def debugsuccessorssets(ui, repo, *revs, **opts):
3883 def debugsuccessorssets(ui, repo, *revs, **opts):
3877 """show set of successors for revision
3884 """show set of successors for revision
3878
3885
3879 A successors set of changeset A is a consistent group of revisions that
3886 A successors set of changeset A is a consistent group of revisions that
3880 succeed A. It contains non-obsolete changesets only unless closests
3887 succeed A. It contains non-obsolete changesets only unless closests
3881 successors set is set.
3888 successors set is set.
3882
3889
3883 In most cases a changeset A has a single successors set containing a single
3890 In most cases a changeset A has a single successors set containing a single
3884 successor (changeset A replaced by A').
3891 successor (changeset A replaced by A').
3885
3892
3886 A changeset that is made obsolete with no successors are called "pruned".
3893 A changeset that is made obsolete with no successors are called "pruned".
3887 Such changesets have no successors sets at all.
3894 Such changesets have no successors sets at all.
3888
3895
3889 A changeset that has been "split" will have a successors set containing
3896 A changeset that has been "split" will have a successors set containing
3890 more than one successor.
3897 more than one successor.
3891
3898
3892 A changeset that has been rewritten in multiple different ways is called
3899 A changeset that has been rewritten in multiple different ways is called
3893 "divergent". Such changesets have multiple successor sets (each of which
3900 "divergent". Such changesets have multiple successor sets (each of which
3894 may also be split, i.e. have multiple successors).
3901 may also be split, i.e. have multiple successors).
3895
3902
3896 Results are displayed as follows::
3903 Results are displayed as follows::
3897
3904
3898 <rev1>
3905 <rev1>
3899 <successors-1A>
3906 <successors-1A>
3900 <rev2>
3907 <rev2>
3901 <successors-2A>
3908 <successors-2A>
3902 <successors-2B1> <successors-2B2> <successors-2B3>
3909 <successors-2B1> <successors-2B2> <successors-2B3>
3903
3910
3904 Here rev2 has two possible (i.e. divergent) successors sets. The first
3911 Here rev2 has two possible (i.e. divergent) successors sets. The first
3905 holds one element, whereas the second holds three (i.e. the changeset has
3912 holds one element, whereas the second holds three (i.e. the changeset has
3906 been split).
3913 been split).
3907 """
3914 """
3908 # passed to successorssets caching computation from one call to another
3915 # passed to successorssets caching computation from one call to another
3909 cache = {}
3916 cache = {}
3910 ctx2str = bytes
3917 ctx2str = bytes
3911 node2str = short
3918 node2str = short
3912 for rev in logcmdutil.revrange(repo, revs):
3919 for rev in logcmdutil.revrange(repo, revs):
3913 ctx = repo[rev]
3920 ctx = repo[rev]
3914 ui.write(b'%s\n' % ctx2str(ctx))
3921 ui.write(b'%s\n' % ctx2str(ctx))
3915 for succsset in obsutil.successorssets(
3922 for succsset in obsutil.successorssets(
3916 repo, ctx.node(), closest=opts['closest'], cache=cache
3923 repo, ctx.node(), closest=opts['closest'], cache=cache
3917 ):
3924 ):
3918 if succsset:
3925 if succsset:
3919 ui.write(b' ')
3926 ui.write(b' ')
3920 ui.write(node2str(succsset[0]))
3927 ui.write(node2str(succsset[0]))
3921 for node in succsset[1:]:
3928 for node in succsset[1:]:
3922 ui.write(b' ')
3929 ui.write(b' ')
3923 ui.write(node2str(node))
3930 ui.write(node2str(node))
3924 ui.write(b'\n')
3931 ui.write(b'\n')
3925
3932
3926
3933
3927 @command(b'debugtagscache', [])
3934 @command(b'debugtagscache', [])
3928 def debugtagscache(ui, repo):
3935 def debugtagscache(ui, repo):
3929 """display the contents of .hg/cache/hgtagsfnodes1"""
3936 """display the contents of .hg/cache/hgtagsfnodes1"""
3930 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3937 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3931 flog = repo.file(b'.hgtags')
3938 flog = repo.file(b'.hgtags')
3932 for r in repo:
3939 for r in repo:
3933 node = repo[r].node()
3940 node = repo[r].node()
3934 tagsnode = cache.getfnode(node, computemissing=False)
3941 tagsnode = cache.getfnode(node, computemissing=False)
3935 if tagsnode:
3942 if tagsnode:
3936 tagsnodedisplay = hex(tagsnode)
3943 tagsnodedisplay = hex(tagsnode)
3937 if not flog.hasnode(tagsnode):
3944 if not flog.hasnode(tagsnode):
3938 tagsnodedisplay += b' (unknown node)'
3945 tagsnodedisplay += b' (unknown node)'
3939 elif tagsnode is None:
3946 elif tagsnode is None:
3940 tagsnodedisplay = b'missing'
3947 tagsnodedisplay = b'missing'
3941 else:
3948 else:
3942 tagsnodedisplay = b'invalid'
3949 tagsnodedisplay = b'invalid'
3943
3950
3944 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3951 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3945
3952
3946
3953
3947 @command(
3954 @command(
3948 b'debugtemplate',
3955 b'debugtemplate',
3949 [
3956 [
3950 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3957 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3951 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3958 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3952 ],
3959 ],
3953 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3960 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3954 optionalrepo=True,
3961 optionalrepo=True,
3955 )
3962 )
3956 def debugtemplate(ui, repo, tmpl, **opts):
3963 def debugtemplate(ui, repo, tmpl, **opts):
3957 """parse and apply a template
3964 """parse and apply a template
3958
3965
3959 If -r/--rev is given, the template is processed as a log template and
3966 If -r/--rev is given, the template is processed as a log template and
3960 applied to the given changesets. Otherwise, it is processed as a generic
3967 applied to the given changesets. Otherwise, it is processed as a generic
3961 template.
3968 template.
3962
3969
3963 Use --verbose to print the parsed tree.
3970 Use --verbose to print the parsed tree.
3964 """
3971 """
3965 revs = None
3972 revs = None
3966 if opts['rev']:
3973 if opts['rev']:
3967 if repo is None:
3974 if repo is None:
3968 raise error.RepoError(
3975 raise error.RepoError(
3969 _(b'there is no Mercurial repository here (.hg not found)')
3976 _(b'there is no Mercurial repository here (.hg not found)')
3970 )
3977 )
3971 revs = logcmdutil.revrange(repo, opts['rev'])
3978 revs = logcmdutil.revrange(repo, opts['rev'])
3972
3979
3973 props = {}
3980 props = {}
3974 for d in opts['define']:
3981 for d in opts['define']:
3975 try:
3982 try:
3976 k, v = (e.strip() for e in d.split(b'=', 1))
3983 k, v = (e.strip() for e in d.split(b'=', 1))
3977 if not k or k == b'ui':
3984 if not k or k == b'ui':
3978 raise ValueError
3985 raise ValueError
3979 props[k] = v
3986 props[k] = v
3980 except ValueError:
3987 except ValueError:
3981 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3988 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3982
3989
3983 if ui.verbose:
3990 if ui.verbose:
3984 aliases = ui.configitems(b'templatealias')
3991 aliases = ui.configitems(b'templatealias')
3985 tree = templater.parse(tmpl)
3992 tree = templater.parse(tmpl)
3986 ui.note(templater.prettyformat(tree), b'\n')
3993 ui.note(templater.prettyformat(tree), b'\n')
3987 newtree = templater.expandaliases(tree, aliases)
3994 newtree = templater.expandaliases(tree, aliases)
3988 if newtree != tree:
3995 if newtree != tree:
3989 ui.notenoi18n(
3996 ui.notenoi18n(
3990 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3997 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3991 )
3998 )
3992
3999
3993 if revs is None:
4000 if revs is None:
3994 tres = formatter.templateresources(ui, repo)
4001 tres = formatter.templateresources(ui, repo)
3995 t = formatter.maketemplater(ui, tmpl, resources=tres)
4002 t = formatter.maketemplater(ui, tmpl, resources=tres)
3996 if ui.verbose:
4003 if ui.verbose:
3997 kwds, funcs = t.symbolsuseddefault()
4004 kwds, funcs = t.symbolsuseddefault()
3998 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4005 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3999 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4006 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4000 ui.write(t.renderdefault(props))
4007 ui.write(t.renderdefault(props))
4001 else:
4008 else:
4002 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4009 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4003 if ui.verbose:
4010 if ui.verbose:
4004 kwds, funcs = displayer.t.symbolsuseddefault()
4011 kwds, funcs = displayer.t.symbolsuseddefault()
4005 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4012 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4006 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4013 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4007 for r in revs:
4014 for r in revs:
4008 displayer.show(repo[r], **pycompat.strkwargs(props))
4015 displayer.show(repo[r], **pycompat.strkwargs(props))
4009 displayer.close()
4016 displayer.close()
4010
4017
4011
4018
4012 @command(
4019 @command(
4013 b'debuguigetpass',
4020 b'debuguigetpass',
4014 [
4021 [
4015 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4022 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4016 ],
4023 ],
4017 _(b'[-p TEXT]'),
4024 _(b'[-p TEXT]'),
4018 norepo=True,
4025 norepo=True,
4019 )
4026 )
4020 def debuguigetpass(ui, prompt=b''):
4027 def debuguigetpass(ui, prompt=b''):
4021 """show prompt to type password"""
4028 """show prompt to type password"""
4022 r = ui.getpass(prompt)
4029 r = ui.getpass(prompt)
4023 if r is None:
4030 if r is None:
4024 r = b"<default response>"
4031 r = b"<default response>"
4025 ui.writenoi18n(b'response: %s\n' % r)
4032 ui.writenoi18n(b'response: %s\n' % r)
4026
4033
4027
4034
4028 @command(
4035 @command(
4029 b'debuguiprompt',
4036 b'debuguiprompt',
4030 [
4037 [
4031 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4038 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4032 ],
4039 ],
4033 _(b'[-p TEXT]'),
4040 _(b'[-p TEXT]'),
4034 norepo=True,
4041 norepo=True,
4035 )
4042 )
4036 def debuguiprompt(ui, prompt=b''):
4043 def debuguiprompt(ui, prompt=b''):
4037 """show plain prompt"""
4044 """show plain prompt"""
4038 r = ui.prompt(prompt)
4045 r = ui.prompt(prompt)
4039 ui.writenoi18n(b'response: %s\n' % r)
4046 ui.writenoi18n(b'response: %s\n' % r)
4040
4047
4041
4048
4042 @command(b'debugupdatecaches', [])
4049 @command(b'debugupdatecaches', [])
4043 def debugupdatecaches(ui, repo, *pats, **opts):
4050 def debugupdatecaches(ui, repo, *pats, **opts):
4044 """warm all known caches in the repository"""
4051 """warm all known caches in the repository"""
4045 with repo.wlock(), repo.lock():
4052 with repo.wlock(), repo.lock():
4046 repo.updatecaches(caches=repository.CACHES_ALL)
4053 repo.updatecaches(caches=repository.CACHES_ALL)
4047
4054
4048
4055
4049 @command(
4056 @command(
4050 b'debugupgraderepo',
4057 b'debugupgraderepo',
4051 [
4058 [
4052 (
4059 (
4053 b'o',
4060 b'o',
4054 b'optimize',
4061 b'optimize',
4055 [],
4062 [],
4056 _(b'extra optimization to perform'),
4063 _(b'extra optimization to perform'),
4057 _(b'NAME'),
4064 _(b'NAME'),
4058 ),
4065 ),
4059 (b'', b'run', False, _(b'performs an upgrade')),
4066 (b'', b'run', False, _(b'performs an upgrade')),
4060 (b'', b'backup', True, _(b'keep the old repository content around')),
4067 (b'', b'backup', True, _(b'keep the old repository content around')),
4061 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4068 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4062 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4069 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4063 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4070 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4064 ],
4071 ],
4065 )
4072 )
4066 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4073 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4067 """upgrade a repository to use different features
4074 """upgrade a repository to use different features
4068
4075
4069 If no arguments are specified, the repository is evaluated for upgrade
4076 If no arguments are specified, the repository is evaluated for upgrade
4070 and a list of problems and potential optimizations is printed.
4077 and a list of problems and potential optimizations is printed.
4071
4078
4072 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4079 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4073 can be influenced via additional arguments. More details will be provided
4080 can be influenced via additional arguments. More details will be provided
4074 by the command output when run without ``--run``.
4081 by the command output when run without ``--run``.
4075
4082
4076 During the upgrade, the repository will be locked and no writes will be
4083 During the upgrade, the repository will be locked and no writes will be
4077 allowed.
4084 allowed.
4078
4085
4079 At the end of the upgrade, the repository may not be readable while new
4086 At the end of the upgrade, the repository may not be readable while new
4080 repository data is swapped in. This window will be as long as it takes to
4087 repository data is swapped in. This window will be as long as it takes to
4081 rename some directories inside the ``.hg`` directory. On most machines, this
4088 rename some directories inside the ``.hg`` directory. On most machines, this
4082 should complete almost instantaneously and the chances of a consumer being
4089 should complete almost instantaneously and the chances of a consumer being
4083 unable to access the repository should be low.
4090 unable to access the repository should be low.
4084
4091
4085 By default, all revlogs will be upgraded. You can restrict this using flags
4092 By default, all revlogs will be upgraded. You can restrict this using flags
4086 such as `--manifest`:
4093 such as `--manifest`:
4087
4094
4088 * `--manifest`: only optimize the manifest
4095 * `--manifest`: only optimize the manifest
4089 * `--no-manifest`: optimize all revlog but the manifest
4096 * `--no-manifest`: optimize all revlog but the manifest
4090 * `--changelog`: optimize the changelog only
4097 * `--changelog`: optimize the changelog only
4091 * `--no-changelog --no-manifest`: optimize filelogs only
4098 * `--no-changelog --no-manifest`: optimize filelogs only
4092 * `--filelogs`: optimize the filelogs only
4099 * `--filelogs`: optimize the filelogs only
4093 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4100 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4094 """
4101 """
4095 return upgrade.upgraderepo(
4102 return upgrade.upgraderepo(
4096 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4103 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4097 )
4104 )
4098
4105
4099
4106
4100 @command(
4107 @command(
4101 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4108 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4102 )
4109 )
4103 def debugwalk(ui, repo, *pats, **opts):
4110 def debugwalk(ui, repo, *pats, **opts):
4104 """show how files match on given patterns"""
4111 """show how files match on given patterns"""
4105 opts = pycompat.byteskwargs(opts)
4112 opts = pycompat.byteskwargs(opts)
4106 m = scmutil.match(repo[None], pats, opts)
4113 m = scmutil.match(repo[None], pats, opts)
4107 if ui.verbose:
4114 if ui.verbose:
4108 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4115 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4109 items = list(repo[None].walk(m))
4116 items = list(repo[None].walk(m))
4110 if not items:
4117 if not items:
4111 return
4118 return
4112 f = lambda fn: fn
4119 f = lambda fn: fn
4113 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4120 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4114 f = lambda fn: util.normpath(fn)
4121 f = lambda fn: util.normpath(fn)
4115 fmt = b'f %%-%ds %%-%ds %%s' % (
4122 fmt = b'f %%-%ds %%-%ds %%s' % (
4116 max([len(abs) for abs in items]),
4123 max([len(abs) for abs in items]),
4117 max([len(repo.pathto(abs)) for abs in items]),
4124 max([len(repo.pathto(abs)) for abs in items]),
4118 )
4125 )
4119 for abs in items:
4126 for abs in items:
4120 line = fmt % (
4127 line = fmt % (
4121 abs,
4128 abs,
4122 f(repo.pathto(abs)),
4129 f(repo.pathto(abs)),
4123 m.exact(abs) and b'exact' or b'',
4130 m.exact(abs) and b'exact' or b'',
4124 )
4131 )
4125 ui.write(b"%s\n" % line.rstrip())
4132 ui.write(b"%s\n" % line.rstrip())
4126
4133
4127
4134
4128 @command(b'debugwhyunstable', [], _(b'REV'))
4135 @command(b'debugwhyunstable', [], _(b'REV'))
4129 def debugwhyunstable(ui, repo, rev):
4136 def debugwhyunstable(ui, repo, rev):
4130 """explain instabilities of a changeset"""
4137 """explain instabilities of a changeset"""
4131 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4138 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4132 dnodes = b''
4139 dnodes = b''
4133 if entry.get(b'divergentnodes'):
4140 if entry.get(b'divergentnodes'):
4134 dnodes = (
4141 dnodes = (
4135 b' '.join(
4142 b' '.join(
4136 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4143 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4137 for ctx in entry[b'divergentnodes']
4144 for ctx in entry[b'divergentnodes']
4138 )
4145 )
4139 + b' '
4146 + b' '
4140 )
4147 )
4141 ui.write(
4148 ui.write(
4142 b'%s: %s%s %s\n'
4149 b'%s: %s%s %s\n'
4143 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4150 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4144 )
4151 )
4145
4152
4146
4153
4147 @command(
4154 @command(
4148 b'debugwireargs',
4155 b'debugwireargs',
4149 [
4156 [
4150 (b'', b'three', b'', b'three'),
4157 (b'', b'three', b'', b'three'),
4151 (b'', b'four', b'', b'four'),
4158 (b'', b'four', b'', b'four'),
4152 (b'', b'five', b'', b'five'),
4159 (b'', b'five', b'', b'five'),
4153 ]
4160 ]
4154 + cmdutil.remoteopts,
4161 + cmdutil.remoteopts,
4155 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4162 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4156 norepo=True,
4163 norepo=True,
4157 )
4164 )
4158 def debugwireargs(ui, repopath, *vals, **opts):
4165 def debugwireargs(ui, repopath, *vals, **opts):
4159 opts = pycompat.byteskwargs(opts)
4166 opts = pycompat.byteskwargs(opts)
4160 repo = hg.peer(ui, opts, repopath)
4167 repo = hg.peer(ui, opts, repopath)
4161 try:
4168 try:
4162 for opt in cmdutil.remoteopts:
4169 for opt in cmdutil.remoteopts:
4163 del opts[opt[1]]
4170 del opts[opt[1]]
4164 args = {}
4171 args = {}
4165 for k, v in opts.items():
4172 for k, v in opts.items():
4166 if v:
4173 if v:
4167 args[k] = v
4174 args[k] = v
4168 args = pycompat.strkwargs(args)
4175 args = pycompat.strkwargs(args)
4169 # run twice to check that we don't mess up the stream for the next command
4176 # run twice to check that we don't mess up the stream for the next command
4170 res1 = repo.debugwireargs(*vals, **args)
4177 res1 = repo.debugwireargs(*vals, **args)
4171 res2 = repo.debugwireargs(*vals, **args)
4178 res2 = repo.debugwireargs(*vals, **args)
4172 ui.write(b"%s\n" % res1)
4179 ui.write(b"%s\n" % res1)
4173 if res1 != res2:
4180 if res1 != res2:
4174 ui.warn(b"%s\n" % res2)
4181 ui.warn(b"%s\n" % res2)
4175 finally:
4182 finally:
4176 repo.close()
4183 repo.close()
4177
4184
4178
4185
4179 def _parsewirelangblocks(fh):
4186 def _parsewirelangblocks(fh):
4180 activeaction = None
4187 activeaction = None
4181 blocklines = []
4188 blocklines = []
4182 lastindent = 0
4189 lastindent = 0
4183
4190
4184 for line in fh:
4191 for line in fh:
4185 line = line.rstrip()
4192 line = line.rstrip()
4186 if not line:
4193 if not line:
4187 continue
4194 continue
4188
4195
4189 if line.startswith(b'#'):
4196 if line.startswith(b'#'):
4190 continue
4197 continue
4191
4198
4192 if not line.startswith(b' '):
4199 if not line.startswith(b' '):
4193 # New block. Flush previous one.
4200 # New block. Flush previous one.
4194 if activeaction:
4201 if activeaction:
4195 yield activeaction, blocklines
4202 yield activeaction, blocklines
4196
4203
4197 activeaction = line
4204 activeaction = line
4198 blocklines = []
4205 blocklines = []
4199 lastindent = 0
4206 lastindent = 0
4200 continue
4207 continue
4201
4208
4202 # Else we start with an indent.
4209 # Else we start with an indent.
4203
4210
4204 if not activeaction:
4211 if not activeaction:
4205 raise error.Abort(_(b'indented line outside of block'))
4212 raise error.Abort(_(b'indented line outside of block'))
4206
4213
4207 indent = len(line) - len(line.lstrip())
4214 indent = len(line) - len(line.lstrip())
4208
4215
4209 # If this line is indented more than the last line, concatenate it.
4216 # If this line is indented more than the last line, concatenate it.
4210 if indent > lastindent and blocklines:
4217 if indent > lastindent and blocklines:
4211 blocklines[-1] += line.lstrip()
4218 blocklines[-1] += line.lstrip()
4212 else:
4219 else:
4213 blocklines.append(line)
4220 blocklines.append(line)
4214 lastindent = indent
4221 lastindent = indent
4215
4222
4216 # Flush last block.
4223 # Flush last block.
4217 if activeaction:
4224 if activeaction:
4218 yield activeaction, blocklines
4225 yield activeaction, blocklines
4219
4226
4220
4227
4221 @command(
4228 @command(
4222 b'debugwireproto',
4229 b'debugwireproto',
4223 [
4230 [
4224 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4231 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4225 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4232 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4226 (
4233 (
4227 b'',
4234 b'',
4228 b'noreadstderr',
4235 b'noreadstderr',
4229 False,
4236 False,
4230 _(b'do not read from stderr of the remote'),
4237 _(b'do not read from stderr of the remote'),
4231 ),
4238 ),
4232 (
4239 (
4233 b'',
4240 b'',
4234 b'nologhandshake',
4241 b'nologhandshake',
4235 False,
4242 False,
4236 _(b'do not log I/O related to the peer handshake'),
4243 _(b'do not log I/O related to the peer handshake'),
4237 ),
4244 ),
4238 ]
4245 ]
4239 + cmdutil.remoteopts,
4246 + cmdutil.remoteopts,
4240 _(b'[PATH]'),
4247 _(b'[PATH]'),
4241 optionalrepo=True,
4248 optionalrepo=True,
4242 )
4249 )
4243 def debugwireproto(ui, repo, path=None, **opts):
4250 def debugwireproto(ui, repo, path=None, **opts):
4244 """send wire protocol commands to a server
4251 """send wire protocol commands to a server
4245
4252
4246 This command can be used to issue wire protocol commands to remote
4253 This command can be used to issue wire protocol commands to remote
4247 peers and to debug the raw data being exchanged.
4254 peers and to debug the raw data being exchanged.
4248
4255
4249 ``--localssh`` will start an SSH server against the current repository
4256 ``--localssh`` will start an SSH server against the current repository
4250 and connect to that. By default, the connection will perform a handshake
4257 and connect to that. By default, the connection will perform a handshake
4251 and establish an appropriate peer instance.
4258 and establish an appropriate peer instance.
4252
4259
4253 ``--peer`` can be used to bypass the handshake protocol and construct a
4260 ``--peer`` can be used to bypass the handshake protocol and construct a
4254 peer instance using the specified class type. Valid values are ``raw``,
4261 peer instance using the specified class type. Valid values are ``raw``,
4255 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4262 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4256 don't support higher-level command actions.
4263 don't support higher-level command actions.
4257
4264
4258 ``--noreadstderr`` can be used to disable automatic reading from stderr
4265 ``--noreadstderr`` can be used to disable automatic reading from stderr
4259 of the peer (for SSH connections only). Disabling automatic reading of
4266 of the peer (for SSH connections only). Disabling automatic reading of
4260 stderr is useful for making output more deterministic.
4267 stderr is useful for making output more deterministic.
4261
4268
4262 Commands are issued via a mini language which is specified via stdin.
4269 Commands are issued via a mini language which is specified via stdin.
4263 The language consists of individual actions to perform. An action is
4270 The language consists of individual actions to perform. An action is
4264 defined by a block. A block is defined as a line with no leading
4271 defined by a block. A block is defined as a line with no leading
4265 space followed by 0 or more lines with leading space. Blocks are
4272 space followed by 0 or more lines with leading space. Blocks are
4266 effectively a high-level command with additional metadata.
4273 effectively a high-level command with additional metadata.
4267
4274
4268 Lines beginning with ``#`` are ignored.
4275 Lines beginning with ``#`` are ignored.
4269
4276
4270 The following sections denote available actions.
4277 The following sections denote available actions.
4271
4278
4272 raw
4279 raw
4273 ---
4280 ---
4274
4281
4275 Send raw data to the server.
4282 Send raw data to the server.
4276
4283
4277 The block payload contains the raw data to send as one atomic send
4284 The block payload contains the raw data to send as one atomic send
4278 operation. The data may not actually be delivered in a single system
4285 operation. The data may not actually be delivered in a single system
4279 call: it depends on the abilities of the transport being used.
4286 call: it depends on the abilities of the transport being used.
4280
4287
4281 Each line in the block is de-indented and concatenated. Then, that
4288 Each line in the block is de-indented and concatenated. Then, that
4282 value is evaluated as a Python b'' literal. This allows the use of
4289 value is evaluated as a Python b'' literal. This allows the use of
4283 backslash escaping, etc.
4290 backslash escaping, etc.
4284
4291
4285 raw+
4292 raw+
4286 ----
4293 ----
4287
4294
4288 Behaves like ``raw`` except flushes output afterwards.
4295 Behaves like ``raw`` except flushes output afterwards.
4289
4296
4290 command <X>
4297 command <X>
4291 -----------
4298 -----------
4292
4299
4293 Send a request to run a named command, whose name follows the ``command``
4300 Send a request to run a named command, whose name follows the ``command``
4294 string.
4301 string.
4295
4302
4296 Arguments to the command are defined as lines in this block. The format of
4303 Arguments to the command are defined as lines in this block. The format of
4297 each line is ``<key> <value>``. e.g.::
4304 each line is ``<key> <value>``. e.g.::
4298
4305
4299 command listkeys
4306 command listkeys
4300 namespace bookmarks
4307 namespace bookmarks
4301
4308
4302 If the value begins with ``eval:``, it will be interpreted as a Python
4309 If the value begins with ``eval:``, it will be interpreted as a Python
4303 literal expression. Otherwise values are interpreted as Python b'' literals.
4310 literal expression. Otherwise values are interpreted as Python b'' literals.
4304 This allows sending complex types and encoding special byte sequences via
4311 This allows sending complex types and encoding special byte sequences via
4305 backslash escaping.
4312 backslash escaping.
4306
4313
4307 The following arguments have special meaning:
4314 The following arguments have special meaning:
4308
4315
4309 ``PUSHFILE``
4316 ``PUSHFILE``
4310 When defined, the *push* mechanism of the peer will be used instead
4317 When defined, the *push* mechanism of the peer will be used instead
4311 of the static request-response mechanism and the content of the
4318 of the static request-response mechanism and the content of the
4312 file specified in the value of this argument will be sent as the
4319 file specified in the value of this argument will be sent as the
4313 command payload.
4320 command payload.
4314
4321
4315 This can be used to submit a local bundle file to the remote.
4322 This can be used to submit a local bundle file to the remote.
4316
4323
4317 batchbegin
4324 batchbegin
4318 ----------
4325 ----------
4319
4326
4320 Instruct the peer to begin a batched send.
4327 Instruct the peer to begin a batched send.
4321
4328
4322 All ``command`` blocks are queued for execution until the next
4329 All ``command`` blocks are queued for execution until the next
4323 ``batchsubmit`` block.
4330 ``batchsubmit`` block.
4324
4331
4325 batchsubmit
4332 batchsubmit
4326 -----------
4333 -----------
4327
4334
4328 Submit previously queued ``command`` blocks as a batch request.
4335 Submit previously queued ``command`` blocks as a batch request.
4329
4336
4330 This action MUST be paired with a ``batchbegin`` action.
4337 This action MUST be paired with a ``batchbegin`` action.
4331
4338
4332 httprequest <method> <path>
4339 httprequest <method> <path>
4333 ---------------------------
4340 ---------------------------
4334
4341
4335 (HTTP peer only)
4342 (HTTP peer only)
4336
4343
4337 Send an HTTP request to the peer.
4344 Send an HTTP request to the peer.
4338
4345
4339 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4346 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4340
4347
4341 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4348 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4342 headers to add to the request. e.g. ``Accept: foo``.
4349 headers to add to the request. e.g. ``Accept: foo``.
4343
4350
4344 The following arguments are special:
4351 The following arguments are special:
4345
4352
4346 ``BODYFILE``
4353 ``BODYFILE``
4347 The content of the file defined as the value to this argument will be
4354 The content of the file defined as the value to this argument will be
4348 transferred verbatim as the HTTP request body.
4355 transferred verbatim as the HTTP request body.
4349
4356
4350 ``frame <type> <flags> <payload>``
4357 ``frame <type> <flags> <payload>``
4351 Send a unified protocol frame as part of the request body.
4358 Send a unified protocol frame as part of the request body.
4352
4359
4353 All frames will be collected and sent as the body to the HTTP
4360 All frames will be collected and sent as the body to the HTTP
4354 request.
4361 request.
4355
4362
4356 close
4363 close
4357 -----
4364 -----
4358
4365
4359 Close the connection to the server.
4366 Close the connection to the server.
4360
4367
4361 flush
4368 flush
4362 -----
4369 -----
4363
4370
4364 Flush data written to the server.
4371 Flush data written to the server.
4365
4372
4366 readavailable
4373 readavailable
4367 -------------
4374 -------------
4368
4375
4369 Close the write end of the connection and read all available data from
4376 Close the write end of the connection and read all available data from
4370 the server.
4377 the server.
4371
4378
4372 If the connection to the server encompasses multiple pipes, we poll both
4379 If the connection to the server encompasses multiple pipes, we poll both
4373 pipes and read available data.
4380 pipes and read available data.
4374
4381
4375 readline
4382 readline
4376 --------
4383 --------
4377
4384
4378 Read a line of output from the server. If there are multiple output
4385 Read a line of output from the server. If there are multiple output
4379 pipes, reads only the main pipe.
4386 pipes, reads only the main pipe.
4380
4387
4381 ereadline
4388 ereadline
4382 ---------
4389 ---------
4383
4390
4384 Like ``readline``, but read from the stderr pipe, if available.
4391 Like ``readline``, but read from the stderr pipe, if available.
4385
4392
4386 read <X>
4393 read <X>
4387 --------
4394 --------
4388
4395
4389 ``read()`` N bytes from the server's main output pipe.
4396 ``read()`` N bytes from the server's main output pipe.
4390
4397
4391 eread <X>
4398 eread <X>
4392 ---------
4399 ---------
4393
4400
4394 ``read()`` N bytes from the server's stderr pipe, if available.
4401 ``read()`` N bytes from the server's stderr pipe, if available.
4395
4402
4396 Specifying Unified Frame-Based Protocol Frames
4403 Specifying Unified Frame-Based Protocol Frames
4397 ----------------------------------------------
4404 ----------------------------------------------
4398
4405
4399 It is possible to emit a *Unified Frame-Based Protocol* by using special
4406 It is possible to emit a *Unified Frame-Based Protocol* by using special
4400 syntax.
4407 syntax.
4401
4408
4402 A frame is composed as a type, flags, and payload. These can be parsed
4409 A frame is composed as a type, flags, and payload. These can be parsed
4403 from a string of the form:
4410 from a string of the form:
4404
4411
4405 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4412 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4406
4413
4407 ``request-id`` and ``stream-id`` are integers defining the request and
4414 ``request-id`` and ``stream-id`` are integers defining the request and
4408 stream identifiers.
4415 stream identifiers.
4409
4416
4410 ``type`` can be an integer value for the frame type or the string name
4417 ``type`` can be an integer value for the frame type or the string name
4411 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4418 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4412 ``command-name``.
4419 ``command-name``.
4413
4420
4414 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4421 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4415 components. Each component (and there can be just one) can be an integer
4422 components. Each component (and there can be just one) can be an integer
4416 or a flag name for stream flags or frame flags, respectively. Values are
4423 or a flag name for stream flags or frame flags, respectively. Values are
4417 resolved to integers and then bitwise OR'd together.
4424 resolved to integers and then bitwise OR'd together.
4418
4425
4419 ``payload`` represents the raw frame payload. If it begins with
4426 ``payload`` represents the raw frame payload. If it begins with
4420 ``cbor:``, the following string is evaluated as Python code and the
4427 ``cbor:``, the following string is evaluated as Python code and the
4421 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4428 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4422 as a Python byte string literal.
4429 as a Python byte string literal.
4423 """
4430 """
4424 opts = pycompat.byteskwargs(opts)
4431 opts = pycompat.byteskwargs(opts)
4425
4432
4426 if opts[b'localssh'] and not repo:
4433 if opts[b'localssh'] and not repo:
4427 raise error.Abort(_(b'--localssh requires a repository'))
4434 raise error.Abort(_(b'--localssh requires a repository'))
4428
4435
4429 if opts[b'peer'] and opts[b'peer'] not in (
4436 if opts[b'peer'] and opts[b'peer'] not in (
4430 b'raw',
4437 b'raw',
4431 b'ssh1',
4438 b'ssh1',
4432 ):
4439 ):
4433 raise error.Abort(
4440 raise error.Abort(
4434 _(b'invalid value for --peer'),
4441 _(b'invalid value for --peer'),
4435 hint=_(b'valid values are "raw" and "ssh1"'),
4442 hint=_(b'valid values are "raw" and "ssh1"'),
4436 )
4443 )
4437
4444
4438 if path and opts[b'localssh']:
4445 if path and opts[b'localssh']:
4439 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4446 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4440
4447
4441 if ui.interactive():
4448 if ui.interactive():
4442 ui.write(_(b'(waiting for commands on stdin)\n'))
4449 ui.write(_(b'(waiting for commands on stdin)\n'))
4443
4450
4444 blocks = list(_parsewirelangblocks(ui.fin))
4451 blocks = list(_parsewirelangblocks(ui.fin))
4445
4452
4446 proc = None
4453 proc = None
4447 stdin = None
4454 stdin = None
4448 stdout = None
4455 stdout = None
4449 stderr = None
4456 stderr = None
4450 opener = None
4457 opener = None
4451
4458
4452 if opts[b'localssh']:
4459 if opts[b'localssh']:
4453 # We start the SSH server in its own process so there is process
4460 # We start the SSH server in its own process so there is process
4454 # separation. This prevents a whole class of potential bugs around
4461 # separation. This prevents a whole class of potential bugs around
4455 # shared state from interfering with server operation.
4462 # shared state from interfering with server operation.
4456 args = procutil.hgcmd() + [
4463 args = procutil.hgcmd() + [
4457 b'-R',
4464 b'-R',
4458 repo.root,
4465 repo.root,
4459 b'debugserve',
4466 b'debugserve',
4460 b'--sshstdio',
4467 b'--sshstdio',
4461 ]
4468 ]
4462 proc = subprocess.Popen(
4469 proc = subprocess.Popen(
4463 pycompat.rapply(procutil.tonativestr, args),
4470 pycompat.rapply(procutil.tonativestr, args),
4464 stdin=subprocess.PIPE,
4471 stdin=subprocess.PIPE,
4465 stdout=subprocess.PIPE,
4472 stdout=subprocess.PIPE,
4466 stderr=subprocess.PIPE,
4473 stderr=subprocess.PIPE,
4467 bufsize=0,
4474 bufsize=0,
4468 )
4475 )
4469
4476
4470 stdin = proc.stdin
4477 stdin = proc.stdin
4471 stdout = proc.stdout
4478 stdout = proc.stdout
4472 stderr = proc.stderr
4479 stderr = proc.stderr
4473
4480
4474 # We turn the pipes into observers so we can log I/O.
4481 # We turn the pipes into observers so we can log I/O.
4475 if ui.verbose or opts[b'peer'] == b'raw':
4482 if ui.verbose or opts[b'peer'] == b'raw':
4476 stdin = util.makeloggingfileobject(
4483 stdin = util.makeloggingfileobject(
4477 ui, proc.stdin, b'i', logdata=True
4484 ui, proc.stdin, b'i', logdata=True
4478 )
4485 )
4479 stdout = util.makeloggingfileobject(
4486 stdout = util.makeloggingfileobject(
4480 ui, proc.stdout, b'o', logdata=True
4487 ui, proc.stdout, b'o', logdata=True
4481 )
4488 )
4482 stderr = util.makeloggingfileobject(
4489 stderr = util.makeloggingfileobject(
4483 ui, proc.stderr, b'e', logdata=True
4490 ui, proc.stderr, b'e', logdata=True
4484 )
4491 )
4485
4492
4486 # --localssh also implies the peer connection settings.
4493 # --localssh also implies the peer connection settings.
4487
4494
4488 url = b'ssh://localserver'
4495 url = b'ssh://localserver'
4489 autoreadstderr = not opts[b'noreadstderr']
4496 autoreadstderr = not opts[b'noreadstderr']
4490
4497
4491 if opts[b'peer'] == b'ssh1':
4498 if opts[b'peer'] == b'ssh1':
4492 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4499 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4493 peer = sshpeer.sshv1peer(
4500 peer = sshpeer.sshv1peer(
4494 ui,
4501 ui,
4495 url,
4502 url,
4496 proc,
4503 proc,
4497 stdin,
4504 stdin,
4498 stdout,
4505 stdout,
4499 stderr,
4506 stderr,
4500 None,
4507 None,
4501 autoreadstderr=autoreadstderr,
4508 autoreadstderr=autoreadstderr,
4502 )
4509 )
4503 elif opts[b'peer'] == b'raw':
4510 elif opts[b'peer'] == b'raw':
4504 ui.write(_(b'using raw connection to peer\n'))
4511 ui.write(_(b'using raw connection to peer\n'))
4505 peer = None
4512 peer = None
4506 else:
4513 else:
4507 ui.write(_(b'creating ssh peer from handshake results\n'))
4514 ui.write(_(b'creating ssh peer from handshake results\n'))
4508 peer = sshpeer.makepeer(
4515 peer = sshpeer.makepeer(
4509 ui,
4516 ui,
4510 url,
4517 url,
4511 proc,
4518 proc,
4512 stdin,
4519 stdin,
4513 stdout,
4520 stdout,
4514 stderr,
4521 stderr,
4515 autoreadstderr=autoreadstderr,
4522 autoreadstderr=autoreadstderr,
4516 )
4523 )
4517
4524
4518 elif path:
4525 elif path:
4519 # We bypass hg.peer() so we can proxy the sockets.
4526 # We bypass hg.peer() so we can proxy the sockets.
4520 # TODO consider not doing this because we skip
4527 # TODO consider not doing this because we skip
4521 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4528 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4522 u = urlutil.url(path)
4529 u = urlutil.url(path)
4523 if u.scheme != b'http':
4530 if u.scheme != b'http':
4524 raise error.Abort(_(b'only http:// paths are currently supported'))
4531 raise error.Abort(_(b'only http:// paths are currently supported'))
4525
4532
4526 url, authinfo = u.authinfo()
4533 url, authinfo = u.authinfo()
4527 openerargs = {
4534 openerargs = {
4528 'useragent': b'Mercurial debugwireproto',
4535 'useragent': b'Mercurial debugwireproto',
4529 }
4536 }
4530
4537
4531 # Turn pipes/sockets into observers so we can log I/O.
4538 # Turn pipes/sockets into observers so we can log I/O.
4532 if ui.verbose:
4539 if ui.verbose:
4533 openerargs.update(
4540 openerargs.update(
4534 {
4541 {
4535 'loggingfh': ui,
4542 'loggingfh': ui,
4536 'loggingname': b's',
4543 'loggingname': b's',
4537 'loggingopts': {
4544 'loggingopts': {
4538 'logdata': True,
4545 'logdata': True,
4539 'logdataapis': False,
4546 'logdataapis': False,
4540 },
4547 },
4541 }
4548 }
4542 )
4549 )
4543
4550
4544 if ui.debugflag:
4551 if ui.debugflag:
4545 openerargs['loggingopts']['logdataapis'] = True
4552 openerargs['loggingopts']['logdataapis'] = True
4546
4553
4547 # Don't send default headers when in raw mode. This allows us to
4554 # Don't send default headers when in raw mode. This allows us to
4548 # bypass most of the behavior of our URL handling code so we can
4555 # bypass most of the behavior of our URL handling code so we can
4549 # have near complete control over what's sent on the wire.
4556 # have near complete control over what's sent on the wire.
4550 if opts[b'peer'] == b'raw':
4557 if opts[b'peer'] == b'raw':
4551 openerargs['sendaccept'] = False
4558 openerargs['sendaccept'] = False
4552
4559
4553 opener = urlmod.opener(ui, authinfo, **openerargs)
4560 opener = urlmod.opener(ui, authinfo, **openerargs)
4554
4561
4555 if opts[b'peer'] == b'raw':
4562 if opts[b'peer'] == b'raw':
4556 ui.write(_(b'using raw connection to peer\n'))
4563 ui.write(_(b'using raw connection to peer\n'))
4557 peer = None
4564 peer = None
4558 elif opts[b'peer']:
4565 elif opts[b'peer']:
4559 raise error.Abort(
4566 raise error.Abort(
4560 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4567 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4561 )
4568 )
4562 else:
4569 else:
4563 peer_path = urlutil.try_path(ui, path)
4570 peer_path = urlutil.try_path(ui, path)
4564 peer = httppeer.makepeer(ui, peer_path, opener=opener)
4571 peer = httppeer.makepeer(ui, peer_path, opener=opener)
4565
4572
4566 # We /could/ populate stdin/stdout with sock.makefile()...
4573 # We /could/ populate stdin/stdout with sock.makefile()...
4567 else:
4574 else:
4568 raise error.Abort(_(b'unsupported connection configuration'))
4575 raise error.Abort(_(b'unsupported connection configuration'))
4569
4576
4570 batchedcommands = None
4577 batchedcommands = None
4571
4578
4572 # Now perform actions based on the parsed wire language instructions.
4579 # Now perform actions based on the parsed wire language instructions.
4573 for action, lines in blocks:
4580 for action, lines in blocks:
4574 if action in (b'raw', b'raw+'):
4581 if action in (b'raw', b'raw+'):
4575 if not stdin:
4582 if not stdin:
4576 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4583 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4577
4584
4578 # Concatenate the data together.
4585 # Concatenate the data together.
4579 data = b''.join(l.lstrip() for l in lines)
4586 data = b''.join(l.lstrip() for l in lines)
4580 data = stringutil.unescapestr(data)
4587 data = stringutil.unescapestr(data)
4581 stdin.write(data)
4588 stdin.write(data)
4582
4589
4583 if action == b'raw+':
4590 if action == b'raw+':
4584 stdin.flush()
4591 stdin.flush()
4585 elif action == b'flush':
4592 elif action == b'flush':
4586 if not stdin:
4593 if not stdin:
4587 raise error.Abort(_(b'cannot call flush on this peer'))
4594 raise error.Abort(_(b'cannot call flush on this peer'))
4588 stdin.flush()
4595 stdin.flush()
4589 elif action.startswith(b'command'):
4596 elif action.startswith(b'command'):
4590 if not peer:
4597 if not peer:
4591 raise error.Abort(
4598 raise error.Abort(
4592 _(
4599 _(
4593 b'cannot send commands unless peer instance '
4600 b'cannot send commands unless peer instance '
4594 b'is available'
4601 b'is available'
4595 )
4602 )
4596 )
4603 )
4597
4604
4598 command = action.split(b' ', 1)[1]
4605 command = action.split(b' ', 1)[1]
4599
4606
4600 args = {}
4607 args = {}
4601 for line in lines:
4608 for line in lines:
4602 # We need to allow empty values.
4609 # We need to allow empty values.
4603 fields = line.lstrip().split(b' ', 1)
4610 fields = line.lstrip().split(b' ', 1)
4604 if len(fields) == 1:
4611 if len(fields) == 1:
4605 key = fields[0]
4612 key = fields[0]
4606 value = b''
4613 value = b''
4607 else:
4614 else:
4608 key, value = fields
4615 key, value = fields
4609
4616
4610 if value.startswith(b'eval:'):
4617 if value.startswith(b'eval:'):
4611 value = stringutil.evalpythonliteral(value[5:])
4618 value = stringutil.evalpythonliteral(value[5:])
4612 else:
4619 else:
4613 value = stringutil.unescapestr(value)
4620 value = stringutil.unescapestr(value)
4614
4621
4615 args[key] = value
4622 args[key] = value
4616
4623
4617 if batchedcommands is not None:
4624 if batchedcommands is not None:
4618 batchedcommands.append((command, args))
4625 batchedcommands.append((command, args))
4619 continue
4626 continue
4620
4627
4621 ui.status(_(b'sending %s command\n') % command)
4628 ui.status(_(b'sending %s command\n') % command)
4622
4629
4623 if b'PUSHFILE' in args:
4630 if b'PUSHFILE' in args:
4624 with open(args[b'PUSHFILE'], 'rb') as fh:
4631 with open(args[b'PUSHFILE'], 'rb') as fh:
4625 del args[b'PUSHFILE']
4632 del args[b'PUSHFILE']
4626 res, output = peer._callpush(
4633 res, output = peer._callpush(
4627 command, fh, **pycompat.strkwargs(args)
4634 command, fh, **pycompat.strkwargs(args)
4628 )
4635 )
4629 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4636 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4630 ui.status(
4637 ui.status(
4631 _(b'remote output: %s\n') % stringutil.escapestr(output)
4638 _(b'remote output: %s\n') % stringutil.escapestr(output)
4632 )
4639 )
4633 else:
4640 else:
4634 with peer.commandexecutor() as e:
4641 with peer.commandexecutor() as e:
4635 res = e.callcommand(command, args).result()
4642 res = e.callcommand(command, args).result()
4636
4643
4637 ui.status(
4644 ui.status(
4638 _(b'response: %s\n')
4645 _(b'response: %s\n')
4639 % stringutil.pprint(res, bprefix=True, indent=2)
4646 % stringutil.pprint(res, bprefix=True, indent=2)
4640 )
4647 )
4641
4648
4642 elif action == b'batchbegin':
4649 elif action == b'batchbegin':
4643 if batchedcommands is not None:
4650 if batchedcommands is not None:
4644 raise error.Abort(_(b'nested batchbegin not allowed'))
4651 raise error.Abort(_(b'nested batchbegin not allowed'))
4645
4652
4646 batchedcommands = []
4653 batchedcommands = []
4647 elif action == b'batchsubmit':
4654 elif action == b'batchsubmit':
4648 # There is a batching API we could go through. But it would be
4655 # There is a batching API we could go through. But it would be
4649 # difficult to normalize requests into function calls. It is easier
4656 # difficult to normalize requests into function calls. It is easier
4650 # to bypass this layer and normalize to commands + args.
4657 # to bypass this layer and normalize to commands + args.
4651 ui.status(
4658 ui.status(
4652 _(b'sending batch with %d sub-commands\n')
4659 _(b'sending batch with %d sub-commands\n')
4653 % len(batchedcommands)
4660 % len(batchedcommands)
4654 )
4661 )
4655 assert peer is not None
4662 assert peer is not None
4656 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4663 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4657 ui.status(
4664 ui.status(
4658 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4665 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4659 )
4666 )
4660
4667
4661 batchedcommands = None
4668 batchedcommands = None
4662
4669
4663 elif action.startswith(b'httprequest '):
4670 elif action.startswith(b'httprequest '):
4664 if not opener:
4671 if not opener:
4665 raise error.Abort(
4672 raise error.Abort(
4666 _(b'cannot use httprequest without an HTTP peer')
4673 _(b'cannot use httprequest without an HTTP peer')
4667 )
4674 )
4668
4675
4669 request = action.split(b' ', 2)
4676 request = action.split(b' ', 2)
4670 if len(request) != 3:
4677 if len(request) != 3:
4671 raise error.Abort(
4678 raise error.Abort(
4672 _(
4679 _(
4673 b'invalid httprequest: expected format is '
4680 b'invalid httprequest: expected format is '
4674 b'"httprequest <method> <path>'
4681 b'"httprequest <method> <path>'
4675 )
4682 )
4676 )
4683 )
4677
4684
4678 method, httppath = request[1:]
4685 method, httppath = request[1:]
4679 headers = {}
4686 headers = {}
4680 body = None
4687 body = None
4681 frames = []
4688 frames = []
4682 for line in lines:
4689 for line in lines:
4683 line = line.lstrip()
4690 line = line.lstrip()
4684 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4691 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4685 if m:
4692 if m:
4686 # Headers need to use native strings.
4693 # Headers need to use native strings.
4687 key = pycompat.strurl(m.group(1))
4694 key = pycompat.strurl(m.group(1))
4688 value = pycompat.strurl(m.group(2))
4695 value = pycompat.strurl(m.group(2))
4689 headers[key] = value
4696 headers[key] = value
4690 continue
4697 continue
4691
4698
4692 if line.startswith(b'BODYFILE '):
4699 if line.startswith(b'BODYFILE '):
4693 with open(line.split(b' ', 1), b'rb') as fh:
4700 with open(line.split(b' ', 1), b'rb') as fh:
4694 body = fh.read()
4701 body = fh.read()
4695 elif line.startswith(b'frame '):
4702 elif line.startswith(b'frame '):
4696 frame = wireprotoframing.makeframefromhumanstring(
4703 frame = wireprotoframing.makeframefromhumanstring(
4697 line[len(b'frame ') :]
4704 line[len(b'frame ') :]
4698 )
4705 )
4699
4706
4700 frames.append(frame)
4707 frames.append(frame)
4701 else:
4708 else:
4702 raise error.Abort(
4709 raise error.Abort(
4703 _(b'unknown argument to httprequest: %s') % line
4710 _(b'unknown argument to httprequest: %s') % line
4704 )
4711 )
4705
4712
4706 url = path + httppath
4713 url = path + httppath
4707
4714
4708 if frames:
4715 if frames:
4709 body = b''.join(bytes(f) for f in frames)
4716 body = b''.join(bytes(f) for f in frames)
4710
4717
4711 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4718 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4712
4719
4713 # urllib.Request insists on using has_data() as a proxy for
4720 # urllib.Request insists on using has_data() as a proxy for
4714 # determining the request method. Override that to use our
4721 # determining the request method. Override that to use our
4715 # explicitly requested method.
4722 # explicitly requested method.
4716 req.get_method = lambda: pycompat.sysstr(method)
4723 req.get_method = lambda: pycompat.sysstr(method)
4717
4724
4718 try:
4725 try:
4719 res = opener.open(req)
4726 res = opener.open(req)
4720 body = res.read()
4727 body = res.read()
4721 except util.urlerr.urlerror as e:
4728 except util.urlerr.urlerror as e:
4722 # read() method must be called, but only exists in Python 2
4729 # read() method must be called, but only exists in Python 2
4723 getattr(e, 'read', lambda: None)()
4730 getattr(e, 'read', lambda: None)()
4724 continue
4731 continue
4725
4732
4726 ct = res.headers.get('Content-Type')
4733 ct = res.headers.get('Content-Type')
4727 if ct == 'application/mercurial-cbor':
4734 if ct == 'application/mercurial-cbor':
4728 ui.write(
4735 ui.write(
4729 _(b'cbor> %s\n')
4736 _(b'cbor> %s\n')
4730 % stringutil.pprint(
4737 % stringutil.pprint(
4731 cborutil.decodeall(body), bprefix=True, indent=2
4738 cborutil.decodeall(body), bprefix=True, indent=2
4732 )
4739 )
4733 )
4740 )
4734
4741
4735 elif action == b'close':
4742 elif action == b'close':
4736 assert peer is not None
4743 assert peer is not None
4737 peer.close()
4744 peer.close()
4738 elif action == b'readavailable':
4745 elif action == b'readavailable':
4739 if not stdout or not stderr:
4746 if not stdout or not stderr:
4740 raise error.Abort(
4747 raise error.Abort(
4741 _(b'readavailable not available on this peer')
4748 _(b'readavailable not available on this peer')
4742 )
4749 )
4743
4750
4744 stdin.close()
4751 stdin.close()
4745 stdout.read()
4752 stdout.read()
4746 stderr.read()
4753 stderr.read()
4747
4754
4748 elif action == b'readline':
4755 elif action == b'readline':
4749 if not stdout:
4756 if not stdout:
4750 raise error.Abort(_(b'readline not available on this peer'))
4757 raise error.Abort(_(b'readline not available on this peer'))
4751 stdout.readline()
4758 stdout.readline()
4752 elif action == b'ereadline':
4759 elif action == b'ereadline':
4753 if not stderr:
4760 if not stderr:
4754 raise error.Abort(_(b'ereadline not available on this peer'))
4761 raise error.Abort(_(b'ereadline not available on this peer'))
4755 stderr.readline()
4762 stderr.readline()
4756 elif action.startswith(b'read '):
4763 elif action.startswith(b'read '):
4757 count = int(action.split(b' ', 1)[1])
4764 count = int(action.split(b' ', 1)[1])
4758 if not stdout:
4765 if not stdout:
4759 raise error.Abort(_(b'read not available on this peer'))
4766 raise error.Abort(_(b'read not available on this peer'))
4760 stdout.read(count)
4767 stdout.read(count)
4761 elif action.startswith(b'eread '):
4768 elif action.startswith(b'eread '):
4762 count = int(action.split(b' ', 1)[1])
4769 count = int(action.split(b' ', 1)[1])
4763 if not stderr:
4770 if not stderr:
4764 raise error.Abort(_(b'eread not available on this peer'))
4771 raise error.Abort(_(b'eread not available on this peer'))
4765 stderr.read(count)
4772 stderr.read(count)
4766 else:
4773 else:
4767 raise error.Abort(_(b'unknown action: %s') % action)
4774 raise error.Abort(_(b'unknown action: %s') % action)
4768
4775
4769 if batchedcommands is not None:
4776 if batchedcommands is not None:
4770 raise error.Abort(_(b'unclosed "batchbegin" request'))
4777 raise error.Abort(_(b'unclosed "batchbegin" request'))
4771
4778
4772 if peer:
4779 if peer:
4773 peer.close()
4780 peer.close()
4774
4781
4775 if proc:
4782 if proc:
4776 proc.kill()
4783 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now