##// END OF EJS Templates
debugrevlog: migrate `opts` to native kwargs
Matt Harbison -
r51857:8f25df13 default
parent child Browse files
Show More
@@ -1,4842 +1,4843 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import binascii
9 import binascii
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import subprocess
24 import subprocess
25 import sys
25 import sys
26 import time
26 import time
27
27
28 from .i18n import _
28 from .i18n import _
29 from .node import (
29 from .node import (
30 bin,
30 bin,
31 hex,
31 hex,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .pycompat import (
35 from .pycompat import (
36 open,
36 open,
37 )
37 )
38 from . import (
38 from . import (
39 bundle2,
39 bundle2,
40 bundlerepo,
40 bundlerepo,
41 changegroup,
41 changegroup,
42 cmdutil,
42 cmdutil,
43 color,
43 color,
44 context,
44 context,
45 copies,
45 copies,
46 dagparser,
46 dagparser,
47 dirstateutils,
47 dirstateutils,
48 encoding,
48 encoding,
49 error,
49 error,
50 exchange,
50 exchange,
51 extensions,
51 extensions,
52 filelog,
52 filelog,
53 filemerge,
53 filemerge,
54 filesetlang,
54 filesetlang,
55 formatter,
55 formatter,
56 hg,
56 hg,
57 httppeer,
57 httppeer,
58 localrepo,
58 localrepo,
59 lock as lockmod,
59 lock as lockmod,
60 logcmdutil,
60 logcmdutil,
61 manifest,
61 manifest,
62 mergestate as mergestatemod,
62 mergestate as mergestatemod,
63 metadata,
63 metadata,
64 obsolete,
64 obsolete,
65 obsutil,
65 obsutil,
66 pathutil,
66 pathutil,
67 phases,
67 phases,
68 policy,
68 policy,
69 pvec,
69 pvec,
70 pycompat,
70 pycompat,
71 registrar,
71 registrar,
72 repair,
72 repair,
73 repoview,
73 repoview,
74 requirements,
74 requirements,
75 revlog,
75 revlog,
76 revset,
76 revset,
77 revsetlang,
77 revsetlang,
78 scmutil,
78 scmutil,
79 setdiscovery,
79 setdiscovery,
80 simplemerge,
80 simplemerge,
81 sshpeer,
81 sshpeer,
82 sslutil,
82 sslutil,
83 streamclone,
83 streamclone,
84 strip,
84 strip,
85 tags as tagsmod,
85 tags as tagsmod,
86 templater,
86 templater,
87 treediscovery,
87 treediscovery,
88 upgrade,
88 upgrade,
89 url as urlmod,
89 url as urlmod,
90 util,
90 util,
91 verify,
91 verify,
92 vfs as vfsmod,
92 vfs as vfsmod,
93 wireprotoframing,
93 wireprotoframing,
94 wireprotoserver,
94 wireprotoserver,
95 )
95 )
96 from .interfaces import repository
96 from .interfaces import repository
97 from .stabletailgraph import stabletailsort
97 from .stabletailgraph import stabletailsort
98 from .utils import (
98 from .utils import (
99 cborutil,
99 cborutil,
100 compression,
100 compression,
101 dateutil,
101 dateutil,
102 procutil,
102 procutil,
103 stringutil,
103 stringutil,
104 urlutil,
104 urlutil,
105 )
105 )
106
106
107 from .revlogutils import (
107 from .revlogutils import (
108 constants as revlog_constants,
108 constants as revlog_constants,
109 debug as revlog_debug,
109 debug as revlog_debug,
110 deltas as deltautil,
110 deltas as deltautil,
111 nodemap,
111 nodemap,
112 rewrite,
112 rewrite,
113 sidedata,
113 sidedata,
114 )
114 )
115
115
116 release = lockmod.release
116 release = lockmod.release
117
117
118 table = {}
118 table = {}
119 table.update(strip.command._table)
119 table.update(strip.command._table)
120 command = registrar.command(table)
120 command = registrar.command(table)
121
121
122
122
123 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
123 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
124 def debugancestor(ui, repo, *args):
124 def debugancestor(ui, repo, *args):
125 """find the ancestor revision of two revisions in a given index"""
125 """find the ancestor revision of two revisions in a given index"""
126 if len(args) == 3:
126 if len(args) == 3:
127 index, rev1, rev2 = args
127 index, rev1, rev2 = args
128 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
128 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
129 lookup = r.lookup
129 lookup = r.lookup
130 elif len(args) == 2:
130 elif len(args) == 2:
131 if not repo:
131 if not repo:
132 raise error.Abort(
132 raise error.Abort(
133 _(b'there is no Mercurial repository here (.hg not found)')
133 _(b'there is no Mercurial repository here (.hg not found)')
134 )
134 )
135 rev1, rev2 = args
135 rev1, rev2 = args
136 r = repo.changelog
136 r = repo.changelog
137 lookup = repo.lookup
137 lookup = repo.lookup
138 else:
138 else:
139 raise error.Abort(_(b'either two or three arguments required'))
139 raise error.Abort(_(b'either two or three arguments required'))
140 a = r.ancestor(lookup(rev1), lookup(rev2))
140 a = r.ancestor(lookup(rev1), lookup(rev2))
141 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
141 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
142
142
143
143
144 @command(b'debugantivirusrunning', [])
144 @command(b'debugantivirusrunning', [])
145 def debugantivirusrunning(ui, repo):
145 def debugantivirusrunning(ui, repo):
146 """attempt to trigger an antivirus scanner to see if one is active"""
146 """attempt to trigger an antivirus scanner to see if one is active"""
147 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
147 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
148 f.write(
148 f.write(
149 util.b85decode(
149 util.b85decode(
150 # This is a base85-armored version of the EICAR test file. See
150 # This is a base85-armored version of the EICAR test file. See
151 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
151 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
152 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
152 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
153 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
153 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
154 )
154 )
155 )
155 )
156 # Give an AV engine time to scan the file.
156 # Give an AV engine time to scan the file.
157 time.sleep(2)
157 time.sleep(2)
158 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
158 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
159
159
160
160
161 @command(b'debugapplystreamclonebundle', [], b'FILE')
161 @command(b'debugapplystreamclonebundle', [], b'FILE')
162 def debugapplystreamclonebundle(ui, repo, fname):
162 def debugapplystreamclonebundle(ui, repo, fname):
163 """apply a stream clone bundle file"""
163 """apply a stream clone bundle file"""
164 f = hg.openpath(ui, fname)
164 f = hg.openpath(ui, fname)
165 gen = exchange.readbundle(ui, f, fname)
165 gen = exchange.readbundle(ui, f, fname)
166 gen.apply(repo)
166 gen.apply(repo)
167
167
168
168
169 @command(
169 @command(
170 b'debugbuilddag',
170 b'debugbuilddag',
171 [
171 [
172 (
172 (
173 b'm',
173 b'm',
174 b'mergeable-file',
174 b'mergeable-file',
175 None,
175 None,
176 _(b'add single file mergeable changes'),
176 _(b'add single file mergeable changes'),
177 ),
177 ),
178 (
178 (
179 b'o',
179 b'o',
180 b'overwritten-file',
180 b'overwritten-file',
181 None,
181 None,
182 _(b'add single file all revs overwrite'),
182 _(b'add single file all revs overwrite'),
183 ),
183 ),
184 (b'n', b'new-file', None, _(b'add new file at each rev')),
184 (b'n', b'new-file', None, _(b'add new file at each rev')),
185 (
185 (
186 b'',
186 b'',
187 b'from-existing',
187 b'from-existing',
188 None,
188 None,
189 _(b'continue from a non-empty repository'),
189 _(b'continue from a non-empty repository'),
190 ),
190 ),
191 ],
191 ],
192 _(b'[OPTION]... [TEXT]'),
192 _(b'[OPTION]... [TEXT]'),
193 )
193 )
194 def debugbuilddag(
194 def debugbuilddag(
195 ui,
195 ui,
196 repo,
196 repo,
197 text=None,
197 text=None,
198 mergeable_file=False,
198 mergeable_file=False,
199 overwritten_file=False,
199 overwritten_file=False,
200 new_file=False,
200 new_file=False,
201 from_existing=False,
201 from_existing=False,
202 ):
202 ):
203 """builds a repo with a given DAG from scratch in the current empty repo
203 """builds a repo with a given DAG from scratch in the current empty repo
204
204
205 The description of the DAG is read from stdin if not given on the
205 The description of the DAG is read from stdin if not given on the
206 command line.
206 command line.
207
207
208 Elements:
208 Elements:
209
209
210 - "+n" is a linear run of n nodes based on the current default parent
210 - "+n" is a linear run of n nodes based on the current default parent
211 - "." is a single node based on the current default parent
211 - "." is a single node based on the current default parent
212 - "$" resets the default parent to null (implied at the start);
212 - "$" resets the default parent to null (implied at the start);
213 otherwise the default parent is always the last node created
213 otherwise the default parent is always the last node created
214 - "<p" sets the default parent to the backref p
214 - "<p" sets the default parent to the backref p
215 - "*p" is a fork at parent p, which is a backref
215 - "*p" is a fork at parent p, which is a backref
216 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
216 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
217 - "/p2" is a merge of the preceding node and p2
217 - "/p2" is a merge of the preceding node and p2
218 - ":tag" defines a local tag for the preceding node
218 - ":tag" defines a local tag for the preceding node
219 - "@branch" sets the named branch for subsequent nodes
219 - "@branch" sets the named branch for subsequent nodes
220 - "#...\\n" is a comment up to the end of the line
220 - "#...\\n" is a comment up to the end of the line
221
221
222 Whitespace between the above elements is ignored.
222 Whitespace between the above elements is ignored.
223
223
224 A backref is either
224 A backref is either
225
225
226 - a number n, which references the node curr-n, where curr is the current
226 - a number n, which references the node curr-n, where curr is the current
227 node, or
227 node, or
228 - the name of a local tag you placed earlier using ":tag", or
228 - the name of a local tag you placed earlier using ":tag", or
229 - empty to denote the default parent.
229 - empty to denote the default parent.
230
230
231 All string valued-elements are either strictly alphanumeric, or must
231 All string valued-elements are either strictly alphanumeric, or must
232 be enclosed in double quotes ("..."), with "\\" as escape character.
232 be enclosed in double quotes ("..."), with "\\" as escape character.
233 """
233 """
234
234
235 if text is None:
235 if text is None:
236 ui.status(_(b"reading DAG from stdin\n"))
236 ui.status(_(b"reading DAG from stdin\n"))
237 text = ui.fin.read()
237 text = ui.fin.read()
238
238
239 cl = repo.changelog
239 cl = repo.changelog
240 if len(cl) > 0 and not from_existing:
240 if len(cl) > 0 and not from_existing:
241 raise error.Abort(_(b'repository is not empty'))
241 raise error.Abort(_(b'repository is not empty'))
242
242
243 # determine number of revs in DAG
243 # determine number of revs in DAG
244 total = 0
244 total = 0
245 for type, data in dagparser.parsedag(text):
245 for type, data in dagparser.parsedag(text):
246 if type == b'n':
246 if type == b'n':
247 total += 1
247 total += 1
248
248
249 if mergeable_file:
249 if mergeable_file:
250 linesperrev = 2
250 linesperrev = 2
251 # make a file with k lines per rev
251 # make a file with k lines per rev
252 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
252 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
253 initialmergedlines.append(b"")
253 initialmergedlines.append(b"")
254
254
255 tags = []
255 tags = []
256 progress = ui.makeprogress(
256 progress = ui.makeprogress(
257 _(b'building'), unit=_(b'revisions'), total=total
257 _(b'building'), unit=_(b'revisions'), total=total
258 )
258 )
259 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
259 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
260 at = -1
260 at = -1
261 atbranch = b'default'
261 atbranch = b'default'
262 nodeids = []
262 nodeids = []
263 id = 0
263 id = 0
264 progress.update(id)
264 progress.update(id)
265 for type, data in dagparser.parsedag(text):
265 for type, data in dagparser.parsedag(text):
266 if type == b'n':
266 if type == b'n':
267 ui.note((b'node %s\n' % pycompat.bytestr(data)))
267 ui.note((b'node %s\n' % pycompat.bytestr(data)))
268 id, ps = data
268 id, ps = data
269
269
270 files = []
270 files = []
271 filecontent = {}
271 filecontent = {}
272
272
273 p2 = None
273 p2 = None
274 if mergeable_file:
274 if mergeable_file:
275 fn = b"mf"
275 fn = b"mf"
276 p1 = repo[ps[0]]
276 p1 = repo[ps[0]]
277 if len(ps) > 1:
277 if len(ps) > 1:
278 p2 = repo[ps[1]]
278 p2 = repo[ps[1]]
279 pa = p1.ancestor(p2)
279 pa = p1.ancestor(p2)
280 base, local, other = [
280 base, local, other = [
281 x[fn].data() for x in (pa, p1, p2)
281 x[fn].data() for x in (pa, p1, p2)
282 ]
282 ]
283 m3 = simplemerge.Merge3Text(base, local, other)
283 m3 = simplemerge.Merge3Text(base, local, other)
284 ml = [
284 ml = [
285 l.strip()
285 l.strip()
286 for l in simplemerge.render_minimized(m3)[0]
286 for l in simplemerge.render_minimized(m3)[0]
287 ]
287 ]
288 ml.append(b"")
288 ml.append(b"")
289 elif at > 0:
289 elif at > 0:
290 ml = p1[fn].data().split(b"\n")
290 ml = p1[fn].data().split(b"\n")
291 else:
291 else:
292 ml = initialmergedlines
292 ml = initialmergedlines
293 ml[id * linesperrev] += b" r%i" % id
293 ml[id * linesperrev] += b" r%i" % id
294 mergedtext = b"\n".join(ml)
294 mergedtext = b"\n".join(ml)
295 files.append(fn)
295 files.append(fn)
296 filecontent[fn] = mergedtext
296 filecontent[fn] = mergedtext
297
297
298 if overwritten_file:
298 if overwritten_file:
299 fn = b"of"
299 fn = b"of"
300 files.append(fn)
300 files.append(fn)
301 filecontent[fn] = b"r%i\n" % id
301 filecontent[fn] = b"r%i\n" % id
302
302
303 if new_file:
303 if new_file:
304 fn = b"nf%i" % id
304 fn = b"nf%i" % id
305 files.append(fn)
305 files.append(fn)
306 filecontent[fn] = b"r%i\n" % id
306 filecontent[fn] = b"r%i\n" % id
307 if len(ps) > 1:
307 if len(ps) > 1:
308 if not p2:
308 if not p2:
309 p2 = repo[ps[1]]
309 p2 = repo[ps[1]]
310 for fn in p2:
310 for fn in p2:
311 if fn.startswith(b"nf"):
311 if fn.startswith(b"nf"):
312 files.append(fn)
312 files.append(fn)
313 filecontent[fn] = p2[fn].data()
313 filecontent[fn] = p2[fn].data()
314
314
315 def fctxfn(repo, cx, path):
315 def fctxfn(repo, cx, path):
316 if path in filecontent:
316 if path in filecontent:
317 return context.memfilectx(
317 return context.memfilectx(
318 repo, cx, path, filecontent[path]
318 repo, cx, path, filecontent[path]
319 )
319 )
320 return None
320 return None
321
321
322 if len(ps) == 0 or ps[0] < 0:
322 if len(ps) == 0 or ps[0] < 0:
323 pars = [None, None]
323 pars = [None, None]
324 elif len(ps) == 1:
324 elif len(ps) == 1:
325 pars = [nodeids[ps[0]], None]
325 pars = [nodeids[ps[0]], None]
326 else:
326 else:
327 pars = [nodeids[p] for p in ps]
327 pars = [nodeids[p] for p in ps]
328 cx = context.memctx(
328 cx = context.memctx(
329 repo,
329 repo,
330 pars,
330 pars,
331 b"r%i" % id,
331 b"r%i" % id,
332 files,
332 files,
333 fctxfn,
333 fctxfn,
334 date=(id, 0),
334 date=(id, 0),
335 user=b"debugbuilddag",
335 user=b"debugbuilddag",
336 extra={b'branch': atbranch},
336 extra={b'branch': atbranch},
337 )
337 )
338 nodeid = repo.commitctx(cx)
338 nodeid = repo.commitctx(cx)
339 nodeids.append(nodeid)
339 nodeids.append(nodeid)
340 at = id
340 at = id
341 elif type == b'l':
341 elif type == b'l':
342 id, name = data
342 id, name = data
343 ui.note((b'tag %s\n' % name))
343 ui.note((b'tag %s\n' % name))
344 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
344 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
345 elif type == b'a':
345 elif type == b'a':
346 ui.note((b'branch %s\n' % data))
346 ui.note((b'branch %s\n' % data))
347 atbranch = data
347 atbranch = data
348 progress.update(id)
348 progress.update(id)
349
349
350 if tags:
350 if tags:
351 repo.vfs.write(b"localtags", b"".join(tags))
351 repo.vfs.write(b"localtags", b"".join(tags))
352
352
353
353
354 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
354 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
355 indent_string = b' ' * indent
355 indent_string = b' ' * indent
356 if all:
356 if all:
357 ui.writenoi18n(
357 ui.writenoi18n(
358 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
358 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
359 % indent_string
359 % indent_string
360 )
360 )
361
361
362 def showchunks(named):
362 def showchunks(named):
363 ui.write(b"\n%s%s\n" % (indent_string, named))
363 ui.write(b"\n%s%s\n" % (indent_string, named))
364 for deltadata in gen.deltaiter():
364 for deltadata in gen.deltaiter():
365 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
365 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
366 ui.write(
366 ui.write(
367 b"%s%s %s %s %s %s %d\n"
367 b"%s%s %s %s %s %s %d\n"
368 % (
368 % (
369 indent_string,
369 indent_string,
370 hex(node),
370 hex(node),
371 hex(p1),
371 hex(p1),
372 hex(p2),
372 hex(p2),
373 hex(cs),
373 hex(cs),
374 hex(deltabase),
374 hex(deltabase),
375 len(delta),
375 len(delta),
376 )
376 )
377 )
377 )
378
378
379 gen.changelogheader()
379 gen.changelogheader()
380 showchunks(b"changelog")
380 showchunks(b"changelog")
381 gen.manifestheader()
381 gen.manifestheader()
382 showchunks(b"manifest")
382 showchunks(b"manifest")
383 for chunkdata in iter(gen.filelogheader, {}):
383 for chunkdata in iter(gen.filelogheader, {}):
384 fname = chunkdata[b'filename']
384 fname = chunkdata[b'filename']
385 showchunks(fname)
385 showchunks(fname)
386 else:
386 else:
387 if isinstance(gen, bundle2.unbundle20):
387 if isinstance(gen, bundle2.unbundle20):
388 raise error.Abort(_(b'use debugbundle2 for this file'))
388 raise error.Abort(_(b'use debugbundle2 for this file'))
389 gen.changelogheader()
389 gen.changelogheader()
390 for deltadata in gen.deltaiter():
390 for deltadata in gen.deltaiter():
391 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
391 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
392 ui.write(b"%s%s\n" % (indent_string, hex(node)))
392 ui.write(b"%s%s\n" % (indent_string, hex(node)))
393
393
394
394
395 def _debugobsmarkers(ui, part, indent=0, **opts):
395 def _debugobsmarkers(ui, part, indent=0, **opts):
396 """display version and markers contained in 'data'"""
396 """display version and markers contained in 'data'"""
397 data = part.read()
397 data = part.read()
398 indent_string = b' ' * indent
398 indent_string = b' ' * indent
399 try:
399 try:
400 version, markers = obsolete._readmarkers(data)
400 version, markers = obsolete._readmarkers(data)
401 except error.UnknownVersion as exc:
401 except error.UnknownVersion as exc:
402 msg = b"%sunsupported version: %s (%d bytes)\n"
402 msg = b"%sunsupported version: %s (%d bytes)\n"
403 msg %= indent_string, exc.version, len(data)
403 msg %= indent_string, exc.version, len(data)
404 ui.write(msg)
404 ui.write(msg)
405 else:
405 else:
406 msg = b"%sversion: %d (%d bytes)\n"
406 msg = b"%sversion: %d (%d bytes)\n"
407 msg %= indent_string, version, len(data)
407 msg %= indent_string, version, len(data)
408 ui.write(msg)
408 ui.write(msg)
409 fm = ui.formatter(b'debugobsolete', pycompat.byteskwargs(opts))
409 fm = ui.formatter(b'debugobsolete', pycompat.byteskwargs(opts))
410 for rawmarker in sorted(markers):
410 for rawmarker in sorted(markers):
411 m = obsutil.marker(None, rawmarker)
411 m = obsutil.marker(None, rawmarker)
412 fm.startitem()
412 fm.startitem()
413 fm.plain(indent_string)
413 fm.plain(indent_string)
414 cmdutil.showmarker(fm, m)
414 cmdutil.showmarker(fm, m)
415 fm.end()
415 fm.end()
416
416
417
417
418 def _debugphaseheads(ui, data, indent=0):
418 def _debugphaseheads(ui, data, indent=0):
419 """display version and markers contained in 'data'"""
419 """display version and markers contained in 'data'"""
420 indent_string = b' ' * indent
420 indent_string = b' ' * indent
421 headsbyphase = phases.binarydecode(data)
421 headsbyphase = phases.binarydecode(data)
422 for phase in phases.allphases:
422 for phase in phases.allphases:
423 for head in headsbyphase[phase]:
423 for head in headsbyphase[phase]:
424 ui.write(indent_string)
424 ui.write(indent_string)
425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
426
426
427
427
428 def _quasirepr(thing):
428 def _quasirepr(thing):
429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
430 return b'{%s}' % (
430 return b'{%s}' % (
431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
432 )
432 )
433 return pycompat.bytestr(repr(thing))
433 return pycompat.bytestr(repr(thing))
434
434
435
435
436 def _debugbundle2(ui, gen, all=None, **opts):
436 def _debugbundle2(ui, gen, all=None, **opts):
437 """lists the contents of a bundle2"""
437 """lists the contents of a bundle2"""
438 if not isinstance(gen, bundle2.unbundle20):
438 if not isinstance(gen, bundle2.unbundle20):
439 raise error.Abort(_(b'not a bundle2 file'))
439 raise error.Abort(_(b'not a bundle2 file'))
440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
441 parttypes = opts.get('part_type', [])
441 parttypes = opts.get('part_type', [])
442 for part in gen.iterparts():
442 for part in gen.iterparts():
443 if parttypes and part.type not in parttypes:
443 if parttypes and part.type not in parttypes:
444 continue
444 continue
445 msg = b'%s -- %s (mandatory: %r)\n'
445 msg = b'%s -- %s (mandatory: %r)\n'
446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
447 if part.type == b'changegroup':
447 if part.type == b'changegroup':
448 version = part.params.get(b'version', b'01')
448 version = part.params.get(b'version', b'01')
449 cg = changegroup.getunbundler(version, part, b'UN')
449 cg = changegroup.getunbundler(version, part, b'UN')
450 if not ui.quiet:
450 if not ui.quiet:
451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
452 if part.type == b'obsmarkers':
452 if part.type == b'obsmarkers':
453 if not ui.quiet:
453 if not ui.quiet:
454 _debugobsmarkers(ui, part, indent=4, **opts)
454 _debugobsmarkers(ui, part, indent=4, **opts)
455 if part.type == b'phase-heads':
455 if part.type == b'phase-heads':
456 if not ui.quiet:
456 if not ui.quiet:
457 _debugphaseheads(ui, part, indent=4)
457 _debugphaseheads(ui, part, indent=4)
458
458
459
459
460 @command(
460 @command(
461 b'debugbundle',
461 b'debugbundle',
462 [
462 [
463 (b'a', b'all', None, _(b'show all details')),
463 (b'a', b'all', None, _(b'show all details')),
464 (b'', b'part-type', [], _(b'show only the named part type')),
464 (b'', b'part-type', [], _(b'show only the named part type')),
465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
466 ],
466 ],
467 _(b'FILE'),
467 _(b'FILE'),
468 norepo=True,
468 norepo=True,
469 )
469 )
470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
471 """lists the contents of a bundle"""
471 """lists the contents of a bundle"""
472 with hg.openpath(ui, bundlepath) as f:
472 with hg.openpath(ui, bundlepath) as f:
473 if spec:
473 if spec:
474 spec = exchange.getbundlespec(ui, f)
474 spec = exchange.getbundlespec(ui, f)
475 ui.write(b'%s\n' % spec)
475 ui.write(b'%s\n' % spec)
476 return
476 return
477
477
478 gen = exchange.readbundle(ui, f, bundlepath)
478 gen = exchange.readbundle(ui, f, bundlepath)
479 if isinstance(gen, bundle2.unbundle20):
479 if isinstance(gen, bundle2.unbundle20):
480 return _debugbundle2(ui, gen, all=all, **opts)
480 return _debugbundle2(ui, gen, all=all, **opts)
481 _debugchangegroup(ui, gen, all=all, **opts)
481 _debugchangegroup(ui, gen, all=all, **opts)
482
482
483
483
484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
485 def debugcapabilities(ui, path, **opts):
485 def debugcapabilities(ui, path, **opts):
486 """lists the capabilities of a remote peer"""
486 """lists the capabilities of a remote peer"""
487 peer = hg.peer(ui, pycompat.byteskwargs(opts), path)
487 peer = hg.peer(ui, pycompat.byteskwargs(opts), path)
488 try:
488 try:
489 caps = peer.capabilities()
489 caps = peer.capabilities()
490 ui.writenoi18n(b'Main capabilities:\n')
490 ui.writenoi18n(b'Main capabilities:\n')
491 for c in sorted(caps):
491 for c in sorted(caps):
492 ui.write(b' %s\n' % c)
492 ui.write(b' %s\n' % c)
493 b2caps = bundle2.bundle2caps(peer)
493 b2caps = bundle2.bundle2caps(peer)
494 if b2caps:
494 if b2caps:
495 ui.writenoi18n(b'Bundle2 capabilities:\n')
495 ui.writenoi18n(b'Bundle2 capabilities:\n')
496 for key, values in sorted(b2caps.items()):
496 for key, values in sorted(b2caps.items()):
497 ui.write(b' %s\n' % key)
497 ui.write(b' %s\n' % key)
498 for v in values:
498 for v in values:
499 ui.write(b' %s\n' % v)
499 ui.write(b' %s\n' % v)
500 finally:
500 finally:
501 peer.close()
501 peer.close()
502
502
503
503
504 @command(
504 @command(
505 b'debugchangedfiles',
505 b'debugchangedfiles',
506 [
506 [
507 (
507 (
508 b'',
508 b'',
509 b'compute',
509 b'compute',
510 False,
510 False,
511 b"compute information instead of reading it from storage",
511 b"compute information instead of reading it from storage",
512 ),
512 ),
513 ],
513 ],
514 b'REV',
514 b'REV',
515 )
515 )
516 def debugchangedfiles(ui, repo, rev, **opts):
516 def debugchangedfiles(ui, repo, rev, **opts):
517 """list the stored files changes for a revision"""
517 """list the stored files changes for a revision"""
518 ctx = logcmdutil.revsingle(repo, rev, None)
518 ctx = logcmdutil.revsingle(repo, rev, None)
519 files = None
519 files = None
520
520
521 if opts['compute']:
521 if opts['compute']:
522 files = metadata.compute_all_files_changes(ctx)
522 files = metadata.compute_all_files_changes(ctx)
523 else:
523 else:
524 sd = repo.changelog.sidedata(ctx.rev())
524 sd = repo.changelog.sidedata(ctx.rev())
525 files_block = sd.get(sidedata.SD_FILES)
525 files_block = sd.get(sidedata.SD_FILES)
526 if files_block is not None:
526 if files_block is not None:
527 files = metadata.decode_files_sidedata(sd)
527 files = metadata.decode_files_sidedata(sd)
528 if files is not None:
528 if files is not None:
529 for f in sorted(files.touched):
529 for f in sorted(files.touched):
530 if f in files.added:
530 if f in files.added:
531 action = b"added"
531 action = b"added"
532 elif f in files.removed:
532 elif f in files.removed:
533 action = b"removed"
533 action = b"removed"
534 elif f in files.merged:
534 elif f in files.merged:
535 action = b"merged"
535 action = b"merged"
536 elif f in files.salvaged:
536 elif f in files.salvaged:
537 action = b"salvaged"
537 action = b"salvaged"
538 else:
538 else:
539 action = b"touched"
539 action = b"touched"
540
540
541 copy_parent = b""
541 copy_parent = b""
542 copy_source = b""
542 copy_source = b""
543 if f in files.copied_from_p1:
543 if f in files.copied_from_p1:
544 copy_parent = b"p1"
544 copy_parent = b"p1"
545 copy_source = files.copied_from_p1[f]
545 copy_source = files.copied_from_p1[f]
546 elif f in files.copied_from_p2:
546 elif f in files.copied_from_p2:
547 copy_parent = b"p2"
547 copy_parent = b"p2"
548 copy_source = files.copied_from_p2[f]
548 copy_source = files.copied_from_p2[f]
549
549
550 data = (action, copy_parent, f, copy_source)
550 data = (action, copy_parent, f, copy_source)
551 template = b"%-8s %2s: %s, %s;\n"
551 template = b"%-8s %2s: %s, %s;\n"
552 ui.write(template % data)
552 ui.write(template % data)
553
553
554
554
555 @command(b'debugcheckstate', [], b'')
555 @command(b'debugcheckstate', [], b'')
556 def debugcheckstate(ui, repo):
556 def debugcheckstate(ui, repo):
557 """validate the correctness of the current dirstate"""
557 """validate the correctness of the current dirstate"""
558 errors = verify.verifier(repo)._verify_dirstate()
558 errors = verify.verifier(repo)._verify_dirstate()
559 if errors:
559 if errors:
560 errstr = _(b"dirstate inconsistent with current parent's manifest")
560 errstr = _(b"dirstate inconsistent with current parent's manifest")
561 raise error.Abort(errstr)
561 raise error.Abort(errstr)
562
562
563
563
564 @command(
564 @command(
565 b'debugcolor',
565 b'debugcolor',
566 [(b'', b'style', None, _(b'show all configured styles'))],
566 [(b'', b'style', None, _(b'show all configured styles'))],
567 b'hg debugcolor',
567 b'hg debugcolor',
568 )
568 )
569 def debugcolor(ui, repo, **opts):
569 def debugcolor(ui, repo, **opts):
570 """show available color, effects or style"""
570 """show available color, effects or style"""
571 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
571 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
572 if opts.get('style'):
572 if opts.get('style'):
573 return _debugdisplaystyle(ui)
573 return _debugdisplaystyle(ui)
574 else:
574 else:
575 return _debugdisplaycolor(ui)
575 return _debugdisplaycolor(ui)
576
576
577
577
578 def _debugdisplaycolor(ui):
578 def _debugdisplaycolor(ui):
579 ui = ui.copy()
579 ui = ui.copy()
580 ui._styles.clear()
580 ui._styles.clear()
581 for effect in color._activeeffects(ui).keys():
581 for effect in color._activeeffects(ui).keys():
582 ui._styles[effect] = effect
582 ui._styles[effect] = effect
583 if ui._terminfoparams:
583 if ui._terminfoparams:
584 for k, v in ui.configitems(b'color'):
584 for k, v in ui.configitems(b'color'):
585 if k.startswith(b'color.'):
585 if k.startswith(b'color.'):
586 ui._styles[k] = k[6:]
586 ui._styles[k] = k[6:]
587 elif k.startswith(b'terminfo.'):
587 elif k.startswith(b'terminfo.'):
588 ui._styles[k] = k[9:]
588 ui._styles[k] = k[9:]
589 ui.write(_(b'available colors:\n'))
589 ui.write(_(b'available colors:\n'))
590 # sort label with a '_' after the other to group '_background' entry.
590 # sort label with a '_' after the other to group '_background' entry.
591 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
591 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
592 for colorname, label in items:
592 for colorname, label in items:
593 ui.write(b'%s\n' % colorname, label=label)
593 ui.write(b'%s\n' % colorname, label=label)
594
594
595
595
596 def _debugdisplaystyle(ui):
596 def _debugdisplaystyle(ui):
597 ui.write(_(b'available style:\n'))
597 ui.write(_(b'available style:\n'))
598 if not ui._styles:
598 if not ui._styles:
599 return
599 return
600 width = max(len(s) for s in ui._styles)
600 width = max(len(s) for s in ui._styles)
601 for label, effects in sorted(ui._styles.items()):
601 for label, effects in sorted(ui._styles.items()):
602 ui.write(b'%s' % label, label=label)
602 ui.write(b'%s' % label, label=label)
603 if effects:
603 if effects:
604 # 50
604 # 50
605 ui.write(b': ')
605 ui.write(b': ')
606 ui.write(b' ' * (max(0, width - len(label))))
606 ui.write(b' ' * (max(0, width - len(label))))
607 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
607 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
608 ui.write(b'\n')
608 ui.write(b'\n')
609
609
610
610
611 @command(b'debugcreatestreamclonebundle', [], b'FILE')
611 @command(b'debugcreatestreamclonebundle', [], b'FILE')
612 def debugcreatestreamclonebundle(ui, repo, fname):
612 def debugcreatestreamclonebundle(ui, repo, fname):
613 """create a stream clone bundle file
613 """create a stream clone bundle file
614
614
615 Stream bundles are special bundles that are essentially archives of
615 Stream bundles are special bundles that are essentially archives of
616 revlog files. They are commonly used for cloning very quickly.
616 revlog files. They are commonly used for cloning very quickly.
617
617
618 This command creates a "version 1" stream clone, which is deprecated in
618 This command creates a "version 1" stream clone, which is deprecated in
619 favor of newer versions of the stream protocol. Bundles using such newer
619 favor of newer versions of the stream protocol. Bundles using such newer
620 versions can be generated using the `hg bundle` command.
620 versions can be generated using the `hg bundle` command.
621 """
621 """
622 # TODO we may want to turn this into an abort when this functionality
622 # TODO we may want to turn this into an abort when this functionality
623 # is moved into `hg bundle`.
623 # is moved into `hg bundle`.
624 if phases.hassecret(repo):
624 if phases.hassecret(repo):
625 ui.warn(
625 ui.warn(
626 _(
626 _(
627 b'(warning: stream clone bundle will contain secret '
627 b'(warning: stream clone bundle will contain secret '
628 b'revisions)\n'
628 b'revisions)\n'
629 )
629 )
630 )
630 )
631
631
632 requirements, gen = streamclone.generatebundlev1(repo)
632 requirements, gen = streamclone.generatebundlev1(repo)
633 changegroup.writechunks(ui, gen, fname)
633 changegroup.writechunks(ui, gen, fname)
634
634
635 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
635 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
636
636
637
637
638 @command(
638 @command(
639 b'debugdag',
639 b'debugdag',
640 [
640 [
641 (b't', b'tags', None, _(b'use tags as labels')),
641 (b't', b'tags', None, _(b'use tags as labels')),
642 (b'b', b'branches', None, _(b'annotate with branch names')),
642 (b'b', b'branches', None, _(b'annotate with branch names')),
643 (b'', b'dots', None, _(b'use dots for runs')),
643 (b'', b'dots', None, _(b'use dots for runs')),
644 (b's', b'spaces', None, _(b'separate elements by spaces')),
644 (b's', b'spaces', None, _(b'separate elements by spaces')),
645 ],
645 ],
646 _(b'[OPTION]... [FILE [REV]...]'),
646 _(b'[OPTION]... [FILE [REV]...]'),
647 optionalrepo=True,
647 optionalrepo=True,
648 )
648 )
649 def debugdag(ui, repo, file_=None, *revs, **opts):
649 def debugdag(ui, repo, file_=None, *revs, **opts):
650 """format the changelog or an index DAG as a concise textual description
650 """format the changelog or an index DAG as a concise textual description
651
651
652 If you pass a revlog index, the revlog's DAG is emitted. If you list
652 If you pass a revlog index, the revlog's DAG is emitted. If you list
653 revision numbers, they get labeled in the output as rN.
653 revision numbers, they get labeled in the output as rN.
654
654
655 Otherwise, the changelog DAG of the current repo is emitted.
655 Otherwise, the changelog DAG of the current repo is emitted.
656 """
656 """
657 spaces = opts.get('spaces')
657 spaces = opts.get('spaces')
658 dots = opts.get('dots')
658 dots = opts.get('dots')
659 if file_:
659 if file_:
660 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
660 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
661 revs = {int(r) for r in revs}
661 revs = {int(r) for r in revs}
662
662
663 def events():
663 def events():
664 for r in rlog:
664 for r in rlog:
665 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
665 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
666 if r in revs:
666 if r in revs:
667 yield b'l', (r, b"r%i" % r)
667 yield b'l', (r, b"r%i" % r)
668
668
669 elif repo:
669 elif repo:
670 cl = repo.changelog
670 cl = repo.changelog
671 tags = opts.get('tags')
671 tags = opts.get('tags')
672 branches = opts.get('branches')
672 branches = opts.get('branches')
673 if tags:
673 if tags:
674 labels = {}
674 labels = {}
675 for l, n in repo.tags().items():
675 for l, n in repo.tags().items():
676 labels.setdefault(cl.rev(n), []).append(l)
676 labels.setdefault(cl.rev(n), []).append(l)
677
677
678 def events():
678 def events():
679 b = b"default"
679 b = b"default"
680 for r in cl:
680 for r in cl:
681 if branches:
681 if branches:
682 newb = cl.read(cl.node(r))[5][b'branch']
682 newb = cl.read(cl.node(r))[5][b'branch']
683 if newb != b:
683 if newb != b:
684 yield b'a', newb
684 yield b'a', newb
685 b = newb
685 b = newb
686 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
686 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
687 if tags:
687 if tags:
688 ls = labels.get(r)
688 ls = labels.get(r)
689 if ls:
689 if ls:
690 for l in ls:
690 for l in ls:
691 yield b'l', (r, l)
691 yield b'l', (r, l)
692
692
693 else:
693 else:
694 raise error.Abort(_(b'need repo for changelog dag'))
694 raise error.Abort(_(b'need repo for changelog dag'))
695
695
696 for line in dagparser.dagtextlines(
696 for line in dagparser.dagtextlines(
697 events(),
697 events(),
698 addspaces=spaces,
698 addspaces=spaces,
699 wraplabels=True,
699 wraplabels=True,
700 wrapannotations=True,
700 wrapannotations=True,
701 wrapnonlinear=dots,
701 wrapnonlinear=dots,
702 usedots=dots,
702 usedots=dots,
703 maxlinewidth=70,
703 maxlinewidth=70,
704 ):
704 ):
705 ui.write(line)
705 ui.write(line)
706 ui.write(b"\n")
706 ui.write(b"\n")
707
707
708
708
709 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
709 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
710 def debugdata(ui, repo, file_, rev=None, **opts):
710 def debugdata(ui, repo, file_, rev=None, **opts):
711 """dump the contents of a data file revision"""
711 """dump the contents of a data file revision"""
712 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
712 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
713 if rev is not None:
713 if rev is not None:
714 raise error.InputError(
714 raise error.InputError(
715 _(b'cannot specify a revision with other arguments')
715 _(b'cannot specify a revision with other arguments')
716 )
716 )
717 file_, rev = None, file_
717 file_, rev = None, file_
718 elif rev is None:
718 elif rev is None:
719 raise error.InputError(_(b'please specify a revision'))
719 raise error.InputError(_(b'please specify a revision'))
720 r = cmdutil.openstorage(
720 r = cmdutil.openstorage(
721 repo, b'debugdata', file_, pycompat.byteskwargs(opts)
721 repo, b'debugdata', file_, pycompat.byteskwargs(opts)
722 )
722 )
723 try:
723 try:
724 ui.write(r.rawdata(r.lookup(rev)))
724 ui.write(r.rawdata(r.lookup(rev)))
725 except KeyError:
725 except KeyError:
726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727
727
728
728
729 @command(
729 @command(
730 b'debugdate',
730 b'debugdate',
731 [(b'e', b'extended', None, _(b'try extended date formats'))],
731 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 _(b'[-e] DATE [RANGE]'),
732 _(b'[-e] DATE [RANGE]'),
733 norepo=True,
733 norepo=True,
734 optionalrepo=True,
734 optionalrepo=True,
735 )
735 )
736 def debugdate(ui, date, range=None, **opts):
736 def debugdate(ui, date, range=None, **opts):
737 """parse and display a date"""
737 """parse and display a date"""
738 if opts["extended"]:
738 if opts["extended"]:
739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 else:
740 else:
741 d = dateutil.parsedate(date)
741 d = dateutil.parsedate(date)
742 ui.writenoi18n(b"internal: %d %d\n" % d)
742 ui.writenoi18n(b"internal: %d %d\n" % d)
743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 if range:
744 if range:
745 m = dateutil.matchdate(range)
745 m = dateutil.matchdate(range)
746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747
747
748
748
749 @command(
749 @command(
750 b'debugdeltachain',
750 b'debugdeltachain',
751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 _(b'-c|-m|FILE'),
752 _(b'-c|-m|FILE'),
753 optionalrepo=True,
753 optionalrepo=True,
754 )
754 )
755 def debugdeltachain(ui, repo, file_=None, **opts):
755 def debugdeltachain(ui, repo, file_=None, **opts):
756 """dump information about delta chains in a revlog
756 """dump information about delta chains in a revlog
757
757
758 Output can be templatized. Available template keywords are:
758 Output can be templatized. Available template keywords are:
759
759
760 :``rev``: revision number
760 :``rev``: revision number
761 :``p1``: parent 1 revision number (for reference)
761 :``p1``: parent 1 revision number (for reference)
762 :``p2``: parent 2 revision number (for reference)
762 :``p2``: parent 2 revision number (for reference)
763 :``chainid``: delta chain identifier (numbered by unique base)
763 :``chainid``: delta chain identifier (numbered by unique base)
764 :``chainlen``: delta chain length to this revision
764 :``chainlen``: delta chain length to this revision
765 :``prevrev``: previous revision in delta chain
765 :``prevrev``: previous revision in delta chain
766 :``deltatype``: role of delta / how it was computed
766 :``deltatype``: role of delta / how it was computed
767 - base: a full snapshot
767 - base: a full snapshot
768 - snap: an intermediate snapshot
768 - snap: an intermediate snapshot
769 - p1: a delta against the first parent
769 - p1: a delta against the first parent
770 - p2: a delta against the second parent
770 - p2: a delta against the second parent
771 - skip1: a delta against the same base as p1
771 - skip1: a delta against the same base as p1
772 (when p1 has empty delta
772 (when p1 has empty delta
773 - skip2: a delta against the same base as p2
773 - skip2: a delta against the same base as p2
774 (when p2 has empty delta
774 (when p2 has empty delta
775 - prev: a delta against the previous revision
775 - prev: a delta against the previous revision
776 - other: a delta against an arbitrary revision
776 - other: a delta against an arbitrary revision
777 :``compsize``: compressed size of revision
777 :``compsize``: compressed size of revision
778 :``uncompsize``: uncompressed size of revision
778 :``uncompsize``: uncompressed size of revision
779 :``chainsize``: total size of compressed revisions in chain
779 :``chainsize``: total size of compressed revisions in chain
780 :``chainratio``: total chain size divided by uncompressed revision size
780 :``chainratio``: total chain size divided by uncompressed revision size
781 (new delta chains typically start at ratio 2.00)
781 (new delta chains typically start at ratio 2.00)
782 :``lindist``: linear distance from base revision in delta chain to end
782 :``lindist``: linear distance from base revision in delta chain to end
783 of this revision
783 of this revision
784 :``extradist``: total size of revisions not part of this delta chain from
784 :``extradist``: total size of revisions not part of this delta chain from
785 base of delta chain to end of this revision; a measurement
785 base of delta chain to end of this revision; a measurement
786 of how much extra data we need to read/seek across to read
786 of how much extra data we need to read/seek across to read
787 the delta chain for this revision
787 the delta chain for this revision
788 :``extraratio``: extradist divided by chainsize; another representation of
788 :``extraratio``: extradist divided by chainsize; another representation of
789 how much unrelated data is needed to load this delta chain
789 how much unrelated data is needed to load this delta chain
790
790
791 If the repository is configured to use the sparse read, additional keywords
791 If the repository is configured to use the sparse read, additional keywords
792 are available:
792 are available:
793
793
794 :``readsize``: total size of data read from the disk for a revision
794 :``readsize``: total size of data read from the disk for a revision
795 (sum of the sizes of all the blocks)
795 (sum of the sizes of all the blocks)
796 :``largestblock``: size of the largest block of data read from the disk
796 :``largestblock``: size of the largest block of data read from the disk
797 :``readdensity``: density of useful bytes in the data read from the disk
797 :``readdensity``: density of useful bytes in the data read from the disk
798 :``srchunks``: in how many data hunks the whole revision would be read
798 :``srchunks``: in how many data hunks the whole revision would be read
799
799
800 The sparse read can be enabled with experimental.sparse-read = True
800 The sparse read can be enabled with experimental.sparse-read = True
801 """
801 """
802 r = cmdutil.openrevlog(
802 r = cmdutil.openrevlog(
803 repo, b'debugdeltachain', file_, pycompat.byteskwargs(opts)
803 repo, b'debugdeltachain', file_, pycompat.byteskwargs(opts)
804 )
804 )
805 index = r.index
805 index = r.index
806 start = r.start
806 start = r.start
807 length = r.length
807 length = r.length
808 generaldelta = r._generaldelta
808 generaldelta = r._generaldelta
809 withsparseread = getattr(r, '_withsparseread', False)
809 withsparseread = getattr(r, '_withsparseread', False)
810
810
811 # security to avoid crash on corrupted revlogs
811 # security to avoid crash on corrupted revlogs
812 total_revs = len(index)
812 total_revs = len(index)
813
813
814 chain_size_cache = {}
814 chain_size_cache = {}
815
815
816 def revinfo(rev):
816 def revinfo(rev):
817 e = index[rev]
817 e = index[rev]
818 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
818 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
819 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
819 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
820
820
821 base = e[revlog_constants.ENTRY_DELTA_BASE]
821 base = e[revlog_constants.ENTRY_DELTA_BASE]
822 p1 = e[revlog_constants.ENTRY_PARENT_1]
822 p1 = e[revlog_constants.ENTRY_PARENT_1]
823 p2 = e[revlog_constants.ENTRY_PARENT_2]
823 p2 = e[revlog_constants.ENTRY_PARENT_2]
824
824
825 # If the parents of a revision has an empty delta, we never try to delta
825 # If the parents of a revision has an empty delta, we never try to delta
826 # against that parent, but directly against the delta base of that
826 # against that parent, but directly against the delta base of that
827 # parent (recursively). It avoids adding a useless entry in the chain.
827 # parent (recursively). It avoids adding a useless entry in the chain.
828 #
828 #
829 # However we need to detect that as a special case for delta-type, that
829 # However we need to detect that as a special case for delta-type, that
830 # is not simply "other".
830 # is not simply "other".
831 p1_base = p1
831 p1_base = p1
832 if p1 != nullrev and p1 < total_revs:
832 if p1 != nullrev and p1 < total_revs:
833 e1 = index[p1]
833 e1 = index[p1]
834 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
834 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
835 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
835 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
836 if (
836 if (
837 new_base == p1_base
837 new_base == p1_base
838 or new_base == nullrev
838 or new_base == nullrev
839 or new_base >= total_revs
839 or new_base >= total_revs
840 ):
840 ):
841 break
841 break
842 p1_base = new_base
842 p1_base = new_base
843 e1 = index[p1_base]
843 e1 = index[p1_base]
844 p2_base = p2
844 p2_base = p2
845 if p2 != nullrev and p2 < total_revs:
845 if p2 != nullrev and p2 < total_revs:
846 e2 = index[p2]
846 e2 = index[p2]
847 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
847 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
848 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
848 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
849 if (
849 if (
850 new_base == p2_base
850 new_base == p2_base
851 or new_base == nullrev
851 or new_base == nullrev
852 or new_base >= total_revs
852 or new_base >= total_revs
853 ):
853 ):
854 break
854 break
855 p2_base = new_base
855 p2_base = new_base
856 e2 = index[p2_base]
856 e2 = index[p2_base]
857
857
858 if generaldelta:
858 if generaldelta:
859 if base == p1:
859 if base == p1:
860 deltatype = b'p1'
860 deltatype = b'p1'
861 elif base == p2:
861 elif base == p2:
862 deltatype = b'p2'
862 deltatype = b'p2'
863 elif base == rev:
863 elif base == rev:
864 deltatype = b'base'
864 deltatype = b'base'
865 elif base == p1_base:
865 elif base == p1_base:
866 deltatype = b'skip1'
866 deltatype = b'skip1'
867 elif base == p2_base:
867 elif base == p2_base:
868 deltatype = b'skip2'
868 deltatype = b'skip2'
869 elif r.issnapshot(rev):
869 elif r.issnapshot(rev):
870 deltatype = b'snap'
870 deltatype = b'snap'
871 elif base == rev - 1:
871 elif base == rev - 1:
872 deltatype = b'prev'
872 deltatype = b'prev'
873 else:
873 else:
874 deltatype = b'other'
874 deltatype = b'other'
875 else:
875 else:
876 if base == rev:
876 if base == rev:
877 deltatype = b'base'
877 deltatype = b'base'
878 else:
878 else:
879 deltatype = b'prev'
879 deltatype = b'prev'
880
880
881 chain = r._deltachain(rev)[0]
881 chain = r._deltachain(rev)[0]
882 chain_size = 0
882 chain_size = 0
883 for iter_rev in reversed(chain):
883 for iter_rev in reversed(chain):
884 cached = chain_size_cache.get(iter_rev)
884 cached = chain_size_cache.get(iter_rev)
885 if cached is not None:
885 if cached is not None:
886 chain_size += cached
886 chain_size += cached
887 break
887 break
888 e = index[iter_rev]
888 e = index[iter_rev]
889 chain_size += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
889 chain_size += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
890 chain_size_cache[rev] = chain_size
890 chain_size_cache[rev] = chain_size
891
891
892 return p1, p2, compsize, uncompsize, deltatype, chain, chain_size
892 return p1, p2, compsize, uncompsize, deltatype, chain, chain_size
893
893
894 fm = ui.formatter(b'debugdeltachain', pycompat.byteskwargs(opts))
894 fm = ui.formatter(b'debugdeltachain', pycompat.byteskwargs(opts))
895
895
896 fm.plain(
896 fm.plain(
897 b' rev p1 p2 chain# chainlen prev delta '
897 b' rev p1 p2 chain# chainlen prev delta '
898 b'size rawsize chainsize ratio lindist extradist '
898 b'size rawsize chainsize ratio lindist extradist '
899 b'extraratio'
899 b'extraratio'
900 )
900 )
901 if withsparseread:
901 if withsparseread:
902 fm.plain(b' readsize largestblk rddensity srchunks')
902 fm.plain(b' readsize largestblk rddensity srchunks')
903 fm.plain(b'\n')
903 fm.plain(b'\n')
904
904
905 chainbases = {}
905 chainbases = {}
906 for rev in r:
906 for rev in r:
907 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
907 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
908 chainbase = chain[0]
908 chainbase = chain[0]
909 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
909 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
910 basestart = start(chainbase)
910 basestart = start(chainbase)
911 revstart = start(rev)
911 revstart = start(rev)
912 lineardist = revstart + comp - basestart
912 lineardist = revstart + comp - basestart
913 extradist = lineardist - chainsize
913 extradist = lineardist - chainsize
914 try:
914 try:
915 prevrev = chain[-2]
915 prevrev = chain[-2]
916 except IndexError:
916 except IndexError:
917 prevrev = -1
917 prevrev = -1
918
918
919 if uncomp != 0:
919 if uncomp != 0:
920 chainratio = float(chainsize) / float(uncomp)
920 chainratio = float(chainsize) / float(uncomp)
921 else:
921 else:
922 chainratio = chainsize
922 chainratio = chainsize
923
923
924 if chainsize != 0:
924 if chainsize != 0:
925 extraratio = float(extradist) / float(chainsize)
925 extraratio = float(extradist) / float(chainsize)
926 else:
926 else:
927 extraratio = extradist
927 extraratio = extradist
928
928
929 fm.startitem()
929 fm.startitem()
930 fm.write(
930 fm.write(
931 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
931 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
932 b'uncompsize chainsize chainratio lindist extradist '
932 b'uncompsize chainsize chainratio lindist extradist '
933 b'extraratio',
933 b'extraratio',
934 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
934 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
935 rev,
935 rev,
936 p1,
936 p1,
937 p2,
937 p2,
938 chainid,
938 chainid,
939 len(chain),
939 len(chain),
940 prevrev,
940 prevrev,
941 deltatype,
941 deltatype,
942 comp,
942 comp,
943 uncomp,
943 uncomp,
944 chainsize,
944 chainsize,
945 chainratio,
945 chainratio,
946 lineardist,
946 lineardist,
947 extradist,
947 extradist,
948 extraratio,
948 extraratio,
949 rev=rev,
949 rev=rev,
950 chainid=chainid,
950 chainid=chainid,
951 chainlen=len(chain),
951 chainlen=len(chain),
952 prevrev=prevrev,
952 prevrev=prevrev,
953 deltatype=deltatype,
953 deltatype=deltatype,
954 compsize=comp,
954 compsize=comp,
955 uncompsize=uncomp,
955 uncompsize=uncomp,
956 chainsize=chainsize,
956 chainsize=chainsize,
957 chainratio=chainratio,
957 chainratio=chainratio,
958 lindist=lineardist,
958 lindist=lineardist,
959 extradist=extradist,
959 extradist=extradist,
960 extraratio=extraratio,
960 extraratio=extraratio,
961 )
961 )
962 if withsparseread:
962 if withsparseread:
963 readsize = 0
963 readsize = 0
964 largestblock = 0
964 largestblock = 0
965 srchunks = 0
965 srchunks = 0
966
966
967 for revschunk in deltautil.slicechunk(r, chain):
967 for revschunk in deltautil.slicechunk(r, chain):
968 srchunks += 1
968 srchunks += 1
969 blkend = start(revschunk[-1]) + length(revschunk[-1])
969 blkend = start(revschunk[-1]) + length(revschunk[-1])
970 blksize = blkend - start(revschunk[0])
970 blksize = blkend - start(revschunk[0])
971
971
972 readsize += blksize
972 readsize += blksize
973 if largestblock < blksize:
973 if largestblock < blksize:
974 largestblock = blksize
974 largestblock = blksize
975
975
976 if readsize:
976 if readsize:
977 readdensity = float(chainsize) / float(readsize)
977 readdensity = float(chainsize) / float(readsize)
978 else:
978 else:
979 readdensity = 1
979 readdensity = 1
980
980
981 fm.write(
981 fm.write(
982 b'readsize largestblock readdensity srchunks',
982 b'readsize largestblock readdensity srchunks',
983 b' %10d %10d %9.5f %8d',
983 b' %10d %10d %9.5f %8d',
984 readsize,
984 readsize,
985 largestblock,
985 largestblock,
986 readdensity,
986 readdensity,
987 srchunks,
987 srchunks,
988 readsize=readsize,
988 readsize=readsize,
989 largestblock=largestblock,
989 largestblock=largestblock,
990 readdensity=readdensity,
990 readdensity=readdensity,
991 srchunks=srchunks,
991 srchunks=srchunks,
992 )
992 )
993
993
994 fm.plain(b'\n')
994 fm.plain(b'\n')
995
995
996 fm.end()
996 fm.end()
997
997
998
998
999 @command(
999 @command(
1000 b'debug-delta-find',
1000 b'debug-delta-find',
1001 cmdutil.debugrevlogopts
1001 cmdutil.debugrevlogopts
1002 + cmdutil.formatteropts
1002 + cmdutil.formatteropts
1003 + [
1003 + [
1004 (
1004 (
1005 b'',
1005 b'',
1006 b'source',
1006 b'source',
1007 b'full',
1007 b'full',
1008 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
1008 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
1009 ),
1009 ),
1010 ],
1010 ],
1011 _(b'-c|-m|FILE REV'),
1011 _(b'-c|-m|FILE REV'),
1012 optionalrepo=True,
1012 optionalrepo=True,
1013 )
1013 )
1014 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1014 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1015 """display the computation to get to a valid delta for storing REV
1015 """display the computation to get to a valid delta for storing REV
1016
1016
1017 This command will replay the process used to find the "best" delta to store
1017 This command will replay the process used to find the "best" delta to store
1018 a revision and display information about all the steps used to get to that
1018 a revision and display information about all the steps used to get to that
1019 result.
1019 result.
1020
1020
1021 By default, the process is fed with a the full-text for the revision. This
1021 By default, the process is fed with a the full-text for the revision. This
1022 can be controlled with the --source flag.
1022 can be controlled with the --source flag.
1023
1023
1024 The revision use the revision number of the target storage (not changelog
1024 The revision use the revision number of the target storage (not changelog
1025 revision number).
1025 revision number).
1026
1026
1027 note: the process is initiated from a full text of the revision to store.
1027 note: the process is initiated from a full text of the revision to store.
1028 """
1028 """
1029 if arg_2 is None:
1029 if arg_2 is None:
1030 file_ = None
1030 file_ = None
1031 rev = arg_1
1031 rev = arg_1
1032 else:
1032 else:
1033 file_ = arg_1
1033 file_ = arg_1
1034 rev = arg_2
1034 rev = arg_2
1035
1035
1036 rev = int(rev)
1036 rev = int(rev)
1037
1037
1038 revlog = cmdutil.openrevlog(
1038 revlog = cmdutil.openrevlog(
1039 repo, b'debugdeltachain', file_, pycompat.byteskwargs(opts)
1039 repo, b'debugdeltachain', file_, pycompat.byteskwargs(opts)
1040 )
1040 )
1041 p1r, p2r = revlog.parentrevs(rev)
1041 p1r, p2r = revlog.parentrevs(rev)
1042
1042
1043 if source == b'full':
1043 if source == b'full':
1044 base_rev = nullrev
1044 base_rev = nullrev
1045 elif source == b'storage':
1045 elif source == b'storage':
1046 base_rev = revlog.deltaparent(rev)
1046 base_rev = revlog.deltaparent(rev)
1047 elif source == b'p1':
1047 elif source == b'p1':
1048 base_rev = p1r
1048 base_rev = p1r
1049 elif source == b'p2':
1049 elif source == b'p2':
1050 base_rev = p2r
1050 base_rev = p2r
1051 elif source == b'prev':
1051 elif source == b'prev':
1052 base_rev = rev - 1
1052 base_rev = rev - 1
1053 else:
1053 else:
1054 raise error.InputError(b"invalid --source value: %s" % source)
1054 raise error.InputError(b"invalid --source value: %s" % source)
1055
1055
1056 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
1056 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
1057
1057
1058
1058
1059 @command(
1059 @command(
1060 b'debugdirstate|debugstate',
1060 b'debugdirstate|debugstate',
1061 [
1061 [
1062 (
1062 (
1063 b'',
1063 b'',
1064 b'nodates',
1064 b'nodates',
1065 None,
1065 None,
1066 _(b'do not display the saved mtime (DEPRECATED)'),
1066 _(b'do not display the saved mtime (DEPRECATED)'),
1067 ),
1067 ),
1068 (b'', b'dates', True, _(b'display the saved mtime')),
1068 (b'', b'dates', True, _(b'display the saved mtime')),
1069 (b'', b'datesort', None, _(b'sort by saved mtime')),
1069 (b'', b'datesort', None, _(b'sort by saved mtime')),
1070 (
1070 (
1071 b'',
1071 b'',
1072 b'docket',
1072 b'docket',
1073 False,
1073 False,
1074 _(b'display the docket (metadata file) instead'),
1074 _(b'display the docket (metadata file) instead'),
1075 ),
1075 ),
1076 (
1076 (
1077 b'',
1077 b'',
1078 b'all',
1078 b'all',
1079 False,
1079 False,
1080 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1080 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1081 ),
1081 ),
1082 ],
1082 ],
1083 _(b'[OPTION]...'),
1083 _(b'[OPTION]...'),
1084 )
1084 )
1085 def debugstate(ui, repo, **opts):
1085 def debugstate(ui, repo, **opts):
1086 """show the contents of the current dirstate"""
1086 """show the contents of the current dirstate"""
1087
1087
1088 if opts.get("docket"):
1088 if opts.get("docket"):
1089 if not repo.dirstate._use_dirstate_v2:
1089 if not repo.dirstate._use_dirstate_v2:
1090 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1090 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1091
1091
1092 docket = repo.dirstate._map.docket
1092 docket = repo.dirstate._map.docket
1093 (
1093 (
1094 start_offset,
1094 start_offset,
1095 root_nodes,
1095 root_nodes,
1096 nodes_with_entry,
1096 nodes_with_entry,
1097 nodes_with_copy,
1097 nodes_with_copy,
1098 unused_bytes,
1098 unused_bytes,
1099 _unused,
1099 _unused,
1100 ignore_pattern,
1100 ignore_pattern,
1101 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1101 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1102
1102
1103 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1103 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1104 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1104 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1105 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1105 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1106 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1106 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1107 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1107 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1108 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1108 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1109 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1109 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1110 ui.write(
1110 ui.write(
1111 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1111 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1112 )
1112 )
1113 return
1113 return
1114
1114
1115 nodates = not opts['dates']
1115 nodates = not opts['dates']
1116 if opts.get('nodates') is not None:
1116 if opts.get('nodates') is not None:
1117 nodates = True
1117 nodates = True
1118 datesort = opts.get('datesort')
1118 datesort = opts.get('datesort')
1119
1119
1120 if datesort:
1120 if datesort:
1121
1121
1122 def keyfunc(entry):
1122 def keyfunc(entry):
1123 filename, _state, _mode, _size, mtime = entry
1123 filename, _state, _mode, _size, mtime = entry
1124 return (mtime, filename)
1124 return (mtime, filename)
1125
1125
1126 else:
1126 else:
1127 keyfunc = None # sort by filename
1127 keyfunc = None # sort by filename
1128 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1128 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1129 entries.sort(key=keyfunc)
1129 entries.sort(key=keyfunc)
1130 for entry in entries:
1130 for entry in entries:
1131 filename, state, mode, size, mtime = entry
1131 filename, state, mode, size, mtime = entry
1132 if mtime == -1:
1132 if mtime == -1:
1133 timestr = b'unset '
1133 timestr = b'unset '
1134 elif nodates:
1134 elif nodates:
1135 timestr = b'set '
1135 timestr = b'set '
1136 else:
1136 else:
1137 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1137 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1138 timestr = encoding.strtolocal(timestr)
1138 timestr = encoding.strtolocal(timestr)
1139 if mode & 0o20000:
1139 if mode & 0o20000:
1140 mode = b'lnk'
1140 mode = b'lnk'
1141 else:
1141 else:
1142 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1142 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1143 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1143 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1144 for f in repo.dirstate.copies():
1144 for f in repo.dirstate.copies():
1145 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1145 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1146
1146
1147
1147
1148 @command(
1148 @command(
1149 b'debugdirstateignorepatternshash',
1149 b'debugdirstateignorepatternshash',
1150 [],
1150 [],
1151 _(b''),
1151 _(b''),
1152 )
1152 )
1153 def debugdirstateignorepatternshash(ui, repo, **opts):
1153 def debugdirstateignorepatternshash(ui, repo, **opts):
1154 """show the hash of ignore patterns stored in dirstate if v2,
1154 """show the hash of ignore patterns stored in dirstate if v2,
1155 or nothing for dirstate-v2
1155 or nothing for dirstate-v2
1156 """
1156 """
1157 if repo.dirstate._use_dirstate_v2:
1157 if repo.dirstate._use_dirstate_v2:
1158 docket = repo.dirstate._map.docket
1158 docket = repo.dirstate._map.docket
1159 hash_len = 20 # 160 bits for SHA-1
1159 hash_len = 20 # 160 bits for SHA-1
1160 hash_bytes = docket.tree_metadata[-hash_len:]
1160 hash_bytes = docket.tree_metadata[-hash_len:]
1161 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1161 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1162
1162
1163
1163
1164 @command(
1164 @command(
1165 b'debugdiscovery',
1165 b'debugdiscovery',
1166 [
1166 [
1167 (b'', b'old', None, _(b'use old-style discovery')),
1167 (b'', b'old', None, _(b'use old-style discovery')),
1168 (
1168 (
1169 b'',
1169 b'',
1170 b'nonheads',
1170 b'nonheads',
1171 None,
1171 None,
1172 _(b'use old-style discovery with non-heads included'),
1172 _(b'use old-style discovery with non-heads included'),
1173 ),
1173 ),
1174 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1174 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1175 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1175 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1176 (
1176 (
1177 b'',
1177 b'',
1178 b'local-as-revs',
1178 b'local-as-revs',
1179 b"",
1179 b"",
1180 b'treat local has having these revisions only',
1180 b'treat local has having these revisions only',
1181 ),
1181 ),
1182 (
1182 (
1183 b'',
1183 b'',
1184 b'remote-as-revs',
1184 b'remote-as-revs',
1185 b"",
1185 b"",
1186 b'use local as remote, with only these revisions',
1186 b'use local as remote, with only these revisions',
1187 ),
1187 ),
1188 ]
1188 ]
1189 + cmdutil.remoteopts
1189 + cmdutil.remoteopts
1190 + cmdutil.formatteropts,
1190 + cmdutil.formatteropts,
1191 _(b'[--rev REV] [OTHER]'),
1191 _(b'[--rev REV] [OTHER]'),
1192 )
1192 )
1193 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1193 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1194 """runs the changeset discovery protocol in isolation
1194 """runs the changeset discovery protocol in isolation
1195
1195
1196 The local peer can be "replaced" by a subset of the local repository by
1196 The local peer can be "replaced" by a subset of the local repository by
1197 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1197 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1198 can be "replaced" by a subset of the local repository using the
1198 can be "replaced" by a subset of the local repository using the
1199 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1199 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1200 discovery situations.
1200 discovery situations.
1201
1201
1202 The following developer oriented config are relevant for people playing with this command:
1202 The following developer oriented config are relevant for people playing with this command:
1203
1203
1204 * devel.discovery.exchange-heads=True
1204 * devel.discovery.exchange-heads=True
1205
1205
1206 If False, the discovery will not start with
1206 If False, the discovery will not start with
1207 remote head fetching and local head querying.
1207 remote head fetching and local head querying.
1208
1208
1209 * devel.discovery.grow-sample=True
1209 * devel.discovery.grow-sample=True
1210
1210
1211 If False, the sample size used in set discovery will not be increased
1211 If False, the sample size used in set discovery will not be increased
1212 through the process
1212 through the process
1213
1213
1214 * devel.discovery.grow-sample.dynamic=True
1214 * devel.discovery.grow-sample.dynamic=True
1215
1215
1216 When discovery.grow-sample.dynamic is True, the default, the sample size is
1216 When discovery.grow-sample.dynamic is True, the default, the sample size is
1217 adapted to the shape of the undecided set (it is set to the max of:
1217 adapted to the shape of the undecided set (it is set to the max of:
1218 <target-size>, len(roots(undecided)), len(heads(undecided)
1218 <target-size>, len(roots(undecided)), len(heads(undecided)
1219
1219
1220 * devel.discovery.grow-sample.rate=1.05
1220 * devel.discovery.grow-sample.rate=1.05
1221
1221
1222 the rate at which the sample grow
1222 the rate at which the sample grow
1223
1223
1224 * devel.discovery.randomize=True
1224 * devel.discovery.randomize=True
1225
1225
1226 If andom sampling during discovery are deterministic. It is meant for
1226 If andom sampling during discovery are deterministic. It is meant for
1227 integration tests.
1227 integration tests.
1228
1228
1229 * devel.discovery.sample-size=200
1229 * devel.discovery.sample-size=200
1230
1230
1231 Control the initial size of the discovery sample
1231 Control the initial size of the discovery sample
1232
1232
1233 * devel.discovery.sample-size.initial=100
1233 * devel.discovery.sample-size.initial=100
1234
1234
1235 Control the initial size of the discovery for initial change
1235 Control the initial size of the discovery for initial change
1236 """
1236 """
1237 unfi = repo.unfiltered()
1237 unfi = repo.unfiltered()
1238
1238
1239 # setup potential extra filtering
1239 # setup potential extra filtering
1240 local_revs = opts["local_as_revs"]
1240 local_revs = opts["local_as_revs"]
1241 remote_revs = opts["remote_as_revs"]
1241 remote_revs = opts["remote_as_revs"]
1242
1242
1243 # make sure tests are repeatable
1243 # make sure tests are repeatable
1244 random.seed(int(opts['seed']))
1244 random.seed(int(opts['seed']))
1245
1245
1246 if not remote_revs:
1246 if not remote_revs:
1247 path = urlutil.get_unique_pull_path_obj(
1247 path = urlutil.get_unique_pull_path_obj(
1248 b'debugdiscovery', ui, remoteurl
1248 b'debugdiscovery', ui, remoteurl
1249 )
1249 )
1250 branches = (path.branch, [])
1250 branches = (path.branch, [])
1251 remote = hg.peer(repo, pycompat.byteskwargs(opts), path)
1251 remote = hg.peer(repo, pycompat.byteskwargs(opts), path)
1252 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1252 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1253 else:
1253 else:
1254 branches = (None, [])
1254 branches = (None, [])
1255 remote_filtered_revs = logcmdutil.revrange(
1255 remote_filtered_revs = logcmdutil.revrange(
1256 unfi, [b"not (::(%s))" % remote_revs]
1256 unfi, [b"not (::(%s))" % remote_revs]
1257 )
1257 )
1258 remote_filtered_revs = frozenset(remote_filtered_revs)
1258 remote_filtered_revs = frozenset(remote_filtered_revs)
1259
1259
1260 def remote_func(x):
1260 def remote_func(x):
1261 return remote_filtered_revs
1261 return remote_filtered_revs
1262
1262
1263 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1263 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1264
1264
1265 remote = repo.peer()
1265 remote = repo.peer()
1266 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1266 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1267
1267
1268 if local_revs:
1268 if local_revs:
1269 local_filtered_revs = logcmdutil.revrange(
1269 local_filtered_revs = logcmdutil.revrange(
1270 unfi, [b"not (::(%s))" % local_revs]
1270 unfi, [b"not (::(%s))" % local_revs]
1271 )
1271 )
1272 local_filtered_revs = frozenset(local_filtered_revs)
1272 local_filtered_revs = frozenset(local_filtered_revs)
1273
1273
1274 def local_func(x):
1274 def local_func(x):
1275 return local_filtered_revs
1275 return local_filtered_revs
1276
1276
1277 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1277 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1278 repo = repo.filtered(b'debug-discovery-local-filter')
1278 repo = repo.filtered(b'debug-discovery-local-filter')
1279
1279
1280 data = {}
1280 data = {}
1281 if opts.get('old'):
1281 if opts.get('old'):
1282
1282
1283 def doit(pushedrevs, remoteheads, remote=remote):
1283 def doit(pushedrevs, remoteheads, remote=remote):
1284 if not hasattr(remote, 'branches'):
1284 if not hasattr(remote, 'branches'):
1285 # enable in-client legacy support
1285 # enable in-client legacy support
1286 remote = localrepo.locallegacypeer(remote.local())
1286 remote = localrepo.locallegacypeer(remote.local())
1287 if remote_revs:
1287 if remote_revs:
1288 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1288 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1289 remote._repo = r
1289 remote._repo = r
1290 common, _in, hds = treediscovery.findcommonincoming(
1290 common, _in, hds = treediscovery.findcommonincoming(
1291 repo, remote, force=True, audit=data
1291 repo, remote, force=True, audit=data
1292 )
1292 )
1293 common = set(common)
1293 common = set(common)
1294 if not opts.get('nonheads'):
1294 if not opts.get('nonheads'):
1295 ui.writenoi18n(
1295 ui.writenoi18n(
1296 b"unpruned common: %s\n"
1296 b"unpruned common: %s\n"
1297 % b" ".join(sorted(short(n) for n in common))
1297 % b" ".join(sorted(short(n) for n in common))
1298 )
1298 )
1299
1299
1300 clnode = repo.changelog.node
1300 clnode = repo.changelog.node
1301 common = repo.revs(b'heads(::%ln)', common)
1301 common = repo.revs(b'heads(::%ln)', common)
1302 common = {clnode(r) for r in common}
1302 common = {clnode(r) for r in common}
1303 return common, hds
1303 return common, hds
1304
1304
1305 else:
1305 else:
1306
1306
1307 def doit(pushedrevs, remoteheads, remote=remote):
1307 def doit(pushedrevs, remoteheads, remote=remote):
1308 nodes = None
1308 nodes = None
1309 if pushedrevs:
1309 if pushedrevs:
1310 revs = logcmdutil.revrange(repo, pushedrevs)
1310 revs = logcmdutil.revrange(repo, pushedrevs)
1311 nodes = [repo[r].node() for r in revs]
1311 nodes = [repo[r].node() for r in revs]
1312 common, any, hds = setdiscovery.findcommonheads(
1312 common, any, hds = setdiscovery.findcommonheads(
1313 ui,
1313 ui,
1314 repo,
1314 repo,
1315 remote,
1315 remote,
1316 ancestorsof=nodes,
1316 ancestorsof=nodes,
1317 audit=data,
1317 audit=data,
1318 abortwhenunrelated=False,
1318 abortwhenunrelated=False,
1319 )
1319 )
1320 return common, hds
1320 return common, hds
1321
1321
1322 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1322 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1323 localrevs = opts['rev']
1323 localrevs = opts['rev']
1324
1324
1325 fm = ui.formatter(b'debugdiscovery', pycompat.byteskwargs(opts))
1325 fm = ui.formatter(b'debugdiscovery', pycompat.byteskwargs(opts))
1326 if fm.strict_format:
1326 if fm.strict_format:
1327
1327
1328 @contextlib.contextmanager
1328 @contextlib.contextmanager
1329 def may_capture_output():
1329 def may_capture_output():
1330 ui.pushbuffer()
1330 ui.pushbuffer()
1331 yield
1331 yield
1332 data[b'output'] = ui.popbuffer()
1332 data[b'output'] = ui.popbuffer()
1333
1333
1334 else:
1334 else:
1335 may_capture_output = util.nullcontextmanager
1335 may_capture_output = util.nullcontextmanager
1336 with may_capture_output():
1336 with may_capture_output():
1337 with util.timedcm('debug-discovery') as t:
1337 with util.timedcm('debug-discovery') as t:
1338 common, hds = doit(localrevs, remoterevs)
1338 common, hds = doit(localrevs, remoterevs)
1339
1339
1340 # compute all statistics
1340 # compute all statistics
1341 if len(common) == 1 and repo.nullid in common:
1341 if len(common) == 1 and repo.nullid in common:
1342 common = set()
1342 common = set()
1343 heads_common = set(common)
1343 heads_common = set(common)
1344 heads_remote = set(hds)
1344 heads_remote = set(hds)
1345 heads_local = set(repo.heads())
1345 heads_local = set(repo.heads())
1346 # note: they cannot be a local or remote head that is in common and not
1346 # note: they cannot be a local or remote head that is in common and not
1347 # itself a head of common.
1347 # itself a head of common.
1348 heads_common_local = heads_common & heads_local
1348 heads_common_local = heads_common & heads_local
1349 heads_common_remote = heads_common & heads_remote
1349 heads_common_remote = heads_common & heads_remote
1350 heads_common_both = heads_common & heads_remote & heads_local
1350 heads_common_both = heads_common & heads_remote & heads_local
1351
1351
1352 all = repo.revs(b'all()')
1352 all = repo.revs(b'all()')
1353 common = repo.revs(b'::%ln', common)
1353 common = repo.revs(b'::%ln', common)
1354 roots_common = repo.revs(b'roots(::%ld)', common)
1354 roots_common = repo.revs(b'roots(::%ld)', common)
1355 missing = repo.revs(b'not ::%ld', common)
1355 missing = repo.revs(b'not ::%ld', common)
1356 heads_missing = repo.revs(b'heads(%ld)', missing)
1356 heads_missing = repo.revs(b'heads(%ld)', missing)
1357 roots_missing = repo.revs(b'roots(%ld)', missing)
1357 roots_missing = repo.revs(b'roots(%ld)', missing)
1358 assert len(common) + len(missing) == len(all)
1358 assert len(common) + len(missing) == len(all)
1359
1359
1360 initial_undecided = repo.revs(
1360 initial_undecided = repo.revs(
1361 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1361 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1362 )
1362 )
1363 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1363 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1364 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1364 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1365 common_initial_undecided = initial_undecided & common
1365 common_initial_undecided = initial_undecided & common
1366 missing_initial_undecided = initial_undecided & missing
1366 missing_initial_undecided = initial_undecided & missing
1367
1367
1368 data[b'elapsed'] = t.elapsed
1368 data[b'elapsed'] = t.elapsed
1369 data[b'nb-common-heads'] = len(heads_common)
1369 data[b'nb-common-heads'] = len(heads_common)
1370 data[b'nb-common-heads-local'] = len(heads_common_local)
1370 data[b'nb-common-heads-local'] = len(heads_common_local)
1371 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1371 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1372 data[b'nb-common-heads-both'] = len(heads_common_both)
1372 data[b'nb-common-heads-both'] = len(heads_common_both)
1373 data[b'nb-common-roots'] = len(roots_common)
1373 data[b'nb-common-roots'] = len(roots_common)
1374 data[b'nb-head-local'] = len(heads_local)
1374 data[b'nb-head-local'] = len(heads_local)
1375 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1375 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1376 data[b'nb-head-remote'] = len(heads_remote)
1376 data[b'nb-head-remote'] = len(heads_remote)
1377 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1377 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1378 heads_common_remote
1378 heads_common_remote
1379 )
1379 )
1380 data[b'nb-revs'] = len(all)
1380 data[b'nb-revs'] = len(all)
1381 data[b'nb-revs-common'] = len(common)
1381 data[b'nb-revs-common'] = len(common)
1382 data[b'nb-revs-missing'] = len(missing)
1382 data[b'nb-revs-missing'] = len(missing)
1383 data[b'nb-missing-heads'] = len(heads_missing)
1383 data[b'nb-missing-heads'] = len(heads_missing)
1384 data[b'nb-missing-roots'] = len(roots_missing)
1384 data[b'nb-missing-roots'] = len(roots_missing)
1385 data[b'nb-ini_und'] = len(initial_undecided)
1385 data[b'nb-ini_und'] = len(initial_undecided)
1386 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1386 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1387 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1387 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1388 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1388 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1389 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1389 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1390
1390
1391 fm.startitem()
1391 fm.startitem()
1392 fm.data(**pycompat.strkwargs(data))
1392 fm.data(**pycompat.strkwargs(data))
1393 # display discovery summary
1393 # display discovery summary
1394 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1394 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1395 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1395 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1396 if b'total-round-trips-heads' in data:
1396 if b'total-round-trips-heads' in data:
1397 fm.plain(
1397 fm.plain(
1398 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1398 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1399 )
1399 )
1400 if b'total-round-trips-branches' in data:
1400 if b'total-round-trips-branches' in data:
1401 fm.plain(
1401 fm.plain(
1402 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1402 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1403 % data
1403 % data
1404 )
1404 )
1405 if b'total-round-trips-between' in data:
1405 if b'total-round-trips-between' in data:
1406 fm.plain(
1406 fm.plain(
1407 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1407 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1408 )
1408 )
1409 fm.plain(b"queries: %(total-queries)9d\n" % data)
1409 fm.plain(b"queries: %(total-queries)9d\n" % data)
1410 if b'total-queries-branches' in data:
1410 if b'total-queries-branches' in data:
1411 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1411 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1412 if b'total-queries-between' in data:
1412 if b'total-queries-between' in data:
1413 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1413 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1414 fm.plain(b"heads summary:\n")
1414 fm.plain(b"heads summary:\n")
1415 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1415 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1416 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1416 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1417 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1417 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1418 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1418 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1419 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1419 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1420 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1420 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1421 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1421 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1422 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1422 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1423 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1423 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1424 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1424 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1425 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1425 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1426 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1426 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1427 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1427 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1428 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1428 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1429 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1429 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1430 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1430 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1431 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1431 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1432 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1432 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1433 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1433 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1434 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1434 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1435 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1435 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1436 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1436 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1437
1437
1438 if ui.verbose:
1438 if ui.verbose:
1439 fm.plain(
1439 fm.plain(
1440 b"common heads: %s\n"
1440 b"common heads: %s\n"
1441 % b" ".join(sorted(short(n) for n in heads_common))
1441 % b" ".join(sorted(short(n) for n in heads_common))
1442 )
1442 )
1443 fm.end()
1443 fm.end()
1444
1444
1445
1445
1446 _chunksize = 4 << 10
1446 _chunksize = 4 << 10
1447
1447
1448
1448
1449 @command(
1449 @command(
1450 b'debugdownload',
1450 b'debugdownload',
1451 [
1451 [
1452 (b'o', b'output', b'', _(b'path')),
1452 (b'o', b'output', b'', _(b'path')),
1453 ],
1453 ],
1454 optionalrepo=True,
1454 optionalrepo=True,
1455 )
1455 )
1456 def debugdownload(ui, repo, url, output=None, **opts):
1456 def debugdownload(ui, repo, url, output=None, **opts):
1457 """download a resource using Mercurial logic and config"""
1457 """download a resource using Mercurial logic and config"""
1458 fh = urlmod.open(ui, url, output)
1458 fh = urlmod.open(ui, url, output)
1459
1459
1460 dest = ui
1460 dest = ui
1461 if output:
1461 if output:
1462 dest = open(output, b"wb", _chunksize)
1462 dest = open(output, b"wb", _chunksize)
1463 try:
1463 try:
1464 data = fh.read(_chunksize)
1464 data = fh.read(_chunksize)
1465 while data:
1465 while data:
1466 dest.write(data)
1466 dest.write(data)
1467 data = fh.read(_chunksize)
1467 data = fh.read(_chunksize)
1468 finally:
1468 finally:
1469 if output:
1469 if output:
1470 dest.close()
1470 dest.close()
1471
1471
1472
1472
1473 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1473 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1474 def debugextensions(ui, repo, **opts):
1474 def debugextensions(ui, repo, **opts):
1475 '''show information about active extensions'''
1475 '''show information about active extensions'''
1476 exts = extensions.extensions(ui)
1476 exts = extensions.extensions(ui)
1477 hgver = util.version()
1477 hgver = util.version()
1478 fm = ui.formatter(b'debugextensions', pycompat.byteskwargs(opts))
1478 fm = ui.formatter(b'debugextensions', pycompat.byteskwargs(opts))
1479 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1479 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1480 isinternal = extensions.ismoduleinternal(extmod)
1480 isinternal = extensions.ismoduleinternal(extmod)
1481 extsource = None
1481 extsource = None
1482
1482
1483 if hasattr(extmod, '__file__'):
1483 if hasattr(extmod, '__file__'):
1484 extsource = pycompat.fsencode(extmod.__file__)
1484 extsource = pycompat.fsencode(extmod.__file__)
1485 elif getattr(sys, 'oxidized', False):
1485 elif getattr(sys, 'oxidized', False):
1486 extsource = pycompat.sysexecutable
1486 extsource = pycompat.sysexecutable
1487 if isinternal:
1487 if isinternal:
1488 exttestedwith = [] # never expose magic string to users
1488 exttestedwith = [] # never expose magic string to users
1489 else:
1489 else:
1490 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1490 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1491 extbuglink = getattr(extmod, 'buglink', None)
1491 extbuglink = getattr(extmod, 'buglink', None)
1492
1492
1493 fm.startitem()
1493 fm.startitem()
1494
1494
1495 if ui.quiet or ui.verbose:
1495 if ui.quiet or ui.verbose:
1496 fm.write(b'name', b'%s\n', extname)
1496 fm.write(b'name', b'%s\n', extname)
1497 else:
1497 else:
1498 fm.write(b'name', b'%s', extname)
1498 fm.write(b'name', b'%s', extname)
1499 if isinternal or hgver in exttestedwith:
1499 if isinternal or hgver in exttestedwith:
1500 fm.plain(b'\n')
1500 fm.plain(b'\n')
1501 elif not exttestedwith:
1501 elif not exttestedwith:
1502 fm.plain(_(b' (untested!)\n'))
1502 fm.plain(_(b' (untested!)\n'))
1503 else:
1503 else:
1504 lasttestedversion = exttestedwith[-1]
1504 lasttestedversion = exttestedwith[-1]
1505 fm.plain(b' (%s!)\n' % lasttestedversion)
1505 fm.plain(b' (%s!)\n' % lasttestedversion)
1506
1506
1507 fm.condwrite(
1507 fm.condwrite(
1508 ui.verbose and extsource,
1508 ui.verbose and extsource,
1509 b'source',
1509 b'source',
1510 _(b' location: %s\n'),
1510 _(b' location: %s\n'),
1511 extsource or b"",
1511 extsource or b"",
1512 )
1512 )
1513
1513
1514 if ui.verbose:
1514 if ui.verbose:
1515 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1515 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1516 fm.data(bundled=isinternal)
1516 fm.data(bundled=isinternal)
1517
1517
1518 fm.condwrite(
1518 fm.condwrite(
1519 ui.verbose and exttestedwith,
1519 ui.verbose and exttestedwith,
1520 b'testedwith',
1520 b'testedwith',
1521 _(b' tested with: %s\n'),
1521 _(b' tested with: %s\n'),
1522 fm.formatlist(exttestedwith, name=b'ver'),
1522 fm.formatlist(exttestedwith, name=b'ver'),
1523 )
1523 )
1524
1524
1525 fm.condwrite(
1525 fm.condwrite(
1526 ui.verbose and extbuglink,
1526 ui.verbose and extbuglink,
1527 b'buglink',
1527 b'buglink',
1528 _(b' bug reporting: %s\n'),
1528 _(b' bug reporting: %s\n'),
1529 extbuglink or b"",
1529 extbuglink or b"",
1530 )
1530 )
1531
1531
1532 fm.end()
1532 fm.end()
1533
1533
1534
1534
1535 @command(
1535 @command(
1536 b'debugfileset',
1536 b'debugfileset',
1537 [
1537 [
1538 (
1538 (
1539 b'r',
1539 b'r',
1540 b'rev',
1540 b'rev',
1541 b'',
1541 b'',
1542 _(b'apply the filespec on this revision'),
1542 _(b'apply the filespec on this revision'),
1543 _(b'REV'),
1543 _(b'REV'),
1544 ),
1544 ),
1545 (
1545 (
1546 b'',
1546 b'',
1547 b'all-files',
1547 b'all-files',
1548 False,
1548 False,
1549 _(b'test files from all revisions and working directory'),
1549 _(b'test files from all revisions and working directory'),
1550 ),
1550 ),
1551 (
1551 (
1552 b's',
1552 b's',
1553 b'show-matcher',
1553 b'show-matcher',
1554 None,
1554 None,
1555 _(b'print internal representation of matcher'),
1555 _(b'print internal representation of matcher'),
1556 ),
1556 ),
1557 (
1557 (
1558 b'p',
1558 b'p',
1559 b'show-stage',
1559 b'show-stage',
1560 [],
1560 [],
1561 _(b'print parsed tree at the given stage'),
1561 _(b'print parsed tree at the given stage'),
1562 _(b'NAME'),
1562 _(b'NAME'),
1563 ),
1563 ),
1564 ],
1564 ],
1565 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1565 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1566 )
1566 )
1567 def debugfileset(ui, repo, expr, **opts):
1567 def debugfileset(ui, repo, expr, **opts):
1568 '''parse and apply a fileset specification'''
1568 '''parse and apply a fileset specification'''
1569 from . import fileset
1569 from . import fileset
1570
1570
1571 fileset.symbols # force import of fileset so we have predicates to optimize
1571 fileset.symbols # force import of fileset so we have predicates to optimize
1572
1572
1573 ctx = logcmdutil.revsingle(repo, opts.get('rev'), None)
1573 ctx = logcmdutil.revsingle(repo, opts.get('rev'), None)
1574
1574
1575 stages = [
1575 stages = [
1576 (b'parsed', pycompat.identity),
1576 (b'parsed', pycompat.identity),
1577 (b'analyzed', filesetlang.analyze),
1577 (b'analyzed', filesetlang.analyze),
1578 (b'optimized', filesetlang.optimize),
1578 (b'optimized', filesetlang.optimize),
1579 ]
1579 ]
1580 stagenames = {n for n, f in stages}
1580 stagenames = {n for n, f in stages}
1581
1581
1582 showalways = set()
1582 showalways = set()
1583 if ui.verbose and not opts['show_stage']:
1583 if ui.verbose and not opts['show_stage']:
1584 # show parsed tree by --verbose (deprecated)
1584 # show parsed tree by --verbose (deprecated)
1585 showalways.add(b'parsed')
1585 showalways.add(b'parsed')
1586 if opts['show_stage'] == [b'all']:
1586 if opts['show_stage'] == [b'all']:
1587 showalways.update(stagenames)
1587 showalways.update(stagenames)
1588 else:
1588 else:
1589 for n in opts['show_stage']:
1589 for n in opts['show_stage']:
1590 if n not in stagenames:
1590 if n not in stagenames:
1591 raise error.Abort(_(b'invalid stage name: %s') % n)
1591 raise error.Abort(_(b'invalid stage name: %s') % n)
1592 showalways.update(opts['show_stage'])
1592 showalways.update(opts['show_stage'])
1593
1593
1594 tree = filesetlang.parse(expr)
1594 tree = filesetlang.parse(expr)
1595 for n, f in stages:
1595 for n, f in stages:
1596 tree = f(tree)
1596 tree = f(tree)
1597 if n in showalways:
1597 if n in showalways:
1598 if opts['show_stage'] or n != b'parsed':
1598 if opts['show_stage'] or n != b'parsed':
1599 ui.write(b"* %s:\n" % n)
1599 ui.write(b"* %s:\n" % n)
1600 ui.write(filesetlang.prettyformat(tree), b"\n")
1600 ui.write(filesetlang.prettyformat(tree), b"\n")
1601
1601
1602 files = set()
1602 files = set()
1603 if opts['all_files']:
1603 if opts['all_files']:
1604 for r in repo:
1604 for r in repo:
1605 c = repo[r]
1605 c = repo[r]
1606 files.update(c.files())
1606 files.update(c.files())
1607 files.update(c.substate)
1607 files.update(c.substate)
1608 if opts['all_files'] or ctx.rev() is None:
1608 if opts['all_files'] or ctx.rev() is None:
1609 wctx = repo[None]
1609 wctx = repo[None]
1610 files.update(
1610 files.update(
1611 repo.dirstate.walk(
1611 repo.dirstate.walk(
1612 scmutil.matchall(repo),
1612 scmutil.matchall(repo),
1613 subrepos=list(wctx.substate),
1613 subrepos=list(wctx.substate),
1614 unknown=True,
1614 unknown=True,
1615 ignored=True,
1615 ignored=True,
1616 )
1616 )
1617 )
1617 )
1618 files.update(wctx.substate)
1618 files.update(wctx.substate)
1619 else:
1619 else:
1620 files.update(ctx.files())
1620 files.update(ctx.files())
1621 files.update(ctx.substate)
1621 files.update(ctx.substate)
1622
1622
1623 m = ctx.matchfileset(repo.getcwd(), expr)
1623 m = ctx.matchfileset(repo.getcwd(), expr)
1624 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
1624 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
1625 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1625 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1626 for f in sorted(files):
1626 for f in sorted(files):
1627 if not m(f):
1627 if not m(f):
1628 continue
1628 continue
1629 ui.write(b"%s\n" % f)
1629 ui.write(b"%s\n" % f)
1630
1630
1631
1631
1632 @command(
1632 @command(
1633 b"debug-repair-issue6528",
1633 b"debug-repair-issue6528",
1634 [
1634 [
1635 (
1635 (
1636 b'',
1636 b'',
1637 b'to-report',
1637 b'to-report',
1638 b'',
1638 b'',
1639 _(b'build a report of affected revisions to this file'),
1639 _(b'build a report of affected revisions to this file'),
1640 _(b'FILE'),
1640 _(b'FILE'),
1641 ),
1641 ),
1642 (
1642 (
1643 b'',
1643 b'',
1644 b'from-report',
1644 b'from-report',
1645 b'',
1645 b'',
1646 _(b'repair revisions listed in this report file'),
1646 _(b'repair revisions listed in this report file'),
1647 _(b'FILE'),
1647 _(b'FILE'),
1648 ),
1648 ),
1649 (
1649 (
1650 b'',
1650 b'',
1651 b'paranoid',
1651 b'paranoid',
1652 False,
1652 False,
1653 _(b'check that both detection methods do the same thing'),
1653 _(b'check that both detection methods do the same thing'),
1654 ),
1654 ),
1655 ]
1655 ]
1656 + cmdutil.dryrunopts,
1656 + cmdutil.dryrunopts,
1657 )
1657 )
1658 def debug_repair_issue6528(ui, repo, **opts):
1658 def debug_repair_issue6528(ui, repo, **opts):
1659 """find affected revisions and repair them. See issue6528 for more details.
1659 """find affected revisions and repair them. See issue6528 for more details.
1660
1660
1661 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1661 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1662 computation of affected revisions for a given repository across clones.
1662 computation of affected revisions for a given repository across clones.
1663 The report format is line-based (with empty lines ignored):
1663 The report format is line-based (with empty lines ignored):
1664
1664
1665 ```
1665 ```
1666 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1666 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1667 ```
1667 ```
1668
1668
1669 There can be multiple broken revisions per filelog, they are separated by
1669 There can be multiple broken revisions per filelog, they are separated by
1670 a comma with no spaces. The only space is between the revision(s) and the
1670 a comma with no spaces. The only space is between the revision(s) and the
1671 filename.
1671 filename.
1672
1672
1673 Note that this does *not* mean that this repairs future affected revisions,
1673 Note that this does *not* mean that this repairs future affected revisions,
1674 that needs a separate fix at the exchange level that was introduced in
1674 that needs a separate fix at the exchange level that was introduced in
1675 Mercurial 5.9.1.
1675 Mercurial 5.9.1.
1676
1676
1677 There is a `--paranoid` flag to test that the fast implementation is correct
1677 There is a `--paranoid` flag to test that the fast implementation is correct
1678 by checking it against the slow implementation. Since this matter is quite
1678 by checking it against the slow implementation. Since this matter is quite
1679 urgent and testing every edge-case is probably quite costly, we use this
1679 urgent and testing every edge-case is probably quite costly, we use this
1680 method to test on large repositories as a fuzzing method of sorts.
1680 method to test on large repositories as a fuzzing method of sorts.
1681 """
1681 """
1682 cmdutil.check_incompatible_arguments(
1682 cmdutil.check_incompatible_arguments(
1683 opts, 'to_report', ['from_report', 'dry_run']
1683 opts, 'to_report', ['from_report', 'dry_run']
1684 )
1684 )
1685 dry_run = opts.get('dry_run')
1685 dry_run = opts.get('dry_run')
1686 to_report = opts.get('to_report')
1686 to_report = opts.get('to_report')
1687 from_report = opts.get('from_report')
1687 from_report = opts.get('from_report')
1688 paranoid = opts.get('paranoid')
1688 paranoid = opts.get('paranoid')
1689 # TODO maybe add filelog pattern and revision pattern parameters to help
1689 # TODO maybe add filelog pattern and revision pattern parameters to help
1690 # narrow down the search for users that know what they're looking for?
1690 # narrow down the search for users that know what they're looking for?
1691
1691
1692 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1692 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1693 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1693 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1694 raise error.Abort(_(msg))
1694 raise error.Abort(_(msg))
1695
1695
1696 rewrite.repair_issue6528(
1696 rewrite.repair_issue6528(
1697 ui,
1697 ui,
1698 repo,
1698 repo,
1699 dry_run=dry_run,
1699 dry_run=dry_run,
1700 to_report=to_report,
1700 to_report=to_report,
1701 from_report=from_report,
1701 from_report=from_report,
1702 paranoid=paranoid,
1702 paranoid=paranoid,
1703 )
1703 )
1704
1704
1705
1705
1706 @command(b'debugformat', [] + cmdutil.formatteropts)
1706 @command(b'debugformat', [] + cmdutil.formatteropts)
1707 def debugformat(ui, repo, **opts):
1707 def debugformat(ui, repo, **opts):
1708 """display format information about the current repository
1708 """display format information about the current repository
1709
1709
1710 Use --verbose to get extra information about current config value and
1710 Use --verbose to get extra information about current config value and
1711 Mercurial default."""
1711 Mercurial default."""
1712 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1712 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1713 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1713 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1714
1714
1715 def makeformatname(name):
1715 def makeformatname(name):
1716 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1716 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1717
1717
1718 fm = ui.formatter(b'debugformat', pycompat.byteskwargs(opts))
1718 fm = ui.formatter(b'debugformat', pycompat.byteskwargs(opts))
1719 if fm.isplain():
1719 if fm.isplain():
1720
1720
1721 def formatvalue(value):
1721 def formatvalue(value):
1722 if hasattr(value, 'startswith'):
1722 if hasattr(value, 'startswith'):
1723 return value
1723 return value
1724 if value:
1724 if value:
1725 return b'yes'
1725 return b'yes'
1726 else:
1726 else:
1727 return b'no'
1727 return b'no'
1728
1728
1729 else:
1729 else:
1730 formatvalue = pycompat.identity
1730 formatvalue = pycompat.identity
1731
1731
1732 fm.plain(b'format-variant')
1732 fm.plain(b'format-variant')
1733 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1733 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1734 fm.plain(b' repo')
1734 fm.plain(b' repo')
1735 if ui.verbose:
1735 if ui.verbose:
1736 fm.plain(b' config default')
1736 fm.plain(b' config default')
1737 fm.plain(b'\n')
1737 fm.plain(b'\n')
1738 for fv in upgrade.allformatvariant:
1738 for fv in upgrade.allformatvariant:
1739 fm.startitem()
1739 fm.startitem()
1740 repovalue = fv.fromrepo(repo)
1740 repovalue = fv.fromrepo(repo)
1741 configvalue = fv.fromconfig(repo)
1741 configvalue = fv.fromconfig(repo)
1742
1742
1743 if repovalue != configvalue:
1743 if repovalue != configvalue:
1744 namelabel = b'formatvariant.name.mismatchconfig'
1744 namelabel = b'formatvariant.name.mismatchconfig'
1745 repolabel = b'formatvariant.repo.mismatchconfig'
1745 repolabel = b'formatvariant.repo.mismatchconfig'
1746 elif repovalue != fv.default:
1746 elif repovalue != fv.default:
1747 namelabel = b'formatvariant.name.mismatchdefault'
1747 namelabel = b'formatvariant.name.mismatchdefault'
1748 repolabel = b'formatvariant.repo.mismatchdefault'
1748 repolabel = b'formatvariant.repo.mismatchdefault'
1749 else:
1749 else:
1750 namelabel = b'formatvariant.name.uptodate'
1750 namelabel = b'formatvariant.name.uptodate'
1751 repolabel = b'formatvariant.repo.uptodate'
1751 repolabel = b'formatvariant.repo.uptodate'
1752
1752
1753 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1753 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1754 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1754 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1755 if fv.default != configvalue:
1755 if fv.default != configvalue:
1756 configlabel = b'formatvariant.config.special'
1756 configlabel = b'formatvariant.config.special'
1757 else:
1757 else:
1758 configlabel = b'formatvariant.config.default'
1758 configlabel = b'formatvariant.config.default'
1759 fm.condwrite(
1759 fm.condwrite(
1760 ui.verbose,
1760 ui.verbose,
1761 b'config',
1761 b'config',
1762 b' %6s',
1762 b' %6s',
1763 formatvalue(configvalue),
1763 formatvalue(configvalue),
1764 label=configlabel,
1764 label=configlabel,
1765 )
1765 )
1766 fm.condwrite(
1766 fm.condwrite(
1767 ui.verbose,
1767 ui.verbose,
1768 b'default',
1768 b'default',
1769 b' %7s',
1769 b' %7s',
1770 formatvalue(fv.default),
1770 formatvalue(fv.default),
1771 label=b'formatvariant.default',
1771 label=b'formatvariant.default',
1772 )
1772 )
1773 fm.plain(b'\n')
1773 fm.plain(b'\n')
1774 fm.end()
1774 fm.end()
1775
1775
1776
1776
1777 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1777 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1778 def debugfsinfo(ui, path=b"."):
1778 def debugfsinfo(ui, path=b"."):
1779 """show information detected about current filesystem"""
1779 """show information detected about current filesystem"""
1780 ui.writenoi18n(b'path: %s\n' % path)
1780 ui.writenoi18n(b'path: %s\n' % path)
1781 ui.writenoi18n(
1781 ui.writenoi18n(
1782 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1782 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1783 )
1783 )
1784 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1784 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1785 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1785 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1786 ui.writenoi18n(
1786 ui.writenoi18n(
1787 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1787 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1788 )
1788 )
1789 ui.writenoi18n(
1789 ui.writenoi18n(
1790 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1790 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1791 )
1791 )
1792 casesensitive = b'(unknown)'
1792 casesensitive = b'(unknown)'
1793 try:
1793 try:
1794 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1794 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1795 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1795 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1796 except OSError:
1796 except OSError:
1797 pass
1797 pass
1798 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1798 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1799
1799
1800
1800
1801 @command(
1801 @command(
1802 b'debuggetbundle',
1802 b'debuggetbundle',
1803 [
1803 [
1804 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1804 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1805 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1805 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1806 (
1806 (
1807 b't',
1807 b't',
1808 b'type',
1808 b'type',
1809 b'bzip2',
1809 b'bzip2',
1810 _(b'bundle compression type to use'),
1810 _(b'bundle compression type to use'),
1811 _(b'TYPE'),
1811 _(b'TYPE'),
1812 ),
1812 ),
1813 ],
1813 ],
1814 _(b'REPO FILE [-H|-C ID]...'),
1814 _(b'REPO FILE [-H|-C ID]...'),
1815 norepo=True,
1815 norepo=True,
1816 )
1816 )
1817 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1817 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1818 """retrieves a bundle from a repo
1818 """retrieves a bundle from a repo
1819
1819
1820 Every ID must be a full-length hex node id string. Saves the bundle to the
1820 Every ID must be a full-length hex node id string. Saves the bundle to the
1821 given file.
1821 given file.
1822 """
1822 """
1823 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
1823 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
1824 if not repo.capable(b'getbundle'):
1824 if not repo.capable(b'getbundle'):
1825 raise error.Abort(b"getbundle() not supported by target repository")
1825 raise error.Abort(b"getbundle() not supported by target repository")
1826 args = {}
1826 args = {}
1827 if common:
1827 if common:
1828 args['common'] = [bin(s) for s in common]
1828 args['common'] = [bin(s) for s in common]
1829 if head:
1829 if head:
1830 args['heads'] = [bin(s) for s in head]
1830 args['heads'] = [bin(s) for s in head]
1831 # TODO: get desired bundlecaps from command line.
1831 # TODO: get desired bundlecaps from command line.
1832 args['bundlecaps'] = None
1832 args['bundlecaps'] = None
1833 bundle = repo.getbundle(b'debug', **args)
1833 bundle = repo.getbundle(b'debug', **args)
1834
1834
1835 bundletype = opts.get('type', b'bzip2').lower()
1835 bundletype = opts.get('type', b'bzip2').lower()
1836 btypes = {
1836 btypes = {
1837 b'none': b'HG10UN',
1837 b'none': b'HG10UN',
1838 b'bzip2': b'HG10BZ',
1838 b'bzip2': b'HG10BZ',
1839 b'gzip': b'HG10GZ',
1839 b'gzip': b'HG10GZ',
1840 b'bundle2': b'HG20',
1840 b'bundle2': b'HG20',
1841 }
1841 }
1842 bundletype = btypes.get(bundletype)
1842 bundletype = btypes.get(bundletype)
1843 if bundletype not in bundle2.bundletypes:
1843 if bundletype not in bundle2.bundletypes:
1844 raise error.Abort(_(b'unknown bundle type specified with --type'))
1844 raise error.Abort(_(b'unknown bundle type specified with --type'))
1845 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1845 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1846
1846
1847
1847
1848 @command(b'debugignore', [], b'[FILE]...')
1848 @command(b'debugignore', [], b'[FILE]...')
1849 def debugignore(ui, repo, *files, **opts):
1849 def debugignore(ui, repo, *files, **opts):
1850 """display the combined ignore pattern and information about ignored files
1850 """display the combined ignore pattern and information about ignored files
1851
1851
1852 With no argument display the combined ignore pattern.
1852 With no argument display the combined ignore pattern.
1853
1853
1854 Given space separated file names, shows if the given file is ignored and
1854 Given space separated file names, shows if the given file is ignored and
1855 if so, show the ignore rule (file and line number) that matched it.
1855 if so, show the ignore rule (file and line number) that matched it.
1856 """
1856 """
1857 ignore = repo.dirstate._ignore
1857 ignore = repo.dirstate._ignore
1858 if not files:
1858 if not files:
1859 # Show all the patterns
1859 # Show all the patterns
1860 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1860 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1861 else:
1861 else:
1862 m = scmutil.match(repo[None], pats=files)
1862 m = scmutil.match(repo[None], pats=files)
1863 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1863 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1864 for f in m.files():
1864 for f in m.files():
1865 nf = util.normpath(f)
1865 nf = util.normpath(f)
1866 ignored = None
1866 ignored = None
1867 ignoredata = None
1867 ignoredata = None
1868 if nf != b'.':
1868 if nf != b'.':
1869 if ignore(nf):
1869 if ignore(nf):
1870 ignored = nf
1870 ignored = nf
1871 ignoredata = repo.dirstate._ignorefileandline(nf)
1871 ignoredata = repo.dirstate._ignorefileandline(nf)
1872 else:
1872 else:
1873 for p in pathutil.finddirs(nf):
1873 for p in pathutil.finddirs(nf):
1874 if ignore(p):
1874 if ignore(p):
1875 ignored = p
1875 ignored = p
1876 ignoredata = repo.dirstate._ignorefileandline(p)
1876 ignoredata = repo.dirstate._ignorefileandline(p)
1877 break
1877 break
1878 if ignored:
1878 if ignored:
1879 if ignored == nf:
1879 if ignored == nf:
1880 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1880 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1881 else:
1881 else:
1882 ui.write(
1882 ui.write(
1883 _(
1883 _(
1884 b"%s is ignored because of "
1884 b"%s is ignored because of "
1885 b"containing directory %s\n"
1885 b"containing directory %s\n"
1886 )
1886 )
1887 % (uipathfn(f), ignored)
1887 % (uipathfn(f), ignored)
1888 )
1888 )
1889 ignorefile, lineno, line = ignoredata
1889 ignorefile, lineno, line = ignoredata
1890 ui.write(
1890 ui.write(
1891 _(b"(ignore rule in %s, line %d: '%s')\n")
1891 _(b"(ignore rule in %s, line %d: '%s')\n")
1892 % (ignorefile, lineno, line)
1892 % (ignorefile, lineno, line)
1893 )
1893 )
1894 else:
1894 else:
1895 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1895 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1896
1896
1897
1897
1898 @command(
1898 @command(
1899 b'debug-revlog-index|debugindex',
1899 b'debug-revlog-index|debugindex',
1900 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1900 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1901 _(b'-c|-m|FILE'),
1901 _(b'-c|-m|FILE'),
1902 )
1902 )
1903 def debugindex(ui, repo, file_=None, **opts):
1903 def debugindex(ui, repo, file_=None, **opts):
1904 """dump index data for a revlog"""
1904 """dump index data for a revlog"""
1905 opts = pycompat.byteskwargs(opts)
1905 opts = pycompat.byteskwargs(opts)
1906 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1906 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1907
1907
1908 fm = ui.formatter(b'debugindex', opts)
1908 fm = ui.formatter(b'debugindex', opts)
1909
1909
1910 revlog = getattr(store, '_revlog', store)
1910 revlog = getattr(store, '_revlog', store)
1911
1911
1912 return revlog_debug.debug_index(
1912 return revlog_debug.debug_index(
1913 ui,
1913 ui,
1914 repo,
1914 repo,
1915 formatter=fm,
1915 formatter=fm,
1916 revlog=revlog,
1916 revlog=revlog,
1917 full_node=ui.debugflag,
1917 full_node=ui.debugflag,
1918 )
1918 )
1919
1919
1920
1920
1921 @command(
1921 @command(
1922 b'debugindexdot',
1922 b'debugindexdot',
1923 cmdutil.debugrevlogopts,
1923 cmdutil.debugrevlogopts,
1924 _(b'-c|-m|FILE'),
1924 _(b'-c|-m|FILE'),
1925 optionalrepo=True,
1925 optionalrepo=True,
1926 )
1926 )
1927 def debugindexdot(ui, repo, file_=None, **opts):
1927 def debugindexdot(ui, repo, file_=None, **opts):
1928 """dump an index DAG as a graphviz dot file"""
1928 """dump an index DAG as a graphviz dot file"""
1929 r = cmdutil.openstorage(
1929 r = cmdutil.openstorage(
1930 repo, b'debugindexdot', file_, pycompat.byteskwargs(opts)
1930 repo, b'debugindexdot', file_, pycompat.byteskwargs(opts)
1931 )
1931 )
1932 ui.writenoi18n(b"digraph G {\n")
1932 ui.writenoi18n(b"digraph G {\n")
1933 for i in r:
1933 for i in r:
1934 node = r.node(i)
1934 node = r.node(i)
1935 pp = r.parents(node)
1935 pp = r.parents(node)
1936 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1936 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1937 if pp[1] != repo.nullid:
1937 if pp[1] != repo.nullid:
1938 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1938 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1939 ui.write(b"}\n")
1939 ui.write(b"}\n")
1940
1940
1941
1941
1942 @command(b'debugindexstats', [])
1942 @command(b'debugindexstats', [])
1943 def debugindexstats(ui, repo):
1943 def debugindexstats(ui, repo):
1944 """show stats related to the changelog index"""
1944 """show stats related to the changelog index"""
1945 repo.changelog.shortest(repo.nullid, 1)
1945 repo.changelog.shortest(repo.nullid, 1)
1946 index = repo.changelog.index
1946 index = repo.changelog.index
1947 if not hasattr(index, 'stats'):
1947 if not hasattr(index, 'stats'):
1948 raise error.Abort(_(b'debugindexstats only works with native code'))
1948 raise error.Abort(_(b'debugindexstats only works with native code'))
1949 for k, v in sorted(index.stats().items()):
1949 for k, v in sorted(index.stats().items()):
1950 ui.write(b'%s: %d\n' % (k, v))
1950 ui.write(b'%s: %d\n' % (k, v))
1951
1951
1952
1952
1953 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1953 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1954 def debuginstall(ui, **opts):
1954 def debuginstall(ui, **opts):
1955 """test Mercurial installation
1955 """test Mercurial installation
1956
1956
1957 Returns 0 on success.
1957 Returns 0 on success.
1958 """
1958 """
1959 problems = 0
1959 problems = 0
1960
1960
1961 fm = ui.formatter(b'debuginstall', pycompat.byteskwargs(opts))
1961 fm = ui.formatter(b'debuginstall', pycompat.byteskwargs(opts))
1962 fm.startitem()
1962 fm.startitem()
1963
1963
1964 # encoding might be unknown or wrong. don't translate these messages.
1964 # encoding might be unknown or wrong. don't translate these messages.
1965 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1965 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1966 err = None
1966 err = None
1967 try:
1967 try:
1968 codecs.lookup(pycompat.sysstr(encoding.encoding))
1968 codecs.lookup(pycompat.sysstr(encoding.encoding))
1969 except LookupError as inst:
1969 except LookupError as inst:
1970 err = stringutil.forcebytestr(inst)
1970 err = stringutil.forcebytestr(inst)
1971 problems += 1
1971 problems += 1
1972 fm.condwrite(
1972 fm.condwrite(
1973 err,
1973 err,
1974 b'encodingerror',
1974 b'encodingerror',
1975 b" %s\n (check that your locale is properly set)\n",
1975 b" %s\n (check that your locale is properly set)\n",
1976 err,
1976 err,
1977 )
1977 )
1978
1978
1979 # Python
1979 # Python
1980 pythonlib = None
1980 pythonlib = None
1981 if hasattr(os, '__file__'):
1981 if hasattr(os, '__file__'):
1982 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1982 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1983 elif getattr(sys, 'oxidized', False):
1983 elif getattr(sys, 'oxidized', False):
1984 pythonlib = pycompat.sysexecutable
1984 pythonlib = pycompat.sysexecutable
1985
1985
1986 fm.write(
1986 fm.write(
1987 b'pythonexe',
1987 b'pythonexe',
1988 _(b"checking Python executable (%s)\n"),
1988 _(b"checking Python executable (%s)\n"),
1989 pycompat.sysexecutable or _(b"unknown"),
1989 pycompat.sysexecutable or _(b"unknown"),
1990 )
1990 )
1991 fm.write(
1991 fm.write(
1992 b'pythonimplementation',
1992 b'pythonimplementation',
1993 _(b"checking Python implementation (%s)\n"),
1993 _(b"checking Python implementation (%s)\n"),
1994 pycompat.sysbytes(platform.python_implementation()),
1994 pycompat.sysbytes(platform.python_implementation()),
1995 )
1995 )
1996 fm.write(
1996 fm.write(
1997 b'pythonver',
1997 b'pythonver',
1998 _(b"checking Python version (%s)\n"),
1998 _(b"checking Python version (%s)\n"),
1999 (b"%d.%d.%d" % sys.version_info[:3]),
1999 (b"%d.%d.%d" % sys.version_info[:3]),
2000 )
2000 )
2001 fm.write(
2001 fm.write(
2002 b'pythonlib',
2002 b'pythonlib',
2003 _(b"checking Python lib (%s)...\n"),
2003 _(b"checking Python lib (%s)...\n"),
2004 pythonlib or _(b"unknown"),
2004 pythonlib or _(b"unknown"),
2005 )
2005 )
2006
2006
2007 try:
2007 try:
2008 from . import rustext # pytype: disable=import-error
2008 from . import rustext # pytype: disable=import-error
2009
2009
2010 rustext.__doc__ # trigger lazy import
2010 rustext.__doc__ # trigger lazy import
2011 except ImportError:
2011 except ImportError:
2012 rustext = None
2012 rustext = None
2013
2013
2014 security = set(sslutil.supportedprotocols)
2014 security = set(sslutil.supportedprotocols)
2015 if sslutil.hassni:
2015 if sslutil.hassni:
2016 security.add(b'sni')
2016 security.add(b'sni')
2017
2017
2018 fm.write(
2018 fm.write(
2019 b'pythonsecurity',
2019 b'pythonsecurity',
2020 _(b"checking Python security support (%s)\n"),
2020 _(b"checking Python security support (%s)\n"),
2021 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2021 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2022 )
2022 )
2023
2023
2024 # These are warnings, not errors. So don't increment problem count. This
2024 # These are warnings, not errors. So don't increment problem count. This
2025 # may change in the future.
2025 # may change in the future.
2026 if b'tls1.2' not in security:
2026 if b'tls1.2' not in security:
2027 fm.plain(
2027 fm.plain(
2028 _(
2028 _(
2029 b' TLS 1.2 not supported by Python install; '
2029 b' TLS 1.2 not supported by Python install; '
2030 b'network connections lack modern security\n'
2030 b'network connections lack modern security\n'
2031 )
2031 )
2032 )
2032 )
2033 if b'sni' not in security:
2033 if b'sni' not in security:
2034 fm.plain(
2034 fm.plain(
2035 _(
2035 _(
2036 b' SNI not supported by Python install; may have '
2036 b' SNI not supported by Python install; may have '
2037 b'connectivity issues with some servers\n'
2037 b'connectivity issues with some servers\n'
2038 )
2038 )
2039 )
2039 )
2040
2040
2041 fm.plain(
2041 fm.plain(
2042 _(
2042 _(
2043 b"checking Rust extensions (%s)\n"
2043 b"checking Rust extensions (%s)\n"
2044 % (b'missing' if rustext is None else b'installed')
2044 % (b'missing' if rustext is None else b'installed')
2045 ),
2045 ),
2046 )
2046 )
2047
2047
2048 # TODO print CA cert info
2048 # TODO print CA cert info
2049
2049
2050 # hg version
2050 # hg version
2051 hgver = util.version()
2051 hgver = util.version()
2052 fm.write(
2052 fm.write(
2053 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2053 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2054 )
2054 )
2055 fm.write(
2055 fm.write(
2056 b'hgverextra',
2056 b'hgverextra',
2057 _(b"checking Mercurial custom build (%s)\n"),
2057 _(b"checking Mercurial custom build (%s)\n"),
2058 b'+'.join(hgver.split(b'+')[1:]),
2058 b'+'.join(hgver.split(b'+')[1:]),
2059 )
2059 )
2060
2060
2061 # compiled modules
2061 # compiled modules
2062 hgmodules = None
2062 hgmodules = None
2063 if hasattr(sys.modules[__name__], '__file__'):
2063 if hasattr(sys.modules[__name__], '__file__'):
2064 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2064 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2065 elif getattr(sys, 'oxidized', False):
2065 elif getattr(sys, 'oxidized', False):
2066 hgmodules = pycompat.sysexecutable
2066 hgmodules = pycompat.sysexecutable
2067
2067
2068 fm.write(
2068 fm.write(
2069 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2069 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2070 )
2070 )
2071 fm.write(
2071 fm.write(
2072 b'hgmodules',
2072 b'hgmodules',
2073 _(b"checking installed modules (%s)...\n"),
2073 _(b"checking installed modules (%s)...\n"),
2074 hgmodules or _(b"unknown"),
2074 hgmodules or _(b"unknown"),
2075 )
2075 )
2076
2076
2077 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2077 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2078 rustext = rustandc # for now, that's the only case
2078 rustext = rustandc # for now, that's the only case
2079 cext = policy.policy in (b'c', b'allow') or rustandc
2079 cext = policy.policy in (b'c', b'allow') or rustandc
2080 nopure = cext or rustext
2080 nopure = cext or rustext
2081 if nopure:
2081 if nopure:
2082 err = None
2082 err = None
2083 try:
2083 try:
2084 if cext:
2084 if cext:
2085 from .cext import ( # pytype: disable=import-error
2085 from .cext import ( # pytype: disable=import-error
2086 base85,
2086 base85,
2087 bdiff,
2087 bdiff,
2088 mpatch,
2088 mpatch,
2089 osutil,
2089 osutil,
2090 )
2090 )
2091
2091
2092 # quiet pyflakes
2092 # quiet pyflakes
2093 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2093 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2094 if rustext:
2094 if rustext:
2095 from .rustext import ( # pytype: disable=import-error
2095 from .rustext import ( # pytype: disable=import-error
2096 ancestor,
2096 ancestor,
2097 dirstate,
2097 dirstate,
2098 )
2098 )
2099
2099
2100 dir(ancestor), dir(dirstate) # quiet pyflakes
2100 dir(ancestor), dir(dirstate) # quiet pyflakes
2101 except Exception as inst:
2101 except Exception as inst:
2102 err = stringutil.forcebytestr(inst)
2102 err = stringutil.forcebytestr(inst)
2103 problems += 1
2103 problems += 1
2104 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2104 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2105
2105
2106 compengines = util.compengines._engines.values()
2106 compengines = util.compengines._engines.values()
2107 fm.write(
2107 fm.write(
2108 b'compengines',
2108 b'compengines',
2109 _(b'checking registered compression engines (%s)\n'),
2109 _(b'checking registered compression engines (%s)\n'),
2110 fm.formatlist(
2110 fm.formatlist(
2111 sorted(e.name() for e in compengines),
2111 sorted(e.name() for e in compengines),
2112 name=b'compengine',
2112 name=b'compengine',
2113 fmt=b'%s',
2113 fmt=b'%s',
2114 sep=b', ',
2114 sep=b', ',
2115 ),
2115 ),
2116 )
2116 )
2117 fm.write(
2117 fm.write(
2118 b'compenginesavail',
2118 b'compenginesavail',
2119 _(b'checking available compression engines (%s)\n'),
2119 _(b'checking available compression engines (%s)\n'),
2120 fm.formatlist(
2120 fm.formatlist(
2121 sorted(e.name() for e in compengines if e.available()),
2121 sorted(e.name() for e in compengines if e.available()),
2122 name=b'compengine',
2122 name=b'compengine',
2123 fmt=b'%s',
2123 fmt=b'%s',
2124 sep=b', ',
2124 sep=b', ',
2125 ),
2125 ),
2126 )
2126 )
2127 wirecompengines = compression.compengines.supportedwireengines(
2127 wirecompengines = compression.compengines.supportedwireengines(
2128 compression.SERVERROLE
2128 compression.SERVERROLE
2129 )
2129 )
2130 fm.write(
2130 fm.write(
2131 b'compenginesserver',
2131 b'compenginesserver',
2132 _(
2132 _(
2133 b'checking available compression engines '
2133 b'checking available compression engines '
2134 b'for wire protocol (%s)\n'
2134 b'for wire protocol (%s)\n'
2135 ),
2135 ),
2136 fm.formatlist(
2136 fm.formatlist(
2137 [e.name() for e in wirecompengines if e.wireprotosupport()],
2137 [e.name() for e in wirecompengines if e.wireprotosupport()],
2138 name=b'compengine',
2138 name=b'compengine',
2139 fmt=b'%s',
2139 fmt=b'%s',
2140 sep=b', ',
2140 sep=b', ',
2141 ),
2141 ),
2142 )
2142 )
2143 re2 = b'missing'
2143 re2 = b'missing'
2144 if util.has_re2():
2144 if util.has_re2():
2145 re2 = b'available'
2145 re2 = b'available'
2146 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2146 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2147 fm.data(re2=bool(util._re2))
2147 fm.data(re2=bool(util._re2))
2148
2148
2149 # templates
2149 # templates
2150 p = templater.templatedir()
2150 p = templater.templatedir()
2151 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2151 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2152 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2152 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2153 if p:
2153 if p:
2154 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2154 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2155 if m:
2155 if m:
2156 # template found, check if it is working
2156 # template found, check if it is working
2157 err = None
2157 err = None
2158 try:
2158 try:
2159 templater.templater.frommapfile(m)
2159 templater.templater.frommapfile(m)
2160 except Exception as inst:
2160 except Exception as inst:
2161 err = stringutil.forcebytestr(inst)
2161 err = stringutil.forcebytestr(inst)
2162 p = None
2162 p = None
2163 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2163 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2164 else:
2164 else:
2165 p = None
2165 p = None
2166 fm.condwrite(
2166 fm.condwrite(
2167 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2167 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2168 )
2168 )
2169 fm.condwrite(
2169 fm.condwrite(
2170 not m,
2170 not m,
2171 b'defaulttemplatenotfound',
2171 b'defaulttemplatenotfound',
2172 _(b" template '%s' not found\n"),
2172 _(b" template '%s' not found\n"),
2173 b"default",
2173 b"default",
2174 )
2174 )
2175 if not p:
2175 if not p:
2176 problems += 1
2176 problems += 1
2177 fm.condwrite(
2177 fm.condwrite(
2178 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2178 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2179 )
2179 )
2180
2180
2181 # editor
2181 # editor
2182 editor = ui.geteditor()
2182 editor = ui.geteditor()
2183 editor = util.expandpath(editor)
2183 editor = util.expandpath(editor)
2184 editorbin = procutil.shellsplit(editor)[0]
2184 editorbin = procutil.shellsplit(editor)[0]
2185 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2185 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2186 cmdpath = procutil.findexe(editorbin)
2186 cmdpath = procutil.findexe(editorbin)
2187 fm.condwrite(
2187 fm.condwrite(
2188 not cmdpath and editor == b'vi',
2188 not cmdpath and editor == b'vi',
2189 b'vinotfound',
2189 b'vinotfound',
2190 _(
2190 _(
2191 b" No commit editor set and can't find %s in PATH\n"
2191 b" No commit editor set and can't find %s in PATH\n"
2192 b" (specify a commit editor in your configuration"
2192 b" (specify a commit editor in your configuration"
2193 b" file)\n"
2193 b" file)\n"
2194 ),
2194 ),
2195 not cmdpath and editor == b'vi' and editorbin,
2195 not cmdpath and editor == b'vi' and editorbin,
2196 )
2196 )
2197 fm.condwrite(
2197 fm.condwrite(
2198 not cmdpath and editor != b'vi',
2198 not cmdpath and editor != b'vi',
2199 b'editornotfound',
2199 b'editornotfound',
2200 _(
2200 _(
2201 b" Can't find editor '%s' in PATH\n"
2201 b" Can't find editor '%s' in PATH\n"
2202 b" (specify a commit editor in your configuration"
2202 b" (specify a commit editor in your configuration"
2203 b" file)\n"
2203 b" file)\n"
2204 ),
2204 ),
2205 not cmdpath and editorbin,
2205 not cmdpath and editorbin,
2206 )
2206 )
2207 if not cmdpath and editor != b'vi':
2207 if not cmdpath and editor != b'vi':
2208 problems += 1
2208 problems += 1
2209
2209
2210 # check username
2210 # check username
2211 username = None
2211 username = None
2212 err = None
2212 err = None
2213 try:
2213 try:
2214 username = ui.username()
2214 username = ui.username()
2215 except error.Abort as e:
2215 except error.Abort as e:
2216 err = e.message
2216 err = e.message
2217 problems += 1
2217 problems += 1
2218
2218
2219 fm.condwrite(
2219 fm.condwrite(
2220 username, b'username', _(b"checking username (%s)\n"), username
2220 username, b'username', _(b"checking username (%s)\n"), username
2221 )
2221 )
2222 fm.condwrite(
2222 fm.condwrite(
2223 err,
2223 err,
2224 b'usernameerror',
2224 b'usernameerror',
2225 _(
2225 _(
2226 b"checking username...\n %s\n"
2226 b"checking username...\n %s\n"
2227 b" (specify a username in your configuration file)\n"
2227 b" (specify a username in your configuration file)\n"
2228 ),
2228 ),
2229 err,
2229 err,
2230 )
2230 )
2231
2231
2232 for name, mod in extensions.extensions():
2232 for name, mod in extensions.extensions():
2233 handler = getattr(mod, 'debuginstall', None)
2233 handler = getattr(mod, 'debuginstall', None)
2234 if handler is not None:
2234 if handler is not None:
2235 problems += handler(ui, fm)
2235 problems += handler(ui, fm)
2236
2236
2237 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2237 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2238 if not problems:
2238 if not problems:
2239 fm.data(problems=problems)
2239 fm.data(problems=problems)
2240 fm.condwrite(
2240 fm.condwrite(
2241 problems,
2241 problems,
2242 b'problems',
2242 b'problems',
2243 _(b"%d problems detected, please check your install!\n"),
2243 _(b"%d problems detected, please check your install!\n"),
2244 problems,
2244 problems,
2245 )
2245 )
2246 fm.end()
2246 fm.end()
2247
2247
2248 return problems
2248 return problems
2249
2249
2250
2250
2251 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2251 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2252 def debugknown(ui, repopath, *ids, **opts):
2252 def debugknown(ui, repopath, *ids, **opts):
2253 """test whether node ids are known to a repo
2253 """test whether node ids are known to a repo
2254
2254
2255 Every ID must be a full-length hex node id string. Returns a list of 0s
2255 Every ID must be a full-length hex node id string. Returns a list of 0s
2256 and 1s indicating unknown/known.
2256 and 1s indicating unknown/known.
2257 """
2257 """
2258 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
2258 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
2259 if not repo.capable(b'known'):
2259 if not repo.capable(b'known'):
2260 raise error.Abort(b"known() not supported by target repository")
2260 raise error.Abort(b"known() not supported by target repository")
2261 flags = repo.known([bin(s) for s in ids])
2261 flags = repo.known([bin(s) for s in ids])
2262 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2262 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2263
2263
2264
2264
2265 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2265 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2266 def debuglabelcomplete(ui, repo, *args):
2266 def debuglabelcomplete(ui, repo, *args):
2267 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2267 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2268 debugnamecomplete(ui, repo, *args)
2268 debugnamecomplete(ui, repo, *args)
2269
2269
2270
2270
2271 @command(
2271 @command(
2272 b'debuglocks',
2272 b'debuglocks',
2273 [
2273 [
2274 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2274 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2275 (
2275 (
2276 b'W',
2276 b'W',
2277 b'force-free-wlock',
2277 b'force-free-wlock',
2278 None,
2278 None,
2279 _(b'free the working state lock (DANGEROUS)'),
2279 _(b'free the working state lock (DANGEROUS)'),
2280 ),
2280 ),
2281 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2281 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2282 (
2282 (
2283 b'S',
2283 b'S',
2284 b'set-wlock',
2284 b'set-wlock',
2285 None,
2285 None,
2286 _(b'set the working state lock until stopped'),
2286 _(b'set the working state lock until stopped'),
2287 ),
2287 ),
2288 ],
2288 ],
2289 _(b'[OPTION]...'),
2289 _(b'[OPTION]...'),
2290 )
2290 )
2291 def debuglocks(ui, repo, **opts):
2291 def debuglocks(ui, repo, **opts):
2292 """show or modify state of locks
2292 """show or modify state of locks
2293
2293
2294 By default, this command will show which locks are held. This
2294 By default, this command will show which locks are held. This
2295 includes the user and process holding the lock, the amount of time
2295 includes the user and process holding the lock, the amount of time
2296 the lock has been held, and the machine name where the process is
2296 the lock has been held, and the machine name where the process is
2297 running if it's not local.
2297 running if it's not local.
2298
2298
2299 Locks protect the integrity of Mercurial's data, so should be
2299 Locks protect the integrity of Mercurial's data, so should be
2300 treated with care. System crashes or other interruptions may cause
2300 treated with care. System crashes or other interruptions may cause
2301 locks to not be properly released, though Mercurial will usually
2301 locks to not be properly released, though Mercurial will usually
2302 detect and remove such stale locks automatically.
2302 detect and remove such stale locks automatically.
2303
2303
2304 However, detecting stale locks may not always be possible (for
2304 However, detecting stale locks may not always be possible (for
2305 instance, on a shared filesystem). Removing locks may also be
2305 instance, on a shared filesystem). Removing locks may also be
2306 blocked by filesystem permissions.
2306 blocked by filesystem permissions.
2307
2307
2308 Setting a lock will prevent other commands from changing the data.
2308 Setting a lock will prevent other commands from changing the data.
2309 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2309 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2310 The set locks are removed when the command exits.
2310 The set locks are removed when the command exits.
2311
2311
2312 Returns 0 if no locks are held.
2312 Returns 0 if no locks are held.
2313
2313
2314 """
2314 """
2315
2315
2316 if opts.get('force_free_lock'):
2316 if opts.get('force_free_lock'):
2317 repo.svfs.tryunlink(b'lock')
2317 repo.svfs.tryunlink(b'lock')
2318 if opts.get('force_free_wlock'):
2318 if opts.get('force_free_wlock'):
2319 repo.vfs.tryunlink(b'wlock')
2319 repo.vfs.tryunlink(b'wlock')
2320 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2320 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2321 return 0
2321 return 0
2322
2322
2323 locks = []
2323 locks = []
2324 try:
2324 try:
2325 if opts.get('set_wlock'):
2325 if opts.get('set_wlock'):
2326 try:
2326 try:
2327 locks.append(repo.wlock(False))
2327 locks.append(repo.wlock(False))
2328 except error.LockHeld:
2328 except error.LockHeld:
2329 raise error.Abort(_(b'wlock is already held'))
2329 raise error.Abort(_(b'wlock is already held'))
2330 if opts.get('set_lock'):
2330 if opts.get('set_lock'):
2331 try:
2331 try:
2332 locks.append(repo.lock(False))
2332 locks.append(repo.lock(False))
2333 except error.LockHeld:
2333 except error.LockHeld:
2334 raise error.Abort(_(b'lock is already held'))
2334 raise error.Abort(_(b'lock is already held'))
2335 if len(locks):
2335 if len(locks):
2336 try:
2336 try:
2337 if ui.interactive():
2337 if ui.interactive():
2338 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2338 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2339 ui.promptchoice(prompt)
2339 ui.promptchoice(prompt)
2340 else:
2340 else:
2341 msg = b"%d locks held, waiting for signal\n"
2341 msg = b"%d locks held, waiting for signal\n"
2342 msg %= len(locks)
2342 msg %= len(locks)
2343 ui.status(msg)
2343 ui.status(msg)
2344 while True: # XXX wait for a signal
2344 while True: # XXX wait for a signal
2345 time.sleep(0.1)
2345 time.sleep(0.1)
2346 except KeyboardInterrupt:
2346 except KeyboardInterrupt:
2347 msg = b"signal-received releasing locks\n"
2347 msg = b"signal-received releasing locks\n"
2348 ui.status(msg)
2348 ui.status(msg)
2349 return 0
2349 return 0
2350 finally:
2350 finally:
2351 release(*locks)
2351 release(*locks)
2352
2352
2353 now = time.time()
2353 now = time.time()
2354 held = 0
2354 held = 0
2355
2355
2356 def report(vfs, name, method):
2356 def report(vfs, name, method):
2357 # this causes stale locks to get reaped for more accurate reporting
2357 # this causes stale locks to get reaped for more accurate reporting
2358 try:
2358 try:
2359 l = method(False)
2359 l = method(False)
2360 except error.LockHeld:
2360 except error.LockHeld:
2361 l = None
2361 l = None
2362
2362
2363 if l:
2363 if l:
2364 l.release()
2364 l.release()
2365 else:
2365 else:
2366 try:
2366 try:
2367 st = vfs.lstat(name)
2367 st = vfs.lstat(name)
2368 age = now - st[stat.ST_MTIME]
2368 age = now - st[stat.ST_MTIME]
2369 user = util.username(st.st_uid)
2369 user = util.username(st.st_uid)
2370 locker = vfs.readlock(name)
2370 locker = vfs.readlock(name)
2371 if b":" in locker:
2371 if b":" in locker:
2372 host, pid = locker.split(b':')
2372 host, pid = locker.split(b':')
2373 if host == socket.gethostname():
2373 if host == socket.gethostname():
2374 locker = b'user %s, process %s' % (user or b'None', pid)
2374 locker = b'user %s, process %s' % (user or b'None', pid)
2375 else:
2375 else:
2376 locker = b'user %s, process %s, host %s' % (
2376 locker = b'user %s, process %s, host %s' % (
2377 user or b'None',
2377 user or b'None',
2378 pid,
2378 pid,
2379 host,
2379 host,
2380 )
2380 )
2381 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2381 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2382 return 1
2382 return 1
2383 except FileNotFoundError:
2383 except FileNotFoundError:
2384 pass
2384 pass
2385
2385
2386 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2386 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2387 return 0
2387 return 0
2388
2388
2389 held += report(repo.svfs, b"lock", repo.lock)
2389 held += report(repo.svfs, b"lock", repo.lock)
2390 held += report(repo.vfs, b"wlock", repo.wlock)
2390 held += report(repo.vfs, b"wlock", repo.wlock)
2391
2391
2392 return held
2392 return held
2393
2393
2394
2394
2395 @command(
2395 @command(
2396 b'debugmanifestfulltextcache',
2396 b'debugmanifestfulltextcache',
2397 [
2397 [
2398 (b'', b'clear', False, _(b'clear the cache')),
2398 (b'', b'clear', False, _(b'clear the cache')),
2399 (
2399 (
2400 b'a',
2400 b'a',
2401 b'add',
2401 b'add',
2402 [],
2402 [],
2403 _(b'add the given manifest nodes to the cache'),
2403 _(b'add the given manifest nodes to the cache'),
2404 _(b'NODE'),
2404 _(b'NODE'),
2405 ),
2405 ),
2406 ],
2406 ],
2407 b'',
2407 b'',
2408 )
2408 )
2409 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2409 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2410 """show, clear or amend the contents of the manifest fulltext cache"""
2410 """show, clear or amend the contents of the manifest fulltext cache"""
2411
2411
2412 def getcache():
2412 def getcache():
2413 r = repo.manifestlog.getstorage(b'')
2413 r = repo.manifestlog.getstorage(b'')
2414 try:
2414 try:
2415 return r._fulltextcache
2415 return r._fulltextcache
2416 except AttributeError:
2416 except AttributeError:
2417 msg = _(
2417 msg = _(
2418 b"Current revlog implementation doesn't appear to have a "
2418 b"Current revlog implementation doesn't appear to have a "
2419 b"manifest fulltext cache\n"
2419 b"manifest fulltext cache\n"
2420 )
2420 )
2421 raise error.Abort(msg)
2421 raise error.Abort(msg)
2422
2422
2423 if opts.get('clear'):
2423 if opts.get('clear'):
2424 with repo.wlock():
2424 with repo.wlock():
2425 cache = getcache()
2425 cache = getcache()
2426 cache.clear(clear_persisted_data=True)
2426 cache.clear(clear_persisted_data=True)
2427 return
2427 return
2428
2428
2429 if add:
2429 if add:
2430 with repo.wlock():
2430 with repo.wlock():
2431 m = repo.manifestlog
2431 m = repo.manifestlog
2432 store = m.getstorage(b'')
2432 store = m.getstorage(b'')
2433 for n in add:
2433 for n in add:
2434 try:
2434 try:
2435 manifest = m[store.lookup(n)]
2435 manifest = m[store.lookup(n)]
2436 except error.LookupError as e:
2436 except error.LookupError as e:
2437 raise error.Abort(
2437 raise error.Abort(
2438 bytes(e), hint=b"Check your manifest node id"
2438 bytes(e), hint=b"Check your manifest node id"
2439 )
2439 )
2440 manifest.read() # stores revisision in cache too
2440 manifest.read() # stores revisision in cache too
2441 return
2441 return
2442
2442
2443 cache = getcache()
2443 cache = getcache()
2444 if not len(cache):
2444 if not len(cache):
2445 ui.write(_(b'cache empty\n'))
2445 ui.write(_(b'cache empty\n'))
2446 else:
2446 else:
2447 ui.write(
2447 ui.write(
2448 _(
2448 _(
2449 b'cache contains %d manifest entries, in order of most to '
2449 b'cache contains %d manifest entries, in order of most to '
2450 b'least recent:\n'
2450 b'least recent:\n'
2451 )
2451 )
2452 % (len(cache),)
2452 % (len(cache),)
2453 )
2453 )
2454 totalsize = 0
2454 totalsize = 0
2455 for nodeid in cache:
2455 for nodeid in cache:
2456 # Use cache.get to not update the LRU order
2456 # Use cache.get to not update the LRU order
2457 data = cache.peek(nodeid)
2457 data = cache.peek(nodeid)
2458 size = len(data)
2458 size = len(data)
2459 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2459 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2460 ui.write(
2460 ui.write(
2461 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2461 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2462 )
2462 )
2463 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2463 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2464 ui.write(
2464 ui.write(
2465 _(b'total cache data size %s, on-disk %s\n')
2465 _(b'total cache data size %s, on-disk %s\n')
2466 % (util.bytecount(totalsize), util.bytecount(ondisk))
2466 % (util.bytecount(totalsize), util.bytecount(ondisk))
2467 )
2467 )
2468
2468
2469
2469
2470 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2470 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2471 def debugmergestate(ui, repo, *args, **opts):
2471 def debugmergestate(ui, repo, *args, **opts):
2472 """print merge state
2472 """print merge state
2473
2473
2474 Use --verbose to print out information about whether v1 or v2 merge state
2474 Use --verbose to print out information about whether v1 or v2 merge state
2475 was chosen."""
2475 was chosen."""
2476
2476
2477 if ui.verbose:
2477 if ui.verbose:
2478 ms = mergestatemod.mergestate(repo)
2478 ms = mergestatemod.mergestate(repo)
2479
2479
2480 # sort so that reasonable information is on top
2480 # sort so that reasonable information is on top
2481 v1records = ms._readrecordsv1()
2481 v1records = ms._readrecordsv1()
2482 v2records = ms._readrecordsv2()
2482 v2records = ms._readrecordsv2()
2483
2483
2484 if not v1records and not v2records:
2484 if not v1records and not v2records:
2485 pass
2485 pass
2486 elif not v2records:
2486 elif not v2records:
2487 ui.writenoi18n(b'no version 2 merge state\n')
2487 ui.writenoi18n(b'no version 2 merge state\n')
2488 elif ms._v1v2match(v1records, v2records):
2488 elif ms._v1v2match(v1records, v2records):
2489 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2489 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2490 else:
2490 else:
2491 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2491 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2492
2492
2493 if not opts['template']:
2493 if not opts['template']:
2494 opts['template'] = (
2494 opts['template'] = (
2495 b'{if(commits, "", "no merge state found\n")}'
2495 b'{if(commits, "", "no merge state found\n")}'
2496 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2496 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2497 b'{files % "file: {path} (state \\"{state}\\")\n'
2497 b'{files % "file: {path} (state \\"{state}\\")\n'
2498 b'{if(local_path, "'
2498 b'{if(local_path, "'
2499 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2499 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2500 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2500 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2501 b' other path: {other_path} (node {other_node})\n'
2501 b' other path: {other_path} (node {other_node})\n'
2502 b'")}'
2502 b'")}'
2503 b'{if(rename_side, "'
2503 b'{if(rename_side, "'
2504 b' rename side: {rename_side}\n'
2504 b' rename side: {rename_side}\n'
2505 b' renamed path: {renamed_path}\n'
2505 b' renamed path: {renamed_path}\n'
2506 b'")}'
2506 b'")}'
2507 b'{extras % " extra: {key} = {value}\n"}'
2507 b'{extras % " extra: {key} = {value}\n"}'
2508 b'"}'
2508 b'"}'
2509 b'{extras % "extra: {file} ({key} = {value})\n"}'
2509 b'{extras % "extra: {file} ({key} = {value})\n"}'
2510 )
2510 )
2511
2511
2512 ms = mergestatemod.mergestate.read(repo)
2512 ms = mergestatemod.mergestate.read(repo)
2513
2513
2514 fm = ui.formatter(b'debugmergestate', pycompat.byteskwargs(opts))
2514 fm = ui.formatter(b'debugmergestate', pycompat.byteskwargs(opts))
2515 fm.startitem()
2515 fm.startitem()
2516
2516
2517 fm_commits = fm.nested(b'commits')
2517 fm_commits = fm.nested(b'commits')
2518 if ms.active():
2518 if ms.active():
2519 for name, node, label_index in (
2519 for name, node, label_index in (
2520 (b'local', ms.local, 0),
2520 (b'local', ms.local, 0),
2521 (b'other', ms.other, 1),
2521 (b'other', ms.other, 1),
2522 ):
2522 ):
2523 fm_commits.startitem()
2523 fm_commits.startitem()
2524 fm_commits.data(name=name)
2524 fm_commits.data(name=name)
2525 fm_commits.data(node=hex(node))
2525 fm_commits.data(node=hex(node))
2526 if ms._labels and len(ms._labels) > label_index:
2526 if ms._labels and len(ms._labels) > label_index:
2527 fm_commits.data(label=ms._labels[label_index])
2527 fm_commits.data(label=ms._labels[label_index])
2528 fm_commits.end()
2528 fm_commits.end()
2529
2529
2530 fm_files = fm.nested(b'files')
2530 fm_files = fm.nested(b'files')
2531 if ms.active():
2531 if ms.active():
2532 for f in ms:
2532 for f in ms:
2533 fm_files.startitem()
2533 fm_files.startitem()
2534 fm_files.data(path=f)
2534 fm_files.data(path=f)
2535 state = ms._state[f]
2535 state = ms._state[f]
2536 fm_files.data(state=state[0])
2536 fm_files.data(state=state[0])
2537 if state[0] in (
2537 if state[0] in (
2538 mergestatemod.MERGE_RECORD_UNRESOLVED,
2538 mergestatemod.MERGE_RECORD_UNRESOLVED,
2539 mergestatemod.MERGE_RECORD_RESOLVED,
2539 mergestatemod.MERGE_RECORD_RESOLVED,
2540 ):
2540 ):
2541 fm_files.data(local_key=state[1])
2541 fm_files.data(local_key=state[1])
2542 fm_files.data(local_path=state[2])
2542 fm_files.data(local_path=state[2])
2543 fm_files.data(ancestor_path=state[3])
2543 fm_files.data(ancestor_path=state[3])
2544 fm_files.data(ancestor_node=state[4])
2544 fm_files.data(ancestor_node=state[4])
2545 fm_files.data(other_path=state[5])
2545 fm_files.data(other_path=state[5])
2546 fm_files.data(other_node=state[6])
2546 fm_files.data(other_node=state[6])
2547 fm_files.data(local_flags=state[7])
2547 fm_files.data(local_flags=state[7])
2548 elif state[0] in (
2548 elif state[0] in (
2549 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2549 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2550 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2550 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2551 ):
2551 ):
2552 fm_files.data(renamed_path=state[1])
2552 fm_files.data(renamed_path=state[1])
2553 fm_files.data(rename_side=state[2])
2553 fm_files.data(rename_side=state[2])
2554 fm_extras = fm_files.nested(b'extras')
2554 fm_extras = fm_files.nested(b'extras')
2555 for k, v in sorted(ms.extras(f).items()):
2555 for k, v in sorted(ms.extras(f).items()):
2556 fm_extras.startitem()
2556 fm_extras.startitem()
2557 fm_extras.data(key=k)
2557 fm_extras.data(key=k)
2558 fm_extras.data(value=v)
2558 fm_extras.data(value=v)
2559 fm_extras.end()
2559 fm_extras.end()
2560
2560
2561 fm_files.end()
2561 fm_files.end()
2562
2562
2563 fm_extras = fm.nested(b'extras')
2563 fm_extras = fm.nested(b'extras')
2564 for f, d in sorted(ms.allextras().items()):
2564 for f, d in sorted(ms.allextras().items()):
2565 if f in ms:
2565 if f in ms:
2566 # If file is in mergestate, we have already processed it's extras
2566 # If file is in mergestate, we have already processed it's extras
2567 continue
2567 continue
2568 for k, v in d.items():
2568 for k, v in d.items():
2569 fm_extras.startitem()
2569 fm_extras.startitem()
2570 fm_extras.data(file=f)
2570 fm_extras.data(file=f)
2571 fm_extras.data(key=k)
2571 fm_extras.data(key=k)
2572 fm_extras.data(value=v)
2572 fm_extras.data(value=v)
2573 fm_extras.end()
2573 fm_extras.end()
2574
2574
2575 fm.end()
2575 fm.end()
2576
2576
2577
2577
2578 @command(b'debugnamecomplete', [], _(b'NAME...'))
2578 @command(b'debugnamecomplete', [], _(b'NAME...'))
2579 def debugnamecomplete(ui, repo, *args):
2579 def debugnamecomplete(ui, repo, *args):
2580 '''complete "names" - tags, open branch names, bookmark names'''
2580 '''complete "names" - tags, open branch names, bookmark names'''
2581
2581
2582 names = set()
2582 names = set()
2583 # since we previously only listed open branches, we will handle that
2583 # since we previously only listed open branches, we will handle that
2584 # specially (after this for loop)
2584 # specially (after this for loop)
2585 for name, ns in repo.names.items():
2585 for name, ns in repo.names.items():
2586 if name != b'branches':
2586 if name != b'branches':
2587 names.update(ns.listnames(repo))
2587 names.update(ns.listnames(repo))
2588 names.update(
2588 names.update(
2589 tag
2589 tag
2590 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2590 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2591 if not closed
2591 if not closed
2592 )
2592 )
2593 completions = set()
2593 completions = set()
2594 if not args:
2594 if not args:
2595 args = [b'']
2595 args = [b'']
2596 for a in args:
2596 for a in args:
2597 completions.update(n for n in names if n.startswith(a))
2597 completions.update(n for n in names if n.startswith(a))
2598 ui.write(b'\n'.join(sorted(completions)))
2598 ui.write(b'\n'.join(sorted(completions)))
2599 ui.write(b'\n')
2599 ui.write(b'\n')
2600
2600
2601
2601
2602 @command(
2602 @command(
2603 b'debugnodemap',
2603 b'debugnodemap',
2604 (
2604 (
2605 cmdutil.debugrevlogopts
2605 cmdutil.debugrevlogopts
2606 + [
2606 + [
2607 (
2607 (
2608 b'',
2608 b'',
2609 b'dump-new',
2609 b'dump-new',
2610 False,
2610 False,
2611 _(b'write a (new) persistent binary nodemap on stdout'),
2611 _(b'write a (new) persistent binary nodemap on stdout'),
2612 ),
2612 ),
2613 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2613 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2614 (
2614 (
2615 b'',
2615 b'',
2616 b'check',
2616 b'check',
2617 False,
2617 False,
2618 _(b'check that the data on disk data are correct.'),
2618 _(b'check that the data on disk data are correct.'),
2619 ),
2619 ),
2620 (
2620 (
2621 b'',
2621 b'',
2622 b'metadata',
2622 b'metadata',
2623 False,
2623 False,
2624 _(b'display the on disk meta data for the nodemap'),
2624 _(b'display the on disk meta data for the nodemap'),
2625 ),
2625 ),
2626 ]
2626 ]
2627 ),
2627 ),
2628 _(b'-c|-m|FILE'),
2628 _(b'-c|-m|FILE'),
2629 )
2629 )
2630 def debugnodemap(ui, repo, file_=None, **opts):
2630 def debugnodemap(ui, repo, file_=None, **opts):
2631 """write and inspect on disk nodemap"""
2631 """write and inspect on disk nodemap"""
2632 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
2632 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
2633 if file_ is not None:
2633 if file_ is not None:
2634 raise error.InputError(
2634 raise error.InputError(
2635 _(b'cannot specify a file with other arguments')
2635 _(b'cannot specify a file with other arguments')
2636 )
2636 )
2637 elif file_ is None:
2637 elif file_ is None:
2638 opts['changelog'] = True
2638 opts['changelog'] = True
2639 r = cmdutil.openstorage(
2639 r = cmdutil.openstorage(
2640 repo.unfiltered(), b'debugnodemap', file_, pycompat.byteskwargs(opts)
2640 repo.unfiltered(), b'debugnodemap', file_, pycompat.byteskwargs(opts)
2641 )
2641 )
2642 if isinstance(r, (manifest.manifestrevlog, filelog.filelog)):
2642 if isinstance(r, (manifest.manifestrevlog, filelog.filelog)):
2643 r = r._revlog
2643 r = r._revlog
2644 if opts['dump_new']:
2644 if opts['dump_new']:
2645 if hasattr(r.index, "nodemap_data_all"):
2645 if hasattr(r.index, "nodemap_data_all"):
2646 data = r.index.nodemap_data_all()
2646 data = r.index.nodemap_data_all()
2647 else:
2647 else:
2648 data = nodemap.persistent_data(r.index)
2648 data = nodemap.persistent_data(r.index)
2649 ui.write(data)
2649 ui.write(data)
2650 elif opts['dump_disk']:
2650 elif opts['dump_disk']:
2651 nm_data = nodemap.persisted_data(r)
2651 nm_data = nodemap.persisted_data(r)
2652 if nm_data is not None:
2652 if nm_data is not None:
2653 docket, data = nm_data
2653 docket, data = nm_data
2654 ui.write(data[:])
2654 ui.write(data[:])
2655 elif opts['check']:
2655 elif opts['check']:
2656 nm_data = nodemap.persisted_data(r)
2656 nm_data = nodemap.persisted_data(r)
2657 if nm_data is not None:
2657 if nm_data is not None:
2658 docket, data = nm_data
2658 docket, data = nm_data
2659 return nodemap.check_data(ui, r.index, data)
2659 return nodemap.check_data(ui, r.index, data)
2660 elif opts['metadata']:
2660 elif opts['metadata']:
2661 nm_data = nodemap.persisted_data(r)
2661 nm_data = nodemap.persisted_data(r)
2662 if nm_data is not None:
2662 if nm_data is not None:
2663 docket, data = nm_data
2663 docket, data = nm_data
2664 ui.write((b"uid: %s\n") % docket.uid)
2664 ui.write((b"uid: %s\n") % docket.uid)
2665 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2665 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2666 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2666 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2667 ui.write((b"data-length: %d\n") % docket.data_length)
2667 ui.write((b"data-length: %d\n") % docket.data_length)
2668 ui.write((b"data-unused: %d\n") % docket.data_unused)
2668 ui.write((b"data-unused: %d\n") % docket.data_unused)
2669 unused_perc = docket.data_unused * 100.0 / docket.data_length
2669 unused_perc = docket.data_unused * 100.0 / docket.data_length
2670 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2670 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2671
2671
2672
2672
2673 @command(
2673 @command(
2674 b'debugobsolete',
2674 b'debugobsolete',
2675 [
2675 [
2676 (b'', b'flags', 0, _(b'markers flag')),
2676 (b'', b'flags', 0, _(b'markers flag')),
2677 (
2677 (
2678 b'',
2678 b'',
2679 b'record-parents',
2679 b'record-parents',
2680 False,
2680 False,
2681 _(b'record parent information for the precursor'),
2681 _(b'record parent information for the precursor'),
2682 ),
2682 ),
2683 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2683 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2684 (
2684 (
2685 b'',
2685 b'',
2686 b'exclusive',
2686 b'exclusive',
2687 False,
2687 False,
2688 _(b'restrict display to markers only relevant to REV'),
2688 _(b'restrict display to markers only relevant to REV'),
2689 ),
2689 ),
2690 (b'', b'index', False, _(b'display index of the marker')),
2690 (b'', b'index', False, _(b'display index of the marker')),
2691 (b'', b'delete', [], _(b'delete markers specified by indices')),
2691 (b'', b'delete', [], _(b'delete markers specified by indices')),
2692 ]
2692 ]
2693 + cmdutil.commitopts2
2693 + cmdutil.commitopts2
2694 + cmdutil.formatteropts,
2694 + cmdutil.formatteropts,
2695 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2695 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2696 )
2696 )
2697 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2697 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2698 """create arbitrary obsolete marker
2698 """create arbitrary obsolete marker
2699
2699
2700 With no arguments, displays the list of obsolescence markers."""
2700 With no arguments, displays the list of obsolescence markers."""
2701
2701
2702 def parsenodeid(s):
2702 def parsenodeid(s):
2703 try:
2703 try:
2704 # We do not use revsingle/revrange functions here to accept
2704 # We do not use revsingle/revrange functions here to accept
2705 # arbitrary node identifiers, possibly not present in the
2705 # arbitrary node identifiers, possibly not present in the
2706 # local repository.
2706 # local repository.
2707 n = bin(s)
2707 n = bin(s)
2708 if len(n) != repo.nodeconstants.nodelen:
2708 if len(n) != repo.nodeconstants.nodelen:
2709 raise ValueError
2709 raise ValueError
2710 return n
2710 return n
2711 except ValueError:
2711 except ValueError:
2712 raise error.InputError(
2712 raise error.InputError(
2713 b'changeset references must be full hexadecimal '
2713 b'changeset references must be full hexadecimal '
2714 b'node identifiers'
2714 b'node identifiers'
2715 )
2715 )
2716
2716
2717 if opts.get('delete'):
2717 if opts.get('delete'):
2718 indices = []
2718 indices = []
2719 for v in opts.get('delete'):
2719 for v in opts.get('delete'):
2720 try:
2720 try:
2721 indices.append(int(v))
2721 indices.append(int(v))
2722 except ValueError:
2722 except ValueError:
2723 raise error.InputError(
2723 raise error.InputError(
2724 _(b'invalid index value: %r') % v,
2724 _(b'invalid index value: %r') % v,
2725 hint=_(b'use integers for indices'),
2725 hint=_(b'use integers for indices'),
2726 )
2726 )
2727
2727
2728 if repo.currenttransaction():
2728 if repo.currenttransaction():
2729 raise error.Abort(
2729 raise error.Abort(
2730 _(b'cannot delete obsmarkers in the middle of transaction.')
2730 _(b'cannot delete obsmarkers in the middle of transaction.')
2731 )
2731 )
2732
2732
2733 with repo.lock():
2733 with repo.lock():
2734 n = repair.deleteobsmarkers(repo.obsstore, indices)
2734 n = repair.deleteobsmarkers(repo.obsstore, indices)
2735 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2735 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2736
2736
2737 return
2737 return
2738
2738
2739 if precursor is not None:
2739 if precursor is not None:
2740 if opts['rev']:
2740 if opts['rev']:
2741 raise error.InputError(
2741 raise error.InputError(
2742 b'cannot select revision when creating marker'
2742 b'cannot select revision when creating marker'
2743 )
2743 )
2744 metadata = {}
2744 metadata = {}
2745 metadata[b'user'] = encoding.fromlocal(opts['user'] or ui.username())
2745 metadata[b'user'] = encoding.fromlocal(opts['user'] or ui.username())
2746 succs = tuple(parsenodeid(succ) for succ in successors)
2746 succs = tuple(parsenodeid(succ) for succ in successors)
2747 l = repo.lock()
2747 l = repo.lock()
2748 try:
2748 try:
2749 tr = repo.transaction(b'debugobsolete')
2749 tr = repo.transaction(b'debugobsolete')
2750 try:
2750 try:
2751 date = opts.get('date')
2751 date = opts.get('date')
2752 if date:
2752 if date:
2753 date = dateutil.parsedate(date)
2753 date = dateutil.parsedate(date)
2754 else:
2754 else:
2755 date = None
2755 date = None
2756 prec = parsenodeid(precursor)
2756 prec = parsenodeid(precursor)
2757 parents = None
2757 parents = None
2758 if opts['record_parents']:
2758 if opts['record_parents']:
2759 if prec not in repo.unfiltered():
2759 if prec not in repo.unfiltered():
2760 raise error.Abort(
2760 raise error.Abort(
2761 b'cannot used --record-parents on '
2761 b'cannot used --record-parents on '
2762 b'unknown changesets'
2762 b'unknown changesets'
2763 )
2763 )
2764 parents = repo.unfiltered()[prec].parents()
2764 parents = repo.unfiltered()[prec].parents()
2765 parents = tuple(p.node() for p in parents)
2765 parents = tuple(p.node() for p in parents)
2766 repo.obsstore.create(
2766 repo.obsstore.create(
2767 tr,
2767 tr,
2768 prec,
2768 prec,
2769 succs,
2769 succs,
2770 opts['flags'],
2770 opts['flags'],
2771 parents=parents,
2771 parents=parents,
2772 date=date,
2772 date=date,
2773 metadata=metadata,
2773 metadata=metadata,
2774 ui=ui,
2774 ui=ui,
2775 )
2775 )
2776 tr.close()
2776 tr.close()
2777 except ValueError as exc:
2777 except ValueError as exc:
2778 raise error.Abort(
2778 raise error.Abort(
2779 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2779 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2780 )
2780 )
2781 finally:
2781 finally:
2782 tr.release()
2782 tr.release()
2783 finally:
2783 finally:
2784 l.release()
2784 l.release()
2785 else:
2785 else:
2786 if opts['rev']:
2786 if opts['rev']:
2787 revs = logcmdutil.revrange(repo, opts['rev'])
2787 revs = logcmdutil.revrange(repo, opts['rev'])
2788 nodes = [repo[r].node() for r in revs]
2788 nodes = [repo[r].node() for r in revs]
2789 markers = list(
2789 markers = list(
2790 obsutil.getmarkers(
2790 obsutil.getmarkers(
2791 repo, nodes=nodes, exclusive=opts['exclusive']
2791 repo, nodes=nodes, exclusive=opts['exclusive']
2792 )
2792 )
2793 )
2793 )
2794 markers.sort(key=lambda x: x._data)
2794 markers.sort(key=lambda x: x._data)
2795 else:
2795 else:
2796 markers = obsutil.getmarkers(repo)
2796 markers = obsutil.getmarkers(repo)
2797
2797
2798 markerstoiter = markers
2798 markerstoiter = markers
2799 isrelevant = lambda m: True
2799 isrelevant = lambda m: True
2800 if opts.get('rev') and opts.get('index'):
2800 if opts.get('rev') and opts.get('index'):
2801 markerstoiter = obsutil.getmarkers(repo)
2801 markerstoiter = obsutil.getmarkers(repo)
2802 markerset = set(markers)
2802 markerset = set(markers)
2803 isrelevant = lambda m: m in markerset
2803 isrelevant = lambda m: m in markerset
2804
2804
2805 fm = ui.formatter(b'debugobsolete', pycompat.byteskwargs(opts))
2805 fm = ui.formatter(b'debugobsolete', pycompat.byteskwargs(opts))
2806 for i, m in enumerate(markerstoiter):
2806 for i, m in enumerate(markerstoiter):
2807 if not isrelevant(m):
2807 if not isrelevant(m):
2808 # marker can be irrelevant when we're iterating over a set
2808 # marker can be irrelevant when we're iterating over a set
2809 # of markers (markerstoiter) which is bigger than the set
2809 # of markers (markerstoiter) which is bigger than the set
2810 # of markers we want to display (markers)
2810 # of markers we want to display (markers)
2811 # this can happen if both --index and --rev options are
2811 # this can happen if both --index and --rev options are
2812 # provided and thus we need to iterate over all of the markers
2812 # provided and thus we need to iterate over all of the markers
2813 # to get the correct indices, but only display the ones that
2813 # to get the correct indices, but only display the ones that
2814 # are relevant to --rev value
2814 # are relevant to --rev value
2815 continue
2815 continue
2816 fm.startitem()
2816 fm.startitem()
2817 ind = i if opts.get('index') else None
2817 ind = i if opts.get('index') else None
2818 cmdutil.showmarker(fm, m, index=ind)
2818 cmdutil.showmarker(fm, m, index=ind)
2819 fm.end()
2819 fm.end()
2820
2820
2821
2821
2822 @command(
2822 @command(
2823 b'debugp1copies',
2823 b'debugp1copies',
2824 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2824 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2825 _(b'[-r REV]'),
2825 _(b'[-r REV]'),
2826 )
2826 )
2827 def debugp1copies(ui, repo, **opts):
2827 def debugp1copies(ui, repo, **opts):
2828 """dump copy information compared to p1"""
2828 """dump copy information compared to p1"""
2829
2829
2830 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
2830 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
2831 for dst, src in ctx.p1copies().items():
2831 for dst, src in ctx.p1copies().items():
2832 ui.write(b'%s -> %s\n' % (src, dst))
2832 ui.write(b'%s -> %s\n' % (src, dst))
2833
2833
2834
2834
2835 @command(
2835 @command(
2836 b'debugp2copies',
2836 b'debugp2copies',
2837 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2837 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2838 _(b'[-r REV]'),
2838 _(b'[-r REV]'),
2839 )
2839 )
2840 def debugp2copies(ui, repo, **opts):
2840 def debugp2copies(ui, repo, **opts):
2841 """dump copy information compared to p2"""
2841 """dump copy information compared to p2"""
2842
2842
2843 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
2843 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
2844 for dst, src in ctx.p2copies().items():
2844 for dst, src in ctx.p2copies().items():
2845 ui.write(b'%s -> %s\n' % (src, dst))
2845 ui.write(b'%s -> %s\n' % (src, dst))
2846
2846
2847
2847
2848 @command(
2848 @command(
2849 b'debugpathcomplete',
2849 b'debugpathcomplete',
2850 [
2850 [
2851 (b'f', b'full', None, _(b'complete an entire path')),
2851 (b'f', b'full', None, _(b'complete an entire path')),
2852 (b'n', b'normal', None, _(b'show only normal files')),
2852 (b'n', b'normal', None, _(b'show only normal files')),
2853 (b'a', b'added', None, _(b'show only added files')),
2853 (b'a', b'added', None, _(b'show only added files')),
2854 (b'r', b'removed', None, _(b'show only removed files')),
2854 (b'r', b'removed', None, _(b'show only removed files')),
2855 ],
2855 ],
2856 _(b'FILESPEC...'),
2856 _(b'FILESPEC...'),
2857 )
2857 )
2858 def debugpathcomplete(ui, repo, *specs, **opts):
2858 def debugpathcomplete(ui, repo, *specs, **opts):
2859 """complete part or all of a tracked path
2859 """complete part or all of a tracked path
2860
2860
2861 This command supports shells that offer path name completion. It
2861 This command supports shells that offer path name completion. It
2862 currently completes only files already known to the dirstate.
2862 currently completes only files already known to the dirstate.
2863
2863
2864 Completion extends only to the next path segment unless
2864 Completion extends only to the next path segment unless
2865 --full is specified, in which case entire paths are used."""
2865 --full is specified, in which case entire paths are used."""
2866
2866
2867 def complete(path, acceptable):
2867 def complete(path, acceptable):
2868 dirstate = repo.dirstate
2868 dirstate = repo.dirstate
2869 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2869 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2870 rootdir = repo.root + pycompat.ossep
2870 rootdir = repo.root + pycompat.ossep
2871 if spec != repo.root and not spec.startswith(rootdir):
2871 if spec != repo.root and not spec.startswith(rootdir):
2872 return [], []
2872 return [], []
2873 if os.path.isdir(spec):
2873 if os.path.isdir(spec):
2874 spec += b'/'
2874 spec += b'/'
2875 spec = spec[len(rootdir) :]
2875 spec = spec[len(rootdir) :]
2876 fixpaths = pycompat.ossep != b'/'
2876 fixpaths = pycompat.ossep != b'/'
2877 if fixpaths:
2877 if fixpaths:
2878 spec = spec.replace(pycompat.ossep, b'/')
2878 spec = spec.replace(pycompat.ossep, b'/')
2879 speclen = len(spec)
2879 speclen = len(spec)
2880 fullpaths = opts['full']
2880 fullpaths = opts['full']
2881 files, dirs = set(), set()
2881 files, dirs = set(), set()
2882 adddir, addfile = dirs.add, files.add
2882 adddir, addfile = dirs.add, files.add
2883 for f, st in dirstate.items():
2883 for f, st in dirstate.items():
2884 if f.startswith(spec) and st.state in acceptable:
2884 if f.startswith(spec) and st.state in acceptable:
2885 if fixpaths:
2885 if fixpaths:
2886 f = f.replace(b'/', pycompat.ossep)
2886 f = f.replace(b'/', pycompat.ossep)
2887 if fullpaths:
2887 if fullpaths:
2888 addfile(f)
2888 addfile(f)
2889 continue
2889 continue
2890 s = f.find(pycompat.ossep, speclen)
2890 s = f.find(pycompat.ossep, speclen)
2891 if s >= 0:
2891 if s >= 0:
2892 adddir(f[:s])
2892 adddir(f[:s])
2893 else:
2893 else:
2894 addfile(f)
2894 addfile(f)
2895 return files, dirs
2895 return files, dirs
2896
2896
2897 acceptable = b''
2897 acceptable = b''
2898 if opts['normal']:
2898 if opts['normal']:
2899 acceptable += b'nm'
2899 acceptable += b'nm'
2900 if opts['added']:
2900 if opts['added']:
2901 acceptable += b'a'
2901 acceptable += b'a'
2902 if opts['removed']:
2902 if opts['removed']:
2903 acceptable += b'r'
2903 acceptable += b'r'
2904 cwd = repo.getcwd()
2904 cwd = repo.getcwd()
2905 if not specs:
2905 if not specs:
2906 specs = [b'.']
2906 specs = [b'.']
2907
2907
2908 files, dirs = set(), set()
2908 files, dirs = set(), set()
2909 for spec in specs:
2909 for spec in specs:
2910 f, d = complete(spec, acceptable or b'nmar')
2910 f, d = complete(spec, acceptable or b'nmar')
2911 files.update(f)
2911 files.update(f)
2912 dirs.update(d)
2912 dirs.update(d)
2913 files.update(dirs)
2913 files.update(dirs)
2914 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2914 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2915 ui.write(b'\n')
2915 ui.write(b'\n')
2916
2916
2917
2917
2918 @command(
2918 @command(
2919 b'debugpathcopies',
2919 b'debugpathcopies',
2920 cmdutil.walkopts,
2920 cmdutil.walkopts,
2921 b'hg debugpathcopies REV1 REV2 [FILE]',
2921 b'hg debugpathcopies REV1 REV2 [FILE]',
2922 inferrepo=True,
2922 inferrepo=True,
2923 )
2923 )
2924 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2924 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2925 """show copies between two revisions"""
2925 """show copies between two revisions"""
2926 ctx1 = scmutil.revsingle(repo, rev1)
2926 ctx1 = scmutil.revsingle(repo, rev1)
2927 ctx2 = scmutil.revsingle(repo, rev2)
2927 ctx2 = scmutil.revsingle(repo, rev2)
2928 m = scmutil.match(ctx1, pats, opts)
2928 m = scmutil.match(ctx1, pats, opts)
2929 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2929 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2930 ui.write(b'%s -> %s\n' % (src, dst))
2930 ui.write(b'%s -> %s\n' % (src, dst))
2931
2931
2932
2932
2933 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2933 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2934 def debugpeer(ui, path):
2934 def debugpeer(ui, path):
2935 """establish a connection to a peer repository"""
2935 """establish a connection to a peer repository"""
2936 # Always enable peer request logging. Requires --debug to display
2936 # Always enable peer request logging. Requires --debug to display
2937 # though.
2937 # though.
2938 overrides = {
2938 overrides = {
2939 (b'devel', b'debug.peer-request'): True,
2939 (b'devel', b'debug.peer-request'): True,
2940 }
2940 }
2941
2941
2942 with ui.configoverride(overrides):
2942 with ui.configoverride(overrides):
2943 peer = hg.peer(ui, {}, path)
2943 peer = hg.peer(ui, {}, path)
2944
2944
2945 try:
2945 try:
2946 local = peer.local() is not None
2946 local = peer.local() is not None
2947 canpush = peer.canpush()
2947 canpush = peer.canpush()
2948
2948
2949 ui.write(_(b'url: %s\n') % peer.url())
2949 ui.write(_(b'url: %s\n') % peer.url())
2950 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2950 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2951 ui.write(
2951 ui.write(
2952 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2952 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2953 )
2953 )
2954 finally:
2954 finally:
2955 peer.close()
2955 peer.close()
2956
2956
2957
2957
2958 @command(
2958 @command(
2959 b'debugpickmergetool',
2959 b'debugpickmergetool',
2960 [
2960 [
2961 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2961 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2962 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2962 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2963 ]
2963 ]
2964 + cmdutil.walkopts
2964 + cmdutil.walkopts
2965 + cmdutil.mergetoolopts,
2965 + cmdutil.mergetoolopts,
2966 _(b'[PATTERN]...'),
2966 _(b'[PATTERN]...'),
2967 inferrepo=True,
2967 inferrepo=True,
2968 )
2968 )
2969 def debugpickmergetool(ui, repo, *pats, **opts):
2969 def debugpickmergetool(ui, repo, *pats, **opts):
2970 """examine which merge tool is chosen for specified file
2970 """examine which merge tool is chosen for specified file
2971
2971
2972 As described in :hg:`help merge-tools`, Mercurial examines
2972 As described in :hg:`help merge-tools`, Mercurial examines
2973 configurations below in this order to decide which merge tool is
2973 configurations below in this order to decide which merge tool is
2974 chosen for specified file.
2974 chosen for specified file.
2975
2975
2976 1. ``--tool`` option
2976 1. ``--tool`` option
2977 2. ``HGMERGE`` environment variable
2977 2. ``HGMERGE`` environment variable
2978 3. configurations in ``merge-patterns`` section
2978 3. configurations in ``merge-patterns`` section
2979 4. configuration of ``ui.merge``
2979 4. configuration of ``ui.merge``
2980 5. configurations in ``merge-tools`` section
2980 5. configurations in ``merge-tools`` section
2981 6. ``hgmerge`` tool (for historical reason only)
2981 6. ``hgmerge`` tool (for historical reason only)
2982 7. default tool for fallback (``:merge`` or ``:prompt``)
2982 7. default tool for fallback (``:merge`` or ``:prompt``)
2983
2983
2984 This command writes out examination result in the style below::
2984 This command writes out examination result in the style below::
2985
2985
2986 FILE = MERGETOOL
2986 FILE = MERGETOOL
2987
2987
2988 By default, all files known in the first parent context of the
2988 By default, all files known in the first parent context of the
2989 working directory are examined. Use file patterns and/or -I/-X
2989 working directory are examined. Use file patterns and/or -I/-X
2990 options to limit target files. -r/--rev is also useful to examine
2990 options to limit target files. -r/--rev is also useful to examine
2991 files in another context without actual updating to it.
2991 files in another context without actual updating to it.
2992
2992
2993 With --debug, this command shows warning messages while matching
2993 With --debug, this command shows warning messages while matching
2994 against ``merge-patterns`` and so on, too. It is recommended to
2994 against ``merge-patterns`` and so on, too. It is recommended to
2995 use this option with explicit file patterns and/or -I/-X options,
2995 use this option with explicit file patterns and/or -I/-X options,
2996 because this option increases amount of output per file according
2996 because this option increases amount of output per file according
2997 to configurations in hgrc.
2997 to configurations in hgrc.
2998
2998
2999 With -v/--verbose, this command shows configurations below at
2999 With -v/--verbose, this command shows configurations below at
3000 first (only if specified).
3000 first (only if specified).
3001
3001
3002 - ``--tool`` option
3002 - ``--tool`` option
3003 - ``HGMERGE`` environment variable
3003 - ``HGMERGE`` environment variable
3004 - configuration of ``ui.merge``
3004 - configuration of ``ui.merge``
3005
3005
3006 If merge tool is chosen before matching against
3006 If merge tool is chosen before matching against
3007 ``merge-patterns``, this command can't show any helpful
3007 ``merge-patterns``, this command can't show any helpful
3008 information, even with --debug. In such case, information above is
3008 information, even with --debug. In such case, information above is
3009 useful to know why a merge tool is chosen.
3009 useful to know why a merge tool is chosen.
3010 """
3010 """
3011 overrides = {}
3011 overrides = {}
3012 if opts['tool']:
3012 if opts['tool']:
3013 overrides[(b'ui', b'forcemerge')] = opts['tool']
3013 overrides[(b'ui', b'forcemerge')] = opts['tool']
3014 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts['tool'])))
3014 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts['tool'])))
3015
3015
3016 with ui.configoverride(overrides, b'debugmergepatterns'):
3016 with ui.configoverride(overrides, b'debugmergepatterns'):
3017 hgmerge = encoding.environ.get(b"HGMERGE")
3017 hgmerge = encoding.environ.get(b"HGMERGE")
3018 if hgmerge is not None:
3018 if hgmerge is not None:
3019 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3019 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3020 uimerge = ui.config(b"ui", b"merge")
3020 uimerge = ui.config(b"ui", b"merge")
3021 if uimerge:
3021 if uimerge:
3022 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3022 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3023
3023
3024 ctx = scmutil.revsingle(repo, opts.get('rev'))
3024 ctx = scmutil.revsingle(repo, opts.get('rev'))
3025 m = scmutil.match(ctx, pats, pycompat.byteskwargs(opts))
3025 m = scmutil.match(ctx, pats, pycompat.byteskwargs(opts))
3026 changedelete = opts['changedelete']
3026 changedelete = opts['changedelete']
3027 for path in ctx.walk(m):
3027 for path in ctx.walk(m):
3028 fctx = ctx[path]
3028 fctx = ctx[path]
3029 with ui.silent(
3029 with ui.silent(
3030 error=True
3030 error=True
3031 ) if not ui.debugflag else util.nullcontextmanager():
3031 ) if not ui.debugflag else util.nullcontextmanager():
3032 tool, toolpath = filemerge._picktool(
3032 tool, toolpath = filemerge._picktool(
3033 repo,
3033 repo,
3034 ui,
3034 ui,
3035 path,
3035 path,
3036 fctx.isbinary(),
3036 fctx.isbinary(),
3037 b'l' in fctx.flags(),
3037 b'l' in fctx.flags(),
3038 changedelete,
3038 changedelete,
3039 )
3039 )
3040 ui.write(b'%s = %s\n' % (path, tool))
3040 ui.write(b'%s = %s\n' % (path, tool))
3041
3041
3042
3042
3043 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3043 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3044 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3044 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3045 """access the pushkey key/value protocol
3045 """access the pushkey key/value protocol
3046
3046
3047 With two args, list the keys in the given namespace.
3047 With two args, list the keys in the given namespace.
3048
3048
3049 With five args, set a key to new if it currently is set to old.
3049 With five args, set a key to new if it currently is set to old.
3050 Reports success or failure.
3050 Reports success or failure.
3051 """
3051 """
3052
3052
3053 target = hg.peer(ui, {}, repopath)
3053 target = hg.peer(ui, {}, repopath)
3054 try:
3054 try:
3055 if keyinfo:
3055 if keyinfo:
3056 key, old, new = keyinfo
3056 key, old, new = keyinfo
3057 with target.commandexecutor() as e:
3057 with target.commandexecutor() as e:
3058 r = e.callcommand(
3058 r = e.callcommand(
3059 b'pushkey',
3059 b'pushkey',
3060 {
3060 {
3061 b'namespace': namespace,
3061 b'namespace': namespace,
3062 b'key': key,
3062 b'key': key,
3063 b'old': old,
3063 b'old': old,
3064 b'new': new,
3064 b'new': new,
3065 },
3065 },
3066 ).result()
3066 ).result()
3067
3067
3068 ui.status(pycompat.bytestr(r) + b'\n')
3068 ui.status(pycompat.bytestr(r) + b'\n')
3069 return not r
3069 return not r
3070 else:
3070 else:
3071 for k, v in sorted(target.listkeys(namespace).items()):
3071 for k, v in sorted(target.listkeys(namespace).items()):
3072 ui.write(
3072 ui.write(
3073 b"%s\t%s\n"
3073 b"%s\t%s\n"
3074 % (stringutil.escapestr(k), stringutil.escapestr(v))
3074 % (stringutil.escapestr(k), stringutil.escapestr(v))
3075 )
3075 )
3076 finally:
3076 finally:
3077 target.close()
3077 target.close()
3078
3078
3079
3079
3080 @command(b'debugpvec', [], _(b'A B'))
3080 @command(b'debugpvec', [], _(b'A B'))
3081 def debugpvec(ui, repo, a, b=None):
3081 def debugpvec(ui, repo, a, b=None):
3082 ca = scmutil.revsingle(repo, a)
3082 ca = scmutil.revsingle(repo, a)
3083 cb = scmutil.revsingle(repo, b)
3083 cb = scmutil.revsingle(repo, b)
3084 pa = pvec.ctxpvec(ca)
3084 pa = pvec.ctxpvec(ca)
3085 pb = pvec.ctxpvec(cb)
3085 pb = pvec.ctxpvec(cb)
3086 if pa == pb:
3086 if pa == pb:
3087 rel = b"="
3087 rel = b"="
3088 elif pa > pb:
3088 elif pa > pb:
3089 rel = b">"
3089 rel = b">"
3090 elif pa < pb:
3090 elif pa < pb:
3091 rel = b"<"
3091 rel = b"<"
3092 elif pa | pb:
3092 elif pa | pb:
3093 rel = b"|"
3093 rel = b"|"
3094 ui.write(_(b"a: %s\n") % pa)
3094 ui.write(_(b"a: %s\n") % pa)
3095 ui.write(_(b"b: %s\n") % pb)
3095 ui.write(_(b"b: %s\n") % pb)
3096 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3096 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3097 ui.write(
3097 ui.write(
3098 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3098 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3099 % (
3099 % (
3100 abs(pa._depth - pb._depth),
3100 abs(pa._depth - pb._depth),
3101 pvec._hamming(pa._vec, pb._vec),
3101 pvec._hamming(pa._vec, pb._vec),
3102 pa.distance(pb),
3102 pa.distance(pb),
3103 rel,
3103 rel,
3104 )
3104 )
3105 )
3105 )
3106
3106
3107
3107
3108 @command(
3108 @command(
3109 b'debugrebuilddirstate|debugrebuildstate',
3109 b'debugrebuilddirstate|debugrebuildstate',
3110 [
3110 [
3111 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3111 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3112 (
3112 (
3113 b'',
3113 b'',
3114 b'minimal',
3114 b'minimal',
3115 None,
3115 None,
3116 _(
3116 _(
3117 b'only rebuild files that are inconsistent with '
3117 b'only rebuild files that are inconsistent with '
3118 b'the working copy parent'
3118 b'the working copy parent'
3119 ),
3119 ),
3120 ),
3120 ),
3121 ],
3121 ],
3122 _(b'[-r REV]'),
3122 _(b'[-r REV]'),
3123 )
3123 )
3124 def debugrebuilddirstate(ui, repo, rev, **opts):
3124 def debugrebuilddirstate(ui, repo, rev, **opts):
3125 """rebuild the dirstate as it would look like for the given revision
3125 """rebuild the dirstate as it would look like for the given revision
3126
3126
3127 If no revision is specified the first current parent will be used.
3127 If no revision is specified the first current parent will be used.
3128
3128
3129 The dirstate will be set to the files of the given revision.
3129 The dirstate will be set to the files of the given revision.
3130 The actual working directory content or existing dirstate
3130 The actual working directory content or existing dirstate
3131 information such as adds or removes is not considered.
3131 information such as adds or removes is not considered.
3132
3132
3133 ``minimal`` will only rebuild the dirstate status for files that claim to be
3133 ``minimal`` will only rebuild the dirstate status for files that claim to be
3134 tracked but are not in the parent manifest, or that exist in the parent
3134 tracked but are not in the parent manifest, or that exist in the parent
3135 manifest but are not in the dirstate. It will not change adds, removes, or
3135 manifest but are not in the dirstate. It will not change adds, removes, or
3136 modified files that are in the working copy parent.
3136 modified files that are in the working copy parent.
3137
3137
3138 One use of this command is to make the next :hg:`status` invocation
3138 One use of this command is to make the next :hg:`status` invocation
3139 check the actual file content.
3139 check the actual file content.
3140 """
3140 """
3141 ctx = scmutil.revsingle(repo, rev)
3141 ctx = scmutil.revsingle(repo, rev)
3142 with repo.wlock():
3142 with repo.wlock():
3143 if repo.currenttransaction() is not None:
3143 if repo.currenttransaction() is not None:
3144 msg = b'rebuild the dirstate outside of a transaction'
3144 msg = b'rebuild the dirstate outside of a transaction'
3145 raise error.ProgrammingError(msg)
3145 raise error.ProgrammingError(msg)
3146 dirstate = repo.dirstate
3146 dirstate = repo.dirstate
3147 changedfiles = None
3147 changedfiles = None
3148 # See command doc for what minimal does.
3148 # See command doc for what minimal does.
3149 if opts.get('minimal'):
3149 if opts.get('minimal'):
3150 manifestfiles = set(ctx.manifest().keys())
3150 manifestfiles = set(ctx.manifest().keys())
3151 dirstatefiles = set(dirstate)
3151 dirstatefiles = set(dirstate)
3152 manifestonly = manifestfiles - dirstatefiles
3152 manifestonly = manifestfiles - dirstatefiles
3153 dsonly = dirstatefiles - manifestfiles
3153 dsonly = dirstatefiles - manifestfiles
3154 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3154 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3155 changedfiles = manifestonly | dsnotadded
3155 changedfiles = manifestonly | dsnotadded
3156
3156
3157 with dirstate.changing_parents(repo):
3157 with dirstate.changing_parents(repo):
3158 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3158 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3159
3159
3160
3160
3161 @command(
3161 @command(
3162 b'debugrebuildfncache',
3162 b'debugrebuildfncache',
3163 [
3163 [
3164 (
3164 (
3165 b'',
3165 b'',
3166 b'only-data',
3166 b'only-data',
3167 False,
3167 False,
3168 _(b'only look for wrong .d files (much faster)'),
3168 _(b'only look for wrong .d files (much faster)'),
3169 )
3169 )
3170 ],
3170 ],
3171 b'',
3171 b'',
3172 )
3172 )
3173 def debugrebuildfncache(ui, repo, **opts):
3173 def debugrebuildfncache(ui, repo, **opts):
3174 """rebuild the fncache file"""
3174 """rebuild the fncache file"""
3175 repair.rebuildfncache(ui, repo, opts.get("only_data"))
3175 repair.rebuildfncache(ui, repo, opts.get("only_data"))
3176
3176
3177
3177
3178 @command(
3178 @command(
3179 b'debugrename',
3179 b'debugrename',
3180 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3180 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3181 _(b'[-r REV] [FILE]...'),
3181 _(b'[-r REV] [FILE]...'),
3182 )
3182 )
3183 def debugrename(ui, repo, *pats, **opts):
3183 def debugrename(ui, repo, *pats, **opts):
3184 """dump rename information"""
3184 """dump rename information"""
3185
3185
3186 ctx = scmutil.revsingle(repo, opts.get('rev'))
3186 ctx = scmutil.revsingle(repo, opts.get('rev'))
3187 m = scmutil.match(ctx, pats, pycompat.byteskwargs(opts))
3187 m = scmutil.match(ctx, pats, pycompat.byteskwargs(opts))
3188 for abs in ctx.walk(m):
3188 for abs in ctx.walk(m):
3189 fctx = ctx[abs]
3189 fctx = ctx[abs]
3190 o = fctx.filelog().renamed(fctx.filenode())
3190 o = fctx.filelog().renamed(fctx.filenode())
3191 rel = repo.pathto(abs)
3191 rel = repo.pathto(abs)
3192 if o:
3192 if o:
3193 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3193 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3194 else:
3194 else:
3195 ui.write(_(b"%s not renamed\n") % rel)
3195 ui.write(_(b"%s not renamed\n") % rel)
3196
3196
3197
3197
3198 @command(b'debugrequires|debugrequirements', [], b'')
3198 @command(b'debugrequires|debugrequirements', [], b'')
3199 def debugrequirements(ui, repo):
3199 def debugrequirements(ui, repo):
3200 """print the current repo requirements"""
3200 """print the current repo requirements"""
3201 for r in sorted(repo.requirements):
3201 for r in sorted(repo.requirements):
3202 ui.write(b"%s\n" % r)
3202 ui.write(b"%s\n" % r)
3203
3203
3204
3204
3205 @command(
3205 @command(
3206 b'debugrevlog',
3206 b'debugrevlog',
3207 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3207 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3208 _(b'-c|-m|FILE'),
3208 _(b'-c|-m|FILE'),
3209 optionalrepo=True,
3209 optionalrepo=True,
3210 )
3210 )
3211 def debugrevlog(ui, repo, file_=None, **opts):
3211 def debugrevlog(ui, repo, file_=None, **opts):
3212 """show data and statistics about a revlog"""
3212 """show data and statistics about a revlog"""
3213 opts = pycompat.byteskwargs(opts)
3213 r = cmdutil.openrevlog(
3214 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3214 repo, b'debugrevlog', file_, pycompat.byteskwargs(opts)
3215
3215 )
3216 if opts.get(b"dump"):
3216
3217 if opts.get("dump"):
3217 revlog_debug.dump(ui, r)
3218 revlog_debug.dump(ui, r)
3218 else:
3219 else:
3219 revlog_debug.debug_revlog(ui, r)
3220 revlog_debug.debug_revlog(ui, r)
3220 return 0
3221 return 0
3221
3222
3222
3223
3223 @command(
3224 @command(
3224 b'debugrevlogindex',
3225 b'debugrevlogindex',
3225 cmdutil.debugrevlogopts
3226 cmdutil.debugrevlogopts
3226 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3227 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3227 _(b'[-f FORMAT] -c|-m|FILE'),
3228 _(b'[-f FORMAT] -c|-m|FILE'),
3228 optionalrepo=True,
3229 optionalrepo=True,
3229 )
3230 )
3230 def debugrevlogindex(ui, repo, file_=None, **opts):
3231 def debugrevlogindex(ui, repo, file_=None, **opts):
3231 """dump the contents of a revlog index"""
3232 """dump the contents of a revlog index"""
3232 opts = pycompat.byteskwargs(opts)
3233 opts = pycompat.byteskwargs(opts)
3233 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3234 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3234 format = opts.get(b'format', 0)
3235 format = opts.get(b'format', 0)
3235 if format not in (0, 1):
3236 if format not in (0, 1):
3236 raise error.Abort(_(b"unknown format %d") % format)
3237 raise error.Abort(_(b"unknown format %d") % format)
3237
3238
3238 if ui.debugflag:
3239 if ui.debugflag:
3239 shortfn = hex
3240 shortfn = hex
3240 else:
3241 else:
3241 shortfn = short
3242 shortfn = short
3242
3243
3243 # There might not be anything in r, so have a sane default
3244 # There might not be anything in r, so have a sane default
3244 idlen = 12
3245 idlen = 12
3245 for i in r:
3246 for i in r:
3246 idlen = len(shortfn(r.node(i)))
3247 idlen = len(shortfn(r.node(i)))
3247 break
3248 break
3248
3249
3249 if format == 0:
3250 if format == 0:
3250 if ui.verbose:
3251 if ui.verbose:
3251 ui.writenoi18n(
3252 ui.writenoi18n(
3252 b" rev offset length linkrev %s %s p2\n"
3253 b" rev offset length linkrev %s %s p2\n"
3253 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3254 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3254 )
3255 )
3255 else:
3256 else:
3256 ui.writenoi18n(
3257 ui.writenoi18n(
3257 b" rev linkrev %s %s p2\n"
3258 b" rev linkrev %s %s p2\n"
3258 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3259 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3259 )
3260 )
3260 elif format == 1:
3261 elif format == 1:
3261 if ui.verbose:
3262 if ui.verbose:
3262 ui.writenoi18n(
3263 ui.writenoi18n(
3263 (
3264 (
3264 b" rev flag offset length size link p1"
3265 b" rev flag offset length size link p1"
3265 b" p2 %s\n"
3266 b" p2 %s\n"
3266 )
3267 )
3267 % b"nodeid".rjust(idlen)
3268 % b"nodeid".rjust(idlen)
3268 )
3269 )
3269 else:
3270 else:
3270 ui.writenoi18n(
3271 ui.writenoi18n(
3271 b" rev flag size link p1 p2 %s\n"
3272 b" rev flag size link p1 p2 %s\n"
3272 % b"nodeid".rjust(idlen)
3273 % b"nodeid".rjust(idlen)
3273 )
3274 )
3274
3275
3275 for i in r:
3276 for i in r:
3276 node = r.node(i)
3277 node = r.node(i)
3277 if format == 0:
3278 if format == 0:
3278 try:
3279 try:
3279 pp = r.parents(node)
3280 pp = r.parents(node)
3280 except Exception:
3281 except Exception:
3281 pp = [repo.nullid, repo.nullid]
3282 pp = [repo.nullid, repo.nullid]
3282 if ui.verbose:
3283 if ui.verbose:
3283 ui.write(
3284 ui.write(
3284 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3285 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3285 % (
3286 % (
3286 i,
3287 i,
3287 r.start(i),
3288 r.start(i),
3288 r.length(i),
3289 r.length(i),
3289 r.linkrev(i),
3290 r.linkrev(i),
3290 shortfn(node),
3291 shortfn(node),
3291 shortfn(pp[0]),
3292 shortfn(pp[0]),
3292 shortfn(pp[1]),
3293 shortfn(pp[1]),
3293 )
3294 )
3294 )
3295 )
3295 else:
3296 else:
3296 ui.write(
3297 ui.write(
3297 b"% 6d % 7d %s %s %s\n"
3298 b"% 6d % 7d %s %s %s\n"
3298 % (
3299 % (
3299 i,
3300 i,
3300 r.linkrev(i),
3301 r.linkrev(i),
3301 shortfn(node),
3302 shortfn(node),
3302 shortfn(pp[0]),
3303 shortfn(pp[0]),
3303 shortfn(pp[1]),
3304 shortfn(pp[1]),
3304 )
3305 )
3305 )
3306 )
3306 elif format == 1:
3307 elif format == 1:
3307 pr = r.parentrevs(i)
3308 pr = r.parentrevs(i)
3308 if ui.verbose:
3309 if ui.verbose:
3309 ui.write(
3310 ui.write(
3310 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3311 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3311 % (
3312 % (
3312 i,
3313 i,
3313 r.flags(i),
3314 r.flags(i),
3314 r.start(i),
3315 r.start(i),
3315 r.length(i),
3316 r.length(i),
3316 r.rawsize(i),
3317 r.rawsize(i),
3317 r.linkrev(i),
3318 r.linkrev(i),
3318 pr[0],
3319 pr[0],
3319 pr[1],
3320 pr[1],
3320 shortfn(node),
3321 shortfn(node),
3321 )
3322 )
3322 )
3323 )
3323 else:
3324 else:
3324 ui.write(
3325 ui.write(
3325 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3326 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3326 % (
3327 % (
3327 i,
3328 i,
3328 r.flags(i),
3329 r.flags(i),
3329 r.rawsize(i),
3330 r.rawsize(i),
3330 r.linkrev(i),
3331 r.linkrev(i),
3331 pr[0],
3332 pr[0],
3332 pr[1],
3333 pr[1],
3333 shortfn(node),
3334 shortfn(node),
3334 )
3335 )
3335 )
3336 )
3336
3337
3337
3338
3338 @command(
3339 @command(
3339 b'debugrevspec',
3340 b'debugrevspec',
3340 [
3341 [
3341 (
3342 (
3342 b'',
3343 b'',
3343 b'optimize',
3344 b'optimize',
3344 None,
3345 None,
3345 _(b'print parsed tree after optimizing (DEPRECATED)'),
3346 _(b'print parsed tree after optimizing (DEPRECATED)'),
3346 ),
3347 ),
3347 (
3348 (
3348 b'',
3349 b'',
3349 b'show-revs',
3350 b'show-revs',
3350 True,
3351 True,
3351 _(b'print list of result revisions (default)'),
3352 _(b'print list of result revisions (default)'),
3352 ),
3353 ),
3353 (
3354 (
3354 b's',
3355 b's',
3355 b'show-set',
3356 b'show-set',
3356 None,
3357 None,
3357 _(b'print internal representation of result set'),
3358 _(b'print internal representation of result set'),
3358 ),
3359 ),
3359 (
3360 (
3360 b'p',
3361 b'p',
3361 b'show-stage',
3362 b'show-stage',
3362 [],
3363 [],
3363 _(b'print parsed tree at the given stage'),
3364 _(b'print parsed tree at the given stage'),
3364 _(b'NAME'),
3365 _(b'NAME'),
3365 ),
3366 ),
3366 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3367 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3367 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3368 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3368 ],
3369 ],
3369 b'REVSPEC',
3370 b'REVSPEC',
3370 )
3371 )
3371 def debugrevspec(ui, repo, expr, **opts):
3372 def debugrevspec(ui, repo, expr, **opts):
3372 """parse and apply a revision specification
3373 """parse and apply a revision specification
3373
3374
3374 Use -p/--show-stage option to print the parsed tree at the given stages.
3375 Use -p/--show-stage option to print the parsed tree at the given stages.
3375 Use -p all to print tree at every stage.
3376 Use -p all to print tree at every stage.
3376
3377
3377 Use --no-show-revs option with -s or -p to print only the set
3378 Use --no-show-revs option with -s or -p to print only the set
3378 representation or the parsed tree respectively.
3379 representation or the parsed tree respectively.
3379
3380
3380 Use --verify-optimized to compare the optimized result with the unoptimized
3381 Use --verify-optimized to compare the optimized result with the unoptimized
3381 one. Returns 1 if the optimized result differs.
3382 one. Returns 1 if the optimized result differs.
3382 """
3383 """
3383 opts = pycompat.byteskwargs(opts)
3384 opts = pycompat.byteskwargs(opts)
3384 aliases = ui.configitems(b'revsetalias')
3385 aliases = ui.configitems(b'revsetalias')
3385 stages = [
3386 stages = [
3386 (b'parsed', lambda tree: tree),
3387 (b'parsed', lambda tree: tree),
3387 (
3388 (
3388 b'expanded',
3389 b'expanded',
3389 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3390 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3390 ),
3391 ),
3391 (b'concatenated', revsetlang.foldconcat),
3392 (b'concatenated', revsetlang.foldconcat),
3392 (b'analyzed', revsetlang.analyze),
3393 (b'analyzed', revsetlang.analyze),
3393 (b'optimized', revsetlang.optimize),
3394 (b'optimized', revsetlang.optimize),
3394 ]
3395 ]
3395 if opts[b'no_optimized']:
3396 if opts[b'no_optimized']:
3396 stages = stages[:-1]
3397 stages = stages[:-1]
3397 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3398 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3398 raise error.Abort(
3399 raise error.Abort(
3399 _(b'cannot use --verify-optimized with --no-optimized')
3400 _(b'cannot use --verify-optimized with --no-optimized')
3400 )
3401 )
3401 stagenames = {n for n, f in stages}
3402 stagenames = {n for n, f in stages}
3402
3403
3403 showalways = set()
3404 showalways = set()
3404 showchanged = set()
3405 showchanged = set()
3405 if ui.verbose and not opts[b'show_stage']:
3406 if ui.verbose and not opts[b'show_stage']:
3406 # show parsed tree by --verbose (deprecated)
3407 # show parsed tree by --verbose (deprecated)
3407 showalways.add(b'parsed')
3408 showalways.add(b'parsed')
3408 showchanged.update([b'expanded', b'concatenated'])
3409 showchanged.update([b'expanded', b'concatenated'])
3409 if opts[b'optimize']:
3410 if opts[b'optimize']:
3410 showalways.add(b'optimized')
3411 showalways.add(b'optimized')
3411 if opts[b'show_stage'] and opts[b'optimize']:
3412 if opts[b'show_stage'] and opts[b'optimize']:
3412 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3413 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3413 if opts[b'show_stage'] == [b'all']:
3414 if opts[b'show_stage'] == [b'all']:
3414 showalways.update(stagenames)
3415 showalways.update(stagenames)
3415 else:
3416 else:
3416 for n in opts[b'show_stage']:
3417 for n in opts[b'show_stage']:
3417 if n not in stagenames:
3418 if n not in stagenames:
3418 raise error.Abort(_(b'invalid stage name: %s') % n)
3419 raise error.Abort(_(b'invalid stage name: %s') % n)
3419 showalways.update(opts[b'show_stage'])
3420 showalways.update(opts[b'show_stage'])
3420
3421
3421 treebystage = {}
3422 treebystage = {}
3422 printedtree = None
3423 printedtree = None
3423 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3424 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3424 for n, f in stages:
3425 for n, f in stages:
3425 treebystage[n] = tree = f(tree)
3426 treebystage[n] = tree = f(tree)
3426 if n in showalways or (n in showchanged and tree != printedtree):
3427 if n in showalways or (n in showchanged and tree != printedtree):
3427 if opts[b'show_stage'] or n != b'parsed':
3428 if opts[b'show_stage'] or n != b'parsed':
3428 ui.write(b"* %s:\n" % n)
3429 ui.write(b"* %s:\n" % n)
3429 ui.write(revsetlang.prettyformat(tree), b"\n")
3430 ui.write(revsetlang.prettyformat(tree), b"\n")
3430 printedtree = tree
3431 printedtree = tree
3431
3432
3432 if opts[b'verify_optimized']:
3433 if opts[b'verify_optimized']:
3433 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3434 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3434 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3435 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3435 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3436 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3436 ui.writenoi18n(
3437 ui.writenoi18n(
3437 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3438 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3438 )
3439 )
3439 ui.writenoi18n(
3440 ui.writenoi18n(
3440 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3441 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3441 )
3442 )
3442 arevs = list(arevs)
3443 arevs = list(arevs)
3443 brevs = list(brevs)
3444 brevs = list(brevs)
3444 if arevs == brevs:
3445 if arevs == brevs:
3445 return 0
3446 return 0
3446 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3447 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3447 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3448 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3448 sm = difflib.SequenceMatcher(None, arevs, brevs)
3449 sm = difflib.SequenceMatcher(None, arevs, brevs)
3449 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3450 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3450 if tag in ('delete', 'replace'):
3451 if tag in ('delete', 'replace'):
3451 for c in arevs[alo:ahi]:
3452 for c in arevs[alo:ahi]:
3452 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3453 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3453 if tag in ('insert', 'replace'):
3454 if tag in ('insert', 'replace'):
3454 for c in brevs[blo:bhi]:
3455 for c in brevs[blo:bhi]:
3455 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3456 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3456 if tag == 'equal':
3457 if tag == 'equal':
3457 for c in arevs[alo:ahi]:
3458 for c in arevs[alo:ahi]:
3458 ui.write(b' %d\n' % c)
3459 ui.write(b' %d\n' % c)
3459 return 1
3460 return 1
3460
3461
3461 func = revset.makematcher(tree)
3462 func = revset.makematcher(tree)
3462 revs = func(repo)
3463 revs = func(repo)
3463 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3464 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3464 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3465 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3465 if not opts[b'show_revs']:
3466 if not opts[b'show_revs']:
3466 return
3467 return
3467 for c in revs:
3468 for c in revs:
3468 ui.write(b"%d\n" % c)
3469 ui.write(b"%d\n" % c)
3469
3470
3470
3471
3471 @command(
3472 @command(
3472 b'debugserve',
3473 b'debugserve',
3473 [
3474 [
3474 (
3475 (
3475 b'',
3476 b'',
3476 b'sshstdio',
3477 b'sshstdio',
3477 False,
3478 False,
3478 _(b'run an SSH server bound to process handles'),
3479 _(b'run an SSH server bound to process handles'),
3479 ),
3480 ),
3480 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3481 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3481 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3482 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3482 ],
3483 ],
3483 b'',
3484 b'',
3484 )
3485 )
3485 def debugserve(ui, repo, **opts):
3486 def debugserve(ui, repo, **opts):
3486 """run a server with advanced settings
3487 """run a server with advanced settings
3487
3488
3488 This command is similar to :hg:`serve`. It exists partially as a
3489 This command is similar to :hg:`serve`. It exists partially as a
3489 workaround to the fact that ``hg serve --stdio`` must have specific
3490 workaround to the fact that ``hg serve --stdio`` must have specific
3490 arguments for security reasons.
3491 arguments for security reasons.
3491 """
3492 """
3492 opts = pycompat.byteskwargs(opts)
3493 opts = pycompat.byteskwargs(opts)
3493
3494
3494 if not opts[b'sshstdio']:
3495 if not opts[b'sshstdio']:
3495 raise error.Abort(_(b'only --sshstdio is currently supported'))
3496 raise error.Abort(_(b'only --sshstdio is currently supported'))
3496
3497
3497 logfh = None
3498 logfh = None
3498
3499
3499 if opts[b'logiofd'] and opts[b'logiofile']:
3500 if opts[b'logiofd'] and opts[b'logiofile']:
3500 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3501 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3501
3502
3502 if opts[b'logiofd']:
3503 if opts[b'logiofd']:
3503 # Ideally we would be line buffered. But line buffering in binary
3504 # Ideally we would be line buffered. But line buffering in binary
3504 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3505 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3505 # buffering could have performance impacts. But since this isn't
3506 # buffering could have performance impacts. But since this isn't
3506 # performance critical code, it should be fine.
3507 # performance critical code, it should be fine.
3507 try:
3508 try:
3508 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3509 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3509 except OSError as e:
3510 except OSError as e:
3510 if e.errno != errno.ESPIPE:
3511 if e.errno != errno.ESPIPE:
3511 raise
3512 raise
3512 # can't seek a pipe, so `ab` mode fails on py3
3513 # can't seek a pipe, so `ab` mode fails on py3
3513 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3514 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3514 elif opts[b'logiofile']:
3515 elif opts[b'logiofile']:
3515 logfh = open(opts[b'logiofile'], b'ab', 0)
3516 logfh = open(opts[b'logiofile'], b'ab', 0)
3516
3517
3517 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3518 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3518 s.serve_forever()
3519 s.serve_forever()
3519
3520
3520
3521
3521 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3522 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3522 def debugsetparents(ui, repo, rev1, rev2=None):
3523 def debugsetparents(ui, repo, rev1, rev2=None):
3523 """manually set the parents of the current working directory (DANGEROUS)
3524 """manually set the parents of the current working directory (DANGEROUS)
3524
3525
3525 This command is not what you are looking for and should not be used. Using
3526 This command is not what you are looking for and should not be used. Using
3526 this command will most certainly results in slight corruption of the file
3527 this command will most certainly results in slight corruption of the file
3527 level histories withing your repository. DO NOT USE THIS COMMAND.
3528 level histories withing your repository. DO NOT USE THIS COMMAND.
3528
3529
3529 The command update the p1 and p2 field in the dirstate, and not touching
3530 The command update the p1 and p2 field in the dirstate, and not touching
3530 anything else. This useful for writing repository conversion tools, but
3531 anything else. This useful for writing repository conversion tools, but
3531 should be used with extreme care. For example, neither the working
3532 should be used with extreme care. For example, neither the working
3532 directory nor the dirstate is updated, so file status may be incorrect
3533 directory nor the dirstate is updated, so file status may be incorrect
3533 after running this command. Only used if you are one of the few people that
3534 after running this command. Only used if you are one of the few people that
3534 deeply unstand both conversion tools and file level histories. If you are
3535 deeply unstand both conversion tools and file level histories. If you are
3535 reading this help, you are not one of this people (most of them sailed west
3536 reading this help, you are not one of this people (most of them sailed west
3536 from Mithlond anyway.
3537 from Mithlond anyway.
3537
3538
3538 So one last time DO NOT USE THIS COMMAND.
3539 So one last time DO NOT USE THIS COMMAND.
3539
3540
3540 Returns 0 on success.
3541 Returns 0 on success.
3541 """
3542 """
3542
3543
3543 node1 = scmutil.revsingle(repo, rev1).node()
3544 node1 = scmutil.revsingle(repo, rev1).node()
3544 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3545 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3545
3546
3546 with repo.wlock():
3547 with repo.wlock():
3547 repo.setparents(node1, node2)
3548 repo.setparents(node1, node2)
3548
3549
3549
3550
3550 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3551 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3551 def debugsidedata(ui, repo, file_, rev=None, **opts):
3552 def debugsidedata(ui, repo, file_, rev=None, **opts):
3552 """dump the side data for a cl/manifest/file revision
3553 """dump the side data for a cl/manifest/file revision
3553
3554
3554 Use --verbose to dump the sidedata content."""
3555 Use --verbose to dump the sidedata content."""
3555 opts = pycompat.byteskwargs(opts)
3556 opts = pycompat.byteskwargs(opts)
3556 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3557 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3557 if rev is not None:
3558 if rev is not None:
3558 raise error.InputError(
3559 raise error.InputError(
3559 _(b'cannot specify a revision with other arguments')
3560 _(b'cannot specify a revision with other arguments')
3560 )
3561 )
3561 file_, rev = None, file_
3562 file_, rev = None, file_
3562 elif rev is None:
3563 elif rev is None:
3563 raise error.InputError(_(b'please specify a revision'))
3564 raise error.InputError(_(b'please specify a revision'))
3564 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3565 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3565 r = getattr(r, '_revlog', r)
3566 r = getattr(r, '_revlog', r)
3566 try:
3567 try:
3567 sidedata = r.sidedata(r.lookup(rev))
3568 sidedata = r.sidedata(r.lookup(rev))
3568 except KeyError:
3569 except KeyError:
3569 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3570 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3570 if sidedata:
3571 if sidedata:
3571 sidedata = list(sidedata.items())
3572 sidedata = list(sidedata.items())
3572 sidedata.sort()
3573 sidedata.sort()
3573 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3574 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3574 for key, value in sidedata:
3575 for key, value in sidedata:
3575 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3576 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3576 if ui.verbose:
3577 if ui.verbose:
3577 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3578 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3578
3579
3579
3580
3580 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3581 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3581 def debugssl(ui, repo, source=None, **opts):
3582 def debugssl(ui, repo, source=None, **opts):
3582 """test a secure connection to a server
3583 """test a secure connection to a server
3583
3584
3584 This builds the certificate chain for the server on Windows, installing the
3585 This builds the certificate chain for the server on Windows, installing the
3585 missing intermediates and trusted root via Windows Update if necessary. It
3586 missing intermediates and trusted root via Windows Update if necessary. It
3586 does nothing on other platforms.
3587 does nothing on other platforms.
3587
3588
3588 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3589 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3589 that server is used. See :hg:`help urls` for more information.
3590 that server is used. See :hg:`help urls` for more information.
3590
3591
3591 If the update succeeds, retry the original operation. Otherwise, the cause
3592 If the update succeeds, retry the original operation. Otherwise, the cause
3592 of the SSL error is likely another issue.
3593 of the SSL error is likely another issue.
3593 """
3594 """
3594 if not pycompat.iswindows:
3595 if not pycompat.iswindows:
3595 raise error.Abort(
3596 raise error.Abort(
3596 _(b'certificate chain building is only possible on Windows')
3597 _(b'certificate chain building is only possible on Windows')
3597 )
3598 )
3598
3599
3599 if not source:
3600 if not source:
3600 if not repo:
3601 if not repo:
3601 raise error.Abort(
3602 raise error.Abort(
3602 _(
3603 _(
3603 b"there is no Mercurial repository here, and no "
3604 b"there is no Mercurial repository here, and no "
3604 b"server specified"
3605 b"server specified"
3605 )
3606 )
3606 )
3607 )
3607 source = b"default"
3608 source = b"default"
3608
3609
3609 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3610 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3610 url = path.url
3611 url = path.url
3611
3612
3612 defaultport = {b'https': 443, b'ssh': 22}
3613 defaultport = {b'https': 443, b'ssh': 22}
3613 if url.scheme in defaultport:
3614 if url.scheme in defaultport:
3614 try:
3615 try:
3615 addr = (url.host, int(url.port or defaultport[url.scheme]))
3616 addr = (url.host, int(url.port or defaultport[url.scheme]))
3616 except ValueError:
3617 except ValueError:
3617 raise error.Abort(_(b"malformed port number in URL"))
3618 raise error.Abort(_(b"malformed port number in URL"))
3618 else:
3619 else:
3619 raise error.Abort(_(b"only https and ssh connections are supported"))
3620 raise error.Abort(_(b"only https and ssh connections are supported"))
3620
3621
3621 from . import win32
3622 from . import win32
3622
3623
3623 s = ssl.wrap_socket(
3624 s = ssl.wrap_socket(
3624 socket.socket(),
3625 socket.socket(),
3625 ssl_version=ssl.PROTOCOL_TLS,
3626 ssl_version=ssl.PROTOCOL_TLS,
3626 cert_reqs=ssl.CERT_NONE,
3627 cert_reqs=ssl.CERT_NONE,
3627 ca_certs=None,
3628 ca_certs=None,
3628 )
3629 )
3629
3630
3630 try:
3631 try:
3631 s.connect(addr)
3632 s.connect(addr)
3632 cert = s.getpeercert(True)
3633 cert = s.getpeercert(True)
3633
3634
3634 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3635 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3635
3636
3636 complete = win32.checkcertificatechain(cert, build=False)
3637 complete = win32.checkcertificatechain(cert, build=False)
3637
3638
3638 if not complete:
3639 if not complete:
3639 ui.status(_(b'certificate chain is incomplete, updating... '))
3640 ui.status(_(b'certificate chain is incomplete, updating... '))
3640
3641
3641 if not win32.checkcertificatechain(cert):
3642 if not win32.checkcertificatechain(cert):
3642 ui.status(_(b'failed.\n'))
3643 ui.status(_(b'failed.\n'))
3643 else:
3644 else:
3644 ui.status(_(b'done.\n'))
3645 ui.status(_(b'done.\n'))
3645 else:
3646 else:
3646 ui.status(_(b'full certificate chain is available\n'))
3647 ui.status(_(b'full certificate chain is available\n'))
3647 finally:
3648 finally:
3648 s.close()
3649 s.close()
3649
3650
3650
3651
3651 @command(
3652 @command(
3652 b'debug::stable-tail-sort',
3653 b'debug::stable-tail-sort',
3653 [
3654 [
3654 (
3655 (
3655 b'T',
3656 b'T',
3656 b'template',
3657 b'template',
3657 b'{rev}\n',
3658 b'{rev}\n',
3658 _(b'display with template'),
3659 _(b'display with template'),
3659 _(b'TEMPLATE'),
3660 _(b'TEMPLATE'),
3660 ),
3661 ),
3661 ],
3662 ],
3662 b'REV',
3663 b'REV',
3663 )
3664 )
3664 def debug_stable_tail_sort(ui, repo, revspec, template, **opts):
3665 def debug_stable_tail_sort(ui, repo, revspec, template, **opts):
3665 """display the stable-tail sort of the ancestors of a given node"""
3666 """display the stable-tail sort of the ancestors of a given node"""
3666 rev = logcmdutil.revsingle(repo, revspec).rev()
3667 rev = logcmdutil.revsingle(repo, revspec).rev()
3667 cl = repo.changelog
3668 cl = repo.changelog
3668
3669
3669 displayer = logcmdutil.maketemplater(ui, repo, template)
3670 displayer = logcmdutil.maketemplater(ui, repo, template)
3670 sorted_revs = stabletailsort._stable_tail_sort_naive(cl, rev)
3671 sorted_revs = stabletailsort._stable_tail_sort_naive(cl, rev)
3671 for ancestor_rev in sorted_revs:
3672 for ancestor_rev in sorted_revs:
3672 displayer.show(repo[ancestor_rev])
3673 displayer.show(repo[ancestor_rev])
3673
3674
3674
3675
3675 @command(
3676 @command(
3676 b'debug::stable-tail-sort-leaps',
3677 b'debug::stable-tail-sort-leaps',
3677 [
3678 [
3678 (
3679 (
3679 b'T',
3680 b'T',
3680 b'template',
3681 b'template',
3681 b'{rev}',
3682 b'{rev}',
3682 _(b'display with template'),
3683 _(b'display with template'),
3683 _(b'TEMPLATE'),
3684 _(b'TEMPLATE'),
3684 ),
3685 ),
3685 (b's', b'specific', False, _(b'restrict to specific leaps')),
3686 (b's', b'specific', False, _(b'restrict to specific leaps')),
3686 ],
3687 ],
3687 b'REV',
3688 b'REV',
3688 )
3689 )
3689 def debug_stable_tail_sort_leaps(ui, repo, rspec, template, specific, **opts):
3690 def debug_stable_tail_sort_leaps(ui, repo, rspec, template, specific, **opts):
3690 """display the leaps in the stable-tail sort of a node, one per line"""
3691 """display the leaps in the stable-tail sort of a node, one per line"""
3691 rev = logcmdutil.revsingle(repo, rspec).rev()
3692 rev = logcmdutil.revsingle(repo, rspec).rev()
3692
3693
3693 if specific:
3694 if specific:
3694 get_leaps = stabletailsort._find_specific_leaps_naive
3695 get_leaps = stabletailsort._find_specific_leaps_naive
3695 else:
3696 else:
3696 get_leaps = stabletailsort._find_all_leaps_naive
3697 get_leaps = stabletailsort._find_all_leaps_naive
3697
3698
3698 displayer = logcmdutil.maketemplater(ui, repo, template)
3699 displayer = logcmdutil.maketemplater(ui, repo, template)
3699 for source, target in get_leaps(repo.changelog, rev):
3700 for source, target in get_leaps(repo.changelog, rev):
3700 displayer.show(repo[source])
3701 displayer.show(repo[source])
3701 displayer.show(repo[target])
3702 displayer.show(repo[target])
3702 ui.write(b'\n')
3703 ui.write(b'\n')
3703
3704
3704
3705
3705 @command(
3706 @command(
3706 b"debugbackupbundle",
3707 b"debugbackupbundle",
3707 [
3708 [
3708 (
3709 (
3709 b"",
3710 b"",
3710 b"recover",
3711 b"recover",
3711 b"",
3712 b"",
3712 b"brings the specified changeset back into the repository",
3713 b"brings the specified changeset back into the repository",
3713 )
3714 )
3714 ]
3715 ]
3715 + cmdutil.logopts,
3716 + cmdutil.logopts,
3716 _(b"hg debugbackupbundle [--recover HASH]"),
3717 _(b"hg debugbackupbundle [--recover HASH]"),
3717 )
3718 )
3718 def debugbackupbundle(ui, repo, *pats, **opts):
3719 def debugbackupbundle(ui, repo, *pats, **opts):
3719 """lists the changesets available in backup bundles
3720 """lists the changesets available in backup bundles
3720
3721
3721 Without any arguments, this command prints a list of the changesets in each
3722 Without any arguments, this command prints a list of the changesets in each
3722 backup bundle.
3723 backup bundle.
3723
3724
3724 --recover takes a changeset hash and unbundles the first bundle that
3725 --recover takes a changeset hash and unbundles the first bundle that
3725 contains that hash, which puts that changeset back in your repository.
3726 contains that hash, which puts that changeset back in your repository.
3726
3727
3727 --verbose will print the entire commit message and the bundle path for that
3728 --verbose will print the entire commit message and the bundle path for that
3728 backup.
3729 backup.
3729 """
3730 """
3730 backups = list(
3731 backups = list(
3731 filter(
3732 filter(
3732 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3733 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3733 )
3734 )
3734 )
3735 )
3735 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3736 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3736
3737
3737 opts = pycompat.byteskwargs(opts)
3738 opts = pycompat.byteskwargs(opts)
3738 opts[b"bundle"] = b""
3739 opts[b"bundle"] = b""
3739 opts[b"force"] = None
3740 opts[b"force"] = None
3740 limit = logcmdutil.getlimit(opts)
3741 limit = logcmdutil.getlimit(opts)
3741
3742
3742 def display(other, chlist, displayer):
3743 def display(other, chlist, displayer):
3743 if opts.get(b"newest_first"):
3744 if opts.get(b"newest_first"):
3744 chlist.reverse()
3745 chlist.reverse()
3745 count = 0
3746 count = 0
3746 for n in chlist:
3747 for n in chlist:
3747 if limit is not None and count >= limit:
3748 if limit is not None and count >= limit:
3748 break
3749 break
3749 parents = [
3750 parents = [
3750 True for p in other.changelog.parents(n) if p != repo.nullid
3751 True for p in other.changelog.parents(n) if p != repo.nullid
3751 ]
3752 ]
3752 if opts.get(b"no_merges") and len(parents) == 2:
3753 if opts.get(b"no_merges") and len(parents) == 2:
3753 continue
3754 continue
3754 count += 1
3755 count += 1
3755 displayer.show(other[n])
3756 displayer.show(other[n])
3756
3757
3757 recovernode = opts.get(b"recover")
3758 recovernode = opts.get(b"recover")
3758 if recovernode:
3759 if recovernode:
3759 if scmutil.isrevsymbol(repo, recovernode):
3760 if scmutil.isrevsymbol(repo, recovernode):
3760 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3761 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3761 return
3762 return
3762 elif backups:
3763 elif backups:
3763 msg = _(
3764 msg = _(
3764 b"Recover changesets using: hg debugbackupbundle --recover "
3765 b"Recover changesets using: hg debugbackupbundle --recover "
3765 b"<changeset hash>\n\nAvailable backup changesets:"
3766 b"<changeset hash>\n\nAvailable backup changesets:"
3766 )
3767 )
3767 ui.status(msg, label=b"status.removed")
3768 ui.status(msg, label=b"status.removed")
3768 else:
3769 else:
3769 ui.status(_(b"no backup changesets found\n"))
3770 ui.status(_(b"no backup changesets found\n"))
3770 return
3771 return
3771
3772
3772 for backup in backups:
3773 for backup in backups:
3773 # Much of this is copied from the hg incoming logic
3774 # Much of this is copied from the hg incoming logic
3774 source = os.path.relpath(backup, encoding.getcwd())
3775 source = os.path.relpath(backup, encoding.getcwd())
3775 path = urlutil.get_unique_pull_path_obj(
3776 path = urlutil.get_unique_pull_path_obj(
3776 b'debugbackupbundle',
3777 b'debugbackupbundle',
3777 ui,
3778 ui,
3778 source,
3779 source,
3779 )
3780 )
3780 try:
3781 try:
3781 other = hg.peer(repo, opts, path)
3782 other = hg.peer(repo, opts, path)
3782 except error.LookupError as ex:
3783 except error.LookupError as ex:
3783 msg = _(b"\nwarning: unable to open bundle %s") % path.loc
3784 msg = _(b"\nwarning: unable to open bundle %s") % path.loc
3784 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3785 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3785 ui.warn(msg, hint=hint)
3786 ui.warn(msg, hint=hint)
3786 continue
3787 continue
3787 branches = (path.branch, opts.get(b'branch', []))
3788 branches = (path.branch, opts.get(b'branch', []))
3788 revs, checkout = hg.addbranchrevs(
3789 revs, checkout = hg.addbranchrevs(
3789 repo, other, branches, opts.get(b"rev")
3790 repo, other, branches, opts.get(b"rev")
3790 )
3791 )
3791
3792
3792 if revs:
3793 if revs:
3793 revs = [other.lookup(rev) for rev in revs]
3794 revs = [other.lookup(rev) for rev in revs]
3794
3795
3795 with ui.silent():
3796 with ui.silent():
3796 try:
3797 try:
3797 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3798 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3798 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3799 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3799 )
3800 )
3800 except error.LookupError:
3801 except error.LookupError:
3801 continue
3802 continue
3802
3803
3803 try:
3804 try:
3804 if not chlist:
3805 if not chlist:
3805 continue
3806 continue
3806 if recovernode:
3807 if recovernode:
3807 with repo.lock(), repo.transaction(b"unbundle") as tr:
3808 with repo.lock(), repo.transaction(b"unbundle") as tr:
3808 if scmutil.isrevsymbol(other, recovernode):
3809 if scmutil.isrevsymbol(other, recovernode):
3809 ui.status(_(b"Unbundling %s\n") % (recovernode))
3810 ui.status(_(b"Unbundling %s\n") % (recovernode))
3810 f = hg.openpath(ui, path.loc)
3811 f = hg.openpath(ui, path.loc)
3811 gen = exchange.readbundle(ui, f, path.loc)
3812 gen = exchange.readbundle(ui, f, path.loc)
3812 if isinstance(gen, bundle2.unbundle20):
3813 if isinstance(gen, bundle2.unbundle20):
3813 bundle2.applybundle(
3814 bundle2.applybundle(
3814 repo,
3815 repo,
3815 gen,
3816 gen,
3816 tr,
3817 tr,
3817 source=b"unbundle",
3818 source=b"unbundle",
3818 url=b"bundle:" + path.loc,
3819 url=b"bundle:" + path.loc,
3819 )
3820 )
3820 else:
3821 else:
3821 gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
3822 gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
3822 break
3823 break
3823 else:
3824 else:
3824 backupdate = encoding.strtolocal(
3825 backupdate = encoding.strtolocal(
3825 time.strftime(
3826 time.strftime(
3826 "%a %H:%M, %Y-%m-%d",
3827 "%a %H:%M, %Y-%m-%d",
3827 time.localtime(os.path.getmtime(path.loc)),
3828 time.localtime(os.path.getmtime(path.loc)),
3828 )
3829 )
3829 )
3830 )
3830 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3831 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3831 if ui.verbose:
3832 if ui.verbose:
3832 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
3833 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
3833 else:
3834 else:
3834 opts[
3835 opts[
3835 b"template"
3836 b"template"
3836 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3837 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3837 displayer = logcmdutil.changesetdisplayer(
3838 displayer = logcmdutil.changesetdisplayer(
3838 ui, other, opts, False
3839 ui, other, opts, False
3839 )
3840 )
3840 display(other, chlist, displayer)
3841 display(other, chlist, displayer)
3841 displayer.close()
3842 displayer.close()
3842 finally:
3843 finally:
3843 cleanupfn()
3844 cleanupfn()
3844
3845
3845
3846
3846 @command(
3847 @command(
3847 b'debugsub',
3848 b'debugsub',
3848 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3849 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3849 _(b'[-r REV] [REV]'),
3850 _(b'[-r REV] [REV]'),
3850 )
3851 )
3851 def debugsub(ui, repo, rev=None):
3852 def debugsub(ui, repo, rev=None):
3852 ctx = scmutil.revsingle(repo, rev, None)
3853 ctx = scmutil.revsingle(repo, rev, None)
3853 for k, v in sorted(ctx.substate.items()):
3854 for k, v in sorted(ctx.substate.items()):
3854 ui.writenoi18n(b'path %s\n' % k)
3855 ui.writenoi18n(b'path %s\n' % k)
3855 ui.writenoi18n(b' source %s\n' % v[0])
3856 ui.writenoi18n(b' source %s\n' % v[0])
3856 ui.writenoi18n(b' revision %s\n' % v[1])
3857 ui.writenoi18n(b' revision %s\n' % v[1])
3857
3858
3858
3859
3859 @command(
3860 @command(
3860 b'debugshell',
3861 b'debugshell',
3861 [
3862 [
3862 (
3863 (
3863 b'c',
3864 b'c',
3864 b'command',
3865 b'command',
3865 b'',
3866 b'',
3866 _(b'program passed in as a string'),
3867 _(b'program passed in as a string'),
3867 _(b'COMMAND'),
3868 _(b'COMMAND'),
3868 )
3869 )
3869 ],
3870 ],
3870 _(b'[-c COMMAND]'),
3871 _(b'[-c COMMAND]'),
3871 optionalrepo=True,
3872 optionalrepo=True,
3872 )
3873 )
3873 def debugshell(ui, repo, **opts):
3874 def debugshell(ui, repo, **opts):
3874 """run an interactive Python interpreter
3875 """run an interactive Python interpreter
3875
3876
3876 The local namespace is provided with a reference to the ui and
3877 The local namespace is provided with a reference to the ui and
3877 the repo instance (if available).
3878 the repo instance (if available).
3878 """
3879 """
3879 import code
3880 import code
3880
3881
3881 imported_objects = {
3882 imported_objects = {
3882 'ui': ui,
3883 'ui': ui,
3883 'repo': repo,
3884 'repo': repo,
3884 }
3885 }
3885
3886
3886 # py2exe disables initialization of the site module, which is responsible
3887 # py2exe disables initialization of the site module, which is responsible
3887 # for arranging for ``quit()`` to exit the interpreter. Manually initialize
3888 # for arranging for ``quit()`` to exit the interpreter. Manually initialize
3888 # the stuff that site normally does here, so that the interpreter can be
3889 # the stuff that site normally does here, so that the interpreter can be
3889 # quit in a consistent manner, whether run with pyoxidizer, exewrapper.c,
3890 # quit in a consistent manner, whether run with pyoxidizer, exewrapper.c,
3890 # py.exe, or py2exe.
3891 # py.exe, or py2exe.
3891 if getattr(sys, "frozen", None) == 'console_exe':
3892 if getattr(sys, "frozen", None) == 'console_exe':
3892 try:
3893 try:
3893 import site
3894 import site
3894
3895
3895 site.setcopyright()
3896 site.setcopyright()
3896 site.sethelper()
3897 site.sethelper()
3897 site.setquit()
3898 site.setquit()
3898 except ImportError:
3899 except ImportError:
3899 site = None # Keep PyCharm happy
3900 site = None # Keep PyCharm happy
3900
3901
3901 command = opts.get('command')
3902 command = opts.get('command')
3902 if command:
3903 if command:
3903 compiled = code.compile_command(encoding.strfromlocal(command))
3904 compiled = code.compile_command(encoding.strfromlocal(command))
3904 code.InteractiveInterpreter(locals=imported_objects).runcode(compiled)
3905 code.InteractiveInterpreter(locals=imported_objects).runcode(compiled)
3905 return
3906 return
3906
3907
3907 code.interact(local=imported_objects)
3908 code.interact(local=imported_objects)
3908
3909
3909
3910
3910 @command(
3911 @command(
3911 b'debug-revlog-stats',
3912 b'debug-revlog-stats',
3912 [
3913 [
3913 (b'c', b'changelog', None, _(b'Display changelog statistics')),
3914 (b'c', b'changelog', None, _(b'Display changelog statistics')),
3914 (b'm', b'manifest', None, _(b'Display manifest statistics')),
3915 (b'm', b'manifest', None, _(b'Display manifest statistics')),
3915 (b'f', b'filelogs', None, _(b'Display filelogs statistics')),
3916 (b'f', b'filelogs', None, _(b'Display filelogs statistics')),
3916 ]
3917 ]
3917 + cmdutil.formatteropts,
3918 + cmdutil.formatteropts,
3918 )
3919 )
3919 def debug_revlog_stats(ui, repo, **opts):
3920 def debug_revlog_stats(ui, repo, **opts):
3920 """display statistics about revlogs in the store"""
3921 """display statistics about revlogs in the store"""
3921 opts = pycompat.byteskwargs(opts)
3922 opts = pycompat.byteskwargs(opts)
3922 changelog = opts[b"changelog"]
3923 changelog = opts[b"changelog"]
3923 manifest = opts[b"manifest"]
3924 manifest = opts[b"manifest"]
3924 filelogs = opts[b"filelogs"]
3925 filelogs = opts[b"filelogs"]
3925
3926
3926 if changelog is None and manifest is None and filelogs is None:
3927 if changelog is None and manifest is None and filelogs is None:
3927 changelog = True
3928 changelog = True
3928 manifest = True
3929 manifest = True
3929 filelogs = True
3930 filelogs = True
3930
3931
3931 repo = repo.unfiltered()
3932 repo = repo.unfiltered()
3932 fm = ui.formatter(b'debug-revlog-stats', opts)
3933 fm = ui.formatter(b'debug-revlog-stats', opts)
3933 revlog_debug.debug_revlog_stats(repo, fm, changelog, manifest, filelogs)
3934 revlog_debug.debug_revlog_stats(repo, fm, changelog, manifest, filelogs)
3934 fm.end()
3935 fm.end()
3935
3936
3936
3937
3937 @command(
3938 @command(
3938 b'debugsuccessorssets',
3939 b'debugsuccessorssets',
3939 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3940 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3940 _(b'[REV]'),
3941 _(b'[REV]'),
3941 )
3942 )
3942 def debugsuccessorssets(ui, repo, *revs, **opts):
3943 def debugsuccessorssets(ui, repo, *revs, **opts):
3943 """show set of successors for revision
3944 """show set of successors for revision
3944
3945
3945 A successors set of changeset A is a consistent group of revisions that
3946 A successors set of changeset A is a consistent group of revisions that
3946 succeed A. It contains non-obsolete changesets only unless closests
3947 succeed A. It contains non-obsolete changesets only unless closests
3947 successors set is set.
3948 successors set is set.
3948
3949
3949 In most cases a changeset A has a single successors set containing a single
3950 In most cases a changeset A has a single successors set containing a single
3950 successor (changeset A replaced by A').
3951 successor (changeset A replaced by A').
3951
3952
3952 A changeset that is made obsolete with no successors are called "pruned".
3953 A changeset that is made obsolete with no successors are called "pruned".
3953 Such changesets have no successors sets at all.
3954 Such changesets have no successors sets at all.
3954
3955
3955 A changeset that has been "split" will have a successors set containing
3956 A changeset that has been "split" will have a successors set containing
3956 more than one successor.
3957 more than one successor.
3957
3958
3958 A changeset that has been rewritten in multiple different ways is called
3959 A changeset that has been rewritten in multiple different ways is called
3959 "divergent". Such changesets have multiple successor sets (each of which
3960 "divergent". Such changesets have multiple successor sets (each of which
3960 may also be split, i.e. have multiple successors).
3961 may also be split, i.e. have multiple successors).
3961
3962
3962 Results are displayed as follows::
3963 Results are displayed as follows::
3963
3964
3964 <rev1>
3965 <rev1>
3965 <successors-1A>
3966 <successors-1A>
3966 <rev2>
3967 <rev2>
3967 <successors-2A>
3968 <successors-2A>
3968 <successors-2B1> <successors-2B2> <successors-2B3>
3969 <successors-2B1> <successors-2B2> <successors-2B3>
3969
3970
3970 Here rev2 has two possible (i.e. divergent) successors sets. The first
3971 Here rev2 has two possible (i.e. divergent) successors sets. The first
3971 holds one element, whereas the second holds three (i.e. the changeset has
3972 holds one element, whereas the second holds three (i.e. the changeset has
3972 been split).
3973 been split).
3973 """
3974 """
3974 # passed to successorssets caching computation from one call to another
3975 # passed to successorssets caching computation from one call to another
3975 cache = {}
3976 cache = {}
3976 ctx2str = bytes
3977 ctx2str = bytes
3977 node2str = short
3978 node2str = short
3978 for rev in logcmdutil.revrange(repo, revs):
3979 for rev in logcmdutil.revrange(repo, revs):
3979 ctx = repo[rev]
3980 ctx = repo[rev]
3980 ui.write(b'%s\n' % ctx2str(ctx))
3981 ui.write(b'%s\n' % ctx2str(ctx))
3981 for succsset in obsutil.successorssets(
3982 for succsset in obsutil.successorssets(
3982 repo, ctx.node(), closest=opts['closest'], cache=cache
3983 repo, ctx.node(), closest=opts['closest'], cache=cache
3983 ):
3984 ):
3984 if succsset:
3985 if succsset:
3985 ui.write(b' ')
3986 ui.write(b' ')
3986 ui.write(node2str(succsset[0]))
3987 ui.write(node2str(succsset[0]))
3987 for node in succsset[1:]:
3988 for node in succsset[1:]:
3988 ui.write(b' ')
3989 ui.write(b' ')
3989 ui.write(node2str(node))
3990 ui.write(node2str(node))
3990 ui.write(b'\n')
3991 ui.write(b'\n')
3991
3992
3992
3993
3993 @command(b'debugtagscache', [])
3994 @command(b'debugtagscache', [])
3994 def debugtagscache(ui, repo):
3995 def debugtagscache(ui, repo):
3995 """display the contents of .hg/cache/hgtagsfnodes1"""
3996 """display the contents of .hg/cache/hgtagsfnodes1"""
3996 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3997 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3997 flog = repo.file(b'.hgtags')
3998 flog = repo.file(b'.hgtags')
3998 for r in repo:
3999 for r in repo:
3999 node = repo[r].node()
4000 node = repo[r].node()
4000 tagsnode = cache.getfnode(node, computemissing=False)
4001 tagsnode = cache.getfnode(node, computemissing=False)
4001 if tagsnode:
4002 if tagsnode:
4002 tagsnodedisplay = hex(tagsnode)
4003 tagsnodedisplay = hex(tagsnode)
4003 if not flog.hasnode(tagsnode):
4004 if not flog.hasnode(tagsnode):
4004 tagsnodedisplay += b' (unknown node)'
4005 tagsnodedisplay += b' (unknown node)'
4005 elif tagsnode is None:
4006 elif tagsnode is None:
4006 tagsnodedisplay = b'missing'
4007 tagsnodedisplay = b'missing'
4007 else:
4008 else:
4008 tagsnodedisplay = b'invalid'
4009 tagsnodedisplay = b'invalid'
4009
4010
4010 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4011 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4011
4012
4012
4013
4013 @command(
4014 @command(
4014 b'debugtemplate',
4015 b'debugtemplate',
4015 [
4016 [
4016 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4017 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4017 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4018 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4018 ],
4019 ],
4019 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4020 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4020 optionalrepo=True,
4021 optionalrepo=True,
4021 )
4022 )
4022 def debugtemplate(ui, repo, tmpl, **opts):
4023 def debugtemplate(ui, repo, tmpl, **opts):
4023 """parse and apply a template
4024 """parse and apply a template
4024
4025
4025 If -r/--rev is given, the template is processed as a log template and
4026 If -r/--rev is given, the template is processed as a log template and
4026 applied to the given changesets. Otherwise, it is processed as a generic
4027 applied to the given changesets. Otherwise, it is processed as a generic
4027 template.
4028 template.
4028
4029
4029 Use --verbose to print the parsed tree.
4030 Use --verbose to print the parsed tree.
4030 """
4031 """
4031 revs = None
4032 revs = None
4032 if opts['rev']:
4033 if opts['rev']:
4033 if repo is None:
4034 if repo is None:
4034 raise error.RepoError(
4035 raise error.RepoError(
4035 _(b'there is no Mercurial repository here (.hg not found)')
4036 _(b'there is no Mercurial repository here (.hg not found)')
4036 )
4037 )
4037 revs = logcmdutil.revrange(repo, opts['rev'])
4038 revs = logcmdutil.revrange(repo, opts['rev'])
4038
4039
4039 props = {}
4040 props = {}
4040 for d in opts['define']:
4041 for d in opts['define']:
4041 try:
4042 try:
4042 k, v = (e.strip() for e in d.split(b'=', 1))
4043 k, v = (e.strip() for e in d.split(b'=', 1))
4043 if not k or k == b'ui':
4044 if not k or k == b'ui':
4044 raise ValueError
4045 raise ValueError
4045 props[k] = v
4046 props[k] = v
4046 except ValueError:
4047 except ValueError:
4047 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4048 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4048
4049
4049 if ui.verbose:
4050 if ui.verbose:
4050 aliases = ui.configitems(b'templatealias')
4051 aliases = ui.configitems(b'templatealias')
4051 tree = templater.parse(tmpl)
4052 tree = templater.parse(tmpl)
4052 ui.note(templater.prettyformat(tree), b'\n')
4053 ui.note(templater.prettyformat(tree), b'\n')
4053 newtree = templater.expandaliases(tree, aliases)
4054 newtree = templater.expandaliases(tree, aliases)
4054 if newtree != tree:
4055 if newtree != tree:
4055 ui.notenoi18n(
4056 ui.notenoi18n(
4056 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4057 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4057 )
4058 )
4058
4059
4059 if revs is None:
4060 if revs is None:
4060 tres = formatter.templateresources(ui, repo)
4061 tres = formatter.templateresources(ui, repo)
4061 t = formatter.maketemplater(ui, tmpl, resources=tres)
4062 t = formatter.maketemplater(ui, tmpl, resources=tres)
4062 if ui.verbose:
4063 if ui.verbose:
4063 kwds, funcs = t.symbolsuseddefault()
4064 kwds, funcs = t.symbolsuseddefault()
4064 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4065 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4065 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4066 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4066 ui.write(t.renderdefault(props))
4067 ui.write(t.renderdefault(props))
4067 else:
4068 else:
4068 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4069 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4069 if ui.verbose:
4070 if ui.verbose:
4070 kwds, funcs = displayer.t.symbolsuseddefault()
4071 kwds, funcs = displayer.t.symbolsuseddefault()
4071 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4072 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4072 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4073 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4073 for r in revs:
4074 for r in revs:
4074 displayer.show(repo[r], **pycompat.strkwargs(props))
4075 displayer.show(repo[r], **pycompat.strkwargs(props))
4075 displayer.close()
4076 displayer.close()
4076
4077
4077
4078
4078 @command(
4079 @command(
4079 b'debuguigetpass',
4080 b'debuguigetpass',
4080 [
4081 [
4081 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4082 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4082 ],
4083 ],
4083 _(b'[-p TEXT]'),
4084 _(b'[-p TEXT]'),
4084 norepo=True,
4085 norepo=True,
4085 )
4086 )
4086 def debuguigetpass(ui, prompt=b''):
4087 def debuguigetpass(ui, prompt=b''):
4087 """show prompt to type password"""
4088 """show prompt to type password"""
4088 r = ui.getpass(prompt)
4089 r = ui.getpass(prompt)
4089 if r is None:
4090 if r is None:
4090 r = b"<default response>"
4091 r = b"<default response>"
4091 ui.writenoi18n(b'response: %s\n' % r)
4092 ui.writenoi18n(b'response: %s\n' % r)
4092
4093
4093
4094
4094 @command(
4095 @command(
4095 b'debuguiprompt',
4096 b'debuguiprompt',
4096 [
4097 [
4097 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4098 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4098 ],
4099 ],
4099 _(b'[-p TEXT]'),
4100 _(b'[-p TEXT]'),
4100 norepo=True,
4101 norepo=True,
4101 )
4102 )
4102 def debuguiprompt(ui, prompt=b''):
4103 def debuguiprompt(ui, prompt=b''):
4103 """show plain prompt"""
4104 """show plain prompt"""
4104 r = ui.prompt(prompt)
4105 r = ui.prompt(prompt)
4105 ui.writenoi18n(b'response: %s\n' % r)
4106 ui.writenoi18n(b'response: %s\n' % r)
4106
4107
4107
4108
4108 @command(b'debugupdatecaches', [])
4109 @command(b'debugupdatecaches', [])
4109 def debugupdatecaches(ui, repo, *pats, **opts):
4110 def debugupdatecaches(ui, repo, *pats, **opts):
4110 """warm all known caches in the repository"""
4111 """warm all known caches in the repository"""
4111 with repo.wlock(), repo.lock():
4112 with repo.wlock(), repo.lock():
4112 repo.updatecaches(caches=repository.CACHES_ALL)
4113 repo.updatecaches(caches=repository.CACHES_ALL)
4113
4114
4114
4115
4115 @command(
4116 @command(
4116 b'debugupgraderepo',
4117 b'debugupgraderepo',
4117 [
4118 [
4118 (
4119 (
4119 b'o',
4120 b'o',
4120 b'optimize',
4121 b'optimize',
4121 [],
4122 [],
4122 _(b'extra optimization to perform'),
4123 _(b'extra optimization to perform'),
4123 _(b'NAME'),
4124 _(b'NAME'),
4124 ),
4125 ),
4125 (b'', b'run', False, _(b'performs an upgrade')),
4126 (b'', b'run', False, _(b'performs an upgrade')),
4126 (b'', b'backup', True, _(b'keep the old repository content around')),
4127 (b'', b'backup', True, _(b'keep the old repository content around')),
4127 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4128 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4128 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4129 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4129 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4130 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4130 ],
4131 ],
4131 )
4132 )
4132 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4133 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4133 """upgrade a repository to use different features
4134 """upgrade a repository to use different features
4134
4135
4135 If no arguments are specified, the repository is evaluated for upgrade
4136 If no arguments are specified, the repository is evaluated for upgrade
4136 and a list of problems and potential optimizations is printed.
4137 and a list of problems and potential optimizations is printed.
4137
4138
4138 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4139 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4139 can be influenced via additional arguments. More details will be provided
4140 can be influenced via additional arguments. More details will be provided
4140 by the command output when run without ``--run``.
4141 by the command output when run without ``--run``.
4141
4142
4142 During the upgrade, the repository will be locked and no writes will be
4143 During the upgrade, the repository will be locked and no writes will be
4143 allowed.
4144 allowed.
4144
4145
4145 At the end of the upgrade, the repository may not be readable while new
4146 At the end of the upgrade, the repository may not be readable while new
4146 repository data is swapped in. This window will be as long as it takes to
4147 repository data is swapped in. This window will be as long as it takes to
4147 rename some directories inside the ``.hg`` directory. On most machines, this
4148 rename some directories inside the ``.hg`` directory. On most machines, this
4148 should complete almost instantaneously and the chances of a consumer being
4149 should complete almost instantaneously and the chances of a consumer being
4149 unable to access the repository should be low.
4150 unable to access the repository should be low.
4150
4151
4151 By default, all revlogs will be upgraded. You can restrict this using flags
4152 By default, all revlogs will be upgraded. You can restrict this using flags
4152 such as `--manifest`:
4153 such as `--manifest`:
4153
4154
4154 * `--manifest`: only optimize the manifest
4155 * `--manifest`: only optimize the manifest
4155 * `--no-manifest`: optimize all revlog but the manifest
4156 * `--no-manifest`: optimize all revlog but the manifest
4156 * `--changelog`: optimize the changelog only
4157 * `--changelog`: optimize the changelog only
4157 * `--no-changelog --no-manifest`: optimize filelogs only
4158 * `--no-changelog --no-manifest`: optimize filelogs only
4158 * `--filelogs`: optimize the filelogs only
4159 * `--filelogs`: optimize the filelogs only
4159 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4160 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4160 """
4161 """
4161 return upgrade.upgraderepo(
4162 return upgrade.upgraderepo(
4162 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4163 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4163 )
4164 )
4164
4165
4165
4166
4166 @command(
4167 @command(
4167 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4168 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4168 )
4169 )
4169 def debugwalk(ui, repo, *pats, **opts):
4170 def debugwalk(ui, repo, *pats, **opts):
4170 """show how files match on given patterns"""
4171 """show how files match on given patterns"""
4171 opts = pycompat.byteskwargs(opts)
4172 opts = pycompat.byteskwargs(opts)
4172 m = scmutil.match(repo[None], pats, opts)
4173 m = scmutil.match(repo[None], pats, opts)
4173 if ui.verbose:
4174 if ui.verbose:
4174 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4175 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4175 items = list(repo[None].walk(m))
4176 items = list(repo[None].walk(m))
4176 if not items:
4177 if not items:
4177 return
4178 return
4178 f = lambda fn: fn
4179 f = lambda fn: fn
4179 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4180 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4180 f = lambda fn: util.normpath(fn)
4181 f = lambda fn: util.normpath(fn)
4181 fmt = b'f %%-%ds %%-%ds %%s' % (
4182 fmt = b'f %%-%ds %%-%ds %%s' % (
4182 max([len(abs) for abs in items]),
4183 max([len(abs) for abs in items]),
4183 max([len(repo.pathto(abs)) for abs in items]),
4184 max([len(repo.pathto(abs)) for abs in items]),
4184 )
4185 )
4185 for abs in items:
4186 for abs in items:
4186 line = fmt % (
4187 line = fmt % (
4187 abs,
4188 abs,
4188 f(repo.pathto(abs)),
4189 f(repo.pathto(abs)),
4189 m.exact(abs) and b'exact' or b'',
4190 m.exact(abs) and b'exact' or b'',
4190 )
4191 )
4191 ui.write(b"%s\n" % line.rstrip())
4192 ui.write(b"%s\n" % line.rstrip())
4192
4193
4193
4194
4194 @command(b'debugwhyunstable', [], _(b'REV'))
4195 @command(b'debugwhyunstable', [], _(b'REV'))
4195 def debugwhyunstable(ui, repo, rev):
4196 def debugwhyunstable(ui, repo, rev):
4196 """explain instabilities of a changeset"""
4197 """explain instabilities of a changeset"""
4197 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4198 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4198 dnodes = b''
4199 dnodes = b''
4199 if entry.get(b'divergentnodes'):
4200 if entry.get(b'divergentnodes'):
4200 dnodes = (
4201 dnodes = (
4201 b' '.join(
4202 b' '.join(
4202 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4203 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4203 for ctx in entry[b'divergentnodes']
4204 for ctx in entry[b'divergentnodes']
4204 )
4205 )
4205 + b' '
4206 + b' '
4206 )
4207 )
4207 ui.write(
4208 ui.write(
4208 b'%s: %s%s %s\n'
4209 b'%s: %s%s %s\n'
4209 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4210 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4210 )
4211 )
4211
4212
4212
4213
4213 @command(
4214 @command(
4214 b'debugwireargs',
4215 b'debugwireargs',
4215 [
4216 [
4216 (b'', b'three', b'', b'three'),
4217 (b'', b'three', b'', b'three'),
4217 (b'', b'four', b'', b'four'),
4218 (b'', b'four', b'', b'four'),
4218 (b'', b'five', b'', b'five'),
4219 (b'', b'five', b'', b'five'),
4219 ]
4220 ]
4220 + cmdutil.remoteopts,
4221 + cmdutil.remoteopts,
4221 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4222 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4222 norepo=True,
4223 norepo=True,
4223 )
4224 )
4224 def debugwireargs(ui, repopath, *vals, **opts):
4225 def debugwireargs(ui, repopath, *vals, **opts):
4225 opts = pycompat.byteskwargs(opts)
4226 opts = pycompat.byteskwargs(opts)
4226 repo = hg.peer(ui, opts, repopath)
4227 repo = hg.peer(ui, opts, repopath)
4227 try:
4228 try:
4228 for opt in cmdutil.remoteopts:
4229 for opt in cmdutil.remoteopts:
4229 del opts[opt[1]]
4230 del opts[opt[1]]
4230 args = {}
4231 args = {}
4231 for k, v in opts.items():
4232 for k, v in opts.items():
4232 if v:
4233 if v:
4233 args[k] = v
4234 args[k] = v
4234 args = pycompat.strkwargs(args)
4235 args = pycompat.strkwargs(args)
4235 # run twice to check that we don't mess up the stream for the next command
4236 # run twice to check that we don't mess up the stream for the next command
4236 res1 = repo.debugwireargs(*vals, **args)
4237 res1 = repo.debugwireargs(*vals, **args)
4237 res2 = repo.debugwireargs(*vals, **args)
4238 res2 = repo.debugwireargs(*vals, **args)
4238 ui.write(b"%s\n" % res1)
4239 ui.write(b"%s\n" % res1)
4239 if res1 != res2:
4240 if res1 != res2:
4240 ui.warn(b"%s\n" % res2)
4241 ui.warn(b"%s\n" % res2)
4241 finally:
4242 finally:
4242 repo.close()
4243 repo.close()
4243
4244
4244
4245
4245 def _parsewirelangblocks(fh):
4246 def _parsewirelangblocks(fh):
4246 activeaction = None
4247 activeaction = None
4247 blocklines = []
4248 blocklines = []
4248 lastindent = 0
4249 lastindent = 0
4249
4250
4250 for line in fh:
4251 for line in fh:
4251 line = line.rstrip()
4252 line = line.rstrip()
4252 if not line:
4253 if not line:
4253 continue
4254 continue
4254
4255
4255 if line.startswith(b'#'):
4256 if line.startswith(b'#'):
4256 continue
4257 continue
4257
4258
4258 if not line.startswith(b' '):
4259 if not line.startswith(b' '):
4259 # New block. Flush previous one.
4260 # New block. Flush previous one.
4260 if activeaction:
4261 if activeaction:
4261 yield activeaction, blocklines
4262 yield activeaction, blocklines
4262
4263
4263 activeaction = line
4264 activeaction = line
4264 blocklines = []
4265 blocklines = []
4265 lastindent = 0
4266 lastindent = 0
4266 continue
4267 continue
4267
4268
4268 # Else we start with an indent.
4269 # Else we start with an indent.
4269
4270
4270 if not activeaction:
4271 if not activeaction:
4271 raise error.Abort(_(b'indented line outside of block'))
4272 raise error.Abort(_(b'indented line outside of block'))
4272
4273
4273 indent = len(line) - len(line.lstrip())
4274 indent = len(line) - len(line.lstrip())
4274
4275
4275 # If this line is indented more than the last line, concatenate it.
4276 # If this line is indented more than the last line, concatenate it.
4276 if indent > lastindent and blocklines:
4277 if indent > lastindent and blocklines:
4277 blocklines[-1] += line.lstrip()
4278 blocklines[-1] += line.lstrip()
4278 else:
4279 else:
4279 blocklines.append(line)
4280 blocklines.append(line)
4280 lastindent = indent
4281 lastindent = indent
4281
4282
4282 # Flush last block.
4283 # Flush last block.
4283 if activeaction:
4284 if activeaction:
4284 yield activeaction, blocklines
4285 yield activeaction, blocklines
4285
4286
4286
4287
4287 @command(
4288 @command(
4288 b'debugwireproto',
4289 b'debugwireproto',
4289 [
4290 [
4290 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4291 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4291 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4292 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4292 (
4293 (
4293 b'',
4294 b'',
4294 b'noreadstderr',
4295 b'noreadstderr',
4295 False,
4296 False,
4296 _(b'do not read from stderr of the remote'),
4297 _(b'do not read from stderr of the remote'),
4297 ),
4298 ),
4298 (
4299 (
4299 b'',
4300 b'',
4300 b'nologhandshake',
4301 b'nologhandshake',
4301 False,
4302 False,
4302 _(b'do not log I/O related to the peer handshake'),
4303 _(b'do not log I/O related to the peer handshake'),
4303 ),
4304 ),
4304 ]
4305 ]
4305 + cmdutil.remoteopts,
4306 + cmdutil.remoteopts,
4306 _(b'[PATH]'),
4307 _(b'[PATH]'),
4307 optionalrepo=True,
4308 optionalrepo=True,
4308 )
4309 )
4309 def debugwireproto(ui, repo, path=None, **opts):
4310 def debugwireproto(ui, repo, path=None, **opts):
4310 """send wire protocol commands to a server
4311 """send wire protocol commands to a server
4311
4312
4312 This command can be used to issue wire protocol commands to remote
4313 This command can be used to issue wire protocol commands to remote
4313 peers and to debug the raw data being exchanged.
4314 peers and to debug the raw data being exchanged.
4314
4315
4315 ``--localssh`` will start an SSH server against the current repository
4316 ``--localssh`` will start an SSH server against the current repository
4316 and connect to that. By default, the connection will perform a handshake
4317 and connect to that. By default, the connection will perform a handshake
4317 and establish an appropriate peer instance.
4318 and establish an appropriate peer instance.
4318
4319
4319 ``--peer`` can be used to bypass the handshake protocol and construct a
4320 ``--peer`` can be used to bypass the handshake protocol and construct a
4320 peer instance using the specified class type. Valid values are ``raw``,
4321 peer instance using the specified class type. Valid values are ``raw``,
4321 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4322 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4322 don't support higher-level command actions.
4323 don't support higher-level command actions.
4323
4324
4324 ``--noreadstderr`` can be used to disable automatic reading from stderr
4325 ``--noreadstderr`` can be used to disable automatic reading from stderr
4325 of the peer (for SSH connections only). Disabling automatic reading of
4326 of the peer (for SSH connections only). Disabling automatic reading of
4326 stderr is useful for making output more deterministic.
4327 stderr is useful for making output more deterministic.
4327
4328
4328 Commands are issued via a mini language which is specified via stdin.
4329 Commands are issued via a mini language which is specified via stdin.
4329 The language consists of individual actions to perform. An action is
4330 The language consists of individual actions to perform. An action is
4330 defined by a block. A block is defined as a line with no leading
4331 defined by a block. A block is defined as a line with no leading
4331 space followed by 0 or more lines with leading space. Blocks are
4332 space followed by 0 or more lines with leading space. Blocks are
4332 effectively a high-level command with additional metadata.
4333 effectively a high-level command with additional metadata.
4333
4334
4334 Lines beginning with ``#`` are ignored.
4335 Lines beginning with ``#`` are ignored.
4335
4336
4336 The following sections denote available actions.
4337 The following sections denote available actions.
4337
4338
4338 raw
4339 raw
4339 ---
4340 ---
4340
4341
4341 Send raw data to the server.
4342 Send raw data to the server.
4342
4343
4343 The block payload contains the raw data to send as one atomic send
4344 The block payload contains the raw data to send as one atomic send
4344 operation. The data may not actually be delivered in a single system
4345 operation. The data may not actually be delivered in a single system
4345 call: it depends on the abilities of the transport being used.
4346 call: it depends on the abilities of the transport being used.
4346
4347
4347 Each line in the block is de-indented and concatenated. Then, that
4348 Each line in the block is de-indented and concatenated. Then, that
4348 value is evaluated as a Python b'' literal. This allows the use of
4349 value is evaluated as a Python b'' literal. This allows the use of
4349 backslash escaping, etc.
4350 backslash escaping, etc.
4350
4351
4351 raw+
4352 raw+
4352 ----
4353 ----
4353
4354
4354 Behaves like ``raw`` except flushes output afterwards.
4355 Behaves like ``raw`` except flushes output afterwards.
4355
4356
4356 command <X>
4357 command <X>
4357 -----------
4358 -----------
4358
4359
4359 Send a request to run a named command, whose name follows the ``command``
4360 Send a request to run a named command, whose name follows the ``command``
4360 string.
4361 string.
4361
4362
4362 Arguments to the command are defined as lines in this block. The format of
4363 Arguments to the command are defined as lines in this block. The format of
4363 each line is ``<key> <value>``. e.g.::
4364 each line is ``<key> <value>``. e.g.::
4364
4365
4365 command listkeys
4366 command listkeys
4366 namespace bookmarks
4367 namespace bookmarks
4367
4368
4368 If the value begins with ``eval:``, it will be interpreted as a Python
4369 If the value begins with ``eval:``, it will be interpreted as a Python
4369 literal expression. Otherwise values are interpreted as Python b'' literals.
4370 literal expression. Otherwise values are interpreted as Python b'' literals.
4370 This allows sending complex types and encoding special byte sequences via
4371 This allows sending complex types and encoding special byte sequences via
4371 backslash escaping.
4372 backslash escaping.
4372
4373
4373 The following arguments have special meaning:
4374 The following arguments have special meaning:
4374
4375
4375 ``PUSHFILE``
4376 ``PUSHFILE``
4376 When defined, the *push* mechanism of the peer will be used instead
4377 When defined, the *push* mechanism of the peer will be used instead
4377 of the static request-response mechanism and the content of the
4378 of the static request-response mechanism and the content of the
4378 file specified in the value of this argument will be sent as the
4379 file specified in the value of this argument will be sent as the
4379 command payload.
4380 command payload.
4380
4381
4381 This can be used to submit a local bundle file to the remote.
4382 This can be used to submit a local bundle file to the remote.
4382
4383
4383 batchbegin
4384 batchbegin
4384 ----------
4385 ----------
4385
4386
4386 Instruct the peer to begin a batched send.
4387 Instruct the peer to begin a batched send.
4387
4388
4388 All ``command`` blocks are queued for execution until the next
4389 All ``command`` blocks are queued for execution until the next
4389 ``batchsubmit`` block.
4390 ``batchsubmit`` block.
4390
4391
4391 batchsubmit
4392 batchsubmit
4392 -----------
4393 -----------
4393
4394
4394 Submit previously queued ``command`` blocks as a batch request.
4395 Submit previously queued ``command`` blocks as a batch request.
4395
4396
4396 This action MUST be paired with a ``batchbegin`` action.
4397 This action MUST be paired with a ``batchbegin`` action.
4397
4398
4398 httprequest <method> <path>
4399 httprequest <method> <path>
4399 ---------------------------
4400 ---------------------------
4400
4401
4401 (HTTP peer only)
4402 (HTTP peer only)
4402
4403
4403 Send an HTTP request to the peer.
4404 Send an HTTP request to the peer.
4404
4405
4405 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4406 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4406
4407
4407 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4408 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4408 headers to add to the request. e.g. ``Accept: foo``.
4409 headers to add to the request. e.g. ``Accept: foo``.
4409
4410
4410 The following arguments are special:
4411 The following arguments are special:
4411
4412
4412 ``BODYFILE``
4413 ``BODYFILE``
4413 The content of the file defined as the value to this argument will be
4414 The content of the file defined as the value to this argument will be
4414 transferred verbatim as the HTTP request body.
4415 transferred verbatim as the HTTP request body.
4415
4416
4416 ``frame <type> <flags> <payload>``
4417 ``frame <type> <flags> <payload>``
4417 Send a unified protocol frame as part of the request body.
4418 Send a unified protocol frame as part of the request body.
4418
4419
4419 All frames will be collected and sent as the body to the HTTP
4420 All frames will be collected and sent as the body to the HTTP
4420 request.
4421 request.
4421
4422
4422 close
4423 close
4423 -----
4424 -----
4424
4425
4425 Close the connection to the server.
4426 Close the connection to the server.
4426
4427
4427 flush
4428 flush
4428 -----
4429 -----
4429
4430
4430 Flush data written to the server.
4431 Flush data written to the server.
4431
4432
4432 readavailable
4433 readavailable
4433 -------------
4434 -------------
4434
4435
4435 Close the write end of the connection and read all available data from
4436 Close the write end of the connection and read all available data from
4436 the server.
4437 the server.
4437
4438
4438 If the connection to the server encompasses multiple pipes, we poll both
4439 If the connection to the server encompasses multiple pipes, we poll both
4439 pipes and read available data.
4440 pipes and read available data.
4440
4441
4441 readline
4442 readline
4442 --------
4443 --------
4443
4444
4444 Read a line of output from the server. If there are multiple output
4445 Read a line of output from the server. If there are multiple output
4445 pipes, reads only the main pipe.
4446 pipes, reads only the main pipe.
4446
4447
4447 ereadline
4448 ereadline
4448 ---------
4449 ---------
4449
4450
4450 Like ``readline``, but read from the stderr pipe, if available.
4451 Like ``readline``, but read from the stderr pipe, if available.
4451
4452
4452 read <X>
4453 read <X>
4453 --------
4454 --------
4454
4455
4455 ``read()`` N bytes from the server's main output pipe.
4456 ``read()`` N bytes from the server's main output pipe.
4456
4457
4457 eread <X>
4458 eread <X>
4458 ---------
4459 ---------
4459
4460
4460 ``read()`` N bytes from the server's stderr pipe, if available.
4461 ``read()`` N bytes from the server's stderr pipe, if available.
4461
4462
4462 Specifying Unified Frame-Based Protocol Frames
4463 Specifying Unified Frame-Based Protocol Frames
4463 ----------------------------------------------
4464 ----------------------------------------------
4464
4465
4465 It is possible to emit a *Unified Frame-Based Protocol* by using special
4466 It is possible to emit a *Unified Frame-Based Protocol* by using special
4466 syntax.
4467 syntax.
4467
4468
4468 A frame is composed as a type, flags, and payload. These can be parsed
4469 A frame is composed as a type, flags, and payload. These can be parsed
4469 from a string of the form:
4470 from a string of the form:
4470
4471
4471 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4472 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4472
4473
4473 ``request-id`` and ``stream-id`` are integers defining the request and
4474 ``request-id`` and ``stream-id`` are integers defining the request and
4474 stream identifiers.
4475 stream identifiers.
4475
4476
4476 ``type`` can be an integer value for the frame type or the string name
4477 ``type`` can be an integer value for the frame type or the string name
4477 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4478 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4478 ``command-name``.
4479 ``command-name``.
4479
4480
4480 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4481 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4481 components. Each component (and there can be just one) can be an integer
4482 components. Each component (and there can be just one) can be an integer
4482 or a flag name for stream flags or frame flags, respectively. Values are
4483 or a flag name for stream flags or frame flags, respectively. Values are
4483 resolved to integers and then bitwise OR'd together.
4484 resolved to integers and then bitwise OR'd together.
4484
4485
4485 ``payload`` represents the raw frame payload. If it begins with
4486 ``payload`` represents the raw frame payload. If it begins with
4486 ``cbor:``, the following string is evaluated as Python code and the
4487 ``cbor:``, the following string is evaluated as Python code and the
4487 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4488 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4488 as a Python byte string literal.
4489 as a Python byte string literal.
4489 """
4490 """
4490 opts = pycompat.byteskwargs(opts)
4491 opts = pycompat.byteskwargs(opts)
4491
4492
4492 if opts[b'localssh'] and not repo:
4493 if opts[b'localssh'] and not repo:
4493 raise error.Abort(_(b'--localssh requires a repository'))
4494 raise error.Abort(_(b'--localssh requires a repository'))
4494
4495
4495 if opts[b'peer'] and opts[b'peer'] not in (
4496 if opts[b'peer'] and opts[b'peer'] not in (
4496 b'raw',
4497 b'raw',
4497 b'ssh1',
4498 b'ssh1',
4498 ):
4499 ):
4499 raise error.Abort(
4500 raise error.Abort(
4500 _(b'invalid value for --peer'),
4501 _(b'invalid value for --peer'),
4501 hint=_(b'valid values are "raw" and "ssh1"'),
4502 hint=_(b'valid values are "raw" and "ssh1"'),
4502 )
4503 )
4503
4504
4504 if path and opts[b'localssh']:
4505 if path and opts[b'localssh']:
4505 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4506 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4506
4507
4507 if ui.interactive():
4508 if ui.interactive():
4508 ui.write(_(b'(waiting for commands on stdin)\n'))
4509 ui.write(_(b'(waiting for commands on stdin)\n'))
4509
4510
4510 blocks = list(_parsewirelangblocks(ui.fin))
4511 blocks = list(_parsewirelangblocks(ui.fin))
4511
4512
4512 proc = None
4513 proc = None
4513 stdin = None
4514 stdin = None
4514 stdout = None
4515 stdout = None
4515 stderr = None
4516 stderr = None
4516 opener = None
4517 opener = None
4517
4518
4518 if opts[b'localssh']:
4519 if opts[b'localssh']:
4519 # We start the SSH server in its own process so there is process
4520 # We start the SSH server in its own process so there is process
4520 # separation. This prevents a whole class of potential bugs around
4521 # separation. This prevents a whole class of potential bugs around
4521 # shared state from interfering with server operation.
4522 # shared state from interfering with server operation.
4522 args = procutil.hgcmd() + [
4523 args = procutil.hgcmd() + [
4523 b'-R',
4524 b'-R',
4524 repo.root,
4525 repo.root,
4525 b'debugserve',
4526 b'debugserve',
4526 b'--sshstdio',
4527 b'--sshstdio',
4527 ]
4528 ]
4528 proc = subprocess.Popen(
4529 proc = subprocess.Popen(
4529 pycompat.rapply(procutil.tonativestr, args),
4530 pycompat.rapply(procutil.tonativestr, args),
4530 stdin=subprocess.PIPE,
4531 stdin=subprocess.PIPE,
4531 stdout=subprocess.PIPE,
4532 stdout=subprocess.PIPE,
4532 stderr=subprocess.PIPE,
4533 stderr=subprocess.PIPE,
4533 bufsize=0,
4534 bufsize=0,
4534 )
4535 )
4535
4536
4536 stdin = proc.stdin
4537 stdin = proc.stdin
4537 stdout = proc.stdout
4538 stdout = proc.stdout
4538 stderr = proc.stderr
4539 stderr = proc.stderr
4539
4540
4540 # We turn the pipes into observers so we can log I/O.
4541 # We turn the pipes into observers so we can log I/O.
4541 if ui.verbose or opts[b'peer'] == b'raw':
4542 if ui.verbose or opts[b'peer'] == b'raw':
4542 stdin = util.makeloggingfileobject(
4543 stdin = util.makeloggingfileobject(
4543 ui, proc.stdin, b'i', logdata=True
4544 ui, proc.stdin, b'i', logdata=True
4544 )
4545 )
4545 stdout = util.makeloggingfileobject(
4546 stdout = util.makeloggingfileobject(
4546 ui, proc.stdout, b'o', logdata=True
4547 ui, proc.stdout, b'o', logdata=True
4547 )
4548 )
4548 stderr = util.makeloggingfileobject(
4549 stderr = util.makeloggingfileobject(
4549 ui, proc.stderr, b'e', logdata=True
4550 ui, proc.stderr, b'e', logdata=True
4550 )
4551 )
4551
4552
4552 # --localssh also implies the peer connection settings.
4553 # --localssh also implies the peer connection settings.
4553
4554
4554 url = b'ssh://localserver'
4555 url = b'ssh://localserver'
4555 autoreadstderr = not opts[b'noreadstderr']
4556 autoreadstderr = not opts[b'noreadstderr']
4556
4557
4557 if opts[b'peer'] == b'ssh1':
4558 if opts[b'peer'] == b'ssh1':
4558 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4559 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4559 peer = sshpeer.sshv1peer(
4560 peer = sshpeer.sshv1peer(
4560 ui,
4561 ui,
4561 url,
4562 url,
4562 proc,
4563 proc,
4563 stdin,
4564 stdin,
4564 stdout,
4565 stdout,
4565 stderr,
4566 stderr,
4566 None,
4567 None,
4567 autoreadstderr=autoreadstderr,
4568 autoreadstderr=autoreadstderr,
4568 )
4569 )
4569 elif opts[b'peer'] == b'raw':
4570 elif opts[b'peer'] == b'raw':
4570 ui.write(_(b'using raw connection to peer\n'))
4571 ui.write(_(b'using raw connection to peer\n'))
4571 peer = None
4572 peer = None
4572 else:
4573 else:
4573 ui.write(_(b'creating ssh peer from handshake results\n'))
4574 ui.write(_(b'creating ssh peer from handshake results\n'))
4574 peer = sshpeer._make_peer(
4575 peer = sshpeer._make_peer(
4575 ui,
4576 ui,
4576 url,
4577 url,
4577 proc,
4578 proc,
4578 stdin,
4579 stdin,
4579 stdout,
4580 stdout,
4580 stderr,
4581 stderr,
4581 autoreadstderr=autoreadstderr,
4582 autoreadstderr=autoreadstderr,
4582 )
4583 )
4583
4584
4584 elif path:
4585 elif path:
4585 # We bypass hg.peer() so we can proxy the sockets.
4586 # We bypass hg.peer() so we can proxy the sockets.
4586 # TODO consider not doing this because we skip
4587 # TODO consider not doing this because we skip
4587 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4588 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4588 u = urlutil.url(path)
4589 u = urlutil.url(path)
4589 if u.scheme != b'http':
4590 if u.scheme != b'http':
4590 raise error.Abort(_(b'only http:// paths are currently supported'))
4591 raise error.Abort(_(b'only http:// paths are currently supported'))
4591
4592
4592 url, authinfo = u.authinfo()
4593 url, authinfo = u.authinfo()
4593 openerargs = {
4594 openerargs = {
4594 'useragent': b'Mercurial debugwireproto',
4595 'useragent': b'Mercurial debugwireproto',
4595 }
4596 }
4596
4597
4597 # Turn pipes/sockets into observers so we can log I/O.
4598 # Turn pipes/sockets into observers so we can log I/O.
4598 if ui.verbose:
4599 if ui.verbose:
4599 openerargs.update(
4600 openerargs.update(
4600 {
4601 {
4601 'loggingfh': ui,
4602 'loggingfh': ui,
4602 'loggingname': b's',
4603 'loggingname': b's',
4603 'loggingopts': {
4604 'loggingopts': {
4604 'logdata': True,
4605 'logdata': True,
4605 'logdataapis': False,
4606 'logdataapis': False,
4606 },
4607 },
4607 }
4608 }
4608 )
4609 )
4609
4610
4610 if ui.debugflag:
4611 if ui.debugflag:
4611 openerargs['loggingopts']['logdataapis'] = True
4612 openerargs['loggingopts']['logdataapis'] = True
4612
4613
4613 # Don't send default headers when in raw mode. This allows us to
4614 # Don't send default headers when in raw mode. This allows us to
4614 # bypass most of the behavior of our URL handling code so we can
4615 # bypass most of the behavior of our URL handling code so we can
4615 # have near complete control over what's sent on the wire.
4616 # have near complete control over what's sent on the wire.
4616 if opts[b'peer'] == b'raw':
4617 if opts[b'peer'] == b'raw':
4617 openerargs['sendaccept'] = False
4618 openerargs['sendaccept'] = False
4618
4619
4619 opener = urlmod.opener(ui, authinfo, **openerargs)
4620 opener = urlmod.opener(ui, authinfo, **openerargs)
4620
4621
4621 if opts[b'peer'] == b'raw':
4622 if opts[b'peer'] == b'raw':
4622 ui.write(_(b'using raw connection to peer\n'))
4623 ui.write(_(b'using raw connection to peer\n'))
4623 peer = None
4624 peer = None
4624 elif opts[b'peer']:
4625 elif opts[b'peer']:
4625 raise error.Abort(
4626 raise error.Abort(
4626 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4627 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4627 )
4628 )
4628 else:
4629 else:
4629 peer_path = urlutil.try_path(ui, path)
4630 peer_path = urlutil.try_path(ui, path)
4630 peer = httppeer._make_peer(ui, peer_path, opener=opener)
4631 peer = httppeer._make_peer(ui, peer_path, opener=opener)
4631
4632
4632 # We /could/ populate stdin/stdout with sock.makefile()...
4633 # We /could/ populate stdin/stdout with sock.makefile()...
4633 else:
4634 else:
4634 raise error.Abort(_(b'unsupported connection configuration'))
4635 raise error.Abort(_(b'unsupported connection configuration'))
4635
4636
4636 batchedcommands = None
4637 batchedcommands = None
4637
4638
4638 # Now perform actions based on the parsed wire language instructions.
4639 # Now perform actions based on the parsed wire language instructions.
4639 for action, lines in blocks:
4640 for action, lines in blocks:
4640 if action in (b'raw', b'raw+'):
4641 if action in (b'raw', b'raw+'):
4641 if not stdin:
4642 if not stdin:
4642 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4643 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4643
4644
4644 # Concatenate the data together.
4645 # Concatenate the data together.
4645 data = b''.join(l.lstrip() for l in lines)
4646 data = b''.join(l.lstrip() for l in lines)
4646 data = stringutil.unescapestr(data)
4647 data = stringutil.unescapestr(data)
4647 stdin.write(data)
4648 stdin.write(data)
4648
4649
4649 if action == b'raw+':
4650 if action == b'raw+':
4650 stdin.flush()
4651 stdin.flush()
4651 elif action == b'flush':
4652 elif action == b'flush':
4652 if not stdin:
4653 if not stdin:
4653 raise error.Abort(_(b'cannot call flush on this peer'))
4654 raise error.Abort(_(b'cannot call flush on this peer'))
4654 stdin.flush()
4655 stdin.flush()
4655 elif action.startswith(b'command'):
4656 elif action.startswith(b'command'):
4656 if not peer:
4657 if not peer:
4657 raise error.Abort(
4658 raise error.Abort(
4658 _(
4659 _(
4659 b'cannot send commands unless peer instance '
4660 b'cannot send commands unless peer instance '
4660 b'is available'
4661 b'is available'
4661 )
4662 )
4662 )
4663 )
4663
4664
4664 command = action.split(b' ', 1)[1]
4665 command = action.split(b' ', 1)[1]
4665
4666
4666 args = {}
4667 args = {}
4667 for line in lines:
4668 for line in lines:
4668 # We need to allow empty values.
4669 # We need to allow empty values.
4669 fields = line.lstrip().split(b' ', 1)
4670 fields = line.lstrip().split(b' ', 1)
4670 if len(fields) == 1:
4671 if len(fields) == 1:
4671 key = fields[0]
4672 key = fields[0]
4672 value = b''
4673 value = b''
4673 else:
4674 else:
4674 key, value = fields
4675 key, value = fields
4675
4676
4676 if value.startswith(b'eval:'):
4677 if value.startswith(b'eval:'):
4677 value = stringutil.evalpythonliteral(value[5:])
4678 value = stringutil.evalpythonliteral(value[5:])
4678 else:
4679 else:
4679 value = stringutil.unescapestr(value)
4680 value = stringutil.unescapestr(value)
4680
4681
4681 args[key] = value
4682 args[key] = value
4682
4683
4683 if batchedcommands is not None:
4684 if batchedcommands is not None:
4684 batchedcommands.append((command, args))
4685 batchedcommands.append((command, args))
4685 continue
4686 continue
4686
4687
4687 ui.status(_(b'sending %s command\n') % command)
4688 ui.status(_(b'sending %s command\n') % command)
4688
4689
4689 if b'PUSHFILE' in args:
4690 if b'PUSHFILE' in args:
4690 with open(args[b'PUSHFILE'], 'rb') as fh:
4691 with open(args[b'PUSHFILE'], 'rb') as fh:
4691 del args[b'PUSHFILE']
4692 del args[b'PUSHFILE']
4692 res, output = peer._callpush(
4693 res, output = peer._callpush(
4693 command, fh, **pycompat.strkwargs(args)
4694 command, fh, **pycompat.strkwargs(args)
4694 )
4695 )
4695 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4696 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4696 ui.status(
4697 ui.status(
4697 _(b'remote output: %s\n') % stringutil.escapestr(output)
4698 _(b'remote output: %s\n') % stringutil.escapestr(output)
4698 )
4699 )
4699 else:
4700 else:
4700 with peer.commandexecutor() as e:
4701 with peer.commandexecutor() as e:
4701 res = e.callcommand(command, args).result()
4702 res = e.callcommand(command, args).result()
4702
4703
4703 ui.status(
4704 ui.status(
4704 _(b'response: %s\n')
4705 _(b'response: %s\n')
4705 % stringutil.pprint(res, bprefix=True, indent=2)
4706 % stringutil.pprint(res, bprefix=True, indent=2)
4706 )
4707 )
4707
4708
4708 elif action == b'batchbegin':
4709 elif action == b'batchbegin':
4709 if batchedcommands is not None:
4710 if batchedcommands is not None:
4710 raise error.Abort(_(b'nested batchbegin not allowed'))
4711 raise error.Abort(_(b'nested batchbegin not allowed'))
4711
4712
4712 batchedcommands = []
4713 batchedcommands = []
4713 elif action == b'batchsubmit':
4714 elif action == b'batchsubmit':
4714 # There is a batching API we could go through. But it would be
4715 # There is a batching API we could go through. But it would be
4715 # difficult to normalize requests into function calls. It is easier
4716 # difficult to normalize requests into function calls. It is easier
4716 # to bypass this layer and normalize to commands + args.
4717 # to bypass this layer and normalize to commands + args.
4717 ui.status(
4718 ui.status(
4718 _(b'sending batch with %d sub-commands\n')
4719 _(b'sending batch with %d sub-commands\n')
4719 % len(batchedcommands)
4720 % len(batchedcommands)
4720 )
4721 )
4721 assert peer is not None
4722 assert peer is not None
4722 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4723 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4723 ui.status(
4724 ui.status(
4724 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4725 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4725 )
4726 )
4726
4727
4727 batchedcommands = None
4728 batchedcommands = None
4728
4729
4729 elif action.startswith(b'httprequest '):
4730 elif action.startswith(b'httprequest '):
4730 if not opener:
4731 if not opener:
4731 raise error.Abort(
4732 raise error.Abort(
4732 _(b'cannot use httprequest without an HTTP peer')
4733 _(b'cannot use httprequest without an HTTP peer')
4733 )
4734 )
4734
4735
4735 request = action.split(b' ', 2)
4736 request = action.split(b' ', 2)
4736 if len(request) != 3:
4737 if len(request) != 3:
4737 raise error.Abort(
4738 raise error.Abort(
4738 _(
4739 _(
4739 b'invalid httprequest: expected format is '
4740 b'invalid httprequest: expected format is '
4740 b'"httprequest <method> <path>'
4741 b'"httprequest <method> <path>'
4741 )
4742 )
4742 )
4743 )
4743
4744
4744 method, httppath = request[1:]
4745 method, httppath = request[1:]
4745 headers = {}
4746 headers = {}
4746 body = None
4747 body = None
4747 frames = []
4748 frames = []
4748 for line in lines:
4749 for line in lines:
4749 line = line.lstrip()
4750 line = line.lstrip()
4750 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4751 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4751 if m:
4752 if m:
4752 # Headers need to use native strings.
4753 # Headers need to use native strings.
4753 key = pycompat.strurl(m.group(1))
4754 key = pycompat.strurl(m.group(1))
4754 value = pycompat.strurl(m.group(2))
4755 value = pycompat.strurl(m.group(2))
4755 headers[key] = value
4756 headers[key] = value
4756 continue
4757 continue
4757
4758
4758 if line.startswith(b'BODYFILE '):
4759 if line.startswith(b'BODYFILE '):
4759 with open(line.split(b' ', 1), b'rb') as fh:
4760 with open(line.split(b' ', 1), b'rb') as fh:
4760 body = fh.read()
4761 body = fh.read()
4761 elif line.startswith(b'frame '):
4762 elif line.startswith(b'frame '):
4762 frame = wireprotoframing.makeframefromhumanstring(
4763 frame = wireprotoframing.makeframefromhumanstring(
4763 line[len(b'frame ') :]
4764 line[len(b'frame ') :]
4764 )
4765 )
4765
4766
4766 frames.append(frame)
4767 frames.append(frame)
4767 else:
4768 else:
4768 raise error.Abort(
4769 raise error.Abort(
4769 _(b'unknown argument to httprequest: %s') % line
4770 _(b'unknown argument to httprequest: %s') % line
4770 )
4771 )
4771
4772
4772 url = path + httppath
4773 url = path + httppath
4773
4774
4774 if frames:
4775 if frames:
4775 body = b''.join(bytes(f) for f in frames)
4776 body = b''.join(bytes(f) for f in frames)
4776
4777
4777 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4778 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4778
4779
4779 # urllib.Request insists on using has_data() as a proxy for
4780 # urllib.Request insists on using has_data() as a proxy for
4780 # determining the request method. Override that to use our
4781 # determining the request method. Override that to use our
4781 # explicitly requested method.
4782 # explicitly requested method.
4782 req.get_method = lambda: pycompat.sysstr(method)
4783 req.get_method = lambda: pycompat.sysstr(method)
4783
4784
4784 try:
4785 try:
4785 res = opener.open(req)
4786 res = opener.open(req)
4786 body = res.read()
4787 body = res.read()
4787 except util.urlerr.urlerror as e:
4788 except util.urlerr.urlerror as e:
4788 # read() method must be called, but only exists in Python 2
4789 # read() method must be called, but only exists in Python 2
4789 getattr(e, 'read', lambda: None)()
4790 getattr(e, 'read', lambda: None)()
4790 continue
4791 continue
4791
4792
4792 ct = res.headers.get('Content-Type')
4793 ct = res.headers.get('Content-Type')
4793 if ct == 'application/mercurial-cbor':
4794 if ct == 'application/mercurial-cbor':
4794 ui.write(
4795 ui.write(
4795 _(b'cbor> %s\n')
4796 _(b'cbor> %s\n')
4796 % stringutil.pprint(
4797 % stringutil.pprint(
4797 cborutil.decodeall(body), bprefix=True, indent=2
4798 cborutil.decodeall(body), bprefix=True, indent=2
4798 )
4799 )
4799 )
4800 )
4800
4801
4801 elif action == b'close':
4802 elif action == b'close':
4802 assert peer is not None
4803 assert peer is not None
4803 peer.close()
4804 peer.close()
4804 elif action == b'readavailable':
4805 elif action == b'readavailable':
4805 if not stdout or not stderr:
4806 if not stdout or not stderr:
4806 raise error.Abort(
4807 raise error.Abort(
4807 _(b'readavailable not available on this peer')
4808 _(b'readavailable not available on this peer')
4808 )
4809 )
4809
4810
4810 stdin.close()
4811 stdin.close()
4811 stdout.read()
4812 stdout.read()
4812 stderr.read()
4813 stderr.read()
4813
4814
4814 elif action == b'readline':
4815 elif action == b'readline':
4815 if not stdout:
4816 if not stdout:
4816 raise error.Abort(_(b'readline not available on this peer'))
4817 raise error.Abort(_(b'readline not available on this peer'))
4817 stdout.readline()
4818 stdout.readline()
4818 elif action == b'ereadline':
4819 elif action == b'ereadline':
4819 if not stderr:
4820 if not stderr:
4820 raise error.Abort(_(b'ereadline not available on this peer'))
4821 raise error.Abort(_(b'ereadline not available on this peer'))
4821 stderr.readline()
4822 stderr.readline()
4822 elif action.startswith(b'read '):
4823 elif action.startswith(b'read '):
4823 count = int(action.split(b' ', 1)[1])
4824 count = int(action.split(b' ', 1)[1])
4824 if not stdout:
4825 if not stdout:
4825 raise error.Abort(_(b'read not available on this peer'))
4826 raise error.Abort(_(b'read not available on this peer'))
4826 stdout.read(count)
4827 stdout.read(count)
4827 elif action.startswith(b'eread '):
4828 elif action.startswith(b'eread '):
4828 count = int(action.split(b' ', 1)[1])
4829 count = int(action.split(b' ', 1)[1])
4829 if not stderr:
4830 if not stderr:
4830 raise error.Abort(_(b'eread not available on this peer'))
4831 raise error.Abort(_(b'eread not available on this peer'))
4831 stderr.read(count)
4832 stderr.read(count)
4832 else:
4833 else:
4833 raise error.Abort(_(b'unknown action: %s') % action)
4834 raise error.Abort(_(b'unknown action: %s') % action)
4834
4835
4835 if batchedcommands is not None:
4836 if batchedcommands is not None:
4836 raise error.Abort(_(b'unclosed "batchbegin" request'))
4837 raise error.Abort(_(b'unclosed "batchbegin" request'))
4837
4838
4838 if peer:
4839 if peer:
4839 peer.close()
4840 peer.close()
4840
4841
4841 if proc:
4842 if proc:
4842 proc.kill()
4843 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now