##// END OF EJS Templates
debugrebuilddirstate: double check that no transaction is open...
marmoute -
r50895:1f28172c default
parent child Browse files
Show More
@@ -1,4773 +1,4776 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import binascii
9 import binascii
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import subprocess
24 import subprocess
25 import sys
25 import sys
26 import time
26 import time
27
27
28 from .i18n import _
28 from .i18n import _
29 from .node import (
29 from .node import (
30 bin,
30 bin,
31 hex,
31 hex,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .pycompat import (
35 from .pycompat import (
36 getattr,
36 getattr,
37 open,
37 open,
38 )
38 )
39 from . import (
39 from . import (
40 bundle2,
40 bundle2,
41 bundlerepo,
41 bundlerepo,
42 changegroup,
42 changegroup,
43 cmdutil,
43 cmdutil,
44 color,
44 color,
45 context,
45 context,
46 copies,
46 copies,
47 dagparser,
47 dagparser,
48 dirstateutils,
48 dirstateutils,
49 encoding,
49 encoding,
50 error,
50 error,
51 exchange,
51 exchange,
52 extensions,
52 extensions,
53 filemerge,
53 filemerge,
54 filesetlang,
54 filesetlang,
55 formatter,
55 formatter,
56 hg,
56 hg,
57 httppeer,
57 httppeer,
58 localrepo,
58 localrepo,
59 lock as lockmod,
59 lock as lockmod,
60 logcmdutil,
60 logcmdutil,
61 mergestate as mergestatemod,
61 mergestate as mergestatemod,
62 metadata,
62 metadata,
63 obsolete,
63 obsolete,
64 obsutil,
64 obsutil,
65 pathutil,
65 pathutil,
66 phases,
66 phases,
67 policy,
67 policy,
68 pvec,
68 pvec,
69 pycompat,
69 pycompat,
70 registrar,
70 registrar,
71 repair,
71 repair,
72 repoview,
72 repoview,
73 requirements,
73 requirements,
74 revlog,
74 revlog,
75 revset,
75 revset,
76 revsetlang,
76 revsetlang,
77 scmutil,
77 scmutil,
78 setdiscovery,
78 setdiscovery,
79 simplemerge,
79 simplemerge,
80 sshpeer,
80 sshpeer,
81 sslutil,
81 sslutil,
82 streamclone,
82 streamclone,
83 strip,
83 strip,
84 tags as tagsmod,
84 tags as tagsmod,
85 templater,
85 templater,
86 treediscovery,
86 treediscovery,
87 upgrade,
87 upgrade,
88 url as urlmod,
88 url as urlmod,
89 util,
89 util,
90 verify,
90 verify,
91 vfs as vfsmod,
91 vfs as vfsmod,
92 wireprotoframing,
92 wireprotoframing,
93 wireprotoserver,
93 wireprotoserver,
94 )
94 )
95 from .interfaces import repository
95 from .interfaces import repository
96 from .utils import (
96 from .utils import (
97 cborutil,
97 cborutil,
98 compression,
98 compression,
99 dateutil,
99 dateutil,
100 procutil,
100 procutil,
101 stringutil,
101 stringutil,
102 urlutil,
102 urlutil,
103 )
103 )
104
104
105 from .revlogutils import (
105 from .revlogutils import (
106 constants as revlog_constants,
106 constants as revlog_constants,
107 debug as revlog_debug,
107 debug as revlog_debug,
108 deltas as deltautil,
108 deltas as deltautil,
109 nodemap,
109 nodemap,
110 rewrite,
110 rewrite,
111 sidedata,
111 sidedata,
112 )
112 )
113
113
114 release = lockmod.release
114 release = lockmod.release
115
115
116 table = {}
116 table = {}
117 table.update(strip.command._table)
117 table.update(strip.command._table)
118 command = registrar.command(table)
118 command = registrar.command(table)
119
119
120
120
121 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
121 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
122 def debugancestor(ui, repo, *args):
122 def debugancestor(ui, repo, *args):
123 """find the ancestor revision of two revisions in a given index"""
123 """find the ancestor revision of two revisions in a given index"""
124 if len(args) == 3:
124 if len(args) == 3:
125 index, rev1, rev2 = args
125 index, rev1, rev2 = args
126 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
126 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
127 lookup = r.lookup
127 lookup = r.lookup
128 elif len(args) == 2:
128 elif len(args) == 2:
129 if not repo:
129 if not repo:
130 raise error.Abort(
130 raise error.Abort(
131 _(b'there is no Mercurial repository here (.hg not found)')
131 _(b'there is no Mercurial repository here (.hg not found)')
132 )
132 )
133 rev1, rev2 = args
133 rev1, rev2 = args
134 r = repo.changelog
134 r = repo.changelog
135 lookup = repo.lookup
135 lookup = repo.lookup
136 else:
136 else:
137 raise error.Abort(_(b'either two or three arguments required'))
137 raise error.Abort(_(b'either two or three arguments required'))
138 a = r.ancestor(lookup(rev1), lookup(rev2))
138 a = r.ancestor(lookup(rev1), lookup(rev2))
139 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
139 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
140
140
141
141
142 @command(b'debugantivirusrunning', [])
142 @command(b'debugantivirusrunning', [])
143 def debugantivirusrunning(ui, repo):
143 def debugantivirusrunning(ui, repo):
144 """attempt to trigger an antivirus scanner to see if one is active"""
144 """attempt to trigger an antivirus scanner to see if one is active"""
145 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
145 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
146 f.write(
146 f.write(
147 util.b85decode(
147 util.b85decode(
148 # This is a base85-armored version of the EICAR test file. See
148 # This is a base85-armored version of the EICAR test file. See
149 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
149 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
150 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
150 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
151 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
151 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
152 )
152 )
153 )
153 )
154 # Give an AV engine time to scan the file.
154 # Give an AV engine time to scan the file.
155 time.sleep(2)
155 time.sleep(2)
156 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
156 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
157
157
158
158
159 @command(b'debugapplystreamclonebundle', [], b'FILE')
159 @command(b'debugapplystreamclonebundle', [], b'FILE')
160 def debugapplystreamclonebundle(ui, repo, fname):
160 def debugapplystreamclonebundle(ui, repo, fname):
161 """apply a stream clone bundle file"""
161 """apply a stream clone bundle file"""
162 f = hg.openpath(ui, fname)
162 f = hg.openpath(ui, fname)
163 gen = exchange.readbundle(ui, f, fname)
163 gen = exchange.readbundle(ui, f, fname)
164 gen.apply(repo)
164 gen.apply(repo)
165
165
166
166
167 @command(
167 @command(
168 b'debugbuilddag',
168 b'debugbuilddag',
169 [
169 [
170 (
170 (
171 b'm',
171 b'm',
172 b'mergeable-file',
172 b'mergeable-file',
173 None,
173 None,
174 _(b'add single file mergeable changes'),
174 _(b'add single file mergeable changes'),
175 ),
175 ),
176 (
176 (
177 b'o',
177 b'o',
178 b'overwritten-file',
178 b'overwritten-file',
179 None,
179 None,
180 _(b'add single file all revs overwrite'),
180 _(b'add single file all revs overwrite'),
181 ),
181 ),
182 (b'n', b'new-file', None, _(b'add new file at each rev')),
182 (b'n', b'new-file', None, _(b'add new file at each rev')),
183 (
183 (
184 b'',
184 b'',
185 b'from-existing',
185 b'from-existing',
186 None,
186 None,
187 _(b'continue from a non-empty repository'),
187 _(b'continue from a non-empty repository'),
188 ),
188 ),
189 ],
189 ],
190 _(b'[OPTION]... [TEXT]'),
190 _(b'[OPTION]... [TEXT]'),
191 )
191 )
192 def debugbuilddag(
192 def debugbuilddag(
193 ui,
193 ui,
194 repo,
194 repo,
195 text=None,
195 text=None,
196 mergeable_file=False,
196 mergeable_file=False,
197 overwritten_file=False,
197 overwritten_file=False,
198 new_file=False,
198 new_file=False,
199 from_existing=False,
199 from_existing=False,
200 ):
200 ):
201 """builds a repo with a given DAG from scratch in the current empty repo
201 """builds a repo with a given DAG from scratch in the current empty repo
202
202
203 The description of the DAG is read from stdin if not given on the
203 The description of the DAG is read from stdin if not given on the
204 command line.
204 command line.
205
205
206 Elements:
206 Elements:
207
207
208 - "+n" is a linear run of n nodes based on the current default parent
208 - "+n" is a linear run of n nodes based on the current default parent
209 - "." is a single node based on the current default parent
209 - "." is a single node based on the current default parent
210 - "$" resets the default parent to null (implied at the start);
210 - "$" resets the default parent to null (implied at the start);
211 otherwise the default parent is always the last node created
211 otherwise the default parent is always the last node created
212 - "<p" sets the default parent to the backref p
212 - "<p" sets the default parent to the backref p
213 - "*p" is a fork at parent p, which is a backref
213 - "*p" is a fork at parent p, which is a backref
214 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
214 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
215 - "/p2" is a merge of the preceding node and p2
215 - "/p2" is a merge of the preceding node and p2
216 - ":tag" defines a local tag for the preceding node
216 - ":tag" defines a local tag for the preceding node
217 - "@branch" sets the named branch for subsequent nodes
217 - "@branch" sets the named branch for subsequent nodes
218 - "#...\\n" is a comment up to the end of the line
218 - "#...\\n" is a comment up to the end of the line
219
219
220 Whitespace between the above elements is ignored.
220 Whitespace between the above elements is ignored.
221
221
222 A backref is either
222 A backref is either
223
223
224 - a number n, which references the node curr-n, where curr is the current
224 - a number n, which references the node curr-n, where curr is the current
225 node, or
225 node, or
226 - the name of a local tag you placed earlier using ":tag", or
226 - the name of a local tag you placed earlier using ":tag", or
227 - empty to denote the default parent.
227 - empty to denote the default parent.
228
228
229 All string valued-elements are either strictly alphanumeric, or must
229 All string valued-elements are either strictly alphanumeric, or must
230 be enclosed in double quotes ("..."), with "\\" as escape character.
230 be enclosed in double quotes ("..."), with "\\" as escape character.
231 """
231 """
232
232
233 if text is None:
233 if text is None:
234 ui.status(_(b"reading DAG from stdin\n"))
234 ui.status(_(b"reading DAG from stdin\n"))
235 text = ui.fin.read()
235 text = ui.fin.read()
236
236
237 cl = repo.changelog
237 cl = repo.changelog
238 if len(cl) > 0 and not from_existing:
238 if len(cl) > 0 and not from_existing:
239 raise error.Abort(_(b'repository is not empty'))
239 raise error.Abort(_(b'repository is not empty'))
240
240
241 # determine number of revs in DAG
241 # determine number of revs in DAG
242 total = 0
242 total = 0
243 for type, data in dagparser.parsedag(text):
243 for type, data in dagparser.parsedag(text):
244 if type == b'n':
244 if type == b'n':
245 total += 1
245 total += 1
246
246
247 if mergeable_file:
247 if mergeable_file:
248 linesperrev = 2
248 linesperrev = 2
249 # make a file with k lines per rev
249 # make a file with k lines per rev
250 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
250 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
251 initialmergedlines.append(b"")
251 initialmergedlines.append(b"")
252
252
253 tags = []
253 tags = []
254 progress = ui.makeprogress(
254 progress = ui.makeprogress(
255 _(b'building'), unit=_(b'revisions'), total=total
255 _(b'building'), unit=_(b'revisions'), total=total
256 )
256 )
257 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
257 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
258 at = -1
258 at = -1
259 atbranch = b'default'
259 atbranch = b'default'
260 nodeids = []
260 nodeids = []
261 id = 0
261 id = 0
262 progress.update(id)
262 progress.update(id)
263 for type, data in dagparser.parsedag(text):
263 for type, data in dagparser.parsedag(text):
264 if type == b'n':
264 if type == b'n':
265 ui.note((b'node %s\n' % pycompat.bytestr(data)))
265 ui.note((b'node %s\n' % pycompat.bytestr(data)))
266 id, ps = data
266 id, ps = data
267
267
268 files = []
268 files = []
269 filecontent = {}
269 filecontent = {}
270
270
271 p2 = None
271 p2 = None
272 if mergeable_file:
272 if mergeable_file:
273 fn = b"mf"
273 fn = b"mf"
274 p1 = repo[ps[0]]
274 p1 = repo[ps[0]]
275 if len(ps) > 1:
275 if len(ps) > 1:
276 p2 = repo[ps[1]]
276 p2 = repo[ps[1]]
277 pa = p1.ancestor(p2)
277 pa = p1.ancestor(p2)
278 base, local, other = [
278 base, local, other = [
279 x[fn].data() for x in (pa, p1, p2)
279 x[fn].data() for x in (pa, p1, p2)
280 ]
280 ]
281 m3 = simplemerge.Merge3Text(base, local, other)
281 m3 = simplemerge.Merge3Text(base, local, other)
282 ml = [
282 ml = [
283 l.strip()
283 l.strip()
284 for l in simplemerge.render_minimized(m3)[0]
284 for l in simplemerge.render_minimized(m3)[0]
285 ]
285 ]
286 ml.append(b"")
286 ml.append(b"")
287 elif at > 0:
287 elif at > 0:
288 ml = p1[fn].data().split(b"\n")
288 ml = p1[fn].data().split(b"\n")
289 else:
289 else:
290 ml = initialmergedlines
290 ml = initialmergedlines
291 ml[id * linesperrev] += b" r%i" % id
291 ml[id * linesperrev] += b" r%i" % id
292 mergedtext = b"\n".join(ml)
292 mergedtext = b"\n".join(ml)
293 files.append(fn)
293 files.append(fn)
294 filecontent[fn] = mergedtext
294 filecontent[fn] = mergedtext
295
295
296 if overwritten_file:
296 if overwritten_file:
297 fn = b"of"
297 fn = b"of"
298 files.append(fn)
298 files.append(fn)
299 filecontent[fn] = b"r%i\n" % id
299 filecontent[fn] = b"r%i\n" % id
300
300
301 if new_file:
301 if new_file:
302 fn = b"nf%i" % id
302 fn = b"nf%i" % id
303 files.append(fn)
303 files.append(fn)
304 filecontent[fn] = b"r%i\n" % id
304 filecontent[fn] = b"r%i\n" % id
305 if len(ps) > 1:
305 if len(ps) > 1:
306 if not p2:
306 if not p2:
307 p2 = repo[ps[1]]
307 p2 = repo[ps[1]]
308 for fn in p2:
308 for fn in p2:
309 if fn.startswith(b"nf"):
309 if fn.startswith(b"nf"):
310 files.append(fn)
310 files.append(fn)
311 filecontent[fn] = p2[fn].data()
311 filecontent[fn] = p2[fn].data()
312
312
313 def fctxfn(repo, cx, path):
313 def fctxfn(repo, cx, path):
314 if path in filecontent:
314 if path in filecontent:
315 return context.memfilectx(
315 return context.memfilectx(
316 repo, cx, path, filecontent[path]
316 repo, cx, path, filecontent[path]
317 )
317 )
318 return None
318 return None
319
319
320 if len(ps) == 0 or ps[0] < 0:
320 if len(ps) == 0 or ps[0] < 0:
321 pars = [None, None]
321 pars = [None, None]
322 elif len(ps) == 1:
322 elif len(ps) == 1:
323 pars = [nodeids[ps[0]], None]
323 pars = [nodeids[ps[0]], None]
324 else:
324 else:
325 pars = [nodeids[p] for p in ps]
325 pars = [nodeids[p] for p in ps]
326 cx = context.memctx(
326 cx = context.memctx(
327 repo,
327 repo,
328 pars,
328 pars,
329 b"r%i" % id,
329 b"r%i" % id,
330 files,
330 files,
331 fctxfn,
331 fctxfn,
332 date=(id, 0),
332 date=(id, 0),
333 user=b"debugbuilddag",
333 user=b"debugbuilddag",
334 extra={b'branch': atbranch},
334 extra={b'branch': atbranch},
335 )
335 )
336 nodeid = repo.commitctx(cx)
336 nodeid = repo.commitctx(cx)
337 nodeids.append(nodeid)
337 nodeids.append(nodeid)
338 at = id
338 at = id
339 elif type == b'l':
339 elif type == b'l':
340 id, name = data
340 id, name = data
341 ui.note((b'tag %s\n' % name))
341 ui.note((b'tag %s\n' % name))
342 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
342 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
343 elif type == b'a':
343 elif type == b'a':
344 ui.note((b'branch %s\n' % data))
344 ui.note((b'branch %s\n' % data))
345 atbranch = data
345 atbranch = data
346 progress.update(id)
346 progress.update(id)
347
347
348 if tags:
348 if tags:
349 repo.vfs.write(b"localtags", b"".join(tags))
349 repo.vfs.write(b"localtags", b"".join(tags))
350
350
351
351
352 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
352 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
353 indent_string = b' ' * indent
353 indent_string = b' ' * indent
354 if all:
354 if all:
355 ui.writenoi18n(
355 ui.writenoi18n(
356 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
356 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
357 % indent_string
357 % indent_string
358 )
358 )
359
359
360 def showchunks(named):
360 def showchunks(named):
361 ui.write(b"\n%s%s\n" % (indent_string, named))
361 ui.write(b"\n%s%s\n" % (indent_string, named))
362 for deltadata in gen.deltaiter():
362 for deltadata in gen.deltaiter():
363 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
363 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
364 ui.write(
364 ui.write(
365 b"%s%s %s %s %s %s %d\n"
365 b"%s%s %s %s %s %s %d\n"
366 % (
366 % (
367 indent_string,
367 indent_string,
368 hex(node),
368 hex(node),
369 hex(p1),
369 hex(p1),
370 hex(p2),
370 hex(p2),
371 hex(cs),
371 hex(cs),
372 hex(deltabase),
372 hex(deltabase),
373 len(delta),
373 len(delta),
374 )
374 )
375 )
375 )
376
376
377 gen.changelogheader()
377 gen.changelogheader()
378 showchunks(b"changelog")
378 showchunks(b"changelog")
379 gen.manifestheader()
379 gen.manifestheader()
380 showchunks(b"manifest")
380 showchunks(b"manifest")
381 for chunkdata in iter(gen.filelogheader, {}):
381 for chunkdata in iter(gen.filelogheader, {}):
382 fname = chunkdata[b'filename']
382 fname = chunkdata[b'filename']
383 showchunks(fname)
383 showchunks(fname)
384 else:
384 else:
385 if isinstance(gen, bundle2.unbundle20):
385 if isinstance(gen, bundle2.unbundle20):
386 raise error.Abort(_(b'use debugbundle2 for this file'))
386 raise error.Abort(_(b'use debugbundle2 for this file'))
387 gen.changelogheader()
387 gen.changelogheader()
388 for deltadata in gen.deltaiter():
388 for deltadata in gen.deltaiter():
389 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
389 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
390 ui.write(b"%s%s\n" % (indent_string, hex(node)))
390 ui.write(b"%s%s\n" % (indent_string, hex(node)))
391
391
392
392
393 def _debugobsmarkers(ui, part, indent=0, **opts):
393 def _debugobsmarkers(ui, part, indent=0, **opts):
394 """display version and markers contained in 'data'"""
394 """display version and markers contained in 'data'"""
395 opts = pycompat.byteskwargs(opts)
395 opts = pycompat.byteskwargs(opts)
396 data = part.read()
396 data = part.read()
397 indent_string = b' ' * indent
397 indent_string = b' ' * indent
398 try:
398 try:
399 version, markers = obsolete._readmarkers(data)
399 version, markers = obsolete._readmarkers(data)
400 except error.UnknownVersion as exc:
400 except error.UnknownVersion as exc:
401 msg = b"%sunsupported version: %s (%d bytes)\n"
401 msg = b"%sunsupported version: %s (%d bytes)\n"
402 msg %= indent_string, exc.version, len(data)
402 msg %= indent_string, exc.version, len(data)
403 ui.write(msg)
403 ui.write(msg)
404 else:
404 else:
405 msg = b"%sversion: %d (%d bytes)\n"
405 msg = b"%sversion: %d (%d bytes)\n"
406 msg %= indent_string, version, len(data)
406 msg %= indent_string, version, len(data)
407 ui.write(msg)
407 ui.write(msg)
408 fm = ui.formatter(b'debugobsolete', opts)
408 fm = ui.formatter(b'debugobsolete', opts)
409 for rawmarker in sorted(markers):
409 for rawmarker in sorted(markers):
410 m = obsutil.marker(None, rawmarker)
410 m = obsutil.marker(None, rawmarker)
411 fm.startitem()
411 fm.startitem()
412 fm.plain(indent_string)
412 fm.plain(indent_string)
413 cmdutil.showmarker(fm, m)
413 cmdutil.showmarker(fm, m)
414 fm.end()
414 fm.end()
415
415
416
416
417 def _debugphaseheads(ui, data, indent=0):
417 def _debugphaseheads(ui, data, indent=0):
418 """display version and markers contained in 'data'"""
418 """display version and markers contained in 'data'"""
419 indent_string = b' ' * indent
419 indent_string = b' ' * indent
420 headsbyphase = phases.binarydecode(data)
420 headsbyphase = phases.binarydecode(data)
421 for phase in phases.allphases:
421 for phase in phases.allphases:
422 for head in headsbyphase[phase]:
422 for head in headsbyphase[phase]:
423 ui.write(indent_string)
423 ui.write(indent_string)
424 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
424 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
425
425
426
426
427 def _quasirepr(thing):
427 def _quasirepr(thing):
428 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
428 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
429 return b'{%s}' % (
429 return b'{%s}' % (
430 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
430 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
431 )
431 )
432 return pycompat.bytestr(repr(thing))
432 return pycompat.bytestr(repr(thing))
433
433
434
434
435 def _debugbundle2(ui, gen, all=None, **opts):
435 def _debugbundle2(ui, gen, all=None, **opts):
436 """lists the contents of a bundle2"""
436 """lists the contents of a bundle2"""
437 if not isinstance(gen, bundle2.unbundle20):
437 if not isinstance(gen, bundle2.unbundle20):
438 raise error.Abort(_(b'not a bundle2 file'))
438 raise error.Abort(_(b'not a bundle2 file'))
439 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
439 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
440 parttypes = opts.get('part_type', [])
440 parttypes = opts.get('part_type', [])
441 for part in gen.iterparts():
441 for part in gen.iterparts():
442 if parttypes and part.type not in parttypes:
442 if parttypes and part.type not in parttypes:
443 continue
443 continue
444 msg = b'%s -- %s (mandatory: %r)\n'
444 msg = b'%s -- %s (mandatory: %r)\n'
445 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
445 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
446 if part.type == b'changegroup':
446 if part.type == b'changegroup':
447 version = part.params.get(b'version', b'01')
447 version = part.params.get(b'version', b'01')
448 cg = changegroup.getunbundler(version, part, b'UN')
448 cg = changegroup.getunbundler(version, part, b'UN')
449 if not ui.quiet:
449 if not ui.quiet:
450 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
450 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
451 if part.type == b'obsmarkers':
451 if part.type == b'obsmarkers':
452 if not ui.quiet:
452 if not ui.quiet:
453 _debugobsmarkers(ui, part, indent=4, **opts)
453 _debugobsmarkers(ui, part, indent=4, **opts)
454 if part.type == b'phase-heads':
454 if part.type == b'phase-heads':
455 if not ui.quiet:
455 if not ui.quiet:
456 _debugphaseheads(ui, part, indent=4)
456 _debugphaseheads(ui, part, indent=4)
457
457
458
458
459 @command(
459 @command(
460 b'debugbundle',
460 b'debugbundle',
461 [
461 [
462 (b'a', b'all', None, _(b'show all details')),
462 (b'a', b'all', None, _(b'show all details')),
463 (b'', b'part-type', [], _(b'show only the named part type')),
463 (b'', b'part-type', [], _(b'show only the named part type')),
464 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
464 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
465 ],
465 ],
466 _(b'FILE'),
466 _(b'FILE'),
467 norepo=True,
467 norepo=True,
468 )
468 )
469 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
469 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
470 """lists the contents of a bundle"""
470 """lists the contents of a bundle"""
471 with hg.openpath(ui, bundlepath) as f:
471 with hg.openpath(ui, bundlepath) as f:
472 if spec:
472 if spec:
473 spec = exchange.getbundlespec(ui, f)
473 spec = exchange.getbundlespec(ui, f)
474 ui.write(b'%s\n' % spec)
474 ui.write(b'%s\n' % spec)
475 return
475 return
476
476
477 gen = exchange.readbundle(ui, f, bundlepath)
477 gen = exchange.readbundle(ui, f, bundlepath)
478 if isinstance(gen, bundle2.unbundle20):
478 if isinstance(gen, bundle2.unbundle20):
479 return _debugbundle2(ui, gen, all=all, **opts)
479 return _debugbundle2(ui, gen, all=all, **opts)
480 _debugchangegroup(ui, gen, all=all, **opts)
480 _debugchangegroup(ui, gen, all=all, **opts)
481
481
482
482
483 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
483 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
484 def debugcapabilities(ui, path, **opts):
484 def debugcapabilities(ui, path, **opts):
485 """lists the capabilities of a remote peer"""
485 """lists the capabilities of a remote peer"""
486 opts = pycompat.byteskwargs(opts)
486 opts = pycompat.byteskwargs(opts)
487 peer = hg.peer(ui, opts, path)
487 peer = hg.peer(ui, opts, path)
488 try:
488 try:
489 caps = peer.capabilities()
489 caps = peer.capabilities()
490 ui.writenoi18n(b'Main capabilities:\n')
490 ui.writenoi18n(b'Main capabilities:\n')
491 for c in sorted(caps):
491 for c in sorted(caps):
492 ui.write(b' %s\n' % c)
492 ui.write(b' %s\n' % c)
493 b2caps = bundle2.bundle2caps(peer)
493 b2caps = bundle2.bundle2caps(peer)
494 if b2caps:
494 if b2caps:
495 ui.writenoi18n(b'Bundle2 capabilities:\n')
495 ui.writenoi18n(b'Bundle2 capabilities:\n')
496 for key, values in sorted(b2caps.items()):
496 for key, values in sorted(b2caps.items()):
497 ui.write(b' %s\n' % key)
497 ui.write(b' %s\n' % key)
498 for v in values:
498 for v in values:
499 ui.write(b' %s\n' % v)
499 ui.write(b' %s\n' % v)
500 finally:
500 finally:
501 peer.close()
501 peer.close()
502
502
503
503
504 @command(
504 @command(
505 b'debugchangedfiles',
505 b'debugchangedfiles',
506 [
506 [
507 (
507 (
508 b'',
508 b'',
509 b'compute',
509 b'compute',
510 False,
510 False,
511 b"compute information instead of reading it from storage",
511 b"compute information instead of reading it from storage",
512 ),
512 ),
513 ],
513 ],
514 b'REV',
514 b'REV',
515 )
515 )
516 def debugchangedfiles(ui, repo, rev, **opts):
516 def debugchangedfiles(ui, repo, rev, **opts):
517 """list the stored files changes for a revision"""
517 """list the stored files changes for a revision"""
518 ctx = logcmdutil.revsingle(repo, rev, None)
518 ctx = logcmdutil.revsingle(repo, rev, None)
519 files = None
519 files = None
520
520
521 if opts['compute']:
521 if opts['compute']:
522 files = metadata.compute_all_files_changes(ctx)
522 files = metadata.compute_all_files_changes(ctx)
523 else:
523 else:
524 sd = repo.changelog.sidedata(ctx.rev())
524 sd = repo.changelog.sidedata(ctx.rev())
525 files_block = sd.get(sidedata.SD_FILES)
525 files_block = sd.get(sidedata.SD_FILES)
526 if files_block is not None:
526 if files_block is not None:
527 files = metadata.decode_files_sidedata(sd)
527 files = metadata.decode_files_sidedata(sd)
528 if files is not None:
528 if files is not None:
529 for f in sorted(files.touched):
529 for f in sorted(files.touched):
530 if f in files.added:
530 if f in files.added:
531 action = b"added"
531 action = b"added"
532 elif f in files.removed:
532 elif f in files.removed:
533 action = b"removed"
533 action = b"removed"
534 elif f in files.merged:
534 elif f in files.merged:
535 action = b"merged"
535 action = b"merged"
536 elif f in files.salvaged:
536 elif f in files.salvaged:
537 action = b"salvaged"
537 action = b"salvaged"
538 else:
538 else:
539 action = b"touched"
539 action = b"touched"
540
540
541 copy_parent = b""
541 copy_parent = b""
542 copy_source = b""
542 copy_source = b""
543 if f in files.copied_from_p1:
543 if f in files.copied_from_p1:
544 copy_parent = b"p1"
544 copy_parent = b"p1"
545 copy_source = files.copied_from_p1[f]
545 copy_source = files.copied_from_p1[f]
546 elif f in files.copied_from_p2:
546 elif f in files.copied_from_p2:
547 copy_parent = b"p2"
547 copy_parent = b"p2"
548 copy_source = files.copied_from_p2[f]
548 copy_source = files.copied_from_p2[f]
549
549
550 data = (action, copy_parent, f, copy_source)
550 data = (action, copy_parent, f, copy_source)
551 template = b"%-8s %2s: %s, %s;\n"
551 template = b"%-8s %2s: %s, %s;\n"
552 ui.write(template % data)
552 ui.write(template % data)
553
553
554
554
555 @command(b'debugcheckstate', [], b'')
555 @command(b'debugcheckstate', [], b'')
556 def debugcheckstate(ui, repo):
556 def debugcheckstate(ui, repo):
557 """validate the correctness of the current dirstate"""
557 """validate the correctness of the current dirstate"""
558 errors = verify.verifier(repo)._verify_dirstate()
558 errors = verify.verifier(repo)._verify_dirstate()
559 if errors:
559 if errors:
560 errstr = _(b"dirstate inconsistent with current parent's manifest")
560 errstr = _(b"dirstate inconsistent with current parent's manifest")
561 raise error.Abort(errstr)
561 raise error.Abort(errstr)
562
562
563
563
564 @command(
564 @command(
565 b'debugcolor',
565 b'debugcolor',
566 [(b'', b'style', None, _(b'show all configured styles'))],
566 [(b'', b'style', None, _(b'show all configured styles'))],
567 b'hg debugcolor',
567 b'hg debugcolor',
568 )
568 )
569 def debugcolor(ui, repo, **opts):
569 def debugcolor(ui, repo, **opts):
570 """show available color, effects or style"""
570 """show available color, effects or style"""
571 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
571 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
572 if opts.get('style'):
572 if opts.get('style'):
573 return _debugdisplaystyle(ui)
573 return _debugdisplaystyle(ui)
574 else:
574 else:
575 return _debugdisplaycolor(ui)
575 return _debugdisplaycolor(ui)
576
576
577
577
578 def _debugdisplaycolor(ui):
578 def _debugdisplaycolor(ui):
579 ui = ui.copy()
579 ui = ui.copy()
580 ui._styles.clear()
580 ui._styles.clear()
581 for effect in color._activeeffects(ui).keys():
581 for effect in color._activeeffects(ui).keys():
582 ui._styles[effect] = effect
582 ui._styles[effect] = effect
583 if ui._terminfoparams:
583 if ui._terminfoparams:
584 for k, v in ui.configitems(b'color'):
584 for k, v in ui.configitems(b'color'):
585 if k.startswith(b'color.'):
585 if k.startswith(b'color.'):
586 ui._styles[k] = k[6:]
586 ui._styles[k] = k[6:]
587 elif k.startswith(b'terminfo.'):
587 elif k.startswith(b'terminfo.'):
588 ui._styles[k] = k[9:]
588 ui._styles[k] = k[9:]
589 ui.write(_(b'available colors:\n'))
589 ui.write(_(b'available colors:\n'))
590 # sort label with a '_' after the other to group '_background' entry.
590 # sort label with a '_' after the other to group '_background' entry.
591 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
591 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
592 for colorname, label in items:
592 for colorname, label in items:
593 ui.write(b'%s\n' % colorname, label=label)
593 ui.write(b'%s\n' % colorname, label=label)
594
594
595
595
596 def _debugdisplaystyle(ui):
596 def _debugdisplaystyle(ui):
597 ui.write(_(b'available style:\n'))
597 ui.write(_(b'available style:\n'))
598 if not ui._styles:
598 if not ui._styles:
599 return
599 return
600 width = max(len(s) for s in ui._styles)
600 width = max(len(s) for s in ui._styles)
601 for label, effects in sorted(ui._styles.items()):
601 for label, effects in sorted(ui._styles.items()):
602 ui.write(b'%s' % label, label=label)
602 ui.write(b'%s' % label, label=label)
603 if effects:
603 if effects:
604 # 50
604 # 50
605 ui.write(b': ')
605 ui.write(b': ')
606 ui.write(b' ' * (max(0, width - len(label))))
606 ui.write(b' ' * (max(0, width - len(label))))
607 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
607 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
608 ui.write(b'\n')
608 ui.write(b'\n')
609
609
610
610
611 @command(b'debugcreatestreamclonebundle', [], b'FILE')
611 @command(b'debugcreatestreamclonebundle', [], b'FILE')
612 def debugcreatestreamclonebundle(ui, repo, fname):
612 def debugcreatestreamclonebundle(ui, repo, fname):
613 """create a stream clone bundle file
613 """create a stream clone bundle file
614
614
615 Stream bundles are special bundles that are essentially archives of
615 Stream bundles are special bundles that are essentially archives of
616 revlog files. They are commonly used for cloning very quickly.
616 revlog files. They are commonly used for cloning very quickly.
617 """
617 """
618 # TODO we may want to turn this into an abort when this functionality
618 # TODO we may want to turn this into an abort when this functionality
619 # is moved into `hg bundle`.
619 # is moved into `hg bundle`.
620 if phases.hassecret(repo):
620 if phases.hassecret(repo):
621 ui.warn(
621 ui.warn(
622 _(
622 _(
623 b'(warning: stream clone bundle will contain secret '
623 b'(warning: stream clone bundle will contain secret '
624 b'revisions)\n'
624 b'revisions)\n'
625 )
625 )
626 )
626 )
627
627
628 requirements, gen = streamclone.generatebundlev1(repo)
628 requirements, gen = streamclone.generatebundlev1(repo)
629 changegroup.writechunks(ui, gen, fname)
629 changegroup.writechunks(ui, gen, fname)
630
630
631 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
631 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
632
632
633
633
634 @command(
634 @command(
635 b'debugdag',
635 b'debugdag',
636 [
636 [
637 (b't', b'tags', None, _(b'use tags as labels')),
637 (b't', b'tags', None, _(b'use tags as labels')),
638 (b'b', b'branches', None, _(b'annotate with branch names')),
638 (b'b', b'branches', None, _(b'annotate with branch names')),
639 (b'', b'dots', None, _(b'use dots for runs')),
639 (b'', b'dots', None, _(b'use dots for runs')),
640 (b's', b'spaces', None, _(b'separate elements by spaces')),
640 (b's', b'spaces', None, _(b'separate elements by spaces')),
641 ],
641 ],
642 _(b'[OPTION]... [FILE [REV]...]'),
642 _(b'[OPTION]... [FILE [REV]...]'),
643 optionalrepo=True,
643 optionalrepo=True,
644 )
644 )
645 def debugdag(ui, repo, file_=None, *revs, **opts):
645 def debugdag(ui, repo, file_=None, *revs, **opts):
646 """format the changelog or an index DAG as a concise textual description
646 """format the changelog or an index DAG as a concise textual description
647
647
648 If you pass a revlog index, the revlog's DAG is emitted. If you list
648 If you pass a revlog index, the revlog's DAG is emitted. If you list
649 revision numbers, they get labeled in the output as rN.
649 revision numbers, they get labeled in the output as rN.
650
650
651 Otherwise, the changelog DAG of the current repo is emitted.
651 Otherwise, the changelog DAG of the current repo is emitted.
652 """
652 """
653 spaces = opts.get('spaces')
653 spaces = opts.get('spaces')
654 dots = opts.get('dots')
654 dots = opts.get('dots')
655 if file_:
655 if file_:
656 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
656 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
657 revs = {int(r) for r in revs}
657 revs = {int(r) for r in revs}
658
658
659 def events():
659 def events():
660 for r in rlog:
660 for r in rlog:
661 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
661 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
662 if r in revs:
662 if r in revs:
663 yield b'l', (r, b"r%i" % r)
663 yield b'l', (r, b"r%i" % r)
664
664
665 elif repo:
665 elif repo:
666 cl = repo.changelog
666 cl = repo.changelog
667 tags = opts.get('tags')
667 tags = opts.get('tags')
668 branches = opts.get('branches')
668 branches = opts.get('branches')
669 if tags:
669 if tags:
670 labels = {}
670 labels = {}
671 for l, n in repo.tags().items():
671 for l, n in repo.tags().items():
672 labels.setdefault(cl.rev(n), []).append(l)
672 labels.setdefault(cl.rev(n), []).append(l)
673
673
674 def events():
674 def events():
675 b = b"default"
675 b = b"default"
676 for r in cl:
676 for r in cl:
677 if branches:
677 if branches:
678 newb = cl.read(cl.node(r))[5][b'branch']
678 newb = cl.read(cl.node(r))[5][b'branch']
679 if newb != b:
679 if newb != b:
680 yield b'a', newb
680 yield b'a', newb
681 b = newb
681 b = newb
682 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
682 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
683 if tags:
683 if tags:
684 ls = labels.get(r)
684 ls = labels.get(r)
685 if ls:
685 if ls:
686 for l in ls:
686 for l in ls:
687 yield b'l', (r, l)
687 yield b'l', (r, l)
688
688
689 else:
689 else:
690 raise error.Abort(_(b'need repo for changelog dag'))
690 raise error.Abort(_(b'need repo for changelog dag'))
691
691
692 for line in dagparser.dagtextlines(
692 for line in dagparser.dagtextlines(
693 events(),
693 events(),
694 addspaces=spaces,
694 addspaces=spaces,
695 wraplabels=True,
695 wraplabels=True,
696 wrapannotations=True,
696 wrapannotations=True,
697 wrapnonlinear=dots,
697 wrapnonlinear=dots,
698 usedots=dots,
698 usedots=dots,
699 maxlinewidth=70,
699 maxlinewidth=70,
700 ):
700 ):
701 ui.write(line)
701 ui.write(line)
702 ui.write(b"\n")
702 ui.write(b"\n")
703
703
704
704
705 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
705 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
706 def debugdata(ui, repo, file_, rev=None, **opts):
706 def debugdata(ui, repo, file_, rev=None, **opts):
707 """dump the contents of a data file revision"""
707 """dump the contents of a data file revision"""
708 opts = pycompat.byteskwargs(opts)
708 opts = pycompat.byteskwargs(opts)
709 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
709 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
710 if rev is not None:
710 if rev is not None:
711 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
711 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
712 file_, rev = None, file_
712 file_, rev = None, file_
713 elif rev is None:
713 elif rev is None:
714 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
714 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
715 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
715 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
716 try:
716 try:
717 ui.write(r.rawdata(r.lookup(rev)))
717 ui.write(r.rawdata(r.lookup(rev)))
718 except KeyError:
718 except KeyError:
719 raise error.Abort(_(b'invalid revision identifier %s') % rev)
719 raise error.Abort(_(b'invalid revision identifier %s') % rev)
720
720
721
721
722 @command(
722 @command(
723 b'debugdate',
723 b'debugdate',
724 [(b'e', b'extended', None, _(b'try extended date formats'))],
724 [(b'e', b'extended', None, _(b'try extended date formats'))],
725 _(b'[-e] DATE [RANGE]'),
725 _(b'[-e] DATE [RANGE]'),
726 norepo=True,
726 norepo=True,
727 optionalrepo=True,
727 optionalrepo=True,
728 )
728 )
729 def debugdate(ui, date, range=None, **opts):
729 def debugdate(ui, date, range=None, **opts):
730 """parse and display a date"""
730 """parse and display a date"""
731 if opts["extended"]:
731 if opts["extended"]:
732 d = dateutil.parsedate(date, dateutil.extendeddateformats)
732 d = dateutil.parsedate(date, dateutil.extendeddateformats)
733 else:
733 else:
734 d = dateutil.parsedate(date)
734 d = dateutil.parsedate(date)
735 ui.writenoi18n(b"internal: %d %d\n" % d)
735 ui.writenoi18n(b"internal: %d %d\n" % d)
736 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
736 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
737 if range:
737 if range:
738 m = dateutil.matchdate(range)
738 m = dateutil.matchdate(range)
739 ui.writenoi18n(b"match: %s\n" % m(d[0]))
739 ui.writenoi18n(b"match: %s\n" % m(d[0]))
740
740
741
741
742 @command(
742 @command(
743 b'debugdeltachain',
743 b'debugdeltachain',
744 cmdutil.debugrevlogopts + cmdutil.formatteropts,
744 cmdutil.debugrevlogopts + cmdutil.formatteropts,
745 _(b'-c|-m|FILE'),
745 _(b'-c|-m|FILE'),
746 optionalrepo=True,
746 optionalrepo=True,
747 )
747 )
748 def debugdeltachain(ui, repo, file_=None, **opts):
748 def debugdeltachain(ui, repo, file_=None, **opts):
749 """dump information about delta chains in a revlog
749 """dump information about delta chains in a revlog
750
750
751 Output can be templatized. Available template keywords are:
751 Output can be templatized. Available template keywords are:
752
752
753 :``rev``: revision number
753 :``rev``: revision number
754 :``p1``: parent 1 revision number (for reference)
754 :``p1``: parent 1 revision number (for reference)
755 :``p2``: parent 2 revision number (for reference)
755 :``p2``: parent 2 revision number (for reference)
756 :``chainid``: delta chain identifier (numbered by unique base)
756 :``chainid``: delta chain identifier (numbered by unique base)
757 :``chainlen``: delta chain length to this revision
757 :``chainlen``: delta chain length to this revision
758 :``prevrev``: previous revision in delta chain
758 :``prevrev``: previous revision in delta chain
759 :``deltatype``: role of delta / how it was computed
759 :``deltatype``: role of delta / how it was computed
760 - base: a full snapshot
760 - base: a full snapshot
761 - snap: an intermediate snapshot
761 - snap: an intermediate snapshot
762 - p1: a delta against the first parent
762 - p1: a delta against the first parent
763 - p2: a delta against the second parent
763 - p2: a delta against the second parent
764 - skip1: a delta against the same base as p1
764 - skip1: a delta against the same base as p1
765 (when p1 has empty delta
765 (when p1 has empty delta
766 - skip2: a delta against the same base as p2
766 - skip2: a delta against the same base as p2
767 (when p2 has empty delta
767 (when p2 has empty delta
768 - prev: a delta against the previous revision
768 - prev: a delta against the previous revision
769 - other: a delta against an arbitrary revision
769 - other: a delta against an arbitrary revision
770 :``compsize``: compressed size of revision
770 :``compsize``: compressed size of revision
771 :``uncompsize``: uncompressed size of revision
771 :``uncompsize``: uncompressed size of revision
772 :``chainsize``: total size of compressed revisions in chain
772 :``chainsize``: total size of compressed revisions in chain
773 :``chainratio``: total chain size divided by uncompressed revision size
773 :``chainratio``: total chain size divided by uncompressed revision size
774 (new delta chains typically start at ratio 2.00)
774 (new delta chains typically start at ratio 2.00)
775 :``lindist``: linear distance from base revision in delta chain to end
775 :``lindist``: linear distance from base revision in delta chain to end
776 of this revision
776 of this revision
777 :``extradist``: total size of revisions not part of this delta chain from
777 :``extradist``: total size of revisions not part of this delta chain from
778 base of delta chain to end of this revision; a measurement
778 base of delta chain to end of this revision; a measurement
779 of how much extra data we need to read/seek across to read
779 of how much extra data we need to read/seek across to read
780 the delta chain for this revision
780 the delta chain for this revision
781 :``extraratio``: extradist divided by chainsize; another representation of
781 :``extraratio``: extradist divided by chainsize; another representation of
782 how much unrelated data is needed to load this delta chain
782 how much unrelated data is needed to load this delta chain
783
783
784 If the repository is configured to use the sparse read, additional keywords
784 If the repository is configured to use the sparse read, additional keywords
785 are available:
785 are available:
786
786
787 :``readsize``: total size of data read from the disk for a revision
787 :``readsize``: total size of data read from the disk for a revision
788 (sum of the sizes of all the blocks)
788 (sum of the sizes of all the blocks)
789 :``largestblock``: size of the largest block of data read from the disk
789 :``largestblock``: size of the largest block of data read from the disk
790 :``readdensity``: density of useful bytes in the data read from the disk
790 :``readdensity``: density of useful bytes in the data read from the disk
791 :``srchunks``: in how many data hunks the whole revision would be read
791 :``srchunks``: in how many data hunks the whole revision would be read
792
792
793 The sparse read can be enabled with experimental.sparse-read = True
793 The sparse read can be enabled with experimental.sparse-read = True
794 """
794 """
795 opts = pycompat.byteskwargs(opts)
795 opts = pycompat.byteskwargs(opts)
796 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
796 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
797 index = r.index
797 index = r.index
798 start = r.start
798 start = r.start
799 length = r.length
799 length = r.length
800 generaldelta = r._generaldelta
800 generaldelta = r._generaldelta
801 withsparseread = getattr(r, '_withsparseread', False)
801 withsparseread = getattr(r, '_withsparseread', False)
802
802
803 # security to avoid crash on corrupted revlogs
803 # security to avoid crash on corrupted revlogs
804 total_revs = len(index)
804 total_revs = len(index)
805
805
806 def revinfo(rev):
806 def revinfo(rev):
807 e = index[rev]
807 e = index[rev]
808 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
808 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
809 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
809 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
810 chainsize = 0
810 chainsize = 0
811
811
812 base = e[revlog_constants.ENTRY_DELTA_BASE]
812 base = e[revlog_constants.ENTRY_DELTA_BASE]
813 p1 = e[revlog_constants.ENTRY_PARENT_1]
813 p1 = e[revlog_constants.ENTRY_PARENT_1]
814 p2 = e[revlog_constants.ENTRY_PARENT_2]
814 p2 = e[revlog_constants.ENTRY_PARENT_2]
815
815
816 # If the parents of a revision has an empty delta, we never try to delta
816 # If the parents of a revision has an empty delta, we never try to delta
817 # against that parent, but directly against the delta base of that
817 # against that parent, but directly against the delta base of that
818 # parent (recursively). It avoids adding a useless entry in the chain.
818 # parent (recursively). It avoids adding a useless entry in the chain.
819 #
819 #
820 # However we need to detect that as a special case for delta-type, that
820 # However we need to detect that as a special case for delta-type, that
821 # is not simply "other".
821 # is not simply "other".
822 p1_base = p1
822 p1_base = p1
823 if p1 != nullrev and p1 < total_revs:
823 if p1 != nullrev and p1 < total_revs:
824 e1 = index[p1]
824 e1 = index[p1]
825 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
825 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
826 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
826 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
827 if (
827 if (
828 new_base == p1_base
828 new_base == p1_base
829 or new_base == nullrev
829 or new_base == nullrev
830 or new_base >= total_revs
830 or new_base >= total_revs
831 ):
831 ):
832 break
832 break
833 p1_base = new_base
833 p1_base = new_base
834 e1 = index[p1_base]
834 e1 = index[p1_base]
835 p2_base = p2
835 p2_base = p2
836 if p2 != nullrev and p2 < total_revs:
836 if p2 != nullrev and p2 < total_revs:
837 e2 = index[p2]
837 e2 = index[p2]
838 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
838 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
839 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
839 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
840 if (
840 if (
841 new_base == p2_base
841 new_base == p2_base
842 or new_base == nullrev
842 or new_base == nullrev
843 or new_base >= total_revs
843 or new_base >= total_revs
844 ):
844 ):
845 break
845 break
846 p2_base = new_base
846 p2_base = new_base
847 e2 = index[p2_base]
847 e2 = index[p2_base]
848
848
849 if generaldelta:
849 if generaldelta:
850 if base == p1:
850 if base == p1:
851 deltatype = b'p1'
851 deltatype = b'p1'
852 elif base == p2:
852 elif base == p2:
853 deltatype = b'p2'
853 deltatype = b'p2'
854 elif base == rev:
854 elif base == rev:
855 deltatype = b'base'
855 deltatype = b'base'
856 elif base == p1_base:
856 elif base == p1_base:
857 deltatype = b'skip1'
857 deltatype = b'skip1'
858 elif base == p2_base:
858 elif base == p2_base:
859 deltatype = b'skip2'
859 deltatype = b'skip2'
860 elif r.issnapshot(rev):
860 elif r.issnapshot(rev):
861 deltatype = b'snap'
861 deltatype = b'snap'
862 elif base == rev - 1:
862 elif base == rev - 1:
863 deltatype = b'prev'
863 deltatype = b'prev'
864 else:
864 else:
865 deltatype = b'other'
865 deltatype = b'other'
866 else:
866 else:
867 if base == rev:
867 if base == rev:
868 deltatype = b'base'
868 deltatype = b'base'
869 else:
869 else:
870 deltatype = b'prev'
870 deltatype = b'prev'
871
871
872 chain = r._deltachain(rev)[0]
872 chain = r._deltachain(rev)[0]
873 for iterrev in chain:
873 for iterrev in chain:
874 e = index[iterrev]
874 e = index[iterrev]
875 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
875 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
876
876
877 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
877 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
878
878
879 fm = ui.formatter(b'debugdeltachain', opts)
879 fm = ui.formatter(b'debugdeltachain', opts)
880
880
881 fm.plain(
881 fm.plain(
882 b' rev p1 p2 chain# chainlen prev delta '
882 b' rev p1 p2 chain# chainlen prev delta '
883 b'size rawsize chainsize ratio lindist extradist '
883 b'size rawsize chainsize ratio lindist extradist '
884 b'extraratio'
884 b'extraratio'
885 )
885 )
886 if withsparseread:
886 if withsparseread:
887 fm.plain(b' readsize largestblk rddensity srchunks')
887 fm.plain(b' readsize largestblk rddensity srchunks')
888 fm.plain(b'\n')
888 fm.plain(b'\n')
889
889
890 chainbases = {}
890 chainbases = {}
891 for rev in r:
891 for rev in r:
892 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
892 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
893 chainbase = chain[0]
893 chainbase = chain[0]
894 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
894 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
895 basestart = start(chainbase)
895 basestart = start(chainbase)
896 revstart = start(rev)
896 revstart = start(rev)
897 lineardist = revstart + comp - basestart
897 lineardist = revstart + comp - basestart
898 extradist = lineardist - chainsize
898 extradist = lineardist - chainsize
899 try:
899 try:
900 prevrev = chain[-2]
900 prevrev = chain[-2]
901 except IndexError:
901 except IndexError:
902 prevrev = -1
902 prevrev = -1
903
903
904 if uncomp != 0:
904 if uncomp != 0:
905 chainratio = float(chainsize) / float(uncomp)
905 chainratio = float(chainsize) / float(uncomp)
906 else:
906 else:
907 chainratio = chainsize
907 chainratio = chainsize
908
908
909 if chainsize != 0:
909 if chainsize != 0:
910 extraratio = float(extradist) / float(chainsize)
910 extraratio = float(extradist) / float(chainsize)
911 else:
911 else:
912 extraratio = extradist
912 extraratio = extradist
913
913
914 fm.startitem()
914 fm.startitem()
915 fm.write(
915 fm.write(
916 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
916 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
917 b'uncompsize chainsize chainratio lindist extradist '
917 b'uncompsize chainsize chainratio lindist extradist '
918 b'extraratio',
918 b'extraratio',
919 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
919 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
920 rev,
920 rev,
921 p1,
921 p1,
922 p2,
922 p2,
923 chainid,
923 chainid,
924 len(chain),
924 len(chain),
925 prevrev,
925 prevrev,
926 deltatype,
926 deltatype,
927 comp,
927 comp,
928 uncomp,
928 uncomp,
929 chainsize,
929 chainsize,
930 chainratio,
930 chainratio,
931 lineardist,
931 lineardist,
932 extradist,
932 extradist,
933 extraratio,
933 extraratio,
934 rev=rev,
934 rev=rev,
935 chainid=chainid,
935 chainid=chainid,
936 chainlen=len(chain),
936 chainlen=len(chain),
937 prevrev=prevrev,
937 prevrev=prevrev,
938 deltatype=deltatype,
938 deltatype=deltatype,
939 compsize=comp,
939 compsize=comp,
940 uncompsize=uncomp,
940 uncompsize=uncomp,
941 chainsize=chainsize,
941 chainsize=chainsize,
942 chainratio=chainratio,
942 chainratio=chainratio,
943 lindist=lineardist,
943 lindist=lineardist,
944 extradist=extradist,
944 extradist=extradist,
945 extraratio=extraratio,
945 extraratio=extraratio,
946 )
946 )
947 if withsparseread:
947 if withsparseread:
948 readsize = 0
948 readsize = 0
949 largestblock = 0
949 largestblock = 0
950 srchunks = 0
950 srchunks = 0
951
951
952 for revschunk in deltautil.slicechunk(r, chain):
952 for revschunk in deltautil.slicechunk(r, chain):
953 srchunks += 1
953 srchunks += 1
954 blkend = start(revschunk[-1]) + length(revschunk[-1])
954 blkend = start(revschunk[-1]) + length(revschunk[-1])
955 blksize = blkend - start(revschunk[0])
955 blksize = blkend - start(revschunk[0])
956
956
957 readsize += blksize
957 readsize += blksize
958 if largestblock < blksize:
958 if largestblock < blksize:
959 largestblock = blksize
959 largestblock = blksize
960
960
961 if readsize:
961 if readsize:
962 readdensity = float(chainsize) / float(readsize)
962 readdensity = float(chainsize) / float(readsize)
963 else:
963 else:
964 readdensity = 1
964 readdensity = 1
965
965
966 fm.write(
966 fm.write(
967 b'readsize largestblock readdensity srchunks',
967 b'readsize largestblock readdensity srchunks',
968 b' %10d %10d %9.5f %8d',
968 b' %10d %10d %9.5f %8d',
969 readsize,
969 readsize,
970 largestblock,
970 largestblock,
971 readdensity,
971 readdensity,
972 srchunks,
972 srchunks,
973 readsize=readsize,
973 readsize=readsize,
974 largestblock=largestblock,
974 largestblock=largestblock,
975 readdensity=readdensity,
975 readdensity=readdensity,
976 srchunks=srchunks,
976 srchunks=srchunks,
977 )
977 )
978
978
979 fm.plain(b'\n')
979 fm.plain(b'\n')
980
980
981 fm.end()
981 fm.end()
982
982
983
983
984 @command(
984 @command(
985 b'debug-delta-find',
985 b'debug-delta-find',
986 cmdutil.debugrevlogopts
986 cmdutil.debugrevlogopts
987 + cmdutil.formatteropts
987 + cmdutil.formatteropts
988 + [
988 + [
989 (
989 (
990 b'',
990 b'',
991 b'source',
991 b'source',
992 b'full',
992 b'full',
993 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
993 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
994 ),
994 ),
995 ],
995 ],
996 _(b'-c|-m|FILE REV'),
996 _(b'-c|-m|FILE REV'),
997 optionalrepo=True,
997 optionalrepo=True,
998 )
998 )
999 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
999 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1000 """display the computation to get to a valid delta for storing REV
1000 """display the computation to get to a valid delta for storing REV
1001
1001
1002 This command will replay the process used to find the "best" delta to store
1002 This command will replay the process used to find the "best" delta to store
1003 a revision and display information about all the steps used to get to that
1003 a revision and display information about all the steps used to get to that
1004 result.
1004 result.
1005
1005
1006 By default, the process is fed with a the full-text for the revision. This
1006 By default, the process is fed with a the full-text for the revision. This
1007 can be controlled with the --source flag.
1007 can be controlled with the --source flag.
1008
1008
1009 The revision use the revision number of the target storage (not changelog
1009 The revision use the revision number of the target storage (not changelog
1010 revision number).
1010 revision number).
1011
1011
1012 note: the process is initiated from a full text of the revision to store.
1012 note: the process is initiated from a full text of the revision to store.
1013 """
1013 """
1014 opts = pycompat.byteskwargs(opts)
1014 opts = pycompat.byteskwargs(opts)
1015 if arg_2 is None:
1015 if arg_2 is None:
1016 file_ = None
1016 file_ = None
1017 rev = arg_1
1017 rev = arg_1
1018 else:
1018 else:
1019 file_ = arg_1
1019 file_ = arg_1
1020 rev = arg_2
1020 rev = arg_2
1021
1021
1022 rev = int(rev)
1022 rev = int(rev)
1023
1023
1024 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1024 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1025 p1r, p2r = revlog.parentrevs(rev)
1025 p1r, p2r = revlog.parentrevs(rev)
1026
1026
1027 if source == b'full':
1027 if source == b'full':
1028 base_rev = nullrev
1028 base_rev = nullrev
1029 elif source == b'storage':
1029 elif source == b'storage':
1030 base_rev = revlog.deltaparent(rev)
1030 base_rev = revlog.deltaparent(rev)
1031 elif source == b'p1':
1031 elif source == b'p1':
1032 base_rev = p1r
1032 base_rev = p1r
1033 elif source == b'p2':
1033 elif source == b'p2':
1034 base_rev = p2r
1034 base_rev = p2r
1035 elif source == b'prev':
1035 elif source == b'prev':
1036 base_rev = rev - 1
1036 base_rev = rev - 1
1037 else:
1037 else:
1038 raise error.InputError(b"invalid --source value: %s" % source)
1038 raise error.InputError(b"invalid --source value: %s" % source)
1039
1039
1040 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
1040 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
1041
1041
1042
1042
1043 @command(
1043 @command(
1044 b'debugdirstate|debugstate',
1044 b'debugdirstate|debugstate',
1045 [
1045 [
1046 (
1046 (
1047 b'',
1047 b'',
1048 b'nodates',
1048 b'nodates',
1049 None,
1049 None,
1050 _(b'do not display the saved mtime (DEPRECATED)'),
1050 _(b'do not display the saved mtime (DEPRECATED)'),
1051 ),
1051 ),
1052 (b'', b'dates', True, _(b'display the saved mtime')),
1052 (b'', b'dates', True, _(b'display the saved mtime')),
1053 (b'', b'datesort', None, _(b'sort by saved mtime')),
1053 (b'', b'datesort', None, _(b'sort by saved mtime')),
1054 (
1054 (
1055 b'',
1055 b'',
1056 b'docket',
1056 b'docket',
1057 False,
1057 False,
1058 _(b'display the docket (metadata file) instead'),
1058 _(b'display the docket (metadata file) instead'),
1059 ),
1059 ),
1060 (
1060 (
1061 b'',
1061 b'',
1062 b'all',
1062 b'all',
1063 False,
1063 False,
1064 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1064 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1065 ),
1065 ),
1066 ],
1066 ],
1067 _(b'[OPTION]...'),
1067 _(b'[OPTION]...'),
1068 )
1068 )
1069 def debugstate(ui, repo, **opts):
1069 def debugstate(ui, repo, **opts):
1070 """show the contents of the current dirstate"""
1070 """show the contents of the current dirstate"""
1071
1071
1072 if opts.get("docket"):
1072 if opts.get("docket"):
1073 if not repo.dirstate._use_dirstate_v2:
1073 if not repo.dirstate._use_dirstate_v2:
1074 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1074 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1075
1075
1076 docket = repo.dirstate._map.docket
1076 docket = repo.dirstate._map.docket
1077 (
1077 (
1078 start_offset,
1078 start_offset,
1079 root_nodes,
1079 root_nodes,
1080 nodes_with_entry,
1080 nodes_with_entry,
1081 nodes_with_copy,
1081 nodes_with_copy,
1082 unused_bytes,
1082 unused_bytes,
1083 _unused,
1083 _unused,
1084 ignore_pattern,
1084 ignore_pattern,
1085 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1085 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1086
1086
1087 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1087 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1088 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1088 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1089 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1089 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1090 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1090 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1091 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1091 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1092 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1092 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1093 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1093 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1094 ui.write(
1094 ui.write(
1095 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1095 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1096 )
1096 )
1097 return
1097 return
1098
1098
1099 nodates = not opts['dates']
1099 nodates = not opts['dates']
1100 if opts.get('nodates') is not None:
1100 if opts.get('nodates') is not None:
1101 nodates = True
1101 nodates = True
1102 datesort = opts.get('datesort')
1102 datesort = opts.get('datesort')
1103
1103
1104 if datesort:
1104 if datesort:
1105
1105
1106 def keyfunc(entry):
1106 def keyfunc(entry):
1107 filename, _state, _mode, _size, mtime = entry
1107 filename, _state, _mode, _size, mtime = entry
1108 return (mtime, filename)
1108 return (mtime, filename)
1109
1109
1110 else:
1110 else:
1111 keyfunc = None # sort by filename
1111 keyfunc = None # sort by filename
1112 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1112 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1113 entries.sort(key=keyfunc)
1113 entries.sort(key=keyfunc)
1114 for entry in entries:
1114 for entry in entries:
1115 filename, state, mode, size, mtime = entry
1115 filename, state, mode, size, mtime = entry
1116 if mtime == -1:
1116 if mtime == -1:
1117 timestr = b'unset '
1117 timestr = b'unset '
1118 elif nodates:
1118 elif nodates:
1119 timestr = b'set '
1119 timestr = b'set '
1120 else:
1120 else:
1121 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1121 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1122 timestr = encoding.strtolocal(timestr)
1122 timestr = encoding.strtolocal(timestr)
1123 if mode & 0o20000:
1123 if mode & 0o20000:
1124 mode = b'lnk'
1124 mode = b'lnk'
1125 else:
1125 else:
1126 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1126 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1127 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1127 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1128 for f in repo.dirstate.copies():
1128 for f in repo.dirstate.copies():
1129 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1129 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1130
1130
1131
1131
1132 @command(
1132 @command(
1133 b'debugdirstateignorepatternshash',
1133 b'debugdirstateignorepatternshash',
1134 [],
1134 [],
1135 _(b''),
1135 _(b''),
1136 )
1136 )
1137 def debugdirstateignorepatternshash(ui, repo, **opts):
1137 def debugdirstateignorepatternshash(ui, repo, **opts):
1138 """show the hash of ignore patterns stored in dirstate if v2,
1138 """show the hash of ignore patterns stored in dirstate if v2,
1139 or nothing for dirstate-v2
1139 or nothing for dirstate-v2
1140 """
1140 """
1141 if repo.dirstate._use_dirstate_v2:
1141 if repo.dirstate._use_dirstate_v2:
1142 docket = repo.dirstate._map.docket
1142 docket = repo.dirstate._map.docket
1143 hash_len = 20 # 160 bits for SHA-1
1143 hash_len = 20 # 160 bits for SHA-1
1144 hash_bytes = docket.tree_metadata[-hash_len:]
1144 hash_bytes = docket.tree_metadata[-hash_len:]
1145 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1145 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1146
1146
1147
1147
1148 @command(
1148 @command(
1149 b'debugdiscovery',
1149 b'debugdiscovery',
1150 [
1150 [
1151 (b'', b'old', None, _(b'use old-style discovery')),
1151 (b'', b'old', None, _(b'use old-style discovery')),
1152 (
1152 (
1153 b'',
1153 b'',
1154 b'nonheads',
1154 b'nonheads',
1155 None,
1155 None,
1156 _(b'use old-style discovery with non-heads included'),
1156 _(b'use old-style discovery with non-heads included'),
1157 ),
1157 ),
1158 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1158 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1159 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1159 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1160 (
1160 (
1161 b'',
1161 b'',
1162 b'local-as-revs',
1162 b'local-as-revs',
1163 b"",
1163 b"",
1164 b'treat local has having these revisions only',
1164 b'treat local has having these revisions only',
1165 ),
1165 ),
1166 (
1166 (
1167 b'',
1167 b'',
1168 b'remote-as-revs',
1168 b'remote-as-revs',
1169 b"",
1169 b"",
1170 b'use local as remote, with only these revisions',
1170 b'use local as remote, with only these revisions',
1171 ),
1171 ),
1172 ]
1172 ]
1173 + cmdutil.remoteopts
1173 + cmdutil.remoteopts
1174 + cmdutil.formatteropts,
1174 + cmdutil.formatteropts,
1175 _(b'[--rev REV] [OTHER]'),
1175 _(b'[--rev REV] [OTHER]'),
1176 )
1176 )
1177 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1177 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1178 """runs the changeset discovery protocol in isolation
1178 """runs the changeset discovery protocol in isolation
1179
1179
1180 The local peer can be "replaced" by a subset of the local repository by
1180 The local peer can be "replaced" by a subset of the local repository by
1181 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1181 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1182 can be "replaced" by a subset of the local repository using the
1182 can be "replaced" by a subset of the local repository using the
1183 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1183 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1184 discovery situations.
1184 discovery situations.
1185
1185
1186 The following developer oriented config are relevant for people playing with this command:
1186 The following developer oriented config are relevant for people playing with this command:
1187
1187
1188 * devel.discovery.exchange-heads=True
1188 * devel.discovery.exchange-heads=True
1189
1189
1190 If False, the discovery will not start with
1190 If False, the discovery will not start with
1191 remote head fetching and local head querying.
1191 remote head fetching and local head querying.
1192
1192
1193 * devel.discovery.grow-sample=True
1193 * devel.discovery.grow-sample=True
1194
1194
1195 If False, the sample size used in set discovery will not be increased
1195 If False, the sample size used in set discovery will not be increased
1196 through the process
1196 through the process
1197
1197
1198 * devel.discovery.grow-sample.dynamic=True
1198 * devel.discovery.grow-sample.dynamic=True
1199
1199
1200 When discovery.grow-sample.dynamic is True, the default, the sample size is
1200 When discovery.grow-sample.dynamic is True, the default, the sample size is
1201 adapted to the shape of the undecided set (it is set to the max of:
1201 adapted to the shape of the undecided set (it is set to the max of:
1202 <target-size>, len(roots(undecided)), len(heads(undecided)
1202 <target-size>, len(roots(undecided)), len(heads(undecided)
1203
1203
1204 * devel.discovery.grow-sample.rate=1.05
1204 * devel.discovery.grow-sample.rate=1.05
1205
1205
1206 the rate at which the sample grow
1206 the rate at which the sample grow
1207
1207
1208 * devel.discovery.randomize=True
1208 * devel.discovery.randomize=True
1209
1209
1210 If andom sampling during discovery are deterministic. It is meant for
1210 If andom sampling during discovery are deterministic. It is meant for
1211 integration tests.
1211 integration tests.
1212
1212
1213 * devel.discovery.sample-size=200
1213 * devel.discovery.sample-size=200
1214
1214
1215 Control the initial size of the discovery sample
1215 Control the initial size of the discovery sample
1216
1216
1217 * devel.discovery.sample-size.initial=100
1217 * devel.discovery.sample-size.initial=100
1218
1218
1219 Control the initial size of the discovery for initial change
1219 Control the initial size of the discovery for initial change
1220 """
1220 """
1221 opts = pycompat.byteskwargs(opts)
1221 opts = pycompat.byteskwargs(opts)
1222 unfi = repo.unfiltered()
1222 unfi = repo.unfiltered()
1223
1223
1224 # setup potential extra filtering
1224 # setup potential extra filtering
1225 local_revs = opts[b"local_as_revs"]
1225 local_revs = opts[b"local_as_revs"]
1226 remote_revs = opts[b"remote_as_revs"]
1226 remote_revs = opts[b"remote_as_revs"]
1227
1227
1228 # make sure tests are repeatable
1228 # make sure tests are repeatable
1229 random.seed(int(opts[b'seed']))
1229 random.seed(int(opts[b'seed']))
1230
1230
1231 if not remote_revs:
1231 if not remote_revs:
1232 path = urlutil.get_unique_pull_path_obj(
1232 path = urlutil.get_unique_pull_path_obj(
1233 b'debugdiscovery', ui, remoteurl
1233 b'debugdiscovery', ui, remoteurl
1234 )
1234 )
1235 branches = (path.branch, [])
1235 branches = (path.branch, [])
1236 remote = hg.peer(repo, opts, path)
1236 remote = hg.peer(repo, opts, path)
1237 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1237 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1238 else:
1238 else:
1239 branches = (None, [])
1239 branches = (None, [])
1240 remote_filtered_revs = logcmdutil.revrange(
1240 remote_filtered_revs = logcmdutil.revrange(
1241 unfi, [b"not (::(%s))" % remote_revs]
1241 unfi, [b"not (::(%s))" % remote_revs]
1242 )
1242 )
1243 remote_filtered_revs = frozenset(remote_filtered_revs)
1243 remote_filtered_revs = frozenset(remote_filtered_revs)
1244
1244
1245 def remote_func(x):
1245 def remote_func(x):
1246 return remote_filtered_revs
1246 return remote_filtered_revs
1247
1247
1248 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1248 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1249
1249
1250 remote = repo.peer()
1250 remote = repo.peer()
1251 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1251 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1252
1252
1253 if local_revs:
1253 if local_revs:
1254 local_filtered_revs = logcmdutil.revrange(
1254 local_filtered_revs = logcmdutil.revrange(
1255 unfi, [b"not (::(%s))" % local_revs]
1255 unfi, [b"not (::(%s))" % local_revs]
1256 )
1256 )
1257 local_filtered_revs = frozenset(local_filtered_revs)
1257 local_filtered_revs = frozenset(local_filtered_revs)
1258
1258
1259 def local_func(x):
1259 def local_func(x):
1260 return local_filtered_revs
1260 return local_filtered_revs
1261
1261
1262 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1262 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1263 repo = repo.filtered(b'debug-discovery-local-filter')
1263 repo = repo.filtered(b'debug-discovery-local-filter')
1264
1264
1265 data = {}
1265 data = {}
1266 if opts.get(b'old'):
1266 if opts.get(b'old'):
1267
1267
1268 def doit(pushedrevs, remoteheads, remote=remote):
1268 def doit(pushedrevs, remoteheads, remote=remote):
1269 if not util.safehasattr(remote, b'branches'):
1269 if not util.safehasattr(remote, b'branches'):
1270 # enable in-client legacy support
1270 # enable in-client legacy support
1271 remote = localrepo.locallegacypeer(remote.local())
1271 remote = localrepo.locallegacypeer(remote.local())
1272 if remote_revs:
1272 if remote_revs:
1273 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1273 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1274 remote._repo = r
1274 remote._repo = r
1275 common, _in, hds = treediscovery.findcommonincoming(
1275 common, _in, hds = treediscovery.findcommonincoming(
1276 repo, remote, force=True, audit=data
1276 repo, remote, force=True, audit=data
1277 )
1277 )
1278 common = set(common)
1278 common = set(common)
1279 if not opts.get(b'nonheads'):
1279 if not opts.get(b'nonheads'):
1280 ui.writenoi18n(
1280 ui.writenoi18n(
1281 b"unpruned common: %s\n"
1281 b"unpruned common: %s\n"
1282 % b" ".join(sorted(short(n) for n in common))
1282 % b" ".join(sorted(short(n) for n in common))
1283 )
1283 )
1284
1284
1285 clnode = repo.changelog.node
1285 clnode = repo.changelog.node
1286 common = repo.revs(b'heads(::%ln)', common)
1286 common = repo.revs(b'heads(::%ln)', common)
1287 common = {clnode(r) for r in common}
1287 common = {clnode(r) for r in common}
1288 return common, hds
1288 return common, hds
1289
1289
1290 else:
1290 else:
1291
1291
1292 def doit(pushedrevs, remoteheads, remote=remote):
1292 def doit(pushedrevs, remoteheads, remote=remote):
1293 nodes = None
1293 nodes = None
1294 if pushedrevs:
1294 if pushedrevs:
1295 revs = logcmdutil.revrange(repo, pushedrevs)
1295 revs = logcmdutil.revrange(repo, pushedrevs)
1296 nodes = [repo[r].node() for r in revs]
1296 nodes = [repo[r].node() for r in revs]
1297 common, any, hds = setdiscovery.findcommonheads(
1297 common, any, hds = setdiscovery.findcommonheads(
1298 ui,
1298 ui,
1299 repo,
1299 repo,
1300 remote,
1300 remote,
1301 ancestorsof=nodes,
1301 ancestorsof=nodes,
1302 audit=data,
1302 audit=data,
1303 abortwhenunrelated=False,
1303 abortwhenunrelated=False,
1304 )
1304 )
1305 return common, hds
1305 return common, hds
1306
1306
1307 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1307 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1308 localrevs = opts[b'rev']
1308 localrevs = opts[b'rev']
1309
1309
1310 fm = ui.formatter(b'debugdiscovery', opts)
1310 fm = ui.formatter(b'debugdiscovery', opts)
1311 if fm.strict_format:
1311 if fm.strict_format:
1312
1312
1313 @contextlib.contextmanager
1313 @contextlib.contextmanager
1314 def may_capture_output():
1314 def may_capture_output():
1315 ui.pushbuffer()
1315 ui.pushbuffer()
1316 yield
1316 yield
1317 data[b'output'] = ui.popbuffer()
1317 data[b'output'] = ui.popbuffer()
1318
1318
1319 else:
1319 else:
1320 may_capture_output = util.nullcontextmanager
1320 may_capture_output = util.nullcontextmanager
1321 with may_capture_output():
1321 with may_capture_output():
1322 with util.timedcm('debug-discovery') as t:
1322 with util.timedcm('debug-discovery') as t:
1323 common, hds = doit(localrevs, remoterevs)
1323 common, hds = doit(localrevs, remoterevs)
1324
1324
1325 # compute all statistics
1325 # compute all statistics
1326 if len(common) == 1 and repo.nullid in common:
1326 if len(common) == 1 and repo.nullid in common:
1327 common = set()
1327 common = set()
1328 heads_common = set(common)
1328 heads_common = set(common)
1329 heads_remote = set(hds)
1329 heads_remote = set(hds)
1330 heads_local = set(repo.heads())
1330 heads_local = set(repo.heads())
1331 # note: they cannot be a local or remote head that is in common and not
1331 # note: they cannot be a local or remote head that is in common and not
1332 # itself a head of common.
1332 # itself a head of common.
1333 heads_common_local = heads_common & heads_local
1333 heads_common_local = heads_common & heads_local
1334 heads_common_remote = heads_common & heads_remote
1334 heads_common_remote = heads_common & heads_remote
1335 heads_common_both = heads_common & heads_remote & heads_local
1335 heads_common_both = heads_common & heads_remote & heads_local
1336
1336
1337 all = repo.revs(b'all()')
1337 all = repo.revs(b'all()')
1338 common = repo.revs(b'::%ln', common)
1338 common = repo.revs(b'::%ln', common)
1339 roots_common = repo.revs(b'roots(::%ld)', common)
1339 roots_common = repo.revs(b'roots(::%ld)', common)
1340 missing = repo.revs(b'not ::%ld', common)
1340 missing = repo.revs(b'not ::%ld', common)
1341 heads_missing = repo.revs(b'heads(%ld)', missing)
1341 heads_missing = repo.revs(b'heads(%ld)', missing)
1342 roots_missing = repo.revs(b'roots(%ld)', missing)
1342 roots_missing = repo.revs(b'roots(%ld)', missing)
1343 assert len(common) + len(missing) == len(all)
1343 assert len(common) + len(missing) == len(all)
1344
1344
1345 initial_undecided = repo.revs(
1345 initial_undecided = repo.revs(
1346 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1346 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1347 )
1347 )
1348 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1348 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1349 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1349 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1350 common_initial_undecided = initial_undecided & common
1350 common_initial_undecided = initial_undecided & common
1351 missing_initial_undecided = initial_undecided & missing
1351 missing_initial_undecided = initial_undecided & missing
1352
1352
1353 data[b'elapsed'] = t.elapsed
1353 data[b'elapsed'] = t.elapsed
1354 data[b'nb-common-heads'] = len(heads_common)
1354 data[b'nb-common-heads'] = len(heads_common)
1355 data[b'nb-common-heads-local'] = len(heads_common_local)
1355 data[b'nb-common-heads-local'] = len(heads_common_local)
1356 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1356 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1357 data[b'nb-common-heads-both'] = len(heads_common_both)
1357 data[b'nb-common-heads-both'] = len(heads_common_both)
1358 data[b'nb-common-roots'] = len(roots_common)
1358 data[b'nb-common-roots'] = len(roots_common)
1359 data[b'nb-head-local'] = len(heads_local)
1359 data[b'nb-head-local'] = len(heads_local)
1360 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1360 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1361 data[b'nb-head-remote'] = len(heads_remote)
1361 data[b'nb-head-remote'] = len(heads_remote)
1362 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1362 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1363 heads_common_remote
1363 heads_common_remote
1364 )
1364 )
1365 data[b'nb-revs'] = len(all)
1365 data[b'nb-revs'] = len(all)
1366 data[b'nb-revs-common'] = len(common)
1366 data[b'nb-revs-common'] = len(common)
1367 data[b'nb-revs-missing'] = len(missing)
1367 data[b'nb-revs-missing'] = len(missing)
1368 data[b'nb-missing-heads'] = len(heads_missing)
1368 data[b'nb-missing-heads'] = len(heads_missing)
1369 data[b'nb-missing-roots'] = len(roots_missing)
1369 data[b'nb-missing-roots'] = len(roots_missing)
1370 data[b'nb-ini_und'] = len(initial_undecided)
1370 data[b'nb-ini_und'] = len(initial_undecided)
1371 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1371 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1372 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1372 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1373 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1373 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1374 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1374 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1375
1375
1376 fm.startitem()
1376 fm.startitem()
1377 fm.data(**pycompat.strkwargs(data))
1377 fm.data(**pycompat.strkwargs(data))
1378 # display discovery summary
1378 # display discovery summary
1379 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1379 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1380 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1380 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1381 if b'total-round-trips-heads' in data:
1381 if b'total-round-trips-heads' in data:
1382 fm.plain(
1382 fm.plain(
1383 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1383 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1384 )
1384 )
1385 if b'total-round-trips-branches' in data:
1385 if b'total-round-trips-branches' in data:
1386 fm.plain(
1386 fm.plain(
1387 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1387 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1388 % data
1388 % data
1389 )
1389 )
1390 if b'total-round-trips-between' in data:
1390 if b'total-round-trips-between' in data:
1391 fm.plain(
1391 fm.plain(
1392 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1392 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1393 )
1393 )
1394 fm.plain(b"queries: %(total-queries)9d\n" % data)
1394 fm.plain(b"queries: %(total-queries)9d\n" % data)
1395 if b'total-queries-branches' in data:
1395 if b'total-queries-branches' in data:
1396 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1396 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1397 if b'total-queries-between' in data:
1397 if b'total-queries-between' in data:
1398 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1398 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1399 fm.plain(b"heads summary:\n")
1399 fm.plain(b"heads summary:\n")
1400 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1400 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1401 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1401 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1402 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1402 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1403 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1403 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1404 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1404 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1405 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1405 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1406 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1406 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1407 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1407 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1408 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1408 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1409 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1409 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1410 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1410 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1411 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1411 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1412 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1412 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1413 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1413 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1414 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1414 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1415 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1415 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1416 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1416 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1417 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1417 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1418 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1418 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1419 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1419 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1420 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1420 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1421 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1421 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1422
1422
1423 if ui.verbose:
1423 if ui.verbose:
1424 fm.plain(
1424 fm.plain(
1425 b"common heads: %s\n"
1425 b"common heads: %s\n"
1426 % b" ".join(sorted(short(n) for n in heads_common))
1426 % b" ".join(sorted(short(n) for n in heads_common))
1427 )
1427 )
1428 fm.end()
1428 fm.end()
1429
1429
1430
1430
1431 _chunksize = 4 << 10
1431 _chunksize = 4 << 10
1432
1432
1433
1433
1434 @command(
1434 @command(
1435 b'debugdownload',
1435 b'debugdownload',
1436 [
1436 [
1437 (b'o', b'output', b'', _(b'path')),
1437 (b'o', b'output', b'', _(b'path')),
1438 ],
1438 ],
1439 optionalrepo=True,
1439 optionalrepo=True,
1440 )
1440 )
1441 def debugdownload(ui, repo, url, output=None, **opts):
1441 def debugdownload(ui, repo, url, output=None, **opts):
1442 """download a resource using Mercurial logic and config"""
1442 """download a resource using Mercurial logic and config"""
1443 fh = urlmod.open(ui, url, output)
1443 fh = urlmod.open(ui, url, output)
1444
1444
1445 dest = ui
1445 dest = ui
1446 if output:
1446 if output:
1447 dest = open(output, b"wb", _chunksize)
1447 dest = open(output, b"wb", _chunksize)
1448 try:
1448 try:
1449 data = fh.read(_chunksize)
1449 data = fh.read(_chunksize)
1450 while data:
1450 while data:
1451 dest.write(data)
1451 dest.write(data)
1452 data = fh.read(_chunksize)
1452 data = fh.read(_chunksize)
1453 finally:
1453 finally:
1454 if output:
1454 if output:
1455 dest.close()
1455 dest.close()
1456
1456
1457
1457
1458 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1458 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1459 def debugextensions(ui, repo, **opts):
1459 def debugextensions(ui, repo, **opts):
1460 '''show information about active extensions'''
1460 '''show information about active extensions'''
1461 opts = pycompat.byteskwargs(opts)
1461 opts = pycompat.byteskwargs(opts)
1462 exts = extensions.extensions(ui)
1462 exts = extensions.extensions(ui)
1463 hgver = util.version()
1463 hgver = util.version()
1464 fm = ui.formatter(b'debugextensions', opts)
1464 fm = ui.formatter(b'debugextensions', opts)
1465 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1465 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1466 isinternal = extensions.ismoduleinternal(extmod)
1466 isinternal = extensions.ismoduleinternal(extmod)
1467 extsource = None
1467 extsource = None
1468
1468
1469 if util.safehasattr(extmod, '__file__'):
1469 if util.safehasattr(extmod, '__file__'):
1470 extsource = pycompat.fsencode(extmod.__file__)
1470 extsource = pycompat.fsencode(extmod.__file__)
1471 elif getattr(sys, 'oxidized', False):
1471 elif getattr(sys, 'oxidized', False):
1472 extsource = pycompat.sysexecutable
1472 extsource = pycompat.sysexecutable
1473 if isinternal:
1473 if isinternal:
1474 exttestedwith = [] # never expose magic string to users
1474 exttestedwith = [] # never expose magic string to users
1475 else:
1475 else:
1476 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1476 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1477 extbuglink = getattr(extmod, 'buglink', None)
1477 extbuglink = getattr(extmod, 'buglink', None)
1478
1478
1479 fm.startitem()
1479 fm.startitem()
1480
1480
1481 if ui.quiet or ui.verbose:
1481 if ui.quiet or ui.verbose:
1482 fm.write(b'name', b'%s\n', extname)
1482 fm.write(b'name', b'%s\n', extname)
1483 else:
1483 else:
1484 fm.write(b'name', b'%s', extname)
1484 fm.write(b'name', b'%s', extname)
1485 if isinternal or hgver in exttestedwith:
1485 if isinternal or hgver in exttestedwith:
1486 fm.plain(b'\n')
1486 fm.plain(b'\n')
1487 elif not exttestedwith:
1487 elif not exttestedwith:
1488 fm.plain(_(b' (untested!)\n'))
1488 fm.plain(_(b' (untested!)\n'))
1489 else:
1489 else:
1490 lasttestedversion = exttestedwith[-1]
1490 lasttestedversion = exttestedwith[-1]
1491 fm.plain(b' (%s!)\n' % lasttestedversion)
1491 fm.plain(b' (%s!)\n' % lasttestedversion)
1492
1492
1493 fm.condwrite(
1493 fm.condwrite(
1494 ui.verbose and extsource,
1494 ui.verbose and extsource,
1495 b'source',
1495 b'source',
1496 _(b' location: %s\n'),
1496 _(b' location: %s\n'),
1497 extsource or b"",
1497 extsource or b"",
1498 )
1498 )
1499
1499
1500 if ui.verbose:
1500 if ui.verbose:
1501 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1501 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1502 fm.data(bundled=isinternal)
1502 fm.data(bundled=isinternal)
1503
1503
1504 fm.condwrite(
1504 fm.condwrite(
1505 ui.verbose and exttestedwith,
1505 ui.verbose and exttestedwith,
1506 b'testedwith',
1506 b'testedwith',
1507 _(b' tested with: %s\n'),
1507 _(b' tested with: %s\n'),
1508 fm.formatlist(exttestedwith, name=b'ver'),
1508 fm.formatlist(exttestedwith, name=b'ver'),
1509 )
1509 )
1510
1510
1511 fm.condwrite(
1511 fm.condwrite(
1512 ui.verbose and extbuglink,
1512 ui.verbose and extbuglink,
1513 b'buglink',
1513 b'buglink',
1514 _(b' bug reporting: %s\n'),
1514 _(b' bug reporting: %s\n'),
1515 extbuglink or b"",
1515 extbuglink or b"",
1516 )
1516 )
1517
1517
1518 fm.end()
1518 fm.end()
1519
1519
1520
1520
1521 @command(
1521 @command(
1522 b'debugfileset',
1522 b'debugfileset',
1523 [
1523 [
1524 (
1524 (
1525 b'r',
1525 b'r',
1526 b'rev',
1526 b'rev',
1527 b'',
1527 b'',
1528 _(b'apply the filespec on this revision'),
1528 _(b'apply the filespec on this revision'),
1529 _(b'REV'),
1529 _(b'REV'),
1530 ),
1530 ),
1531 (
1531 (
1532 b'',
1532 b'',
1533 b'all-files',
1533 b'all-files',
1534 False,
1534 False,
1535 _(b'test files from all revisions and working directory'),
1535 _(b'test files from all revisions and working directory'),
1536 ),
1536 ),
1537 (
1537 (
1538 b's',
1538 b's',
1539 b'show-matcher',
1539 b'show-matcher',
1540 None,
1540 None,
1541 _(b'print internal representation of matcher'),
1541 _(b'print internal representation of matcher'),
1542 ),
1542 ),
1543 (
1543 (
1544 b'p',
1544 b'p',
1545 b'show-stage',
1545 b'show-stage',
1546 [],
1546 [],
1547 _(b'print parsed tree at the given stage'),
1547 _(b'print parsed tree at the given stage'),
1548 _(b'NAME'),
1548 _(b'NAME'),
1549 ),
1549 ),
1550 ],
1550 ],
1551 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1551 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1552 )
1552 )
1553 def debugfileset(ui, repo, expr, **opts):
1553 def debugfileset(ui, repo, expr, **opts):
1554 '''parse and apply a fileset specification'''
1554 '''parse and apply a fileset specification'''
1555 from . import fileset
1555 from . import fileset
1556
1556
1557 fileset.symbols # force import of fileset so we have predicates to optimize
1557 fileset.symbols # force import of fileset so we have predicates to optimize
1558 opts = pycompat.byteskwargs(opts)
1558 opts = pycompat.byteskwargs(opts)
1559 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1559 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1560
1560
1561 stages = [
1561 stages = [
1562 (b'parsed', pycompat.identity),
1562 (b'parsed', pycompat.identity),
1563 (b'analyzed', filesetlang.analyze),
1563 (b'analyzed', filesetlang.analyze),
1564 (b'optimized', filesetlang.optimize),
1564 (b'optimized', filesetlang.optimize),
1565 ]
1565 ]
1566 stagenames = {n for n, f in stages}
1566 stagenames = {n for n, f in stages}
1567
1567
1568 showalways = set()
1568 showalways = set()
1569 if ui.verbose and not opts[b'show_stage']:
1569 if ui.verbose and not opts[b'show_stage']:
1570 # show parsed tree by --verbose (deprecated)
1570 # show parsed tree by --verbose (deprecated)
1571 showalways.add(b'parsed')
1571 showalways.add(b'parsed')
1572 if opts[b'show_stage'] == [b'all']:
1572 if opts[b'show_stage'] == [b'all']:
1573 showalways.update(stagenames)
1573 showalways.update(stagenames)
1574 else:
1574 else:
1575 for n in opts[b'show_stage']:
1575 for n in opts[b'show_stage']:
1576 if n not in stagenames:
1576 if n not in stagenames:
1577 raise error.Abort(_(b'invalid stage name: %s') % n)
1577 raise error.Abort(_(b'invalid stage name: %s') % n)
1578 showalways.update(opts[b'show_stage'])
1578 showalways.update(opts[b'show_stage'])
1579
1579
1580 tree = filesetlang.parse(expr)
1580 tree = filesetlang.parse(expr)
1581 for n, f in stages:
1581 for n, f in stages:
1582 tree = f(tree)
1582 tree = f(tree)
1583 if n in showalways:
1583 if n in showalways:
1584 if opts[b'show_stage'] or n != b'parsed':
1584 if opts[b'show_stage'] or n != b'parsed':
1585 ui.write(b"* %s:\n" % n)
1585 ui.write(b"* %s:\n" % n)
1586 ui.write(filesetlang.prettyformat(tree), b"\n")
1586 ui.write(filesetlang.prettyformat(tree), b"\n")
1587
1587
1588 files = set()
1588 files = set()
1589 if opts[b'all_files']:
1589 if opts[b'all_files']:
1590 for r in repo:
1590 for r in repo:
1591 c = repo[r]
1591 c = repo[r]
1592 files.update(c.files())
1592 files.update(c.files())
1593 files.update(c.substate)
1593 files.update(c.substate)
1594 if opts[b'all_files'] or ctx.rev() is None:
1594 if opts[b'all_files'] or ctx.rev() is None:
1595 wctx = repo[None]
1595 wctx = repo[None]
1596 files.update(
1596 files.update(
1597 repo.dirstate.walk(
1597 repo.dirstate.walk(
1598 scmutil.matchall(repo),
1598 scmutil.matchall(repo),
1599 subrepos=list(wctx.substate),
1599 subrepos=list(wctx.substate),
1600 unknown=True,
1600 unknown=True,
1601 ignored=True,
1601 ignored=True,
1602 )
1602 )
1603 )
1603 )
1604 files.update(wctx.substate)
1604 files.update(wctx.substate)
1605 else:
1605 else:
1606 files.update(ctx.files())
1606 files.update(ctx.files())
1607 files.update(ctx.substate)
1607 files.update(ctx.substate)
1608
1608
1609 m = ctx.matchfileset(repo.getcwd(), expr)
1609 m = ctx.matchfileset(repo.getcwd(), expr)
1610 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1610 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1611 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1611 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1612 for f in sorted(files):
1612 for f in sorted(files):
1613 if not m(f):
1613 if not m(f):
1614 continue
1614 continue
1615 ui.write(b"%s\n" % f)
1615 ui.write(b"%s\n" % f)
1616
1616
1617
1617
1618 @command(
1618 @command(
1619 b"debug-repair-issue6528",
1619 b"debug-repair-issue6528",
1620 [
1620 [
1621 (
1621 (
1622 b'',
1622 b'',
1623 b'to-report',
1623 b'to-report',
1624 b'',
1624 b'',
1625 _(b'build a report of affected revisions to this file'),
1625 _(b'build a report of affected revisions to this file'),
1626 _(b'FILE'),
1626 _(b'FILE'),
1627 ),
1627 ),
1628 (
1628 (
1629 b'',
1629 b'',
1630 b'from-report',
1630 b'from-report',
1631 b'',
1631 b'',
1632 _(b'repair revisions listed in this report file'),
1632 _(b'repair revisions listed in this report file'),
1633 _(b'FILE'),
1633 _(b'FILE'),
1634 ),
1634 ),
1635 (
1635 (
1636 b'',
1636 b'',
1637 b'paranoid',
1637 b'paranoid',
1638 False,
1638 False,
1639 _(b'check that both detection methods do the same thing'),
1639 _(b'check that both detection methods do the same thing'),
1640 ),
1640 ),
1641 ]
1641 ]
1642 + cmdutil.dryrunopts,
1642 + cmdutil.dryrunopts,
1643 )
1643 )
1644 def debug_repair_issue6528(ui, repo, **opts):
1644 def debug_repair_issue6528(ui, repo, **opts):
1645 """find affected revisions and repair them. See issue6528 for more details.
1645 """find affected revisions and repair them. See issue6528 for more details.
1646
1646
1647 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1647 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1648 computation of affected revisions for a given repository across clones.
1648 computation of affected revisions for a given repository across clones.
1649 The report format is line-based (with empty lines ignored):
1649 The report format is line-based (with empty lines ignored):
1650
1650
1651 ```
1651 ```
1652 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1652 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1653 ```
1653 ```
1654
1654
1655 There can be multiple broken revisions per filelog, they are separated by
1655 There can be multiple broken revisions per filelog, they are separated by
1656 a comma with no spaces. The only space is between the revision(s) and the
1656 a comma with no spaces. The only space is between the revision(s) and the
1657 filename.
1657 filename.
1658
1658
1659 Note that this does *not* mean that this repairs future affected revisions,
1659 Note that this does *not* mean that this repairs future affected revisions,
1660 that needs a separate fix at the exchange level that was introduced in
1660 that needs a separate fix at the exchange level that was introduced in
1661 Mercurial 5.9.1.
1661 Mercurial 5.9.1.
1662
1662
1663 There is a `--paranoid` flag to test that the fast implementation is correct
1663 There is a `--paranoid` flag to test that the fast implementation is correct
1664 by checking it against the slow implementation. Since this matter is quite
1664 by checking it against the slow implementation. Since this matter is quite
1665 urgent and testing every edge-case is probably quite costly, we use this
1665 urgent and testing every edge-case is probably quite costly, we use this
1666 method to test on large repositories as a fuzzing method of sorts.
1666 method to test on large repositories as a fuzzing method of sorts.
1667 """
1667 """
1668 cmdutil.check_incompatible_arguments(
1668 cmdutil.check_incompatible_arguments(
1669 opts, 'to_report', ['from_report', 'dry_run']
1669 opts, 'to_report', ['from_report', 'dry_run']
1670 )
1670 )
1671 dry_run = opts.get('dry_run')
1671 dry_run = opts.get('dry_run')
1672 to_report = opts.get('to_report')
1672 to_report = opts.get('to_report')
1673 from_report = opts.get('from_report')
1673 from_report = opts.get('from_report')
1674 paranoid = opts.get('paranoid')
1674 paranoid = opts.get('paranoid')
1675 # TODO maybe add filelog pattern and revision pattern parameters to help
1675 # TODO maybe add filelog pattern and revision pattern parameters to help
1676 # narrow down the search for users that know what they're looking for?
1676 # narrow down the search for users that know what they're looking for?
1677
1677
1678 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1678 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1679 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1679 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1680 raise error.Abort(_(msg))
1680 raise error.Abort(_(msg))
1681
1681
1682 rewrite.repair_issue6528(
1682 rewrite.repair_issue6528(
1683 ui,
1683 ui,
1684 repo,
1684 repo,
1685 dry_run=dry_run,
1685 dry_run=dry_run,
1686 to_report=to_report,
1686 to_report=to_report,
1687 from_report=from_report,
1687 from_report=from_report,
1688 paranoid=paranoid,
1688 paranoid=paranoid,
1689 )
1689 )
1690
1690
1691
1691
1692 @command(b'debugformat', [] + cmdutil.formatteropts)
1692 @command(b'debugformat', [] + cmdutil.formatteropts)
1693 def debugformat(ui, repo, **opts):
1693 def debugformat(ui, repo, **opts):
1694 """display format information about the current repository
1694 """display format information about the current repository
1695
1695
1696 Use --verbose to get extra information about current config value and
1696 Use --verbose to get extra information about current config value and
1697 Mercurial default."""
1697 Mercurial default."""
1698 opts = pycompat.byteskwargs(opts)
1698 opts = pycompat.byteskwargs(opts)
1699 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1699 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1700 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1700 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1701
1701
1702 def makeformatname(name):
1702 def makeformatname(name):
1703 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1703 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1704
1704
1705 fm = ui.formatter(b'debugformat', opts)
1705 fm = ui.formatter(b'debugformat', opts)
1706 if fm.isplain():
1706 if fm.isplain():
1707
1707
1708 def formatvalue(value):
1708 def formatvalue(value):
1709 if util.safehasattr(value, b'startswith'):
1709 if util.safehasattr(value, b'startswith'):
1710 return value
1710 return value
1711 if value:
1711 if value:
1712 return b'yes'
1712 return b'yes'
1713 else:
1713 else:
1714 return b'no'
1714 return b'no'
1715
1715
1716 else:
1716 else:
1717 formatvalue = pycompat.identity
1717 formatvalue = pycompat.identity
1718
1718
1719 fm.plain(b'format-variant')
1719 fm.plain(b'format-variant')
1720 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1720 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1721 fm.plain(b' repo')
1721 fm.plain(b' repo')
1722 if ui.verbose:
1722 if ui.verbose:
1723 fm.plain(b' config default')
1723 fm.plain(b' config default')
1724 fm.plain(b'\n')
1724 fm.plain(b'\n')
1725 for fv in upgrade.allformatvariant:
1725 for fv in upgrade.allformatvariant:
1726 fm.startitem()
1726 fm.startitem()
1727 repovalue = fv.fromrepo(repo)
1727 repovalue = fv.fromrepo(repo)
1728 configvalue = fv.fromconfig(repo)
1728 configvalue = fv.fromconfig(repo)
1729
1729
1730 if repovalue != configvalue:
1730 if repovalue != configvalue:
1731 namelabel = b'formatvariant.name.mismatchconfig'
1731 namelabel = b'formatvariant.name.mismatchconfig'
1732 repolabel = b'formatvariant.repo.mismatchconfig'
1732 repolabel = b'formatvariant.repo.mismatchconfig'
1733 elif repovalue != fv.default:
1733 elif repovalue != fv.default:
1734 namelabel = b'formatvariant.name.mismatchdefault'
1734 namelabel = b'formatvariant.name.mismatchdefault'
1735 repolabel = b'formatvariant.repo.mismatchdefault'
1735 repolabel = b'formatvariant.repo.mismatchdefault'
1736 else:
1736 else:
1737 namelabel = b'formatvariant.name.uptodate'
1737 namelabel = b'formatvariant.name.uptodate'
1738 repolabel = b'formatvariant.repo.uptodate'
1738 repolabel = b'formatvariant.repo.uptodate'
1739
1739
1740 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1740 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1741 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1741 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1742 if fv.default != configvalue:
1742 if fv.default != configvalue:
1743 configlabel = b'formatvariant.config.special'
1743 configlabel = b'formatvariant.config.special'
1744 else:
1744 else:
1745 configlabel = b'formatvariant.config.default'
1745 configlabel = b'formatvariant.config.default'
1746 fm.condwrite(
1746 fm.condwrite(
1747 ui.verbose,
1747 ui.verbose,
1748 b'config',
1748 b'config',
1749 b' %6s',
1749 b' %6s',
1750 formatvalue(configvalue),
1750 formatvalue(configvalue),
1751 label=configlabel,
1751 label=configlabel,
1752 )
1752 )
1753 fm.condwrite(
1753 fm.condwrite(
1754 ui.verbose,
1754 ui.verbose,
1755 b'default',
1755 b'default',
1756 b' %7s',
1756 b' %7s',
1757 formatvalue(fv.default),
1757 formatvalue(fv.default),
1758 label=b'formatvariant.default',
1758 label=b'formatvariant.default',
1759 )
1759 )
1760 fm.plain(b'\n')
1760 fm.plain(b'\n')
1761 fm.end()
1761 fm.end()
1762
1762
1763
1763
1764 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1764 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1765 def debugfsinfo(ui, path=b"."):
1765 def debugfsinfo(ui, path=b"."):
1766 """show information detected about current filesystem"""
1766 """show information detected about current filesystem"""
1767 ui.writenoi18n(b'path: %s\n' % path)
1767 ui.writenoi18n(b'path: %s\n' % path)
1768 ui.writenoi18n(
1768 ui.writenoi18n(
1769 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1769 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1770 )
1770 )
1771 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1771 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1772 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1772 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1773 ui.writenoi18n(
1773 ui.writenoi18n(
1774 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1774 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1775 )
1775 )
1776 ui.writenoi18n(
1776 ui.writenoi18n(
1777 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1777 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1778 )
1778 )
1779 casesensitive = b'(unknown)'
1779 casesensitive = b'(unknown)'
1780 try:
1780 try:
1781 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1781 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1782 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1782 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1783 except OSError:
1783 except OSError:
1784 pass
1784 pass
1785 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1785 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1786
1786
1787
1787
1788 @command(
1788 @command(
1789 b'debuggetbundle',
1789 b'debuggetbundle',
1790 [
1790 [
1791 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1791 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1792 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1792 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1793 (
1793 (
1794 b't',
1794 b't',
1795 b'type',
1795 b'type',
1796 b'bzip2',
1796 b'bzip2',
1797 _(b'bundle compression type to use'),
1797 _(b'bundle compression type to use'),
1798 _(b'TYPE'),
1798 _(b'TYPE'),
1799 ),
1799 ),
1800 ],
1800 ],
1801 _(b'REPO FILE [-H|-C ID]...'),
1801 _(b'REPO FILE [-H|-C ID]...'),
1802 norepo=True,
1802 norepo=True,
1803 )
1803 )
1804 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1804 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1805 """retrieves a bundle from a repo
1805 """retrieves a bundle from a repo
1806
1806
1807 Every ID must be a full-length hex node id string. Saves the bundle to the
1807 Every ID must be a full-length hex node id string. Saves the bundle to the
1808 given file.
1808 given file.
1809 """
1809 """
1810 opts = pycompat.byteskwargs(opts)
1810 opts = pycompat.byteskwargs(opts)
1811 repo = hg.peer(ui, opts, repopath)
1811 repo = hg.peer(ui, opts, repopath)
1812 if not repo.capable(b'getbundle'):
1812 if not repo.capable(b'getbundle'):
1813 raise error.Abort(b"getbundle() not supported by target repository")
1813 raise error.Abort(b"getbundle() not supported by target repository")
1814 args = {}
1814 args = {}
1815 if common:
1815 if common:
1816 args['common'] = [bin(s) for s in common]
1816 args['common'] = [bin(s) for s in common]
1817 if head:
1817 if head:
1818 args['heads'] = [bin(s) for s in head]
1818 args['heads'] = [bin(s) for s in head]
1819 # TODO: get desired bundlecaps from command line.
1819 # TODO: get desired bundlecaps from command line.
1820 args['bundlecaps'] = None
1820 args['bundlecaps'] = None
1821 bundle = repo.getbundle(b'debug', **args)
1821 bundle = repo.getbundle(b'debug', **args)
1822
1822
1823 bundletype = opts.get(b'type', b'bzip2').lower()
1823 bundletype = opts.get(b'type', b'bzip2').lower()
1824 btypes = {
1824 btypes = {
1825 b'none': b'HG10UN',
1825 b'none': b'HG10UN',
1826 b'bzip2': b'HG10BZ',
1826 b'bzip2': b'HG10BZ',
1827 b'gzip': b'HG10GZ',
1827 b'gzip': b'HG10GZ',
1828 b'bundle2': b'HG20',
1828 b'bundle2': b'HG20',
1829 }
1829 }
1830 bundletype = btypes.get(bundletype)
1830 bundletype = btypes.get(bundletype)
1831 if bundletype not in bundle2.bundletypes:
1831 if bundletype not in bundle2.bundletypes:
1832 raise error.Abort(_(b'unknown bundle type specified with --type'))
1832 raise error.Abort(_(b'unknown bundle type specified with --type'))
1833 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1833 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1834
1834
1835
1835
1836 @command(b'debugignore', [], b'[FILE]')
1836 @command(b'debugignore', [], b'[FILE]')
1837 def debugignore(ui, repo, *files, **opts):
1837 def debugignore(ui, repo, *files, **opts):
1838 """display the combined ignore pattern and information about ignored files
1838 """display the combined ignore pattern and information about ignored files
1839
1839
1840 With no argument display the combined ignore pattern.
1840 With no argument display the combined ignore pattern.
1841
1841
1842 Given space separated file names, shows if the given file is ignored and
1842 Given space separated file names, shows if the given file is ignored and
1843 if so, show the ignore rule (file and line number) that matched it.
1843 if so, show the ignore rule (file and line number) that matched it.
1844 """
1844 """
1845 ignore = repo.dirstate._ignore
1845 ignore = repo.dirstate._ignore
1846 if not files:
1846 if not files:
1847 # Show all the patterns
1847 # Show all the patterns
1848 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1848 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1849 else:
1849 else:
1850 m = scmutil.match(repo[None], pats=files)
1850 m = scmutil.match(repo[None], pats=files)
1851 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1851 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1852 for f in m.files():
1852 for f in m.files():
1853 nf = util.normpath(f)
1853 nf = util.normpath(f)
1854 ignored = None
1854 ignored = None
1855 ignoredata = None
1855 ignoredata = None
1856 if nf != b'.':
1856 if nf != b'.':
1857 if ignore(nf):
1857 if ignore(nf):
1858 ignored = nf
1858 ignored = nf
1859 ignoredata = repo.dirstate._ignorefileandline(nf)
1859 ignoredata = repo.dirstate._ignorefileandline(nf)
1860 else:
1860 else:
1861 for p in pathutil.finddirs(nf):
1861 for p in pathutil.finddirs(nf):
1862 if ignore(p):
1862 if ignore(p):
1863 ignored = p
1863 ignored = p
1864 ignoredata = repo.dirstate._ignorefileandline(p)
1864 ignoredata = repo.dirstate._ignorefileandline(p)
1865 break
1865 break
1866 if ignored:
1866 if ignored:
1867 if ignored == nf:
1867 if ignored == nf:
1868 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1868 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1869 else:
1869 else:
1870 ui.write(
1870 ui.write(
1871 _(
1871 _(
1872 b"%s is ignored because of "
1872 b"%s is ignored because of "
1873 b"containing directory %s\n"
1873 b"containing directory %s\n"
1874 )
1874 )
1875 % (uipathfn(f), ignored)
1875 % (uipathfn(f), ignored)
1876 )
1876 )
1877 ignorefile, lineno, line = ignoredata
1877 ignorefile, lineno, line = ignoredata
1878 ui.write(
1878 ui.write(
1879 _(b"(ignore rule in %s, line %d: '%s')\n")
1879 _(b"(ignore rule in %s, line %d: '%s')\n")
1880 % (ignorefile, lineno, line)
1880 % (ignorefile, lineno, line)
1881 )
1881 )
1882 else:
1882 else:
1883 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1883 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1884
1884
1885
1885
1886 @command(
1886 @command(
1887 b'debug-revlog-index|debugindex',
1887 b'debug-revlog-index|debugindex',
1888 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1888 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1889 _(b'-c|-m|FILE'),
1889 _(b'-c|-m|FILE'),
1890 )
1890 )
1891 def debugindex(ui, repo, file_=None, **opts):
1891 def debugindex(ui, repo, file_=None, **opts):
1892 """dump index data for a revlog"""
1892 """dump index data for a revlog"""
1893 opts = pycompat.byteskwargs(opts)
1893 opts = pycompat.byteskwargs(opts)
1894 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1894 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1895
1895
1896 fm = ui.formatter(b'debugindex', opts)
1896 fm = ui.formatter(b'debugindex', opts)
1897
1897
1898 revlog = getattr(store, b'_revlog', store)
1898 revlog = getattr(store, b'_revlog', store)
1899
1899
1900 return revlog_debug.debug_index(
1900 return revlog_debug.debug_index(
1901 ui,
1901 ui,
1902 repo,
1902 repo,
1903 formatter=fm,
1903 formatter=fm,
1904 revlog=revlog,
1904 revlog=revlog,
1905 full_node=ui.debugflag,
1905 full_node=ui.debugflag,
1906 )
1906 )
1907
1907
1908
1908
1909 @command(
1909 @command(
1910 b'debugindexdot',
1910 b'debugindexdot',
1911 cmdutil.debugrevlogopts,
1911 cmdutil.debugrevlogopts,
1912 _(b'-c|-m|FILE'),
1912 _(b'-c|-m|FILE'),
1913 optionalrepo=True,
1913 optionalrepo=True,
1914 )
1914 )
1915 def debugindexdot(ui, repo, file_=None, **opts):
1915 def debugindexdot(ui, repo, file_=None, **opts):
1916 """dump an index DAG as a graphviz dot file"""
1916 """dump an index DAG as a graphviz dot file"""
1917 opts = pycompat.byteskwargs(opts)
1917 opts = pycompat.byteskwargs(opts)
1918 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1918 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1919 ui.writenoi18n(b"digraph G {\n")
1919 ui.writenoi18n(b"digraph G {\n")
1920 for i in r:
1920 for i in r:
1921 node = r.node(i)
1921 node = r.node(i)
1922 pp = r.parents(node)
1922 pp = r.parents(node)
1923 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1923 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1924 if pp[1] != repo.nullid:
1924 if pp[1] != repo.nullid:
1925 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1925 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1926 ui.write(b"}\n")
1926 ui.write(b"}\n")
1927
1927
1928
1928
1929 @command(b'debugindexstats', [])
1929 @command(b'debugindexstats', [])
1930 def debugindexstats(ui, repo):
1930 def debugindexstats(ui, repo):
1931 """show stats related to the changelog index"""
1931 """show stats related to the changelog index"""
1932 repo.changelog.shortest(repo.nullid, 1)
1932 repo.changelog.shortest(repo.nullid, 1)
1933 index = repo.changelog.index
1933 index = repo.changelog.index
1934 if not util.safehasattr(index, b'stats'):
1934 if not util.safehasattr(index, b'stats'):
1935 raise error.Abort(_(b'debugindexstats only works with native code'))
1935 raise error.Abort(_(b'debugindexstats only works with native code'))
1936 for k, v in sorted(index.stats().items()):
1936 for k, v in sorted(index.stats().items()):
1937 ui.write(b'%s: %d\n' % (k, v))
1937 ui.write(b'%s: %d\n' % (k, v))
1938
1938
1939
1939
1940 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1940 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1941 def debuginstall(ui, **opts):
1941 def debuginstall(ui, **opts):
1942 """test Mercurial installation
1942 """test Mercurial installation
1943
1943
1944 Returns 0 on success.
1944 Returns 0 on success.
1945 """
1945 """
1946 opts = pycompat.byteskwargs(opts)
1946 opts = pycompat.byteskwargs(opts)
1947
1947
1948 problems = 0
1948 problems = 0
1949
1949
1950 fm = ui.formatter(b'debuginstall', opts)
1950 fm = ui.formatter(b'debuginstall', opts)
1951 fm.startitem()
1951 fm.startitem()
1952
1952
1953 # encoding might be unknown or wrong. don't translate these messages.
1953 # encoding might be unknown or wrong. don't translate these messages.
1954 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1954 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1955 err = None
1955 err = None
1956 try:
1956 try:
1957 codecs.lookup(pycompat.sysstr(encoding.encoding))
1957 codecs.lookup(pycompat.sysstr(encoding.encoding))
1958 except LookupError as inst:
1958 except LookupError as inst:
1959 err = stringutil.forcebytestr(inst)
1959 err = stringutil.forcebytestr(inst)
1960 problems += 1
1960 problems += 1
1961 fm.condwrite(
1961 fm.condwrite(
1962 err,
1962 err,
1963 b'encodingerror',
1963 b'encodingerror',
1964 b" %s\n (check that your locale is properly set)\n",
1964 b" %s\n (check that your locale is properly set)\n",
1965 err,
1965 err,
1966 )
1966 )
1967
1967
1968 # Python
1968 # Python
1969 pythonlib = None
1969 pythonlib = None
1970 if util.safehasattr(os, '__file__'):
1970 if util.safehasattr(os, '__file__'):
1971 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1971 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1972 elif getattr(sys, 'oxidized', False):
1972 elif getattr(sys, 'oxidized', False):
1973 pythonlib = pycompat.sysexecutable
1973 pythonlib = pycompat.sysexecutable
1974
1974
1975 fm.write(
1975 fm.write(
1976 b'pythonexe',
1976 b'pythonexe',
1977 _(b"checking Python executable (%s)\n"),
1977 _(b"checking Python executable (%s)\n"),
1978 pycompat.sysexecutable or _(b"unknown"),
1978 pycompat.sysexecutable or _(b"unknown"),
1979 )
1979 )
1980 fm.write(
1980 fm.write(
1981 b'pythonimplementation',
1981 b'pythonimplementation',
1982 _(b"checking Python implementation (%s)\n"),
1982 _(b"checking Python implementation (%s)\n"),
1983 pycompat.sysbytes(platform.python_implementation()),
1983 pycompat.sysbytes(platform.python_implementation()),
1984 )
1984 )
1985 fm.write(
1985 fm.write(
1986 b'pythonver',
1986 b'pythonver',
1987 _(b"checking Python version (%s)\n"),
1987 _(b"checking Python version (%s)\n"),
1988 (b"%d.%d.%d" % sys.version_info[:3]),
1988 (b"%d.%d.%d" % sys.version_info[:3]),
1989 )
1989 )
1990 fm.write(
1990 fm.write(
1991 b'pythonlib',
1991 b'pythonlib',
1992 _(b"checking Python lib (%s)...\n"),
1992 _(b"checking Python lib (%s)...\n"),
1993 pythonlib or _(b"unknown"),
1993 pythonlib or _(b"unknown"),
1994 )
1994 )
1995
1995
1996 try:
1996 try:
1997 from . import rustext # pytype: disable=import-error
1997 from . import rustext # pytype: disable=import-error
1998
1998
1999 rustext.__doc__ # trigger lazy import
1999 rustext.__doc__ # trigger lazy import
2000 except ImportError:
2000 except ImportError:
2001 rustext = None
2001 rustext = None
2002
2002
2003 security = set(sslutil.supportedprotocols)
2003 security = set(sslutil.supportedprotocols)
2004 if sslutil.hassni:
2004 if sslutil.hassni:
2005 security.add(b'sni')
2005 security.add(b'sni')
2006
2006
2007 fm.write(
2007 fm.write(
2008 b'pythonsecurity',
2008 b'pythonsecurity',
2009 _(b"checking Python security support (%s)\n"),
2009 _(b"checking Python security support (%s)\n"),
2010 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2010 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2011 )
2011 )
2012
2012
2013 # These are warnings, not errors. So don't increment problem count. This
2013 # These are warnings, not errors. So don't increment problem count. This
2014 # may change in the future.
2014 # may change in the future.
2015 if b'tls1.2' not in security:
2015 if b'tls1.2' not in security:
2016 fm.plain(
2016 fm.plain(
2017 _(
2017 _(
2018 b' TLS 1.2 not supported by Python install; '
2018 b' TLS 1.2 not supported by Python install; '
2019 b'network connections lack modern security\n'
2019 b'network connections lack modern security\n'
2020 )
2020 )
2021 )
2021 )
2022 if b'sni' not in security:
2022 if b'sni' not in security:
2023 fm.plain(
2023 fm.plain(
2024 _(
2024 _(
2025 b' SNI not supported by Python install; may have '
2025 b' SNI not supported by Python install; may have '
2026 b'connectivity issues with some servers\n'
2026 b'connectivity issues with some servers\n'
2027 )
2027 )
2028 )
2028 )
2029
2029
2030 fm.plain(
2030 fm.plain(
2031 _(
2031 _(
2032 b"checking Rust extensions (%s)\n"
2032 b"checking Rust extensions (%s)\n"
2033 % (b'missing' if rustext is None else b'installed')
2033 % (b'missing' if rustext is None else b'installed')
2034 ),
2034 ),
2035 )
2035 )
2036
2036
2037 # TODO print CA cert info
2037 # TODO print CA cert info
2038
2038
2039 # hg version
2039 # hg version
2040 hgver = util.version()
2040 hgver = util.version()
2041 fm.write(
2041 fm.write(
2042 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2042 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2043 )
2043 )
2044 fm.write(
2044 fm.write(
2045 b'hgverextra',
2045 b'hgverextra',
2046 _(b"checking Mercurial custom build (%s)\n"),
2046 _(b"checking Mercurial custom build (%s)\n"),
2047 b'+'.join(hgver.split(b'+')[1:]),
2047 b'+'.join(hgver.split(b'+')[1:]),
2048 )
2048 )
2049
2049
2050 # compiled modules
2050 # compiled modules
2051 hgmodules = None
2051 hgmodules = None
2052 if util.safehasattr(sys.modules[__name__], '__file__'):
2052 if util.safehasattr(sys.modules[__name__], '__file__'):
2053 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2053 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2054 elif getattr(sys, 'oxidized', False):
2054 elif getattr(sys, 'oxidized', False):
2055 hgmodules = pycompat.sysexecutable
2055 hgmodules = pycompat.sysexecutable
2056
2056
2057 fm.write(
2057 fm.write(
2058 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2058 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2059 )
2059 )
2060 fm.write(
2060 fm.write(
2061 b'hgmodules',
2061 b'hgmodules',
2062 _(b"checking installed modules (%s)...\n"),
2062 _(b"checking installed modules (%s)...\n"),
2063 hgmodules or _(b"unknown"),
2063 hgmodules or _(b"unknown"),
2064 )
2064 )
2065
2065
2066 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2066 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2067 rustext = rustandc # for now, that's the only case
2067 rustext = rustandc # for now, that's the only case
2068 cext = policy.policy in (b'c', b'allow') or rustandc
2068 cext = policy.policy in (b'c', b'allow') or rustandc
2069 nopure = cext or rustext
2069 nopure = cext or rustext
2070 if nopure:
2070 if nopure:
2071 err = None
2071 err = None
2072 try:
2072 try:
2073 if cext:
2073 if cext:
2074 from .cext import ( # pytype: disable=import-error
2074 from .cext import ( # pytype: disable=import-error
2075 base85,
2075 base85,
2076 bdiff,
2076 bdiff,
2077 mpatch,
2077 mpatch,
2078 osutil,
2078 osutil,
2079 )
2079 )
2080
2080
2081 # quiet pyflakes
2081 # quiet pyflakes
2082 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2082 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2083 if rustext:
2083 if rustext:
2084 from .rustext import ( # pytype: disable=import-error
2084 from .rustext import ( # pytype: disable=import-error
2085 ancestor,
2085 ancestor,
2086 dirstate,
2086 dirstate,
2087 )
2087 )
2088
2088
2089 dir(ancestor), dir(dirstate) # quiet pyflakes
2089 dir(ancestor), dir(dirstate) # quiet pyflakes
2090 except Exception as inst:
2090 except Exception as inst:
2091 err = stringutil.forcebytestr(inst)
2091 err = stringutil.forcebytestr(inst)
2092 problems += 1
2092 problems += 1
2093 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2093 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2094
2094
2095 compengines = util.compengines._engines.values()
2095 compengines = util.compengines._engines.values()
2096 fm.write(
2096 fm.write(
2097 b'compengines',
2097 b'compengines',
2098 _(b'checking registered compression engines (%s)\n'),
2098 _(b'checking registered compression engines (%s)\n'),
2099 fm.formatlist(
2099 fm.formatlist(
2100 sorted(e.name() for e in compengines),
2100 sorted(e.name() for e in compengines),
2101 name=b'compengine',
2101 name=b'compengine',
2102 fmt=b'%s',
2102 fmt=b'%s',
2103 sep=b', ',
2103 sep=b', ',
2104 ),
2104 ),
2105 )
2105 )
2106 fm.write(
2106 fm.write(
2107 b'compenginesavail',
2107 b'compenginesavail',
2108 _(b'checking available compression engines (%s)\n'),
2108 _(b'checking available compression engines (%s)\n'),
2109 fm.formatlist(
2109 fm.formatlist(
2110 sorted(e.name() for e in compengines if e.available()),
2110 sorted(e.name() for e in compengines if e.available()),
2111 name=b'compengine',
2111 name=b'compengine',
2112 fmt=b'%s',
2112 fmt=b'%s',
2113 sep=b', ',
2113 sep=b', ',
2114 ),
2114 ),
2115 )
2115 )
2116 wirecompengines = compression.compengines.supportedwireengines(
2116 wirecompengines = compression.compengines.supportedwireengines(
2117 compression.SERVERROLE
2117 compression.SERVERROLE
2118 )
2118 )
2119 fm.write(
2119 fm.write(
2120 b'compenginesserver',
2120 b'compenginesserver',
2121 _(
2121 _(
2122 b'checking available compression engines '
2122 b'checking available compression engines '
2123 b'for wire protocol (%s)\n'
2123 b'for wire protocol (%s)\n'
2124 ),
2124 ),
2125 fm.formatlist(
2125 fm.formatlist(
2126 [e.name() for e in wirecompengines if e.wireprotosupport()],
2126 [e.name() for e in wirecompengines if e.wireprotosupport()],
2127 name=b'compengine',
2127 name=b'compengine',
2128 fmt=b'%s',
2128 fmt=b'%s',
2129 sep=b', ',
2129 sep=b', ',
2130 ),
2130 ),
2131 )
2131 )
2132 re2 = b'missing'
2132 re2 = b'missing'
2133 if util._re2:
2133 if util._re2:
2134 re2 = b'available'
2134 re2 = b'available'
2135 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2135 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2136 fm.data(re2=bool(util._re2))
2136 fm.data(re2=bool(util._re2))
2137
2137
2138 # templates
2138 # templates
2139 p = templater.templatedir()
2139 p = templater.templatedir()
2140 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2140 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2141 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2141 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2142 if p:
2142 if p:
2143 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2143 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2144 if m:
2144 if m:
2145 # template found, check if it is working
2145 # template found, check if it is working
2146 err = None
2146 err = None
2147 try:
2147 try:
2148 templater.templater.frommapfile(m)
2148 templater.templater.frommapfile(m)
2149 except Exception as inst:
2149 except Exception as inst:
2150 err = stringutil.forcebytestr(inst)
2150 err = stringutil.forcebytestr(inst)
2151 p = None
2151 p = None
2152 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2152 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2153 else:
2153 else:
2154 p = None
2154 p = None
2155 fm.condwrite(
2155 fm.condwrite(
2156 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2156 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2157 )
2157 )
2158 fm.condwrite(
2158 fm.condwrite(
2159 not m,
2159 not m,
2160 b'defaulttemplatenotfound',
2160 b'defaulttemplatenotfound',
2161 _(b" template '%s' not found\n"),
2161 _(b" template '%s' not found\n"),
2162 b"default",
2162 b"default",
2163 )
2163 )
2164 if not p:
2164 if not p:
2165 problems += 1
2165 problems += 1
2166 fm.condwrite(
2166 fm.condwrite(
2167 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2167 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2168 )
2168 )
2169
2169
2170 # editor
2170 # editor
2171 editor = ui.geteditor()
2171 editor = ui.geteditor()
2172 editor = util.expandpath(editor)
2172 editor = util.expandpath(editor)
2173 editorbin = procutil.shellsplit(editor)[0]
2173 editorbin = procutil.shellsplit(editor)[0]
2174 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2174 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2175 cmdpath = procutil.findexe(editorbin)
2175 cmdpath = procutil.findexe(editorbin)
2176 fm.condwrite(
2176 fm.condwrite(
2177 not cmdpath and editor == b'vi',
2177 not cmdpath and editor == b'vi',
2178 b'vinotfound',
2178 b'vinotfound',
2179 _(
2179 _(
2180 b" No commit editor set and can't find %s in PATH\n"
2180 b" No commit editor set and can't find %s in PATH\n"
2181 b" (specify a commit editor in your configuration"
2181 b" (specify a commit editor in your configuration"
2182 b" file)\n"
2182 b" file)\n"
2183 ),
2183 ),
2184 not cmdpath and editor == b'vi' and editorbin,
2184 not cmdpath and editor == b'vi' and editorbin,
2185 )
2185 )
2186 fm.condwrite(
2186 fm.condwrite(
2187 not cmdpath and editor != b'vi',
2187 not cmdpath and editor != b'vi',
2188 b'editornotfound',
2188 b'editornotfound',
2189 _(
2189 _(
2190 b" Can't find editor '%s' in PATH\n"
2190 b" Can't find editor '%s' in PATH\n"
2191 b" (specify a commit editor in your configuration"
2191 b" (specify a commit editor in your configuration"
2192 b" file)\n"
2192 b" file)\n"
2193 ),
2193 ),
2194 not cmdpath and editorbin,
2194 not cmdpath and editorbin,
2195 )
2195 )
2196 if not cmdpath and editor != b'vi':
2196 if not cmdpath and editor != b'vi':
2197 problems += 1
2197 problems += 1
2198
2198
2199 # check username
2199 # check username
2200 username = None
2200 username = None
2201 err = None
2201 err = None
2202 try:
2202 try:
2203 username = ui.username()
2203 username = ui.username()
2204 except error.Abort as e:
2204 except error.Abort as e:
2205 err = e.message
2205 err = e.message
2206 problems += 1
2206 problems += 1
2207
2207
2208 fm.condwrite(
2208 fm.condwrite(
2209 username, b'username', _(b"checking username (%s)\n"), username
2209 username, b'username', _(b"checking username (%s)\n"), username
2210 )
2210 )
2211 fm.condwrite(
2211 fm.condwrite(
2212 err,
2212 err,
2213 b'usernameerror',
2213 b'usernameerror',
2214 _(
2214 _(
2215 b"checking username...\n %s\n"
2215 b"checking username...\n %s\n"
2216 b" (specify a username in your configuration file)\n"
2216 b" (specify a username in your configuration file)\n"
2217 ),
2217 ),
2218 err,
2218 err,
2219 )
2219 )
2220
2220
2221 for name, mod in extensions.extensions():
2221 for name, mod in extensions.extensions():
2222 handler = getattr(mod, 'debuginstall', None)
2222 handler = getattr(mod, 'debuginstall', None)
2223 if handler is not None:
2223 if handler is not None:
2224 problems += handler(ui, fm)
2224 problems += handler(ui, fm)
2225
2225
2226 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2226 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2227 if not problems:
2227 if not problems:
2228 fm.data(problems=problems)
2228 fm.data(problems=problems)
2229 fm.condwrite(
2229 fm.condwrite(
2230 problems,
2230 problems,
2231 b'problems',
2231 b'problems',
2232 _(b"%d problems detected, please check your install!\n"),
2232 _(b"%d problems detected, please check your install!\n"),
2233 problems,
2233 problems,
2234 )
2234 )
2235 fm.end()
2235 fm.end()
2236
2236
2237 return problems
2237 return problems
2238
2238
2239
2239
2240 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2240 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2241 def debugknown(ui, repopath, *ids, **opts):
2241 def debugknown(ui, repopath, *ids, **opts):
2242 """test whether node ids are known to a repo
2242 """test whether node ids are known to a repo
2243
2243
2244 Every ID must be a full-length hex node id string. Returns a list of 0s
2244 Every ID must be a full-length hex node id string. Returns a list of 0s
2245 and 1s indicating unknown/known.
2245 and 1s indicating unknown/known.
2246 """
2246 """
2247 opts = pycompat.byteskwargs(opts)
2247 opts = pycompat.byteskwargs(opts)
2248 repo = hg.peer(ui, opts, repopath)
2248 repo = hg.peer(ui, opts, repopath)
2249 if not repo.capable(b'known'):
2249 if not repo.capable(b'known'):
2250 raise error.Abort(b"known() not supported by target repository")
2250 raise error.Abort(b"known() not supported by target repository")
2251 flags = repo.known([bin(s) for s in ids])
2251 flags = repo.known([bin(s) for s in ids])
2252 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2252 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2253
2253
2254
2254
2255 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2255 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2256 def debuglabelcomplete(ui, repo, *args):
2256 def debuglabelcomplete(ui, repo, *args):
2257 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2257 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2258 debugnamecomplete(ui, repo, *args)
2258 debugnamecomplete(ui, repo, *args)
2259
2259
2260
2260
2261 @command(
2261 @command(
2262 b'debuglocks',
2262 b'debuglocks',
2263 [
2263 [
2264 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2264 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2265 (
2265 (
2266 b'W',
2266 b'W',
2267 b'force-free-wlock',
2267 b'force-free-wlock',
2268 None,
2268 None,
2269 _(b'free the working state lock (DANGEROUS)'),
2269 _(b'free the working state lock (DANGEROUS)'),
2270 ),
2270 ),
2271 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2271 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2272 (
2272 (
2273 b'S',
2273 b'S',
2274 b'set-wlock',
2274 b'set-wlock',
2275 None,
2275 None,
2276 _(b'set the working state lock until stopped'),
2276 _(b'set the working state lock until stopped'),
2277 ),
2277 ),
2278 ],
2278 ],
2279 _(b'[OPTION]...'),
2279 _(b'[OPTION]...'),
2280 )
2280 )
2281 def debuglocks(ui, repo, **opts):
2281 def debuglocks(ui, repo, **opts):
2282 """show or modify state of locks
2282 """show or modify state of locks
2283
2283
2284 By default, this command will show which locks are held. This
2284 By default, this command will show which locks are held. This
2285 includes the user and process holding the lock, the amount of time
2285 includes the user and process holding the lock, the amount of time
2286 the lock has been held, and the machine name where the process is
2286 the lock has been held, and the machine name where the process is
2287 running if it's not local.
2287 running if it's not local.
2288
2288
2289 Locks protect the integrity of Mercurial's data, so should be
2289 Locks protect the integrity of Mercurial's data, so should be
2290 treated with care. System crashes or other interruptions may cause
2290 treated with care. System crashes or other interruptions may cause
2291 locks to not be properly released, though Mercurial will usually
2291 locks to not be properly released, though Mercurial will usually
2292 detect and remove such stale locks automatically.
2292 detect and remove such stale locks automatically.
2293
2293
2294 However, detecting stale locks may not always be possible (for
2294 However, detecting stale locks may not always be possible (for
2295 instance, on a shared filesystem). Removing locks may also be
2295 instance, on a shared filesystem). Removing locks may also be
2296 blocked by filesystem permissions.
2296 blocked by filesystem permissions.
2297
2297
2298 Setting a lock will prevent other commands from changing the data.
2298 Setting a lock will prevent other commands from changing the data.
2299 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2299 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2300 The set locks are removed when the command exits.
2300 The set locks are removed when the command exits.
2301
2301
2302 Returns 0 if no locks are held.
2302 Returns 0 if no locks are held.
2303
2303
2304 """
2304 """
2305
2305
2306 if opts.get('force_free_lock'):
2306 if opts.get('force_free_lock'):
2307 repo.svfs.tryunlink(b'lock')
2307 repo.svfs.tryunlink(b'lock')
2308 if opts.get('force_free_wlock'):
2308 if opts.get('force_free_wlock'):
2309 repo.vfs.tryunlink(b'wlock')
2309 repo.vfs.tryunlink(b'wlock')
2310 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2310 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2311 return 0
2311 return 0
2312
2312
2313 locks = []
2313 locks = []
2314 try:
2314 try:
2315 if opts.get('set_wlock'):
2315 if opts.get('set_wlock'):
2316 try:
2316 try:
2317 locks.append(repo.wlock(False))
2317 locks.append(repo.wlock(False))
2318 except error.LockHeld:
2318 except error.LockHeld:
2319 raise error.Abort(_(b'wlock is already held'))
2319 raise error.Abort(_(b'wlock is already held'))
2320 if opts.get('set_lock'):
2320 if opts.get('set_lock'):
2321 try:
2321 try:
2322 locks.append(repo.lock(False))
2322 locks.append(repo.lock(False))
2323 except error.LockHeld:
2323 except error.LockHeld:
2324 raise error.Abort(_(b'lock is already held'))
2324 raise error.Abort(_(b'lock is already held'))
2325 if len(locks):
2325 if len(locks):
2326 try:
2326 try:
2327 if ui.interactive():
2327 if ui.interactive():
2328 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2328 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2329 ui.promptchoice(prompt)
2329 ui.promptchoice(prompt)
2330 else:
2330 else:
2331 msg = b"%d locks held, waiting for signal\n"
2331 msg = b"%d locks held, waiting for signal\n"
2332 msg %= len(locks)
2332 msg %= len(locks)
2333 ui.status(msg)
2333 ui.status(msg)
2334 while True: # XXX wait for a signal
2334 while True: # XXX wait for a signal
2335 time.sleep(0.1)
2335 time.sleep(0.1)
2336 except KeyboardInterrupt:
2336 except KeyboardInterrupt:
2337 msg = b"signal-received releasing locks\n"
2337 msg = b"signal-received releasing locks\n"
2338 ui.status(msg)
2338 ui.status(msg)
2339 return 0
2339 return 0
2340 finally:
2340 finally:
2341 release(*locks)
2341 release(*locks)
2342
2342
2343 now = time.time()
2343 now = time.time()
2344 held = 0
2344 held = 0
2345
2345
2346 def report(vfs, name, method):
2346 def report(vfs, name, method):
2347 # this causes stale locks to get reaped for more accurate reporting
2347 # this causes stale locks to get reaped for more accurate reporting
2348 try:
2348 try:
2349 l = method(False)
2349 l = method(False)
2350 except error.LockHeld:
2350 except error.LockHeld:
2351 l = None
2351 l = None
2352
2352
2353 if l:
2353 if l:
2354 l.release()
2354 l.release()
2355 else:
2355 else:
2356 try:
2356 try:
2357 st = vfs.lstat(name)
2357 st = vfs.lstat(name)
2358 age = now - st[stat.ST_MTIME]
2358 age = now - st[stat.ST_MTIME]
2359 user = util.username(st.st_uid)
2359 user = util.username(st.st_uid)
2360 locker = vfs.readlock(name)
2360 locker = vfs.readlock(name)
2361 if b":" in locker:
2361 if b":" in locker:
2362 host, pid = locker.split(b':')
2362 host, pid = locker.split(b':')
2363 if host == socket.gethostname():
2363 if host == socket.gethostname():
2364 locker = b'user %s, process %s' % (user or b'None', pid)
2364 locker = b'user %s, process %s' % (user or b'None', pid)
2365 else:
2365 else:
2366 locker = b'user %s, process %s, host %s' % (
2366 locker = b'user %s, process %s, host %s' % (
2367 user or b'None',
2367 user or b'None',
2368 pid,
2368 pid,
2369 host,
2369 host,
2370 )
2370 )
2371 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2371 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2372 return 1
2372 return 1
2373 except FileNotFoundError:
2373 except FileNotFoundError:
2374 pass
2374 pass
2375
2375
2376 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2376 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2377 return 0
2377 return 0
2378
2378
2379 held += report(repo.svfs, b"lock", repo.lock)
2379 held += report(repo.svfs, b"lock", repo.lock)
2380 held += report(repo.vfs, b"wlock", repo.wlock)
2380 held += report(repo.vfs, b"wlock", repo.wlock)
2381
2381
2382 return held
2382 return held
2383
2383
2384
2384
2385 @command(
2385 @command(
2386 b'debugmanifestfulltextcache',
2386 b'debugmanifestfulltextcache',
2387 [
2387 [
2388 (b'', b'clear', False, _(b'clear the cache')),
2388 (b'', b'clear', False, _(b'clear the cache')),
2389 (
2389 (
2390 b'a',
2390 b'a',
2391 b'add',
2391 b'add',
2392 [],
2392 [],
2393 _(b'add the given manifest nodes to the cache'),
2393 _(b'add the given manifest nodes to the cache'),
2394 _(b'NODE'),
2394 _(b'NODE'),
2395 ),
2395 ),
2396 ],
2396 ],
2397 b'',
2397 b'',
2398 )
2398 )
2399 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2399 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2400 """show, clear or amend the contents of the manifest fulltext cache"""
2400 """show, clear or amend the contents of the manifest fulltext cache"""
2401
2401
2402 def getcache():
2402 def getcache():
2403 r = repo.manifestlog.getstorage(b'')
2403 r = repo.manifestlog.getstorage(b'')
2404 try:
2404 try:
2405 return r._fulltextcache
2405 return r._fulltextcache
2406 except AttributeError:
2406 except AttributeError:
2407 msg = _(
2407 msg = _(
2408 b"Current revlog implementation doesn't appear to have a "
2408 b"Current revlog implementation doesn't appear to have a "
2409 b"manifest fulltext cache\n"
2409 b"manifest fulltext cache\n"
2410 )
2410 )
2411 raise error.Abort(msg)
2411 raise error.Abort(msg)
2412
2412
2413 if opts.get('clear'):
2413 if opts.get('clear'):
2414 with repo.wlock():
2414 with repo.wlock():
2415 cache = getcache()
2415 cache = getcache()
2416 cache.clear(clear_persisted_data=True)
2416 cache.clear(clear_persisted_data=True)
2417 return
2417 return
2418
2418
2419 if add:
2419 if add:
2420 with repo.wlock():
2420 with repo.wlock():
2421 m = repo.manifestlog
2421 m = repo.manifestlog
2422 store = m.getstorage(b'')
2422 store = m.getstorage(b'')
2423 for n in add:
2423 for n in add:
2424 try:
2424 try:
2425 manifest = m[store.lookup(n)]
2425 manifest = m[store.lookup(n)]
2426 except error.LookupError as e:
2426 except error.LookupError as e:
2427 raise error.Abort(
2427 raise error.Abort(
2428 bytes(e), hint=b"Check your manifest node id"
2428 bytes(e), hint=b"Check your manifest node id"
2429 )
2429 )
2430 manifest.read() # stores revisision in cache too
2430 manifest.read() # stores revisision in cache too
2431 return
2431 return
2432
2432
2433 cache = getcache()
2433 cache = getcache()
2434 if not len(cache):
2434 if not len(cache):
2435 ui.write(_(b'cache empty\n'))
2435 ui.write(_(b'cache empty\n'))
2436 else:
2436 else:
2437 ui.write(
2437 ui.write(
2438 _(
2438 _(
2439 b'cache contains %d manifest entries, in order of most to '
2439 b'cache contains %d manifest entries, in order of most to '
2440 b'least recent:\n'
2440 b'least recent:\n'
2441 )
2441 )
2442 % (len(cache),)
2442 % (len(cache),)
2443 )
2443 )
2444 totalsize = 0
2444 totalsize = 0
2445 for nodeid in cache:
2445 for nodeid in cache:
2446 # Use cache.get to not update the LRU order
2446 # Use cache.get to not update the LRU order
2447 data = cache.peek(nodeid)
2447 data = cache.peek(nodeid)
2448 size = len(data)
2448 size = len(data)
2449 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2449 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2450 ui.write(
2450 ui.write(
2451 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2451 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2452 )
2452 )
2453 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2453 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2454 ui.write(
2454 ui.write(
2455 _(b'total cache data size %s, on-disk %s\n')
2455 _(b'total cache data size %s, on-disk %s\n')
2456 % (util.bytecount(totalsize), util.bytecount(ondisk))
2456 % (util.bytecount(totalsize), util.bytecount(ondisk))
2457 )
2457 )
2458
2458
2459
2459
2460 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2460 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2461 def debugmergestate(ui, repo, *args, **opts):
2461 def debugmergestate(ui, repo, *args, **opts):
2462 """print merge state
2462 """print merge state
2463
2463
2464 Use --verbose to print out information about whether v1 or v2 merge state
2464 Use --verbose to print out information about whether v1 or v2 merge state
2465 was chosen."""
2465 was chosen."""
2466
2466
2467 if ui.verbose:
2467 if ui.verbose:
2468 ms = mergestatemod.mergestate(repo)
2468 ms = mergestatemod.mergestate(repo)
2469
2469
2470 # sort so that reasonable information is on top
2470 # sort so that reasonable information is on top
2471 v1records = ms._readrecordsv1()
2471 v1records = ms._readrecordsv1()
2472 v2records = ms._readrecordsv2()
2472 v2records = ms._readrecordsv2()
2473
2473
2474 if not v1records and not v2records:
2474 if not v1records and not v2records:
2475 pass
2475 pass
2476 elif not v2records:
2476 elif not v2records:
2477 ui.writenoi18n(b'no version 2 merge state\n')
2477 ui.writenoi18n(b'no version 2 merge state\n')
2478 elif ms._v1v2match(v1records, v2records):
2478 elif ms._v1v2match(v1records, v2records):
2479 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2479 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2480 else:
2480 else:
2481 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2481 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2482
2482
2483 opts = pycompat.byteskwargs(opts)
2483 opts = pycompat.byteskwargs(opts)
2484 if not opts[b'template']:
2484 if not opts[b'template']:
2485 opts[b'template'] = (
2485 opts[b'template'] = (
2486 b'{if(commits, "", "no merge state found\n")}'
2486 b'{if(commits, "", "no merge state found\n")}'
2487 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2487 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2488 b'{files % "file: {path} (state \\"{state}\\")\n'
2488 b'{files % "file: {path} (state \\"{state}\\")\n'
2489 b'{if(local_path, "'
2489 b'{if(local_path, "'
2490 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2490 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2491 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2491 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2492 b' other path: {other_path} (node {other_node})\n'
2492 b' other path: {other_path} (node {other_node})\n'
2493 b'")}'
2493 b'")}'
2494 b'{if(rename_side, "'
2494 b'{if(rename_side, "'
2495 b' rename side: {rename_side}\n'
2495 b' rename side: {rename_side}\n'
2496 b' renamed path: {renamed_path}\n'
2496 b' renamed path: {renamed_path}\n'
2497 b'")}'
2497 b'")}'
2498 b'{extras % " extra: {key} = {value}\n"}'
2498 b'{extras % " extra: {key} = {value}\n"}'
2499 b'"}'
2499 b'"}'
2500 b'{extras % "extra: {file} ({key} = {value})\n"}'
2500 b'{extras % "extra: {file} ({key} = {value})\n"}'
2501 )
2501 )
2502
2502
2503 ms = mergestatemod.mergestate.read(repo)
2503 ms = mergestatemod.mergestate.read(repo)
2504
2504
2505 fm = ui.formatter(b'debugmergestate', opts)
2505 fm = ui.formatter(b'debugmergestate', opts)
2506 fm.startitem()
2506 fm.startitem()
2507
2507
2508 fm_commits = fm.nested(b'commits')
2508 fm_commits = fm.nested(b'commits')
2509 if ms.active():
2509 if ms.active():
2510 for name, node, label_index in (
2510 for name, node, label_index in (
2511 (b'local', ms.local, 0),
2511 (b'local', ms.local, 0),
2512 (b'other', ms.other, 1),
2512 (b'other', ms.other, 1),
2513 ):
2513 ):
2514 fm_commits.startitem()
2514 fm_commits.startitem()
2515 fm_commits.data(name=name)
2515 fm_commits.data(name=name)
2516 fm_commits.data(node=hex(node))
2516 fm_commits.data(node=hex(node))
2517 if ms._labels and len(ms._labels) > label_index:
2517 if ms._labels and len(ms._labels) > label_index:
2518 fm_commits.data(label=ms._labels[label_index])
2518 fm_commits.data(label=ms._labels[label_index])
2519 fm_commits.end()
2519 fm_commits.end()
2520
2520
2521 fm_files = fm.nested(b'files')
2521 fm_files = fm.nested(b'files')
2522 if ms.active():
2522 if ms.active():
2523 for f in ms:
2523 for f in ms:
2524 fm_files.startitem()
2524 fm_files.startitem()
2525 fm_files.data(path=f)
2525 fm_files.data(path=f)
2526 state = ms._state[f]
2526 state = ms._state[f]
2527 fm_files.data(state=state[0])
2527 fm_files.data(state=state[0])
2528 if state[0] in (
2528 if state[0] in (
2529 mergestatemod.MERGE_RECORD_UNRESOLVED,
2529 mergestatemod.MERGE_RECORD_UNRESOLVED,
2530 mergestatemod.MERGE_RECORD_RESOLVED,
2530 mergestatemod.MERGE_RECORD_RESOLVED,
2531 ):
2531 ):
2532 fm_files.data(local_key=state[1])
2532 fm_files.data(local_key=state[1])
2533 fm_files.data(local_path=state[2])
2533 fm_files.data(local_path=state[2])
2534 fm_files.data(ancestor_path=state[3])
2534 fm_files.data(ancestor_path=state[3])
2535 fm_files.data(ancestor_node=state[4])
2535 fm_files.data(ancestor_node=state[4])
2536 fm_files.data(other_path=state[5])
2536 fm_files.data(other_path=state[5])
2537 fm_files.data(other_node=state[6])
2537 fm_files.data(other_node=state[6])
2538 fm_files.data(local_flags=state[7])
2538 fm_files.data(local_flags=state[7])
2539 elif state[0] in (
2539 elif state[0] in (
2540 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2540 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2541 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2541 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2542 ):
2542 ):
2543 fm_files.data(renamed_path=state[1])
2543 fm_files.data(renamed_path=state[1])
2544 fm_files.data(rename_side=state[2])
2544 fm_files.data(rename_side=state[2])
2545 fm_extras = fm_files.nested(b'extras')
2545 fm_extras = fm_files.nested(b'extras')
2546 for k, v in sorted(ms.extras(f).items()):
2546 for k, v in sorted(ms.extras(f).items()):
2547 fm_extras.startitem()
2547 fm_extras.startitem()
2548 fm_extras.data(key=k)
2548 fm_extras.data(key=k)
2549 fm_extras.data(value=v)
2549 fm_extras.data(value=v)
2550 fm_extras.end()
2550 fm_extras.end()
2551
2551
2552 fm_files.end()
2552 fm_files.end()
2553
2553
2554 fm_extras = fm.nested(b'extras')
2554 fm_extras = fm.nested(b'extras')
2555 for f, d in sorted(ms.allextras().items()):
2555 for f, d in sorted(ms.allextras().items()):
2556 if f in ms:
2556 if f in ms:
2557 # If file is in mergestate, we have already processed it's extras
2557 # If file is in mergestate, we have already processed it's extras
2558 continue
2558 continue
2559 for k, v in d.items():
2559 for k, v in d.items():
2560 fm_extras.startitem()
2560 fm_extras.startitem()
2561 fm_extras.data(file=f)
2561 fm_extras.data(file=f)
2562 fm_extras.data(key=k)
2562 fm_extras.data(key=k)
2563 fm_extras.data(value=v)
2563 fm_extras.data(value=v)
2564 fm_extras.end()
2564 fm_extras.end()
2565
2565
2566 fm.end()
2566 fm.end()
2567
2567
2568
2568
2569 @command(b'debugnamecomplete', [], _(b'NAME...'))
2569 @command(b'debugnamecomplete', [], _(b'NAME...'))
2570 def debugnamecomplete(ui, repo, *args):
2570 def debugnamecomplete(ui, repo, *args):
2571 '''complete "names" - tags, open branch names, bookmark names'''
2571 '''complete "names" - tags, open branch names, bookmark names'''
2572
2572
2573 names = set()
2573 names = set()
2574 # since we previously only listed open branches, we will handle that
2574 # since we previously only listed open branches, we will handle that
2575 # specially (after this for loop)
2575 # specially (after this for loop)
2576 for name, ns in repo.names.items():
2576 for name, ns in repo.names.items():
2577 if name != b'branches':
2577 if name != b'branches':
2578 names.update(ns.listnames(repo))
2578 names.update(ns.listnames(repo))
2579 names.update(
2579 names.update(
2580 tag
2580 tag
2581 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2581 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2582 if not closed
2582 if not closed
2583 )
2583 )
2584 completions = set()
2584 completions = set()
2585 if not args:
2585 if not args:
2586 args = [b'']
2586 args = [b'']
2587 for a in args:
2587 for a in args:
2588 completions.update(n for n in names if n.startswith(a))
2588 completions.update(n for n in names if n.startswith(a))
2589 ui.write(b'\n'.join(sorted(completions)))
2589 ui.write(b'\n'.join(sorted(completions)))
2590 ui.write(b'\n')
2590 ui.write(b'\n')
2591
2591
2592
2592
2593 @command(
2593 @command(
2594 b'debugnodemap',
2594 b'debugnodemap',
2595 [
2595 [
2596 (
2596 (
2597 b'',
2597 b'',
2598 b'dump-new',
2598 b'dump-new',
2599 False,
2599 False,
2600 _(b'write a (new) persistent binary nodemap on stdout'),
2600 _(b'write a (new) persistent binary nodemap on stdout'),
2601 ),
2601 ),
2602 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2602 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2603 (
2603 (
2604 b'',
2604 b'',
2605 b'check',
2605 b'check',
2606 False,
2606 False,
2607 _(b'check that the data on disk data are correct.'),
2607 _(b'check that the data on disk data are correct.'),
2608 ),
2608 ),
2609 (
2609 (
2610 b'',
2610 b'',
2611 b'metadata',
2611 b'metadata',
2612 False,
2612 False,
2613 _(b'display the on disk meta data for the nodemap'),
2613 _(b'display the on disk meta data for the nodemap'),
2614 ),
2614 ),
2615 ],
2615 ],
2616 )
2616 )
2617 def debugnodemap(ui, repo, **opts):
2617 def debugnodemap(ui, repo, **opts):
2618 """write and inspect on disk nodemap"""
2618 """write and inspect on disk nodemap"""
2619 if opts['dump_new']:
2619 if opts['dump_new']:
2620 unfi = repo.unfiltered()
2620 unfi = repo.unfiltered()
2621 cl = unfi.changelog
2621 cl = unfi.changelog
2622 if util.safehasattr(cl.index, "nodemap_data_all"):
2622 if util.safehasattr(cl.index, "nodemap_data_all"):
2623 data = cl.index.nodemap_data_all()
2623 data = cl.index.nodemap_data_all()
2624 else:
2624 else:
2625 data = nodemap.persistent_data(cl.index)
2625 data = nodemap.persistent_data(cl.index)
2626 ui.write(data)
2626 ui.write(data)
2627 elif opts['dump_disk']:
2627 elif opts['dump_disk']:
2628 unfi = repo.unfiltered()
2628 unfi = repo.unfiltered()
2629 cl = unfi.changelog
2629 cl = unfi.changelog
2630 nm_data = nodemap.persisted_data(cl)
2630 nm_data = nodemap.persisted_data(cl)
2631 if nm_data is not None:
2631 if nm_data is not None:
2632 docket, data = nm_data
2632 docket, data = nm_data
2633 ui.write(data[:])
2633 ui.write(data[:])
2634 elif opts['check']:
2634 elif opts['check']:
2635 unfi = repo.unfiltered()
2635 unfi = repo.unfiltered()
2636 cl = unfi.changelog
2636 cl = unfi.changelog
2637 nm_data = nodemap.persisted_data(cl)
2637 nm_data = nodemap.persisted_data(cl)
2638 if nm_data is not None:
2638 if nm_data is not None:
2639 docket, data = nm_data
2639 docket, data = nm_data
2640 return nodemap.check_data(ui, cl.index, data)
2640 return nodemap.check_data(ui, cl.index, data)
2641 elif opts['metadata']:
2641 elif opts['metadata']:
2642 unfi = repo.unfiltered()
2642 unfi = repo.unfiltered()
2643 cl = unfi.changelog
2643 cl = unfi.changelog
2644 nm_data = nodemap.persisted_data(cl)
2644 nm_data = nodemap.persisted_data(cl)
2645 if nm_data is not None:
2645 if nm_data is not None:
2646 docket, data = nm_data
2646 docket, data = nm_data
2647 ui.write((b"uid: %s\n") % docket.uid)
2647 ui.write((b"uid: %s\n") % docket.uid)
2648 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2648 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2649 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2649 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2650 ui.write((b"data-length: %d\n") % docket.data_length)
2650 ui.write((b"data-length: %d\n") % docket.data_length)
2651 ui.write((b"data-unused: %d\n") % docket.data_unused)
2651 ui.write((b"data-unused: %d\n") % docket.data_unused)
2652 unused_perc = docket.data_unused * 100.0 / docket.data_length
2652 unused_perc = docket.data_unused * 100.0 / docket.data_length
2653 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2653 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2654
2654
2655
2655
2656 @command(
2656 @command(
2657 b'debugobsolete',
2657 b'debugobsolete',
2658 [
2658 [
2659 (b'', b'flags', 0, _(b'markers flag')),
2659 (b'', b'flags', 0, _(b'markers flag')),
2660 (
2660 (
2661 b'',
2661 b'',
2662 b'record-parents',
2662 b'record-parents',
2663 False,
2663 False,
2664 _(b'record parent information for the precursor'),
2664 _(b'record parent information for the precursor'),
2665 ),
2665 ),
2666 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2666 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2667 (
2667 (
2668 b'',
2668 b'',
2669 b'exclusive',
2669 b'exclusive',
2670 False,
2670 False,
2671 _(b'restrict display to markers only relevant to REV'),
2671 _(b'restrict display to markers only relevant to REV'),
2672 ),
2672 ),
2673 (b'', b'index', False, _(b'display index of the marker')),
2673 (b'', b'index', False, _(b'display index of the marker')),
2674 (b'', b'delete', [], _(b'delete markers specified by indices')),
2674 (b'', b'delete', [], _(b'delete markers specified by indices')),
2675 ]
2675 ]
2676 + cmdutil.commitopts2
2676 + cmdutil.commitopts2
2677 + cmdutil.formatteropts,
2677 + cmdutil.formatteropts,
2678 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2678 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2679 )
2679 )
2680 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2680 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2681 """create arbitrary obsolete marker
2681 """create arbitrary obsolete marker
2682
2682
2683 With no arguments, displays the list of obsolescence markers."""
2683 With no arguments, displays the list of obsolescence markers."""
2684
2684
2685 opts = pycompat.byteskwargs(opts)
2685 opts = pycompat.byteskwargs(opts)
2686
2686
2687 def parsenodeid(s):
2687 def parsenodeid(s):
2688 try:
2688 try:
2689 # We do not use revsingle/revrange functions here to accept
2689 # We do not use revsingle/revrange functions here to accept
2690 # arbitrary node identifiers, possibly not present in the
2690 # arbitrary node identifiers, possibly not present in the
2691 # local repository.
2691 # local repository.
2692 n = bin(s)
2692 n = bin(s)
2693 if len(n) != repo.nodeconstants.nodelen:
2693 if len(n) != repo.nodeconstants.nodelen:
2694 raise ValueError
2694 raise ValueError
2695 return n
2695 return n
2696 except ValueError:
2696 except ValueError:
2697 raise error.InputError(
2697 raise error.InputError(
2698 b'changeset references must be full hexadecimal '
2698 b'changeset references must be full hexadecimal '
2699 b'node identifiers'
2699 b'node identifiers'
2700 )
2700 )
2701
2701
2702 if opts.get(b'delete'):
2702 if opts.get(b'delete'):
2703 indices = []
2703 indices = []
2704 for v in opts.get(b'delete'):
2704 for v in opts.get(b'delete'):
2705 try:
2705 try:
2706 indices.append(int(v))
2706 indices.append(int(v))
2707 except ValueError:
2707 except ValueError:
2708 raise error.InputError(
2708 raise error.InputError(
2709 _(b'invalid index value: %r') % v,
2709 _(b'invalid index value: %r') % v,
2710 hint=_(b'use integers for indices'),
2710 hint=_(b'use integers for indices'),
2711 )
2711 )
2712
2712
2713 if repo.currenttransaction():
2713 if repo.currenttransaction():
2714 raise error.Abort(
2714 raise error.Abort(
2715 _(b'cannot delete obsmarkers in the middle of transaction.')
2715 _(b'cannot delete obsmarkers in the middle of transaction.')
2716 )
2716 )
2717
2717
2718 with repo.lock():
2718 with repo.lock():
2719 n = repair.deleteobsmarkers(repo.obsstore, indices)
2719 n = repair.deleteobsmarkers(repo.obsstore, indices)
2720 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2720 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2721
2721
2722 return
2722 return
2723
2723
2724 if precursor is not None:
2724 if precursor is not None:
2725 if opts[b'rev']:
2725 if opts[b'rev']:
2726 raise error.InputError(
2726 raise error.InputError(
2727 b'cannot select revision when creating marker'
2727 b'cannot select revision when creating marker'
2728 )
2728 )
2729 metadata = {}
2729 metadata = {}
2730 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2730 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2731 succs = tuple(parsenodeid(succ) for succ in successors)
2731 succs = tuple(parsenodeid(succ) for succ in successors)
2732 l = repo.lock()
2732 l = repo.lock()
2733 try:
2733 try:
2734 tr = repo.transaction(b'debugobsolete')
2734 tr = repo.transaction(b'debugobsolete')
2735 try:
2735 try:
2736 date = opts.get(b'date')
2736 date = opts.get(b'date')
2737 if date:
2737 if date:
2738 date = dateutil.parsedate(date)
2738 date = dateutil.parsedate(date)
2739 else:
2739 else:
2740 date = None
2740 date = None
2741 prec = parsenodeid(precursor)
2741 prec = parsenodeid(precursor)
2742 parents = None
2742 parents = None
2743 if opts[b'record_parents']:
2743 if opts[b'record_parents']:
2744 if prec not in repo.unfiltered():
2744 if prec not in repo.unfiltered():
2745 raise error.Abort(
2745 raise error.Abort(
2746 b'cannot used --record-parents on '
2746 b'cannot used --record-parents on '
2747 b'unknown changesets'
2747 b'unknown changesets'
2748 )
2748 )
2749 parents = repo.unfiltered()[prec].parents()
2749 parents = repo.unfiltered()[prec].parents()
2750 parents = tuple(p.node() for p in parents)
2750 parents = tuple(p.node() for p in parents)
2751 repo.obsstore.create(
2751 repo.obsstore.create(
2752 tr,
2752 tr,
2753 prec,
2753 prec,
2754 succs,
2754 succs,
2755 opts[b'flags'],
2755 opts[b'flags'],
2756 parents=parents,
2756 parents=parents,
2757 date=date,
2757 date=date,
2758 metadata=metadata,
2758 metadata=metadata,
2759 ui=ui,
2759 ui=ui,
2760 )
2760 )
2761 tr.close()
2761 tr.close()
2762 except ValueError as exc:
2762 except ValueError as exc:
2763 raise error.Abort(
2763 raise error.Abort(
2764 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2764 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2765 )
2765 )
2766 finally:
2766 finally:
2767 tr.release()
2767 tr.release()
2768 finally:
2768 finally:
2769 l.release()
2769 l.release()
2770 else:
2770 else:
2771 if opts[b'rev']:
2771 if opts[b'rev']:
2772 revs = logcmdutil.revrange(repo, opts[b'rev'])
2772 revs = logcmdutil.revrange(repo, opts[b'rev'])
2773 nodes = [repo[r].node() for r in revs]
2773 nodes = [repo[r].node() for r in revs]
2774 markers = list(
2774 markers = list(
2775 obsutil.getmarkers(
2775 obsutil.getmarkers(
2776 repo, nodes=nodes, exclusive=opts[b'exclusive']
2776 repo, nodes=nodes, exclusive=opts[b'exclusive']
2777 )
2777 )
2778 )
2778 )
2779 markers.sort(key=lambda x: x._data)
2779 markers.sort(key=lambda x: x._data)
2780 else:
2780 else:
2781 markers = obsutil.getmarkers(repo)
2781 markers = obsutil.getmarkers(repo)
2782
2782
2783 markerstoiter = markers
2783 markerstoiter = markers
2784 isrelevant = lambda m: True
2784 isrelevant = lambda m: True
2785 if opts.get(b'rev') and opts.get(b'index'):
2785 if opts.get(b'rev') and opts.get(b'index'):
2786 markerstoiter = obsutil.getmarkers(repo)
2786 markerstoiter = obsutil.getmarkers(repo)
2787 markerset = set(markers)
2787 markerset = set(markers)
2788 isrelevant = lambda m: m in markerset
2788 isrelevant = lambda m: m in markerset
2789
2789
2790 fm = ui.formatter(b'debugobsolete', opts)
2790 fm = ui.formatter(b'debugobsolete', opts)
2791 for i, m in enumerate(markerstoiter):
2791 for i, m in enumerate(markerstoiter):
2792 if not isrelevant(m):
2792 if not isrelevant(m):
2793 # marker can be irrelevant when we're iterating over a set
2793 # marker can be irrelevant when we're iterating over a set
2794 # of markers (markerstoiter) which is bigger than the set
2794 # of markers (markerstoiter) which is bigger than the set
2795 # of markers we want to display (markers)
2795 # of markers we want to display (markers)
2796 # this can happen if both --index and --rev options are
2796 # this can happen if both --index and --rev options are
2797 # provided and thus we need to iterate over all of the markers
2797 # provided and thus we need to iterate over all of the markers
2798 # to get the correct indices, but only display the ones that
2798 # to get the correct indices, but only display the ones that
2799 # are relevant to --rev value
2799 # are relevant to --rev value
2800 continue
2800 continue
2801 fm.startitem()
2801 fm.startitem()
2802 ind = i if opts.get(b'index') else None
2802 ind = i if opts.get(b'index') else None
2803 cmdutil.showmarker(fm, m, index=ind)
2803 cmdutil.showmarker(fm, m, index=ind)
2804 fm.end()
2804 fm.end()
2805
2805
2806
2806
2807 @command(
2807 @command(
2808 b'debugp1copies',
2808 b'debugp1copies',
2809 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2809 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2810 _(b'[-r REV]'),
2810 _(b'[-r REV]'),
2811 )
2811 )
2812 def debugp1copies(ui, repo, **opts):
2812 def debugp1copies(ui, repo, **opts):
2813 """dump copy information compared to p1"""
2813 """dump copy information compared to p1"""
2814
2814
2815 opts = pycompat.byteskwargs(opts)
2815 opts = pycompat.byteskwargs(opts)
2816 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2816 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2817 for dst, src in ctx.p1copies().items():
2817 for dst, src in ctx.p1copies().items():
2818 ui.write(b'%s -> %s\n' % (src, dst))
2818 ui.write(b'%s -> %s\n' % (src, dst))
2819
2819
2820
2820
2821 @command(
2821 @command(
2822 b'debugp2copies',
2822 b'debugp2copies',
2823 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2823 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2824 _(b'[-r REV]'),
2824 _(b'[-r REV]'),
2825 )
2825 )
2826 def debugp2copies(ui, repo, **opts):
2826 def debugp2copies(ui, repo, **opts):
2827 """dump copy information compared to p2"""
2827 """dump copy information compared to p2"""
2828
2828
2829 opts = pycompat.byteskwargs(opts)
2829 opts = pycompat.byteskwargs(opts)
2830 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2830 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2831 for dst, src in ctx.p2copies().items():
2831 for dst, src in ctx.p2copies().items():
2832 ui.write(b'%s -> %s\n' % (src, dst))
2832 ui.write(b'%s -> %s\n' % (src, dst))
2833
2833
2834
2834
2835 @command(
2835 @command(
2836 b'debugpathcomplete',
2836 b'debugpathcomplete',
2837 [
2837 [
2838 (b'f', b'full', None, _(b'complete an entire path')),
2838 (b'f', b'full', None, _(b'complete an entire path')),
2839 (b'n', b'normal', None, _(b'show only normal files')),
2839 (b'n', b'normal', None, _(b'show only normal files')),
2840 (b'a', b'added', None, _(b'show only added files')),
2840 (b'a', b'added', None, _(b'show only added files')),
2841 (b'r', b'removed', None, _(b'show only removed files')),
2841 (b'r', b'removed', None, _(b'show only removed files')),
2842 ],
2842 ],
2843 _(b'FILESPEC...'),
2843 _(b'FILESPEC...'),
2844 )
2844 )
2845 def debugpathcomplete(ui, repo, *specs, **opts):
2845 def debugpathcomplete(ui, repo, *specs, **opts):
2846 """complete part or all of a tracked path
2846 """complete part or all of a tracked path
2847
2847
2848 This command supports shells that offer path name completion. It
2848 This command supports shells that offer path name completion. It
2849 currently completes only files already known to the dirstate.
2849 currently completes only files already known to the dirstate.
2850
2850
2851 Completion extends only to the next path segment unless
2851 Completion extends only to the next path segment unless
2852 --full is specified, in which case entire paths are used."""
2852 --full is specified, in which case entire paths are used."""
2853
2853
2854 def complete(path, acceptable):
2854 def complete(path, acceptable):
2855 dirstate = repo.dirstate
2855 dirstate = repo.dirstate
2856 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2856 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2857 rootdir = repo.root + pycompat.ossep
2857 rootdir = repo.root + pycompat.ossep
2858 if spec != repo.root and not spec.startswith(rootdir):
2858 if spec != repo.root and not spec.startswith(rootdir):
2859 return [], []
2859 return [], []
2860 if os.path.isdir(spec):
2860 if os.path.isdir(spec):
2861 spec += b'/'
2861 spec += b'/'
2862 spec = spec[len(rootdir) :]
2862 spec = spec[len(rootdir) :]
2863 fixpaths = pycompat.ossep != b'/'
2863 fixpaths = pycompat.ossep != b'/'
2864 if fixpaths:
2864 if fixpaths:
2865 spec = spec.replace(pycompat.ossep, b'/')
2865 spec = spec.replace(pycompat.ossep, b'/')
2866 speclen = len(spec)
2866 speclen = len(spec)
2867 fullpaths = opts['full']
2867 fullpaths = opts['full']
2868 files, dirs = set(), set()
2868 files, dirs = set(), set()
2869 adddir, addfile = dirs.add, files.add
2869 adddir, addfile = dirs.add, files.add
2870 for f, st in dirstate.items():
2870 for f, st in dirstate.items():
2871 if f.startswith(spec) and st.state in acceptable:
2871 if f.startswith(spec) and st.state in acceptable:
2872 if fixpaths:
2872 if fixpaths:
2873 f = f.replace(b'/', pycompat.ossep)
2873 f = f.replace(b'/', pycompat.ossep)
2874 if fullpaths:
2874 if fullpaths:
2875 addfile(f)
2875 addfile(f)
2876 continue
2876 continue
2877 s = f.find(pycompat.ossep, speclen)
2877 s = f.find(pycompat.ossep, speclen)
2878 if s >= 0:
2878 if s >= 0:
2879 adddir(f[:s])
2879 adddir(f[:s])
2880 else:
2880 else:
2881 addfile(f)
2881 addfile(f)
2882 return files, dirs
2882 return files, dirs
2883
2883
2884 acceptable = b''
2884 acceptable = b''
2885 if opts['normal']:
2885 if opts['normal']:
2886 acceptable += b'nm'
2886 acceptable += b'nm'
2887 if opts['added']:
2887 if opts['added']:
2888 acceptable += b'a'
2888 acceptable += b'a'
2889 if opts['removed']:
2889 if opts['removed']:
2890 acceptable += b'r'
2890 acceptable += b'r'
2891 cwd = repo.getcwd()
2891 cwd = repo.getcwd()
2892 if not specs:
2892 if not specs:
2893 specs = [b'.']
2893 specs = [b'.']
2894
2894
2895 files, dirs = set(), set()
2895 files, dirs = set(), set()
2896 for spec in specs:
2896 for spec in specs:
2897 f, d = complete(spec, acceptable or b'nmar')
2897 f, d = complete(spec, acceptable or b'nmar')
2898 files.update(f)
2898 files.update(f)
2899 dirs.update(d)
2899 dirs.update(d)
2900 files.update(dirs)
2900 files.update(dirs)
2901 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2901 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2902 ui.write(b'\n')
2902 ui.write(b'\n')
2903
2903
2904
2904
2905 @command(
2905 @command(
2906 b'debugpathcopies',
2906 b'debugpathcopies',
2907 cmdutil.walkopts,
2907 cmdutil.walkopts,
2908 b'hg debugpathcopies REV1 REV2 [FILE]',
2908 b'hg debugpathcopies REV1 REV2 [FILE]',
2909 inferrepo=True,
2909 inferrepo=True,
2910 )
2910 )
2911 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2911 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2912 """show copies between two revisions"""
2912 """show copies between two revisions"""
2913 ctx1 = scmutil.revsingle(repo, rev1)
2913 ctx1 = scmutil.revsingle(repo, rev1)
2914 ctx2 = scmutil.revsingle(repo, rev2)
2914 ctx2 = scmutil.revsingle(repo, rev2)
2915 m = scmutil.match(ctx1, pats, opts)
2915 m = scmutil.match(ctx1, pats, opts)
2916 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2916 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2917 ui.write(b'%s -> %s\n' % (src, dst))
2917 ui.write(b'%s -> %s\n' % (src, dst))
2918
2918
2919
2919
2920 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2920 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2921 def debugpeer(ui, path):
2921 def debugpeer(ui, path):
2922 """establish a connection to a peer repository"""
2922 """establish a connection to a peer repository"""
2923 # Always enable peer request logging. Requires --debug to display
2923 # Always enable peer request logging. Requires --debug to display
2924 # though.
2924 # though.
2925 overrides = {
2925 overrides = {
2926 (b'devel', b'debug.peer-request'): True,
2926 (b'devel', b'debug.peer-request'): True,
2927 }
2927 }
2928
2928
2929 with ui.configoverride(overrides):
2929 with ui.configoverride(overrides):
2930 peer = hg.peer(ui, {}, path)
2930 peer = hg.peer(ui, {}, path)
2931
2931
2932 try:
2932 try:
2933 local = peer.local() is not None
2933 local = peer.local() is not None
2934 canpush = peer.canpush()
2934 canpush = peer.canpush()
2935
2935
2936 ui.write(_(b'url: %s\n') % peer.url())
2936 ui.write(_(b'url: %s\n') % peer.url())
2937 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2937 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2938 ui.write(
2938 ui.write(
2939 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2939 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2940 )
2940 )
2941 finally:
2941 finally:
2942 peer.close()
2942 peer.close()
2943
2943
2944
2944
2945 @command(
2945 @command(
2946 b'debugpickmergetool',
2946 b'debugpickmergetool',
2947 [
2947 [
2948 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2948 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2949 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2949 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2950 ]
2950 ]
2951 + cmdutil.walkopts
2951 + cmdutil.walkopts
2952 + cmdutil.mergetoolopts,
2952 + cmdutil.mergetoolopts,
2953 _(b'[PATTERN]...'),
2953 _(b'[PATTERN]...'),
2954 inferrepo=True,
2954 inferrepo=True,
2955 )
2955 )
2956 def debugpickmergetool(ui, repo, *pats, **opts):
2956 def debugpickmergetool(ui, repo, *pats, **opts):
2957 """examine which merge tool is chosen for specified file
2957 """examine which merge tool is chosen for specified file
2958
2958
2959 As described in :hg:`help merge-tools`, Mercurial examines
2959 As described in :hg:`help merge-tools`, Mercurial examines
2960 configurations below in this order to decide which merge tool is
2960 configurations below in this order to decide which merge tool is
2961 chosen for specified file.
2961 chosen for specified file.
2962
2962
2963 1. ``--tool`` option
2963 1. ``--tool`` option
2964 2. ``HGMERGE`` environment variable
2964 2. ``HGMERGE`` environment variable
2965 3. configurations in ``merge-patterns`` section
2965 3. configurations in ``merge-patterns`` section
2966 4. configuration of ``ui.merge``
2966 4. configuration of ``ui.merge``
2967 5. configurations in ``merge-tools`` section
2967 5. configurations in ``merge-tools`` section
2968 6. ``hgmerge`` tool (for historical reason only)
2968 6. ``hgmerge`` tool (for historical reason only)
2969 7. default tool for fallback (``:merge`` or ``:prompt``)
2969 7. default tool for fallback (``:merge`` or ``:prompt``)
2970
2970
2971 This command writes out examination result in the style below::
2971 This command writes out examination result in the style below::
2972
2972
2973 FILE = MERGETOOL
2973 FILE = MERGETOOL
2974
2974
2975 By default, all files known in the first parent context of the
2975 By default, all files known in the first parent context of the
2976 working directory are examined. Use file patterns and/or -I/-X
2976 working directory are examined. Use file patterns and/or -I/-X
2977 options to limit target files. -r/--rev is also useful to examine
2977 options to limit target files. -r/--rev is also useful to examine
2978 files in another context without actual updating to it.
2978 files in another context without actual updating to it.
2979
2979
2980 With --debug, this command shows warning messages while matching
2980 With --debug, this command shows warning messages while matching
2981 against ``merge-patterns`` and so on, too. It is recommended to
2981 against ``merge-patterns`` and so on, too. It is recommended to
2982 use this option with explicit file patterns and/or -I/-X options,
2982 use this option with explicit file patterns and/or -I/-X options,
2983 because this option increases amount of output per file according
2983 because this option increases amount of output per file according
2984 to configurations in hgrc.
2984 to configurations in hgrc.
2985
2985
2986 With -v/--verbose, this command shows configurations below at
2986 With -v/--verbose, this command shows configurations below at
2987 first (only if specified).
2987 first (only if specified).
2988
2988
2989 - ``--tool`` option
2989 - ``--tool`` option
2990 - ``HGMERGE`` environment variable
2990 - ``HGMERGE`` environment variable
2991 - configuration of ``ui.merge``
2991 - configuration of ``ui.merge``
2992
2992
2993 If merge tool is chosen before matching against
2993 If merge tool is chosen before matching against
2994 ``merge-patterns``, this command can't show any helpful
2994 ``merge-patterns``, this command can't show any helpful
2995 information, even with --debug. In such case, information above is
2995 information, even with --debug. In such case, information above is
2996 useful to know why a merge tool is chosen.
2996 useful to know why a merge tool is chosen.
2997 """
2997 """
2998 opts = pycompat.byteskwargs(opts)
2998 opts = pycompat.byteskwargs(opts)
2999 overrides = {}
2999 overrides = {}
3000 if opts[b'tool']:
3000 if opts[b'tool']:
3001 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3001 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3002 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3002 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3003
3003
3004 with ui.configoverride(overrides, b'debugmergepatterns'):
3004 with ui.configoverride(overrides, b'debugmergepatterns'):
3005 hgmerge = encoding.environ.get(b"HGMERGE")
3005 hgmerge = encoding.environ.get(b"HGMERGE")
3006 if hgmerge is not None:
3006 if hgmerge is not None:
3007 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3007 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3008 uimerge = ui.config(b"ui", b"merge")
3008 uimerge = ui.config(b"ui", b"merge")
3009 if uimerge:
3009 if uimerge:
3010 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3010 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3011
3011
3012 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3012 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3013 m = scmutil.match(ctx, pats, opts)
3013 m = scmutil.match(ctx, pats, opts)
3014 changedelete = opts[b'changedelete']
3014 changedelete = opts[b'changedelete']
3015 for path in ctx.walk(m):
3015 for path in ctx.walk(m):
3016 fctx = ctx[path]
3016 fctx = ctx[path]
3017 with ui.silent(
3017 with ui.silent(
3018 error=True
3018 error=True
3019 ) if not ui.debugflag else util.nullcontextmanager():
3019 ) if not ui.debugflag else util.nullcontextmanager():
3020 tool, toolpath = filemerge._picktool(
3020 tool, toolpath = filemerge._picktool(
3021 repo,
3021 repo,
3022 ui,
3022 ui,
3023 path,
3023 path,
3024 fctx.isbinary(),
3024 fctx.isbinary(),
3025 b'l' in fctx.flags(),
3025 b'l' in fctx.flags(),
3026 changedelete,
3026 changedelete,
3027 )
3027 )
3028 ui.write(b'%s = %s\n' % (path, tool))
3028 ui.write(b'%s = %s\n' % (path, tool))
3029
3029
3030
3030
3031 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3031 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3032 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3032 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3033 """access the pushkey key/value protocol
3033 """access the pushkey key/value protocol
3034
3034
3035 With two args, list the keys in the given namespace.
3035 With two args, list the keys in the given namespace.
3036
3036
3037 With five args, set a key to new if it currently is set to old.
3037 With five args, set a key to new if it currently is set to old.
3038 Reports success or failure.
3038 Reports success or failure.
3039 """
3039 """
3040
3040
3041 target = hg.peer(ui, {}, repopath)
3041 target = hg.peer(ui, {}, repopath)
3042 try:
3042 try:
3043 if keyinfo:
3043 if keyinfo:
3044 key, old, new = keyinfo
3044 key, old, new = keyinfo
3045 with target.commandexecutor() as e:
3045 with target.commandexecutor() as e:
3046 r = e.callcommand(
3046 r = e.callcommand(
3047 b'pushkey',
3047 b'pushkey',
3048 {
3048 {
3049 b'namespace': namespace,
3049 b'namespace': namespace,
3050 b'key': key,
3050 b'key': key,
3051 b'old': old,
3051 b'old': old,
3052 b'new': new,
3052 b'new': new,
3053 },
3053 },
3054 ).result()
3054 ).result()
3055
3055
3056 ui.status(pycompat.bytestr(r) + b'\n')
3056 ui.status(pycompat.bytestr(r) + b'\n')
3057 return not r
3057 return not r
3058 else:
3058 else:
3059 for k, v in sorted(target.listkeys(namespace).items()):
3059 for k, v in sorted(target.listkeys(namespace).items()):
3060 ui.write(
3060 ui.write(
3061 b"%s\t%s\n"
3061 b"%s\t%s\n"
3062 % (stringutil.escapestr(k), stringutil.escapestr(v))
3062 % (stringutil.escapestr(k), stringutil.escapestr(v))
3063 )
3063 )
3064 finally:
3064 finally:
3065 target.close()
3065 target.close()
3066
3066
3067
3067
3068 @command(b'debugpvec', [], _(b'A B'))
3068 @command(b'debugpvec', [], _(b'A B'))
3069 def debugpvec(ui, repo, a, b=None):
3069 def debugpvec(ui, repo, a, b=None):
3070 ca = scmutil.revsingle(repo, a)
3070 ca = scmutil.revsingle(repo, a)
3071 cb = scmutil.revsingle(repo, b)
3071 cb = scmutil.revsingle(repo, b)
3072 pa = pvec.ctxpvec(ca)
3072 pa = pvec.ctxpvec(ca)
3073 pb = pvec.ctxpvec(cb)
3073 pb = pvec.ctxpvec(cb)
3074 if pa == pb:
3074 if pa == pb:
3075 rel = b"="
3075 rel = b"="
3076 elif pa > pb:
3076 elif pa > pb:
3077 rel = b">"
3077 rel = b">"
3078 elif pa < pb:
3078 elif pa < pb:
3079 rel = b"<"
3079 rel = b"<"
3080 elif pa | pb:
3080 elif pa | pb:
3081 rel = b"|"
3081 rel = b"|"
3082 ui.write(_(b"a: %s\n") % pa)
3082 ui.write(_(b"a: %s\n") % pa)
3083 ui.write(_(b"b: %s\n") % pb)
3083 ui.write(_(b"b: %s\n") % pb)
3084 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3084 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3085 ui.write(
3085 ui.write(
3086 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3086 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3087 % (
3087 % (
3088 abs(pa._depth - pb._depth),
3088 abs(pa._depth - pb._depth),
3089 pvec._hamming(pa._vec, pb._vec),
3089 pvec._hamming(pa._vec, pb._vec),
3090 pa.distance(pb),
3090 pa.distance(pb),
3091 rel,
3091 rel,
3092 )
3092 )
3093 )
3093 )
3094
3094
3095
3095
3096 @command(
3096 @command(
3097 b'debugrebuilddirstate|debugrebuildstate',
3097 b'debugrebuilddirstate|debugrebuildstate',
3098 [
3098 [
3099 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3099 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3100 (
3100 (
3101 b'',
3101 b'',
3102 b'minimal',
3102 b'minimal',
3103 None,
3103 None,
3104 _(
3104 _(
3105 b'only rebuild files that are inconsistent with '
3105 b'only rebuild files that are inconsistent with '
3106 b'the working copy parent'
3106 b'the working copy parent'
3107 ),
3107 ),
3108 ),
3108 ),
3109 ],
3109 ],
3110 _(b'[-r REV]'),
3110 _(b'[-r REV]'),
3111 )
3111 )
3112 def debugrebuilddirstate(ui, repo, rev, **opts):
3112 def debugrebuilddirstate(ui, repo, rev, **opts):
3113 """rebuild the dirstate as it would look like for the given revision
3113 """rebuild the dirstate as it would look like for the given revision
3114
3114
3115 If no revision is specified the first current parent will be used.
3115 If no revision is specified the first current parent will be used.
3116
3116
3117 The dirstate will be set to the files of the given revision.
3117 The dirstate will be set to the files of the given revision.
3118 The actual working directory content or existing dirstate
3118 The actual working directory content or existing dirstate
3119 information such as adds or removes is not considered.
3119 information such as adds or removes is not considered.
3120
3120
3121 ``minimal`` will only rebuild the dirstate status for files that claim to be
3121 ``minimal`` will only rebuild the dirstate status for files that claim to be
3122 tracked but are not in the parent manifest, or that exist in the parent
3122 tracked but are not in the parent manifest, or that exist in the parent
3123 manifest but are not in the dirstate. It will not change adds, removes, or
3123 manifest but are not in the dirstate. It will not change adds, removes, or
3124 modified files that are in the working copy parent.
3124 modified files that are in the working copy parent.
3125
3125
3126 One use of this command is to make the next :hg:`status` invocation
3126 One use of this command is to make the next :hg:`status` invocation
3127 check the actual file content.
3127 check the actual file content.
3128 """
3128 """
3129 ctx = scmutil.revsingle(repo, rev)
3129 ctx = scmutil.revsingle(repo, rev)
3130 with repo.wlock():
3130 with repo.wlock():
3131 if repo.currenttransaction() is not None:
3132 msg = b'rebuild the dirstate outside of a transaction'
3133 raise error.ProgrammingError(msg)
3131 dirstate = repo.dirstate
3134 dirstate = repo.dirstate
3132 changedfiles = None
3135 changedfiles = None
3133 # See command doc for what minimal does.
3136 # See command doc for what minimal does.
3134 if opts.get('minimal'):
3137 if opts.get('minimal'):
3135 manifestfiles = set(ctx.manifest().keys())
3138 manifestfiles = set(ctx.manifest().keys())
3136 dirstatefiles = set(dirstate)
3139 dirstatefiles = set(dirstate)
3137 manifestonly = manifestfiles - dirstatefiles
3140 manifestonly = manifestfiles - dirstatefiles
3138 dsonly = dirstatefiles - manifestfiles
3141 dsonly = dirstatefiles - manifestfiles
3139 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3142 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3140 changedfiles = manifestonly | dsnotadded
3143 changedfiles = manifestonly | dsnotadded
3141
3144
3142 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3145 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3143 dirstate.write(repo.currenttransaction())
3146 dirstate.write(repo.currenttransaction())
3144
3147
3145
3148
3146 @command(
3149 @command(
3147 b'debugrebuildfncache',
3150 b'debugrebuildfncache',
3148 [
3151 [
3149 (
3152 (
3150 b'',
3153 b'',
3151 b'only-data',
3154 b'only-data',
3152 False,
3155 False,
3153 _(b'only look for wrong .d files (much faster)'),
3156 _(b'only look for wrong .d files (much faster)'),
3154 )
3157 )
3155 ],
3158 ],
3156 b'',
3159 b'',
3157 )
3160 )
3158 def debugrebuildfncache(ui, repo, **opts):
3161 def debugrebuildfncache(ui, repo, **opts):
3159 """rebuild the fncache file"""
3162 """rebuild the fncache file"""
3160 opts = pycompat.byteskwargs(opts)
3163 opts = pycompat.byteskwargs(opts)
3161 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3164 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3162
3165
3163
3166
3164 @command(
3167 @command(
3165 b'debugrename',
3168 b'debugrename',
3166 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3169 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3167 _(b'[-r REV] [FILE]...'),
3170 _(b'[-r REV] [FILE]...'),
3168 )
3171 )
3169 def debugrename(ui, repo, *pats, **opts):
3172 def debugrename(ui, repo, *pats, **opts):
3170 """dump rename information"""
3173 """dump rename information"""
3171
3174
3172 opts = pycompat.byteskwargs(opts)
3175 opts = pycompat.byteskwargs(opts)
3173 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3176 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3174 m = scmutil.match(ctx, pats, opts)
3177 m = scmutil.match(ctx, pats, opts)
3175 for abs in ctx.walk(m):
3178 for abs in ctx.walk(m):
3176 fctx = ctx[abs]
3179 fctx = ctx[abs]
3177 o = fctx.filelog().renamed(fctx.filenode())
3180 o = fctx.filelog().renamed(fctx.filenode())
3178 rel = repo.pathto(abs)
3181 rel = repo.pathto(abs)
3179 if o:
3182 if o:
3180 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3183 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3181 else:
3184 else:
3182 ui.write(_(b"%s not renamed\n") % rel)
3185 ui.write(_(b"%s not renamed\n") % rel)
3183
3186
3184
3187
3185 @command(b'debugrequires|debugrequirements', [], b'')
3188 @command(b'debugrequires|debugrequirements', [], b'')
3186 def debugrequirements(ui, repo):
3189 def debugrequirements(ui, repo):
3187 """print the current repo requirements"""
3190 """print the current repo requirements"""
3188 for r in sorted(repo.requirements):
3191 for r in sorted(repo.requirements):
3189 ui.write(b"%s\n" % r)
3192 ui.write(b"%s\n" % r)
3190
3193
3191
3194
3192 @command(
3195 @command(
3193 b'debugrevlog',
3196 b'debugrevlog',
3194 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3197 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3195 _(b'-c|-m|FILE'),
3198 _(b'-c|-m|FILE'),
3196 optionalrepo=True,
3199 optionalrepo=True,
3197 )
3200 )
3198 def debugrevlog(ui, repo, file_=None, **opts):
3201 def debugrevlog(ui, repo, file_=None, **opts):
3199 """show data and statistics about a revlog"""
3202 """show data and statistics about a revlog"""
3200 opts = pycompat.byteskwargs(opts)
3203 opts = pycompat.byteskwargs(opts)
3201 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3204 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3202
3205
3203 if opts.get(b"dump"):
3206 if opts.get(b"dump"):
3204 revlog_debug.dump(ui, r)
3207 revlog_debug.dump(ui, r)
3205 else:
3208 else:
3206 revlog_debug.debug_revlog(ui, r)
3209 revlog_debug.debug_revlog(ui, r)
3207 return 0
3210 return 0
3208
3211
3209
3212
3210 @command(
3213 @command(
3211 b'debugrevlogindex',
3214 b'debugrevlogindex',
3212 cmdutil.debugrevlogopts
3215 cmdutil.debugrevlogopts
3213 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3216 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3214 _(b'[-f FORMAT] -c|-m|FILE'),
3217 _(b'[-f FORMAT] -c|-m|FILE'),
3215 optionalrepo=True,
3218 optionalrepo=True,
3216 )
3219 )
3217 def debugrevlogindex(ui, repo, file_=None, **opts):
3220 def debugrevlogindex(ui, repo, file_=None, **opts):
3218 """dump the contents of a revlog index"""
3221 """dump the contents of a revlog index"""
3219 opts = pycompat.byteskwargs(opts)
3222 opts = pycompat.byteskwargs(opts)
3220 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3223 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3221 format = opts.get(b'format', 0)
3224 format = opts.get(b'format', 0)
3222 if format not in (0, 1):
3225 if format not in (0, 1):
3223 raise error.Abort(_(b"unknown format %d") % format)
3226 raise error.Abort(_(b"unknown format %d") % format)
3224
3227
3225 if ui.debugflag:
3228 if ui.debugflag:
3226 shortfn = hex
3229 shortfn = hex
3227 else:
3230 else:
3228 shortfn = short
3231 shortfn = short
3229
3232
3230 # There might not be anything in r, so have a sane default
3233 # There might not be anything in r, so have a sane default
3231 idlen = 12
3234 idlen = 12
3232 for i in r:
3235 for i in r:
3233 idlen = len(shortfn(r.node(i)))
3236 idlen = len(shortfn(r.node(i)))
3234 break
3237 break
3235
3238
3236 if format == 0:
3239 if format == 0:
3237 if ui.verbose:
3240 if ui.verbose:
3238 ui.writenoi18n(
3241 ui.writenoi18n(
3239 b" rev offset length linkrev %s %s p2\n"
3242 b" rev offset length linkrev %s %s p2\n"
3240 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3243 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3241 )
3244 )
3242 else:
3245 else:
3243 ui.writenoi18n(
3246 ui.writenoi18n(
3244 b" rev linkrev %s %s p2\n"
3247 b" rev linkrev %s %s p2\n"
3245 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3248 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3246 )
3249 )
3247 elif format == 1:
3250 elif format == 1:
3248 if ui.verbose:
3251 if ui.verbose:
3249 ui.writenoi18n(
3252 ui.writenoi18n(
3250 (
3253 (
3251 b" rev flag offset length size link p1"
3254 b" rev flag offset length size link p1"
3252 b" p2 %s\n"
3255 b" p2 %s\n"
3253 )
3256 )
3254 % b"nodeid".rjust(idlen)
3257 % b"nodeid".rjust(idlen)
3255 )
3258 )
3256 else:
3259 else:
3257 ui.writenoi18n(
3260 ui.writenoi18n(
3258 b" rev flag size link p1 p2 %s\n"
3261 b" rev flag size link p1 p2 %s\n"
3259 % b"nodeid".rjust(idlen)
3262 % b"nodeid".rjust(idlen)
3260 )
3263 )
3261
3264
3262 for i in r:
3265 for i in r:
3263 node = r.node(i)
3266 node = r.node(i)
3264 if format == 0:
3267 if format == 0:
3265 try:
3268 try:
3266 pp = r.parents(node)
3269 pp = r.parents(node)
3267 except Exception:
3270 except Exception:
3268 pp = [repo.nullid, repo.nullid]
3271 pp = [repo.nullid, repo.nullid]
3269 if ui.verbose:
3272 if ui.verbose:
3270 ui.write(
3273 ui.write(
3271 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3274 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3272 % (
3275 % (
3273 i,
3276 i,
3274 r.start(i),
3277 r.start(i),
3275 r.length(i),
3278 r.length(i),
3276 r.linkrev(i),
3279 r.linkrev(i),
3277 shortfn(node),
3280 shortfn(node),
3278 shortfn(pp[0]),
3281 shortfn(pp[0]),
3279 shortfn(pp[1]),
3282 shortfn(pp[1]),
3280 )
3283 )
3281 )
3284 )
3282 else:
3285 else:
3283 ui.write(
3286 ui.write(
3284 b"% 6d % 7d %s %s %s\n"
3287 b"% 6d % 7d %s %s %s\n"
3285 % (
3288 % (
3286 i,
3289 i,
3287 r.linkrev(i),
3290 r.linkrev(i),
3288 shortfn(node),
3291 shortfn(node),
3289 shortfn(pp[0]),
3292 shortfn(pp[0]),
3290 shortfn(pp[1]),
3293 shortfn(pp[1]),
3291 )
3294 )
3292 )
3295 )
3293 elif format == 1:
3296 elif format == 1:
3294 pr = r.parentrevs(i)
3297 pr = r.parentrevs(i)
3295 if ui.verbose:
3298 if ui.verbose:
3296 ui.write(
3299 ui.write(
3297 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3300 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3298 % (
3301 % (
3299 i,
3302 i,
3300 r.flags(i),
3303 r.flags(i),
3301 r.start(i),
3304 r.start(i),
3302 r.length(i),
3305 r.length(i),
3303 r.rawsize(i),
3306 r.rawsize(i),
3304 r.linkrev(i),
3307 r.linkrev(i),
3305 pr[0],
3308 pr[0],
3306 pr[1],
3309 pr[1],
3307 shortfn(node),
3310 shortfn(node),
3308 )
3311 )
3309 )
3312 )
3310 else:
3313 else:
3311 ui.write(
3314 ui.write(
3312 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3315 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3313 % (
3316 % (
3314 i,
3317 i,
3315 r.flags(i),
3318 r.flags(i),
3316 r.rawsize(i),
3319 r.rawsize(i),
3317 r.linkrev(i),
3320 r.linkrev(i),
3318 pr[0],
3321 pr[0],
3319 pr[1],
3322 pr[1],
3320 shortfn(node),
3323 shortfn(node),
3321 )
3324 )
3322 )
3325 )
3323
3326
3324
3327
3325 @command(
3328 @command(
3326 b'debugrevspec',
3329 b'debugrevspec',
3327 [
3330 [
3328 (
3331 (
3329 b'',
3332 b'',
3330 b'optimize',
3333 b'optimize',
3331 None,
3334 None,
3332 _(b'print parsed tree after optimizing (DEPRECATED)'),
3335 _(b'print parsed tree after optimizing (DEPRECATED)'),
3333 ),
3336 ),
3334 (
3337 (
3335 b'',
3338 b'',
3336 b'show-revs',
3339 b'show-revs',
3337 True,
3340 True,
3338 _(b'print list of result revisions (default)'),
3341 _(b'print list of result revisions (default)'),
3339 ),
3342 ),
3340 (
3343 (
3341 b's',
3344 b's',
3342 b'show-set',
3345 b'show-set',
3343 None,
3346 None,
3344 _(b'print internal representation of result set'),
3347 _(b'print internal representation of result set'),
3345 ),
3348 ),
3346 (
3349 (
3347 b'p',
3350 b'p',
3348 b'show-stage',
3351 b'show-stage',
3349 [],
3352 [],
3350 _(b'print parsed tree at the given stage'),
3353 _(b'print parsed tree at the given stage'),
3351 _(b'NAME'),
3354 _(b'NAME'),
3352 ),
3355 ),
3353 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3356 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3354 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3357 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3355 ],
3358 ],
3356 b'REVSPEC',
3359 b'REVSPEC',
3357 )
3360 )
3358 def debugrevspec(ui, repo, expr, **opts):
3361 def debugrevspec(ui, repo, expr, **opts):
3359 """parse and apply a revision specification
3362 """parse and apply a revision specification
3360
3363
3361 Use -p/--show-stage option to print the parsed tree at the given stages.
3364 Use -p/--show-stage option to print the parsed tree at the given stages.
3362 Use -p all to print tree at every stage.
3365 Use -p all to print tree at every stage.
3363
3366
3364 Use --no-show-revs option with -s or -p to print only the set
3367 Use --no-show-revs option with -s or -p to print only the set
3365 representation or the parsed tree respectively.
3368 representation or the parsed tree respectively.
3366
3369
3367 Use --verify-optimized to compare the optimized result with the unoptimized
3370 Use --verify-optimized to compare the optimized result with the unoptimized
3368 one. Returns 1 if the optimized result differs.
3371 one. Returns 1 if the optimized result differs.
3369 """
3372 """
3370 opts = pycompat.byteskwargs(opts)
3373 opts = pycompat.byteskwargs(opts)
3371 aliases = ui.configitems(b'revsetalias')
3374 aliases = ui.configitems(b'revsetalias')
3372 stages = [
3375 stages = [
3373 (b'parsed', lambda tree: tree),
3376 (b'parsed', lambda tree: tree),
3374 (
3377 (
3375 b'expanded',
3378 b'expanded',
3376 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3379 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3377 ),
3380 ),
3378 (b'concatenated', revsetlang.foldconcat),
3381 (b'concatenated', revsetlang.foldconcat),
3379 (b'analyzed', revsetlang.analyze),
3382 (b'analyzed', revsetlang.analyze),
3380 (b'optimized', revsetlang.optimize),
3383 (b'optimized', revsetlang.optimize),
3381 ]
3384 ]
3382 if opts[b'no_optimized']:
3385 if opts[b'no_optimized']:
3383 stages = stages[:-1]
3386 stages = stages[:-1]
3384 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3387 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3385 raise error.Abort(
3388 raise error.Abort(
3386 _(b'cannot use --verify-optimized with --no-optimized')
3389 _(b'cannot use --verify-optimized with --no-optimized')
3387 )
3390 )
3388 stagenames = {n for n, f in stages}
3391 stagenames = {n for n, f in stages}
3389
3392
3390 showalways = set()
3393 showalways = set()
3391 showchanged = set()
3394 showchanged = set()
3392 if ui.verbose and not opts[b'show_stage']:
3395 if ui.verbose and not opts[b'show_stage']:
3393 # show parsed tree by --verbose (deprecated)
3396 # show parsed tree by --verbose (deprecated)
3394 showalways.add(b'parsed')
3397 showalways.add(b'parsed')
3395 showchanged.update([b'expanded', b'concatenated'])
3398 showchanged.update([b'expanded', b'concatenated'])
3396 if opts[b'optimize']:
3399 if opts[b'optimize']:
3397 showalways.add(b'optimized')
3400 showalways.add(b'optimized')
3398 if opts[b'show_stage'] and opts[b'optimize']:
3401 if opts[b'show_stage'] and opts[b'optimize']:
3399 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3402 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3400 if opts[b'show_stage'] == [b'all']:
3403 if opts[b'show_stage'] == [b'all']:
3401 showalways.update(stagenames)
3404 showalways.update(stagenames)
3402 else:
3405 else:
3403 for n in opts[b'show_stage']:
3406 for n in opts[b'show_stage']:
3404 if n not in stagenames:
3407 if n not in stagenames:
3405 raise error.Abort(_(b'invalid stage name: %s') % n)
3408 raise error.Abort(_(b'invalid stage name: %s') % n)
3406 showalways.update(opts[b'show_stage'])
3409 showalways.update(opts[b'show_stage'])
3407
3410
3408 treebystage = {}
3411 treebystage = {}
3409 printedtree = None
3412 printedtree = None
3410 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3413 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3411 for n, f in stages:
3414 for n, f in stages:
3412 treebystage[n] = tree = f(tree)
3415 treebystage[n] = tree = f(tree)
3413 if n in showalways or (n in showchanged and tree != printedtree):
3416 if n in showalways or (n in showchanged and tree != printedtree):
3414 if opts[b'show_stage'] or n != b'parsed':
3417 if opts[b'show_stage'] or n != b'parsed':
3415 ui.write(b"* %s:\n" % n)
3418 ui.write(b"* %s:\n" % n)
3416 ui.write(revsetlang.prettyformat(tree), b"\n")
3419 ui.write(revsetlang.prettyformat(tree), b"\n")
3417 printedtree = tree
3420 printedtree = tree
3418
3421
3419 if opts[b'verify_optimized']:
3422 if opts[b'verify_optimized']:
3420 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3423 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3421 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3424 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3422 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3425 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3423 ui.writenoi18n(
3426 ui.writenoi18n(
3424 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3427 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3425 )
3428 )
3426 ui.writenoi18n(
3429 ui.writenoi18n(
3427 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3430 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3428 )
3431 )
3429 arevs = list(arevs)
3432 arevs = list(arevs)
3430 brevs = list(brevs)
3433 brevs = list(brevs)
3431 if arevs == brevs:
3434 if arevs == brevs:
3432 return 0
3435 return 0
3433 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3436 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3434 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3437 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3435 sm = difflib.SequenceMatcher(None, arevs, brevs)
3438 sm = difflib.SequenceMatcher(None, arevs, brevs)
3436 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3439 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3437 if tag in ('delete', 'replace'):
3440 if tag in ('delete', 'replace'):
3438 for c in arevs[alo:ahi]:
3441 for c in arevs[alo:ahi]:
3439 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3442 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3440 if tag in ('insert', 'replace'):
3443 if tag in ('insert', 'replace'):
3441 for c in brevs[blo:bhi]:
3444 for c in brevs[blo:bhi]:
3442 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3445 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3443 if tag == 'equal':
3446 if tag == 'equal':
3444 for c in arevs[alo:ahi]:
3447 for c in arevs[alo:ahi]:
3445 ui.write(b' %d\n' % c)
3448 ui.write(b' %d\n' % c)
3446 return 1
3449 return 1
3447
3450
3448 func = revset.makematcher(tree)
3451 func = revset.makematcher(tree)
3449 revs = func(repo)
3452 revs = func(repo)
3450 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3453 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3451 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3454 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3452 if not opts[b'show_revs']:
3455 if not opts[b'show_revs']:
3453 return
3456 return
3454 for c in revs:
3457 for c in revs:
3455 ui.write(b"%d\n" % c)
3458 ui.write(b"%d\n" % c)
3456
3459
3457
3460
3458 @command(
3461 @command(
3459 b'debugserve',
3462 b'debugserve',
3460 [
3463 [
3461 (
3464 (
3462 b'',
3465 b'',
3463 b'sshstdio',
3466 b'sshstdio',
3464 False,
3467 False,
3465 _(b'run an SSH server bound to process handles'),
3468 _(b'run an SSH server bound to process handles'),
3466 ),
3469 ),
3467 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3470 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3468 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3471 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3469 ],
3472 ],
3470 b'',
3473 b'',
3471 )
3474 )
3472 def debugserve(ui, repo, **opts):
3475 def debugserve(ui, repo, **opts):
3473 """run a server with advanced settings
3476 """run a server with advanced settings
3474
3477
3475 This command is similar to :hg:`serve`. It exists partially as a
3478 This command is similar to :hg:`serve`. It exists partially as a
3476 workaround to the fact that ``hg serve --stdio`` must have specific
3479 workaround to the fact that ``hg serve --stdio`` must have specific
3477 arguments for security reasons.
3480 arguments for security reasons.
3478 """
3481 """
3479 opts = pycompat.byteskwargs(opts)
3482 opts = pycompat.byteskwargs(opts)
3480
3483
3481 if not opts[b'sshstdio']:
3484 if not opts[b'sshstdio']:
3482 raise error.Abort(_(b'only --sshstdio is currently supported'))
3485 raise error.Abort(_(b'only --sshstdio is currently supported'))
3483
3486
3484 logfh = None
3487 logfh = None
3485
3488
3486 if opts[b'logiofd'] and opts[b'logiofile']:
3489 if opts[b'logiofd'] and opts[b'logiofile']:
3487 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3490 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3488
3491
3489 if opts[b'logiofd']:
3492 if opts[b'logiofd']:
3490 # Ideally we would be line buffered. But line buffering in binary
3493 # Ideally we would be line buffered. But line buffering in binary
3491 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3494 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3492 # buffering could have performance impacts. But since this isn't
3495 # buffering could have performance impacts. But since this isn't
3493 # performance critical code, it should be fine.
3496 # performance critical code, it should be fine.
3494 try:
3497 try:
3495 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3498 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3496 except OSError as e:
3499 except OSError as e:
3497 if e.errno != errno.ESPIPE:
3500 if e.errno != errno.ESPIPE:
3498 raise
3501 raise
3499 # can't seek a pipe, so `ab` mode fails on py3
3502 # can't seek a pipe, so `ab` mode fails on py3
3500 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3503 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3501 elif opts[b'logiofile']:
3504 elif opts[b'logiofile']:
3502 logfh = open(opts[b'logiofile'], b'ab', 0)
3505 logfh = open(opts[b'logiofile'], b'ab', 0)
3503
3506
3504 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3507 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3505 s.serve_forever()
3508 s.serve_forever()
3506
3509
3507
3510
3508 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3511 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3509 def debugsetparents(ui, repo, rev1, rev2=None):
3512 def debugsetparents(ui, repo, rev1, rev2=None):
3510 """manually set the parents of the current working directory (DANGEROUS)
3513 """manually set the parents of the current working directory (DANGEROUS)
3511
3514
3512 This command is not what you are looking for and should not be used. Using
3515 This command is not what you are looking for and should not be used. Using
3513 this command will most certainly results in slight corruption of the file
3516 this command will most certainly results in slight corruption of the file
3514 level histories withing your repository. DO NOT USE THIS COMMAND.
3517 level histories withing your repository. DO NOT USE THIS COMMAND.
3515
3518
3516 The command update the p1 and p2 field in the dirstate, and not touching
3519 The command update the p1 and p2 field in the dirstate, and not touching
3517 anything else. This useful for writing repository conversion tools, but
3520 anything else. This useful for writing repository conversion tools, but
3518 should be used with extreme care. For example, neither the working
3521 should be used with extreme care. For example, neither the working
3519 directory nor the dirstate is updated, so file status may be incorrect
3522 directory nor the dirstate is updated, so file status may be incorrect
3520 after running this command. Only used if you are one of the few people that
3523 after running this command. Only used if you are one of the few people that
3521 deeply unstand both conversion tools and file level histories. If you are
3524 deeply unstand both conversion tools and file level histories. If you are
3522 reading this help, you are not one of this people (most of them sailed west
3525 reading this help, you are not one of this people (most of them sailed west
3523 from Mithlond anyway.
3526 from Mithlond anyway.
3524
3527
3525 So one last time DO NOT USE THIS COMMAND.
3528 So one last time DO NOT USE THIS COMMAND.
3526
3529
3527 Returns 0 on success.
3530 Returns 0 on success.
3528 """
3531 """
3529
3532
3530 node1 = scmutil.revsingle(repo, rev1).node()
3533 node1 = scmutil.revsingle(repo, rev1).node()
3531 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3534 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3532
3535
3533 with repo.wlock():
3536 with repo.wlock():
3534 repo.setparents(node1, node2)
3537 repo.setparents(node1, node2)
3535
3538
3536
3539
3537 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3540 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3538 def debugsidedata(ui, repo, file_, rev=None, **opts):
3541 def debugsidedata(ui, repo, file_, rev=None, **opts):
3539 """dump the side data for a cl/manifest/file revision
3542 """dump the side data for a cl/manifest/file revision
3540
3543
3541 Use --verbose to dump the sidedata content."""
3544 Use --verbose to dump the sidedata content."""
3542 opts = pycompat.byteskwargs(opts)
3545 opts = pycompat.byteskwargs(opts)
3543 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3546 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3544 if rev is not None:
3547 if rev is not None:
3545 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3548 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3546 file_, rev = None, file_
3549 file_, rev = None, file_
3547 elif rev is None:
3550 elif rev is None:
3548 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3551 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3549 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3552 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3550 r = getattr(r, '_revlog', r)
3553 r = getattr(r, '_revlog', r)
3551 try:
3554 try:
3552 sidedata = r.sidedata(r.lookup(rev))
3555 sidedata = r.sidedata(r.lookup(rev))
3553 except KeyError:
3556 except KeyError:
3554 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3557 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3555 if sidedata:
3558 if sidedata:
3556 sidedata = list(sidedata.items())
3559 sidedata = list(sidedata.items())
3557 sidedata.sort()
3560 sidedata.sort()
3558 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3561 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3559 for key, value in sidedata:
3562 for key, value in sidedata:
3560 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3563 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3561 if ui.verbose:
3564 if ui.verbose:
3562 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3565 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3563
3566
3564
3567
3565 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3568 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3566 def debugssl(ui, repo, source=None, **opts):
3569 def debugssl(ui, repo, source=None, **opts):
3567 """test a secure connection to a server
3570 """test a secure connection to a server
3568
3571
3569 This builds the certificate chain for the server on Windows, installing the
3572 This builds the certificate chain for the server on Windows, installing the
3570 missing intermediates and trusted root via Windows Update if necessary. It
3573 missing intermediates and trusted root via Windows Update if necessary. It
3571 does nothing on other platforms.
3574 does nothing on other platforms.
3572
3575
3573 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3576 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3574 that server is used. See :hg:`help urls` for more information.
3577 that server is used. See :hg:`help urls` for more information.
3575
3578
3576 If the update succeeds, retry the original operation. Otherwise, the cause
3579 If the update succeeds, retry the original operation. Otherwise, the cause
3577 of the SSL error is likely another issue.
3580 of the SSL error is likely another issue.
3578 """
3581 """
3579 if not pycompat.iswindows:
3582 if not pycompat.iswindows:
3580 raise error.Abort(
3583 raise error.Abort(
3581 _(b'certificate chain building is only possible on Windows')
3584 _(b'certificate chain building is only possible on Windows')
3582 )
3585 )
3583
3586
3584 if not source:
3587 if not source:
3585 if not repo:
3588 if not repo:
3586 raise error.Abort(
3589 raise error.Abort(
3587 _(
3590 _(
3588 b"there is no Mercurial repository here, and no "
3591 b"there is no Mercurial repository here, and no "
3589 b"server specified"
3592 b"server specified"
3590 )
3593 )
3591 )
3594 )
3592 source = b"default"
3595 source = b"default"
3593
3596
3594 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3597 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3595 url = path.url
3598 url = path.url
3596
3599
3597 defaultport = {b'https': 443, b'ssh': 22}
3600 defaultport = {b'https': 443, b'ssh': 22}
3598 if url.scheme in defaultport:
3601 if url.scheme in defaultport:
3599 try:
3602 try:
3600 addr = (url.host, int(url.port or defaultport[url.scheme]))
3603 addr = (url.host, int(url.port or defaultport[url.scheme]))
3601 except ValueError:
3604 except ValueError:
3602 raise error.Abort(_(b"malformed port number in URL"))
3605 raise error.Abort(_(b"malformed port number in URL"))
3603 else:
3606 else:
3604 raise error.Abort(_(b"only https and ssh connections are supported"))
3607 raise error.Abort(_(b"only https and ssh connections are supported"))
3605
3608
3606 from . import win32
3609 from . import win32
3607
3610
3608 s = ssl.wrap_socket(
3611 s = ssl.wrap_socket(
3609 socket.socket(),
3612 socket.socket(),
3610 ssl_version=ssl.PROTOCOL_TLS,
3613 ssl_version=ssl.PROTOCOL_TLS,
3611 cert_reqs=ssl.CERT_NONE,
3614 cert_reqs=ssl.CERT_NONE,
3612 ca_certs=None,
3615 ca_certs=None,
3613 )
3616 )
3614
3617
3615 try:
3618 try:
3616 s.connect(addr)
3619 s.connect(addr)
3617 cert = s.getpeercert(True)
3620 cert = s.getpeercert(True)
3618
3621
3619 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3622 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3620
3623
3621 complete = win32.checkcertificatechain(cert, build=False)
3624 complete = win32.checkcertificatechain(cert, build=False)
3622
3625
3623 if not complete:
3626 if not complete:
3624 ui.status(_(b'certificate chain is incomplete, updating... '))
3627 ui.status(_(b'certificate chain is incomplete, updating... '))
3625
3628
3626 if not win32.checkcertificatechain(cert):
3629 if not win32.checkcertificatechain(cert):
3627 ui.status(_(b'failed.\n'))
3630 ui.status(_(b'failed.\n'))
3628 else:
3631 else:
3629 ui.status(_(b'done.\n'))
3632 ui.status(_(b'done.\n'))
3630 else:
3633 else:
3631 ui.status(_(b'full certificate chain is available\n'))
3634 ui.status(_(b'full certificate chain is available\n'))
3632 finally:
3635 finally:
3633 s.close()
3636 s.close()
3634
3637
3635
3638
3636 @command(
3639 @command(
3637 b"debugbackupbundle",
3640 b"debugbackupbundle",
3638 [
3641 [
3639 (
3642 (
3640 b"",
3643 b"",
3641 b"recover",
3644 b"recover",
3642 b"",
3645 b"",
3643 b"brings the specified changeset back into the repository",
3646 b"brings the specified changeset back into the repository",
3644 )
3647 )
3645 ]
3648 ]
3646 + cmdutil.logopts,
3649 + cmdutil.logopts,
3647 _(b"hg debugbackupbundle [--recover HASH]"),
3650 _(b"hg debugbackupbundle [--recover HASH]"),
3648 )
3651 )
3649 def debugbackupbundle(ui, repo, *pats, **opts):
3652 def debugbackupbundle(ui, repo, *pats, **opts):
3650 """lists the changesets available in backup bundles
3653 """lists the changesets available in backup bundles
3651
3654
3652 Without any arguments, this command prints a list of the changesets in each
3655 Without any arguments, this command prints a list of the changesets in each
3653 backup bundle.
3656 backup bundle.
3654
3657
3655 --recover takes a changeset hash and unbundles the first bundle that
3658 --recover takes a changeset hash and unbundles the first bundle that
3656 contains that hash, which puts that changeset back in your repository.
3659 contains that hash, which puts that changeset back in your repository.
3657
3660
3658 --verbose will print the entire commit message and the bundle path for that
3661 --verbose will print the entire commit message and the bundle path for that
3659 backup.
3662 backup.
3660 """
3663 """
3661 backups = list(
3664 backups = list(
3662 filter(
3665 filter(
3663 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3666 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3664 )
3667 )
3665 )
3668 )
3666 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3669 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3667
3670
3668 opts = pycompat.byteskwargs(opts)
3671 opts = pycompat.byteskwargs(opts)
3669 opts[b"bundle"] = b""
3672 opts[b"bundle"] = b""
3670 opts[b"force"] = None
3673 opts[b"force"] = None
3671 limit = logcmdutil.getlimit(opts)
3674 limit = logcmdutil.getlimit(opts)
3672
3675
3673 def display(other, chlist, displayer):
3676 def display(other, chlist, displayer):
3674 if opts.get(b"newest_first"):
3677 if opts.get(b"newest_first"):
3675 chlist.reverse()
3678 chlist.reverse()
3676 count = 0
3679 count = 0
3677 for n in chlist:
3680 for n in chlist:
3678 if limit is not None and count >= limit:
3681 if limit is not None and count >= limit:
3679 break
3682 break
3680 parents = [
3683 parents = [
3681 True for p in other.changelog.parents(n) if p != repo.nullid
3684 True for p in other.changelog.parents(n) if p != repo.nullid
3682 ]
3685 ]
3683 if opts.get(b"no_merges") and len(parents) == 2:
3686 if opts.get(b"no_merges") and len(parents) == 2:
3684 continue
3687 continue
3685 count += 1
3688 count += 1
3686 displayer.show(other[n])
3689 displayer.show(other[n])
3687
3690
3688 recovernode = opts.get(b"recover")
3691 recovernode = opts.get(b"recover")
3689 if recovernode:
3692 if recovernode:
3690 if scmutil.isrevsymbol(repo, recovernode):
3693 if scmutil.isrevsymbol(repo, recovernode):
3691 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3694 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3692 return
3695 return
3693 elif backups:
3696 elif backups:
3694 msg = _(
3697 msg = _(
3695 b"Recover changesets using: hg debugbackupbundle --recover "
3698 b"Recover changesets using: hg debugbackupbundle --recover "
3696 b"<changeset hash>\n\nAvailable backup changesets:"
3699 b"<changeset hash>\n\nAvailable backup changesets:"
3697 )
3700 )
3698 ui.status(msg, label=b"status.removed")
3701 ui.status(msg, label=b"status.removed")
3699 else:
3702 else:
3700 ui.status(_(b"no backup changesets found\n"))
3703 ui.status(_(b"no backup changesets found\n"))
3701 return
3704 return
3702
3705
3703 for backup in backups:
3706 for backup in backups:
3704 # Much of this is copied from the hg incoming logic
3707 # Much of this is copied from the hg incoming logic
3705 source = os.path.relpath(backup, encoding.getcwd())
3708 source = os.path.relpath(backup, encoding.getcwd())
3706 path = urlutil.get_unique_pull_path_obj(
3709 path = urlutil.get_unique_pull_path_obj(
3707 b'debugbackupbundle',
3710 b'debugbackupbundle',
3708 ui,
3711 ui,
3709 source,
3712 source,
3710 )
3713 )
3711 try:
3714 try:
3712 other = hg.peer(repo, opts, path)
3715 other = hg.peer(repo, opts, path)
3713 except error.LookupError as ex:
3716 except error.LookupError as ex:
3714 msg = _(b"\nwarning: unable to open bundle %s") % path.loc
3717 msg = _(b"\nwarning: unable to open bundle %s") % path.loc
3715 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3718 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3716 ui.warn(msg, hint=hint)
3719 ui.warn(msg, hint=hint)
3717 continue
3720 continue
3718 branches = (path.branch, opts.get(b'branch', []))
3721 branches = (path.branch, opts.get(b'branch', []))
3719 revs, checkout = hg.addbranchrevs(
3722 revs, checkout = hg.addbranchrevs(
3720 repo, other, branches, opts.get(b"rev")
3723 repo, other, branches, opts.get(b"rev")
3721 )
3724 )
3722
3725
3723 if revs:
3726 if revs:
3724 revs = [other.lookup(rev) for rev in revs]
3727 revs = [other.lookup(rev) for rev in revs]
3725
3728
3726 with ui.silent():
3729 with ui.silent():
3727 try:
3730 try:
3728 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3731 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3729 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3732 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3730 )
3733 )
3731 except error.LookupError:
3734 except error.LookupError:
3732 continue
3735 continue
3733
3736
3734 try:
3737 try:
3735 if not chlist:
3738 if not chlist:
3736 continue
3739 continue
3737 if recovernode:
3740 if recovernode:
3738 with repo.lock(), repo.transaction(b"unbundle") as tr:
3741 with repo.lock(), repo.transaction(b"unbundle") as tr:
3739 if scmutil.isrevsymbol(other, recovernode):
3742 if scmutil.isrevsymbol(other, recovernode):
3740 ui.status(_(b"Unbundling %s\n") % (recovernode))
3743 ui.status(_(b"Unbundling %s\n") % (recovernode))
3741 f = hg.openpath(ui, path.loc)
3744 f = hg.openpath(ui, path.loc)
3742 gen = exchange.readbundle(ui, f, path.loc)
3745 gen = exchange.readbundle(ui, f, path.loc)
3743 if isinstance(gen, bundle2.unbundle20):
3746 if isinstance(gen, bundle2.unbundle20):
3744 bundle2.applybundle(
3747 bundle2.applybundle(
3745 repo,
3748 repo,
3746 gen,
3749 gen,
3747 tr,
3750 tr,
3748 source=b"unbundle",
3751 source=b"unbundle",
3749 url=b"bundle:" + path.loc,
3752 url=b"bundle:" + path.loc,
3750 )
3753 )
3751 else:
3754 else:
3752 gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
3755 gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
3753 break
3756 break
3754 else:
3757 else:
3755 backupdate = encoding.strtolocal(
3758 backupdate = encoding.strtolocal(
3756 time.strftime(
3759 time.strftime(
3757 "%a %H:%M, %Y-%m-%d",
3760 "%a %H:%M, %Y-%m-%d",
3758 time.localtime(os.path.getmtime(path.loc)),
3761 time.localtime(os.path.getmtime(path.loc)),
3759 )
3762 )
3760 )
3763 )
3761 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3764 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3762 if ui.verbose:
3765 if ui.verbose:
3763 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
3766 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
3764 else:
3767 else:
3765 opts[
3768 opts[
3766 b"template"
3769 b"template"
3767 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3770 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3768 displayer = logcmdutil.changesetdisplayer(
3771 displayer = logcmdutil.changesetdisplayer(
3769 ui, other, opts, False
3772 ui, other, opts, False
3770 )
3773 )
3771 display(other, chlist, displayer)
3774 display(other, chlist, displayer)
3772 displayer.close()
3775 displayer.close()
3773 finally:
3776 finally:
3774 cleanupfn()
3777 cleanupfn()
3775
3778
3776
3779
3777 @command(
3780 @command(
3778 b'debugsub',
3781 b'debugsub',
3779 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3782 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3780 _(b'[-r REV] [REV]'),
3783 _(b'[-r REV] [REV]'),
3781 )
3784 )
3782 def debugsub(ui, repo, rev=None):
3785 def debugsub(ui, repo, rev=None):
3783 ctx = scmutil.revsingle(repo, rev, None)
3786 ctx = scmutil.revsingle(repo, rev, None)
3784 for k, v in sorted(ctx.substate.items()):
3787 for k, v in sorted(ctx.substate.items()):
3785 ui.writenoi18n(b'path %s\n' % k)
3788 ui.writenoi18n(b'path %s\n' % k)
3786 ui.writenoi18n(b' source %s\n' % v[0])
3789 ui.writenoi18n(b' source %s\n' % v[0])
3787 ui.writenoi18n(b' revision %s\n' % v[1])
3790 ui.writenoi18n(b' revision %s\n' % v[1])
3788
3791
3789
3792
3790 @command(
3793 @command(
3791 b'debugshell',
3794 b'debugshell',
3792 [
3795 [
3793 (
3796 (
3794 b'c',
3797 b'c',
3795 b'command',
3798 b'command',
3796 b'',
3799 b'',
3797 _(b'program passed in as a string'),
3800 _(b'program passed in as a string'),
3798 _(b'COMMAND'),
3801 _(b'COMMAND'),
3799 )
3802 )
3800 ],
3803 ],
3801 _(b'[-c COMMAND]'),
3804 _(b'[-c COMMAND]'),
3802 optionalrepo=True,
3805 optionalrepo=True,
3803 )
3806 )
3804 def debugshell(ui, repo, **opts):
3807 def debugshell(ui, repo, **opts):
3805 """run an interactive Python interpreter
3808 """run an interactive Python interpreter
3806
3809
3807 The local namespace is provided with a reference to the ui and
3810 The local namespace is provided with a reference to the ui and
3808 the repo instance (if available).
3811 the repo instance (if available).
3809 """
3812 """
3810 import code
3813 import code
3811
3814
3812 imported_objects = {
3815 imported_objects = {
3813 'ui': ui,
3816 'ui': ui,
3814 'repo': repo,
3817 'repo': repo,
3815 }
3818 }
3816
3819
3817 # py2exe disables initialization of the site module, which is responsible
3820 # py2exe disables initialization of the site module, which is responsible
3818 # for arranging for ``quit()`` to exit the interpreter. Manually initialize
3821 # for arranging for ``quit()`` to exit the interpreter. Manually initialize
3819 # the stuff that site normally does here, so that the interpreter can be
3822 # the stuff that site normally does here, so that the interpreter can be
3820 # quit in a consistent manner, whether run with pyoxidizer, exewrapper.c,
3823 # quit in a consistent manner, whether run with pyoxidizer, exewrapper.c,
3821 # py.exe, or py2exe.
3824 # py.exe, or py2exe.
3822 if getattr(sys, "frozen", None) == 'console_exe':
3825 if getattr(sys, "frozen", None) == 'console_exe':
3823 try:
3826 try:
3824 import site
3827 import site
3825
3828
3826 site.setcopyright()
3829 site.setcopyright()
3827 site.sethelper()
3830 site.sethelper()
3828 site.setquit()
3831 site.setquit()
3829 except ImportError:
3832 except ImportError:
3830 site = None # Keep PyCharm happy
3833 site = None # Keep PyCharm happy
3831
3834
3832 command = opts.get('command')
3835 command = opts.get('command')
3833 if command:
3836 if command:
3834 compiled = code.compile_command(encoding.strfromlocal(command))
3837 compiled = code.compile_command(encoding.strfromlocal(command))
3835 code.InteractiveInterpreter(locals=imported_objects).runcode(compiled)
3838 code.InteractiveInterpreter(locals=imported_objects).runcode(compiled)
3836 return
3839 return
3837
3840
3838 code.interact(local=imported_objects)
3841 code.interact(local=imported_objects)
3839
3842
3840
3843
3841 @command(
3844 @command(
3842 b'debug-revlog-stats',
3845 b'debug-revlog-stats',
3843 [
3846 [
3844 (b'c', b'changelog', None, _(b'Display changelog statistics')),
3847 (b'c', b'changelog', None, _(b'Display changelog statistics')),
3845 (b'm', b'manifest', None, _(b'Display manifest statistics')),
3848 (b'm', b'manifest', None, _(b'Display manifest statistics')),
3846 (b'f', b'filelogs', None, _(b'Display filelogs statistics')),
3849 (b'f', b'filelogs', None, _(b'Display filelogs statistics')),
3847 ]
3850 ]
3848 + cmdutil.formatteropts,
3851 + cmdutil.formatteropts,
3849 )
3852 )
3850 def debug_revlog_stats(ui, repo, **opts):
3853 def debug_revlog_stats(ui, repo, **opts):
3851 """display statistics about revlogs in the store"""
3854 """display statistics about revlogs in the store"""
3852 opts = pycompat.byteskwargs(opts)
3855 opts = pycompat.byteskwargs(opts)
3853 changelog = opts[b"changelog"]
3856 changelog = opts[b"changelog"]
3854 manifest = opts[b"manifest"]
3857 manifest = opts[b"manifest"]
3855 filelogs = opts[b"filelogs"]
3858 filelogs = opts[b"filelogs"]
3856
3859
3857 if changelog is None and manifest is None and filelogs is None:
3860 if changelog is None and manifest is None and filelogs is None:
3858 changelog = True
3861 changelog = True
3859 manifest = True
3862 manifest = True
3860 filelogs = True
3863 filelogs = True
3861
3864
3862 repo = repo.unfiltered()
3865 repo = repo.unfiltered()
3863 fm = ui.formatter(b'debug-revlog-stats', opts)
3866 fm = ui.formatter(b'debug-revlog-stats', opts)
3864 revlog_debug.debug_revlog_stats(repo, fm, changelog, manifest, filelogs)
3867 revlog_debug.debug_revlog_stats(repo, fm, changelog, manifest, filelogs)
3865 fm.end()
3868 fm.end()
3866
3869
3867
3870
3868 @command(
3871 @command(
3869 b'debugsuccessorssets',
3872 b'debugsuccessorssets',
3870 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3873 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3871 _(b'[REV]'),
3874 _(b'[REV]'),
3872 )
3875 )
3873 def debugsuccessorssets(ui, repo, *revs, **opts):
3876 def debugsuccessorssets(ui, repo, *revs, **opts):
3874 """show set of successors for revision
3877 """show set of successors for revision
3875
3878
3876 A successors set of changeset A is a consistent group of revisions that
3879 A successors set of changeset A is a consistent group of revisions that
3877 succeed A. It contains non-obsolete changesets only unless closests
3880 succeed A. It contains non-obsolete changesets only unless closests
3878 successors set is set.
3881 successors set is set.
3879
3882
3880 In most cases a changeset A has a single successors set containing a single
3883 In most cases a changeset A has a single successors set containing a single
3881 successor (changeset A replaced by A').
3884 successor (changeset A replaced by A').
3882
3885
3883 A changeset that is made obsolete with no successors are called "pruned".
3886 A changeset that is made obsolete with no successors are called "pruned".
3884 Such changesets have no successors sets at all.
3887 Such changesets have no successors sets at all.
3885
3888
3886 A changeset that has been "split" will have a successors set containing
3889 A changeset that has been "split" will have a successors set containing
3887 more than one successor.
3890 more than one successor.
3888
3891
3889 A changeset that has been rewritten in multiple different ways is called
3892 A changeset that has been rewritten in multiple different ways is called
3890 "divergent". Such changesets have multiple successor sets (each of which
3893 "divergent". Such changesets have multiple successor sets (each of which
3891 may also be split, i.e. have multiple successors).
3894 may also be split, i.e. have multiple successors).
3892
3895
3893 Results are displayed as follows::
3896 Results are displayed as follows::
3894
3897
3895 <rev1>
3898 <rev1>
3896 <successors-1A>
3899 <successors-1A>
3897 <rev2>
3900 <rev2>
3898 <successors-2A>
3901 <successors-2A>
3899 <successors-2B1> <successors-2B2> <successors-2B3>
3902 <successors-2B1> <successors-2B2> <successors-2B3>
3900
3903
3901 Here rev2 has two possible (i.e. divergent) successors sets. The first
3904 Here rev2 has two possible (i.e. divergent) successors sets. The first
3902 holds one element, whereas the second holds three (i.e. the changeset has
3905 holds one element, whereas the second holds three (i.e. the changeset has
3903 been split).
3906 been split).
3904 """
3907 """
3905 # passed to successorssets caching computation from one call to another
3908 # passed to successorssets caching computation from one call to another
3906 cache = {}
3909 cache = {}
3907 ctx2str = bytes
3910 ctx2str = bytes
3908 node2str = short
3911 node2str = short
3909 for rev in logcmdutil.revrange(repo, revs):
3912 for rev in logcmdutil.revrange(repo, revs):
3910 ctx = repo[rev]
3913 ctx = repo[rev]
3911 ui.write(b'%s\n' % ctx2str(ctx))
3914 ui.write(b'%s\n' % ctx2str(ctx))
3912 for succsset in obsutil.successorssets(
3915 for succsset in obsutil.successorssets(
3913 repo, ctx.node(), closest=opts['closest'], cache=cache
3916 repo, ctx.node(), closest=opts['closest'], cache=cache
3914 ):
3917 ):
3915 if succsset:
3918 if succsset:
3916 ui.write(b' ')
3919 ui.write(b' ')
3917 ui.write(node2str(succsset[0]))
3920 ui.write(node2str(succsset[0]))
3918 for node in succsset[1:]:
3921 for node in succsset[1:]:
3919 ui.write(b' ')
3922 ui.write(b' ')
3920 ui.write(node2str(node))
3923 ui.write(node2str(node))
3921 ui.write(b'\n')
3924 ui.write(b'\n')
3922
3925
3923
3926
3924 @command(b'debugtagscache', [])
3927 @command(b'debugtagscache', [])
3925 def debugtagscache(ui, repo):
3928 def debugtagscache(ui, repo):
3926 """display the contents of .hg/cache/hgtagsfnodes1"""
3929 """display the contents of .hg/cache/hgtagsfnodes1"""
3927 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3930 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3928 flog = repo.file(b'.hgtags')
3931 flog = repo.file(b'.hgtags')
3929 for r in repo:
3932 for r in repo:
3930 node = repo[r].node()
3933 node = repo[r].node()
3931 tagsnode = cache.getfnode(node, computemissing=False)
3934 tagsnode = cache.getfnode(node, computemissing=False)
3932 if tagsnode:
3935 if tagsnode:
3933 tagsnodedisplay = hex(tagsnode)
3936 tagsnodedisplay = hex(tagsnode)
3934 if not flog.hasnode(tagsnode):
3937 if not flog.hasnode(tagsnode):
3935 tagsnodedisplay += b' (unknown node)'
3938 tagsnodedisplay += b' (unknown node)'
3936 elif tagsnode is None:
3939 elif tagsnode is None:
3937 tagsnodedisplay = b'missing'
3940 tagsnodedisplay = b'missing'
3938 else:
3941 else:
3939 tagsnodedisplay = b'invalid'
3942 tagsnodedisplay = b'invalid'
3940
3943
3941 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3944 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3942
3945
3943
3946
3944 @command(
3947 @command(
3945 b'debugtemplate',
3948 b'debugtemplate',
3946 [
3949 [
3947 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3950 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3948 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3951 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3949 ],
3952 ],
3950 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3953 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3951 optionalrepo=True,
3954 optionalrepo=True,
3952 )
3955 )
3953 def debugtemplate(ui, repo, tmpl, **opts):
3956 def debugtemplate(ui, repo, tmpl, **opts):
3954 """parse and apply a template
3957 """parse and apply a template
3955
3958
3956 If -r/--rev is given, the template is processed as a log template and
3959 If -r/--rev is given, the template is processed as a log template and
3957 applied to the given changesets. Otherwise, it is processed as a generic
3960 applied to the given changesets. Otherwise, it is processed as a generic
3958 template.
3961 template.
3959
3962
3960 Use --verbose to print the parsed tree.
3963 Use --verbose to print the parsed tree.
3961 """
3964 """
3962 revs = None
3965 revs = None
3963 if opts['rev']:
3966 if opts['rev']:
3964 if repo is None:
3967 if repo is None:
3965 raise error.RepoError(
3968 raise error.RepoError(
3966 _(b'there is no Mercurial repository here (.hg not found)')
3969 _(b'there is no Mercurial repository here (.hg not found)')
3967 )
3970 )
3968 revs = logcmdutil.revrange(repo, opts['rev'])
3971 revs = logcmdutil.revrange(repo, opts['rev'])
3969
3972
3970 props = {}
3973 props = {}
3971 for d in opts['define']:
3974 for d in opts['define']:
3972 try:
3975 try:
3973 k, v = (e.strip() for e in d.split(b'=', 1))
3976 k, v = (e.strip() for e in d.split(b'=', 1))
3974 if not k or k == b'ui':
3977 if not k or k == b'ui':
3975 raise ValueError
3978 raise ValueError
3976 props[k] = v
3979 props[k] = v
3977 except ValueError:
3980 except ValueError:
3978 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3981 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3979
3982
3980 if ui.verbose:
3983 if ui.verbose:
3981 aliases = ui.configitems(b'templatealias')
3984 aliases = ui.configitems(b'templatealias')
3982 tree = templater.parse(tmpl)
3985 tree = templater.parse(tmpl)
3983 ui.note(templater.prettyformat(tree), b'\n')
3986 ui.note(templater.prettyformat(tree), b'\n')
3984 newtree = templater.expandaliases(tree, aliases)
3987 newtree = templater.expandaliases(tree, aliases)
3985 if newtree != tree:
3988 if newtree != tree:
3986 ui.notenoi18n(
3989 ui.notenoi18n(
3987 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3990 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3988 )
3991 )
3989
3992
3990 if revs is None:
3993 if revs is None:
3991 tres = formatter.templateresources(ui, repo)
3994 tres = formatter.templateresources(ui, repo)
3992 t = formatter.maketemplater(ui, tmpl, resources=tres)
3995 t = formatter.maketemplater(ui, tmpl, resources=tres)
3993 if ui.verbose:
3996 if ui.verbose:
3994 kwds, funcs = t.symbolsuseddefault()
3997 kwds, funcs = t.symbolsuseddefault()
3995 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3998 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3996 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3999 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3997 ui.write(t.renderdefault(props))
4000 ui.write(t.renderdefault(props))
3998 else:
4001 else:
3999 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4002 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4000 if ui.verbose:
4003 if ui.verbose:
4001 kwds, funcs = displayer.t.symbolsuseddefault()
4004 kwds, funcs = displayer.t.symbolsuseddefault()
4002 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4005 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4003 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4006 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4004 for r in revs:
4007 for r in revs:
4005 displayer.show(repo[r], **pycompat.strkwargs(props))
4008 displayer.show(repo[r], **pycompat.strkwargs(props))
4006 displayer.close()
4009 displayer.close()
4007
4010
4008
4011
4009 @command(
4012 @command(
4010 b'debuguigetpass',
4013 b'debuguigetpass',
4011 [
4014 [
4012 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4015 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4013 ],
4016 ],
4014 _(b'[-p TEXT]'),
4017 _(b'[-p TEXT]'),
4015 norepo=True,
4018 norepo=True,
4016 )
4019 )
4017 def debuguigetpass(ui, prompt=b''):
4020 def debuguigetpass(ui, prompt=b''):
4018 """show prompt to type password"""
4021 """show prompt to type password"""
4019 r = ui.getpass(prompt)
4022 r = ui.getpass(prompt)
4020 if r is None:
4023 if r is None:
4021 r = b"<default response>"
4024 r = b"<default response>"
4022 ui.writenoi18n(b'response: %s\n' % r)
4025 ui.writenoi18n(b'response: %s\n' % r)
4023
4026
4024
4027
4025 @command(
4028 @command(
4026 b'debuguiprompt',
4029 b'debuguiprompt',
4027 [
4030 [
4028 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4031 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4029 ],
4032 ],
4030 _(b'[-p TEXT]'),
4033 _(b'[-p TEXT]'),
4031 norepo=True,
4034 norepo=True,
4032 )
4035 )
4033 def debuguiprompt(ui, prompt=b''):
4036 def debuguiprompt(ui, prompt=b''):
4034 """show plain prompt"""
4037 """show plain prompt"""
4035 r = ui.prompt(prompt)
4038 r = ui.prompt(prompt)
4036 ui.writenoi18n(b'response: %s\n' % r)
4039 ui.writenoi18n(b'response: %s\n' % r)
4037
4040
4038
4041
4039 @command(b'debugupdatecaches', [])
4042 @command(b'debugupdatecaches', [])
4040 def debugupdatecaches(ui, repo, *pats, **opts):
4043 def debugupdatecaches(ui, repo, *pats, **opts):
4041 """warm all known caches in the repository"""
4044 """warm all known caches in the repository"""
4042 with repo.wlock(), repo.lock():
4045 with repo.wlock(), repo.lock():
4043 repo.updatecaches(caches=repository.CACHES_ALL)
4046 repo.updatecaches(caches=repository.CACHES_ALL)
4044
4047
4045
4048
4046 @command(
4049 @command(
4047 b'debugupgraderepo',
4050 b'debugupgraderepo',
4048 [
4051 [
4049 (
4052 (
4050 b'o',
4053 b'o',
4051 b'optimize',
4054 b'optimize',
4052 [],
4055 [],
4053 _(b'extra optimization to perform'),
4056 _(b'extra optimization to perform'),
4054 _(b'NAME'),
4057 _(b'NAME'),
4055 ),
4058 ),
4056 (b'', b'run', False, _(b'performs an upgrade')),
4059 (b'', b'run', False, _(b'performs an upgrade')),
4057 (b'', b'backup', True, _(b'keep the old repository content around')),
4060 (b'', b'backup', True, _(b'keep the old repository content around')),
4058 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4061 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4059 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4062 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4060 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4063 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4061 ],
4064 ],
4062 )
4065 )
4063 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4066 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4064 """upgrade a repository to use different features
4067 """upgrade a repository to use different features
4065
4068
4066 If no arguments are specified, the repository is evaluated for upgrade
4069 If no arguments are specified, the repository is evaluated for upgrade
4067 and a list of problems and potential optimizations is printed.
4070 and a list of problems and potential optimizations is printed.
4068
4071
4069 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4072 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4070 can be influenced via additional arguments. More details will be provided
4073 can be influenced via additional arguments. More details will be provided
4071 by the command output when run without ``--run``.
4074 by the command output when run without ``--run``.
4072
4075
4073 During the upgrade, the repository will be locked and no writes will be
4076 During the upgrade, the repository will be locked and no writes will be
4074 allowed.
4077 allowed.
4075
4078
4076 At the end of the upgrade, the repository may not be readable while new
4079 At the end of the upgrade, the repository may not be readable while new
4077 repository data is swapped in. This window will be as long as it takes to
4080 repository data is swapped in. This window will be as long as it takes to
4078 rename some directories inside the ``.hg`` directory. On most machines, this
4081 rename some directories inside the ``.hg`` directory. On most machines, this
4079 should complete almost instantaneously and the chances of a consumer being
4082 should complete almost instantaneously and the chances of a consumer being
4080 unable to access the repository should be low.
4083 unable to access the repository should be low.
4081
4084
4082 By default, all revlogs will be upgraded. You can restrict this using flags
4085 By default, all revlogs will be upgraded. You can restrict this using flags
4083 such as `--manifest`:
4086 such as `--manifest`:
4084
4087
4085 * `--manifest`: only optimize the manifest
4088 * `--manifest`: only optimize the manifest
4086 * `--no-manifest`: optimize all revlog but the manifest
4089 * `--no-manifest`: optimize all revlog but the manifest
4087 * `--changelog`: optimize the changelog only
4090 * `--changelog`: optimize the changelog only
4088 * `--no-changelog --no-manifest`: optimize filelogs only
4091 * `--no-changelog --no-manifest`: optimize filelogs only
4089 * `--filelogs`: optimize the filelogs only
4092 * `--filelogs`: optimize the filelogs only
4090 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4093 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4091 """
4094 """
4092 return upgrade.upgraderepo(
4095 return upgrade.upgraderepo(
4093 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4096 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4094 )
4097 )
4095
4098
4096
4099
4097 @command(
4100 @command(
4098 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4101 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4099 )
4102 )
4100 def debugwalk(ui, repo, *pats, **opts):
4103 def debugwalk(ui, repo, *pats, **opts):
4101 """show how files match on given patterns"""
4104 """show how files match on given patterns"""
4102 opts = pycompat.byteskwargs(opts)
4105 opts = pycompat.byteskwargs(opts)
4103 m = scmutil.match(repo[None], pats, opts)
4106 m = scmutil.match(repo[None], pats, opts)
4104 if ui.verbose:
4107 if ui.verbose:
4105 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4108 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4106 items = list(repo[None].walk(m))
4109 items = list(repo[None].walk(m))
4107 if not items:
4110 if not items:
4108 return
4111 return
4109 f = lambda fn: fn
4112 f = lambda fn: fn
4110 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4113 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4111 f = lambda fn: util.normpath(fn)
4114 f = lambda fn: util.normpath(fn)
4112 fmt = b'f %%-%ds %%-%ds %%s' % (
4115 fmt = b'f %%-%ds %%-%ds %%s' % (
4113 max([len(abs) for abs in items]),
4116 max([len(abs) for abs in items]),
4114 max([len(repo.pathto(abs)) for abs in items]),
4117 max([len(repo.pathto(abs)) for abs in items]),
4115 )
4118 )
4116 for abs in items:
4119 for abs in items:
4117 line = fmt % (
4120 line = fmt % (
4118 abs,
4121 abs,
4119 f(repo.pathto(abs)),
4122 f(repo.pathto(abs)),
4120 m.exact(abs) and b'exact' or b'',
4123 m.exact(abs) and b'exact' or b'',
4121 )
4124 )
4122 ui.write(b"%s\n" % line.rstrip())
4125 ui.write(b"%s\n" % line.rstrip())
4123
4126
4124
4127
4125 @command(b'debugwhyunstable', [], _(b'REV'))
4128 @command(b'debugwhyunstable', [], _(b'REV'))
4126 def debugwhyunstable(ui, repo, rev):
4129 def debugwhyunstable(ui, repo, rev):
4127 """explain instabilities of a changeset"""
4130 """explain instabilities of a changeset"""
4128 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4131 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4129 dnodes = b''
4132 dnodes = b''
4130 if entry.get(b'divergentnodes'):
4133 if entry.get(b'divergentnodes'):
4131 dnodes = (
4134 dnodes = (
4132 b' '.join(
4135 b' '.join(
4133 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4136 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4134 for ctx in entry[b'divergentnodes']
4137 for ctx in entry[b'divergentnodes']
4135 )
4138 )
4136 + b' '
4139 + b' '
4137 )
4140 )
4138 ui.write(
4141 ui.write(
4139 b'%s: %s%s %s\n'
4142 b'%s: %s%s %s\n'
4140 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4143 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4141 )
4144 )
4142
4145
4143
4146
4144 @command(
4147 @command(
4145 b'debugwireargs',
4148 b'debugwireargs',
4146 [
4149 [
4147 (b'', b'three', b'', b'three'),
4150 (b'', b'three', b'', b'three'),
4148 (b'', b'four', b'', b'four'),
4151 (b'', b'four', b'', b'four'),
4149 (b'', b'five', b'', b'five'),
4152 (b'', b'five', b'', b'five'),
4150 ]
4153 ]
4151 + cmdutil.remoteopts,
4154 + cmdutil.remoteopts,
4152 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4155 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4153 norepo=True,
4156 norepo=True,
4154 )
4157 )
4155 def debugwireargs(ui, repopath, *vals, **opts):
4158 def debugwireargs(ui, repopath, *vals, **opts):
4156 opts = pycompat.byteskwargs(opts)
4159 opts = pycompat.byteskwargs(opts)
4157 repo = hg.peer(ui, opts, repopath)
4160 repo = hg.peer(ui, opts, repopath)
4158 try:
4161 try:
4159 for opt in cmdutil.remoteopts:
4162 for opt in cmdutil.remoteopts:
4160 del opts[opt[1]]
4163 del opts[opt[1]]
4161 args = {}
4164 args = {}
4162 for k, v in opts.items():
4165 for k, v in opts.items():
4163 if v:
4166 if v:
4164 args[k] = v
4167 args[k] = v
4165 args = pycompat.strkwargs(args)
4168 args = pycompat.strkwargs(args)
4166 # run twice to check that we don't mess up the stream for the next command
4169 # run twice to check that we don't mess up the stream for the next command
4167 res1 = repo.debugwireargs(*vals, **args)
4170 res1 = repo.debugwireargs(*vals, **args)
4168 res2 = repo.debugwireargs(*vals, **args)
4171 res2 = repo.debugwireargs(*vals, **args)
4169 ui.write(b"%s\n" % res1)
4172 ui.write(b"%s\n" % res1)
4170 if res1 != res2:
4173 if res1 != res2:
4171 ui.warn(b"%s\n" % res2)
4174 ui.warn(b"%s\n" % res2)
4172 finally:
4175 finally:
4173 repo.close()
4176 repo.close()
4174
4177
4175
4178
4176 def _parsewirelangblocks(fh):
4179 def _parsewirelangblocks(fh):
4177 activeaction = None
4180 activeaction = None
4178 blocklines = []
4181 blocklines = []
4179 lastindent = 0
4182 lastindent = 0
4180
4183
4181 for line in fh:
4184 for line in fh:
4182 line = line.rstrip()
4185 line = line.rstrip()
4183 if not line:
4186 if not line:
4184 continue
4187 continue
4185
4188
4186 if line.startswith(b'#'):
4189 if line.startswith(b'#'):
4187 continue
4190 continue
4188
4191
4189 if not line.startswith(b' '):
4192 if not line.startswith(b' '):
4190 # New block. Flush previous one.
4193 # New block. Flush previous one.
4191 if activeaction:
4194 if activeaction:
4192 yield activeaction, blocklines
4195 yield activeaction, blocklines
4193
4196
4194 activeaction = line
4197 activeaction = line
4195 blocklines = []
4198 blocklines = []
4196 lastindent = 0
4199 lastindent = 0
4197 continue
4200 continue
4198
4201
4199 # Else we start with an indent.
4202 # Else we start with an indent.
4200
4203
4201 if not activeaction:
4204 if not activeaction:
4202 raise error.Abort(_(b'indented line outside of block'))
4205 raise error.Abort(_(b'indented line outside of block'))
4203
4206
4204 indent = len(line) - len(line.lstrip())
4207 indent = len(line) - len(line.lstrip())
4205
4208
4206 # If this line is indented more than the last line, concatenate it.
4209 # If this line is indented more than the last line, concatenate it.
4207 if indent > lastindent and blocklines:
4210 if indent > lastindent and blocklines:
4208 blocklines[-1] += line.lstrip()
4211 blocklines[-1] += line.lstrip()
4209 else:
4212 else:
4210 blocklines.append(line)
4213 blocklines.append(line)
4211 lastindent = indent
4214 lastindent = indent
4212
4215
4213 # Flush last block.
4216 # Flush last block.
4214 if activeaction:
4217 if activeaction:
4215 yield activeaction, blocklines
4218 yield activeaction, blocklines
4216
4219
4217
4220
4218 @command(
4221 @command(
4219 b'debugwireproto',
4222 b'debugwireproto',
4220 [
4223 [
4221 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4224 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4222 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4225 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4223 (
4226 (
4224 b'',
4227 b'',
4225 b'noreadstderr',
4228 b'noreadstderr',
4226 False,
4229 False,
4227 _(b'do not read from stderr of the remote'),
4230 _(b'do not read from stderr of the remote'),
4228 ),
4231 ),
4229 (
4232 (
4230 b'',
4233 b'',
4231 b'nologhandshake',
4234 b'nologhandshake',
4232 False,
4235 False,
4233 _(b'do not log I/O related to the peer handshake'),
4236 _(b'do not log I/O related to the peer handshake'),
4234 ),
4237 ),
4235 ]
4238 ]
4236 + cmdutil.remoteopts,
4239 + cmdutil.remoteopts,
4237 _(b'[PATH]'),
4240 _(b'[PATH]'),
4238 optionalrepo=True,
4241 optionalrepo=True,
4239 )
4242 )
4240 def debugwireproto(ui, repo, path=None, **opts):
4243 def debugwireproto(ui, repo, path=None, **opts):
4241 """send wire protocol commands to a server
4244 """send wire protocol commands to a server
4242
4245
4243 This command can be used to issue wire protocol commands to remote
4246 This command can be used to issue wire protocol commands to remote
4244 peers and to debug the raw data being exchanged.
4247 peers and to debug the raw data being exchanged.
4245
4248
4246 ``--localssh`` will start an SSH server against the current repository
4249 ``--localssh`` will start an SSH server against the current repository
4247 and connect to that. By default, the connection will perform a handshake
4250 and connect to that. By default, the connection will perform a handshake
4248 and establish an appropriate peer instance.
4251 and establish an appropriate peer instance.
4249
4252
4250 ``--peer`` can be used to bypass the handshake protocol and construct a
4253 ``--peer`` can be used to bypass the handshake protocol and construct a
4251 peer instance using the specified class type. Valid values are ``raw``,
4254 peer instance using the specified class type. Valid values are ``raw``,
4252 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4255 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4253 don't support higher-level command actions.
4256 don't support higher-level command actions.
4254
4257
4255 ``--noreadstderr`` can be used to disable automatic reading from stderr
4258 ``--noreadstderr`` can be used to disable automatic reading from stderr
4256 of the peer (for SSH connections only). Disabling automatic reading of
4259 of the peer (for SSH connections only). Disabling automatic reading of
4257 stderr is useful for making output more deterministic.
4260 stderr is useful for making output more deterministic.
4258
4261
4259 Commands are issued via a mini language which is specified via stdin.
4262 Commands are issued via a mini language which is specified via stdin.
4260 The language consists of individual actions to perform. An action is
4263 The language consists of individual actions to perform. An action is
4261 defined by a block. A block is defined as a line with no leading
4264 defined by a block. A block is defined as a line with no leading
4262 space followed by 0 or more lines with leading space. Blocks are
4265 space followed by 0 or more lines with leading space. Blocks are
4263 effectively a high-level command with additional metadata.
4266 effectively a high-level command with additional metadata.
4264
4267
4265 Lines beginning with ``#`` are ignored.
4268 Lines beginning with ``#`` are ignored.
4266
4269
4267 The following sections denote available actions.
4270 The following sections denote available actions.
4268
4271
4269 raw
4272 raw
4270 ---
4273 ---
4271
4274
4272 Send raw data to the server.
4275 Send raw data to the server.
4273
4276
4274 The block payload contains the raw data to send as one atomic send
4277 The block payload contains the raw data to send as one atomic send
4275 operation. The data may not actually be delivered in a single system
4278 operation. The data may not actually be delivered in a single system
4276 call: it depends on the abilities of the transport being used.
4279 call: it depends on the abilities of the transport being used.
4277
4280
4278 Each line in the block is de-indented and concatenated. Then, that
4281 Each line in the block is de-indented and concatenated. Then, that
4279 value is evaluated as a Python b'' literal. This allows the use of
4282 value is evaluated as a Python b'' literal. This allows the use of
4280 backslash escaping, etc.
4283 backslash escaping, etc.
4281
4284
4282 raw+
4285 raw+
4283 ----
4286 ----
4284
4287
4285 Behaves like ``raw`` except flushes output afterwards.
4288 Behaves like ``raw`` except flushes output afterwards.
4286
4289
4287 command <X>
4290 command <X>
4288 -----------
4291 -----------
4289
4292
4290 Send a request to run a named command, whose name follows the ``command``
4293 Send a request to run a named command, whose name follows the ``command``
4291 string.
4294 string.
4292
4295
4293 Arguments to the command are defined as lines in this block. The format of
4296 Arguments to the command are defined as lines in this block. The format of
4294 each line is ``<key> <value>``. e.g.::
4297 each line is ``<key> <value>``. e.g.::
4295
4298
4296 command listkeys
4299 command listkeys
4297 namespace bookmarks
4300 namespace bookmarks
4298
4301
4299 If the value begins with ``eval:``, it will be interpreted as a Python
4302 If the value begins with ``eval:``, it will be interpreted as a Python
4300 literal expression. Otherwise values are interpreted as Python b'' literals.
4303 literal expression. Otherwise values are interpreted as Python b'' literals.
4301 This allows sending complex types and encoding special byte sequences via
4304 This allows sending complex types and encoding special byte sequences via
4302 backslash escaping.
4305 backslash escaping.
4303
4306
4304 The following arguments have special meaning:
4307 The following arguments have special meaning:
4305
4308
4306 ``PUSHFILE``
4309 ``PUSHFILE``
4307 When defined, the *push* mechanism of the peer will be used instead
4310 When defined, the *push* mechanism of the peer will be used instead
4308 of the static request-response mechanism and the content of the
4311 of the static request-response mechanism and the content of the
4309 file specified in the value of this argument will be sent as the
4312 file specified in the value of this argument will be sent as the
4310 command payload.
4313 command payload.
4311
4314
4312 This can be used to submit a local bundle file to the remote.
4315 This can be used to submit a local bundle file to the remote.
4313
4316
4314 batchbegin
4317 batchbegin
4315 ----------
4318 ----------
4316
4319
4317 Instruct the peer to begin a batched send.
4320 Instruct the peer to begin a batched send.
4318
4321
4319 All ``command`` blocks are queued for execution until the next
4322 All ``command`` blocks are queued for execution until the next
4320 ``batchsubmit`` block.
4323 ``batchsubmit`` block.
4321
4324
4322 batchsubmit
4325 batchsubmit
4323 -----------
4326 -----------
4324
4327
4325 Submit previously queued ``command`` blocks as a batch request.
4328 Submit previously queued ``command`` blocks as a batch request.
4326
4329
4327 This action MUST be paired with a ``batchbegin`` action.
4330 This action MUST be paired with a ``batchbegin`` action.
4328
4331
4329 httprequest <method> <path>
4332 httprequest <method> <path>
4330 ---------------------------
4333 ---------------------------
4331
4334
4332 (HTTP peer only)
4335 (HTTP peer only)
4333
4336
4334 Send an HTTP request to the peer.
4337 Send an HTTP request to the peer.
4335
4338
4336 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4339 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4337
4340
4338 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4341 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4339 headers to add to the request. e.g. ``Accept: foo``.
4342 headers to add to the request. e.g. ``Accept: foo``.
4340
4343
4341 The following arguments are special:
4344 The following arguments are special:
4342
4345
4343 ``BODYFILE``
4346 ``BODYFILE``
4344 The content of the file defined as the value to this argument will be
4347 The content of the file defined as the value to this argument will be
4345 transferred verbatim as the HTTP request body.
4348 transferred verbatim as the HTTP request body.
4346
4349
4347 ``frame <type> <flags> <payload>``
4350 ``frame <type> <flags> <payload>``
4348 Send a unified protocol frame as part of the request body.
4351 Send a unified protocol frame as part of the request body.
4349
4352
4350 All frames will be collected and sent as the body to the HTTP
4353 All frames will be collected and sent as the body to the HTTP
4351 request.
4354 request.
4352
4355
4353 close
4356 close
4354 -----
4357 -----
4355
4358
4356 Close the connection to the server.
4359 Close the connection to the server.
4357
4360
4358 flush
4361 flush
4359 -----
4362 -----
4360
4363
4361 Flush data written to the server.
4364 Flush data written to the server.
4362
4365
4363 readavailable
4366 readavailable
4364 -------------
4367 -------------
4365
4368
4366 Close the write end of the connection and read all available data from
4369 Close the write end of the connection and read all available data from
4367 the server.
4370 the server.
4368
4371
4369 If the connection to the server encompasses multiple pipes, we poll both
4372 If the connection to the server encompasses multiple pipes, we poll both
4370 pipes and read available data.
4373 pipes and read available data.
4371
4374
4372 readline
4375 readline
4373 --------
4376 --------
4374
4377
4375 Read a line of output from the server. If there are multiple output
4378 Read a line of output from the server. If there are multiple output
4376 pipes, reads only the main pipe.
4379 pipes, reads only the main pipe.
4377
4380
4378 ereadline
4381 ereadline
4379 ---------
4382 ---------
4380
4383
4381 Like ``readline``, but read from the stderr pipe, if available.
4384 Like ``readline``, but read from the stderr pipe, if available.
4382
4385
4383 read <X>
4386 read <X>
4384 --------
4387 --------
4385
4388
4386 ``read()`` N bytes from the server's main output pipe.
4389 ``read()`` N bytes from the server's main output pipe.
4387
4390
4388 eread <X>
4391 eread <X>
4389 ---------
4392 ---------
4390
4393
4391 ``read()`` N bytes from the server's stderr pipe, if available.
4394 ``read()`` N bytes from the server's stderr pipe, if available.
4392
4395
4393 Specifying Unified Frame-Based Protocol Frames
4396 Specifying Unified Frame-Based Protocol Frames
4394 ----------------------------------------------
4397 ----------------------------------------------
4395
4398
4396 It is possible to emit a *Unified Frame-Based Protocol* by using special
4399 It is possible to emit a *Unified Frame-Based Protocol* by using special
4397 syntax.
4400 syntax.
4398
4401
4399 A frame is composed as a type, flags, and payload. These can be parsed
4402 A frame is composed as a type, flags, and payload. These can be parsed
4400 from a string of the form:
4403 from a string of the form:
4401
4404
4402 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4405 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4403
4406
4404 ``request-id`` and ``stream-id`` are integers defining the request and
4407 ``request-id`` and ``stream-id`` are integers defining the request and
4405 stream identifiers.
4408 stream identifiers.
4406
4409
4407 ``type`` can be an integer value for the frame type or the string name
4410 ``type`` can be an integer value for the frame type or the string name
4408 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4411 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4409 ``command-name``.
4412 ``command-name``.
4410
4413
4411 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4414 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4412 components. Each component (and there can be just one) can be an integer
4415 components. Each component (and there can be just one) can be an integer
4413 or a flag name for stream flags or frame flags, respectively. Values are
4416 or a flag name for stream flags or frame flags, respectively. Values are
4414 resolved to integers and then bitwise OR'd together.
4417 resolved to integers and then bitwise OR'd together.
4415
4418
4416 ``payload`` represents the raw frame payload. If it begins with
4419 ``payload`` represents the raw frame payload. If it begins with
4417 ``cbor:``, the following string is evaluated as Python code and the
4420 ``cbor:``, the following string is evaluated as Python code and the
4418 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4421 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4419 as a Python byte string literal.
4422 as a Python byte string literal.
4420 """
4423 """
4421 opts = pycompat.byteskwargs(opts)
4424 opts = pycompat.byteskwargs(opts)
4422
4425
4423 if opts[b'localssh'] and not repo:
4426 if opts[b'localssh'] and not repo:
4424 raise error.Abort(_(b'--localssh requires a repository'))
4427 raise error.Abort(_(b'--localssh requires a repository'))
4425
4428
4426 if opts[b'peer'] and opts[b'peer'] not in (
4429 if opts[b'peer'] and opts[b'peer'] not in (
4427 b'raw',
4430 b'raw',
4428 b'ssh1',
4431 b'ssh1',
4429 ):
4432 ):
4430 raise error.Abort(
4433 raise error.Abort(
4431 _(b'invalid value for --peer'),
4434 _(b'invalid value for --peer'),
4432 hint=_(b'valid values are "raw" and "ssh1"'),
4435 hint=_(b'valid values are "raw" and "ssh1"'),
4433 )
4436 )
4434
4437
4435 if path and opts[b'localssh']:
4438 if path and opts[b'localssh']:
4436 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4439 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4437
4440
4438 if ui.interactive():
4441 if ui.interactive():
4439 ui.write(_(b'(waiting for commands on stdin)\n'))
4442 ui.write(_(b'(waiting for commands on stdin)\n'))
4440
4443
4441 blocks = list(_parsewirelangblocks(ui.fin))
4444 blocks = list(_parsewirelangblocks(ui.fin))
4442
4445
4443 proc = None
4446 proc = None
4444 stdin = None
4447 stdin = None
4445 stdout = None
4448 stdout = None
4446 stderr = None
4449 stderr = None
4447 opener = None
4450 opener = None
4448
4451
4449 if opts[b'localssh']:
4452 if opts[b'localssh']:
4450 # We start the SSH server in its own process so there is process
4453 # We start the SSH server in its own process so there is process
4451 # separation. This prevents a whole class of potential bugs around
4454 # separation. This prevents a whole class of potential bugs around
4452 # shared state from interfering with server operation.
4455 # shared state from interfering with server operation.
4453 args = procutil.hgcmd() + [
4456 args = procutil.hgcmd() + [
4454 b'-R',
4457 b'-R',
4455 repo.root,
4458 repo.root,
4456 b'debugserve',
4459 b'debugserve',
4457 b'--sshstdio',
4460 b'--sshstdio',
4458 ]
4461 ]
4459 proc = subprocess.Popen(
4462 proc = subprocess.Popen(
4460 pycompat.rapply(procutil.tonativestr, args),
4463 pycompat.rapply(procutil.tonativestr, args),
4461 stdin=subprocess.PIPE,
4464 stdin=subprocess.PIPE,
4462 stdout=subprocess.PIPE,
4465 stdout=subprocess.PIPE,
4463 stderr=subprocess.PIPE,
4466 stderr=subprocess.PIPE,
4464 bufsize=0,
4467 bufsize=0,
4465 )
4468 )
4466
4469
4467 stdin = proc.stdin
4470 stdin = proc.stdin
4468 stdout = proc.stdout
4471 stdout = proc.stdout
4469 stderr = proc.stderr
4472 stderr = proc.stderr
4470
4473
4471 # We turn the pipes into observers so we can log I/O.
4474 # We turn the pipes into observers so we can log I/O.
4472 if ui.verbose or opts[b'peer'] == b'raw':
4475 if ui.verbose or opts[b'peer'] == b'raw':
4473 stdin = util.makeloggingfileobject(
4476 stdin = util.makeloggingfileobject(
4474 ui, proc.stdin, b'i', logdata=True
4477 ui, proc.stdin, b'i', logdata=True
4475 )
4478 )
4476 stdout = util.makeloggingfileobject(
4479 stdout = util.makeloggingfileobject(
4477 ui, proc.stdout, b'o', logdata=True
4480 ui, proc.stdout, b'o', logdata=True
4478 )
4481 )
4479 stderr = util.makeloggingfileobject(
4482 stderr = util.makeloggingfileobject(
4480 ui, proc.stderr, b'e', logdata=True
4483 ui, proc.stderr, b'e', logdata=True
4481 )
4484 )
4482
4485
4483 # --localssh also implies the peer connection settings.
4486 # --localssh also implies the peer connection settings.
4484
4487
4485 url = b'ssh://localserver'
4488 url = b'ssh://localserver'
4486 autoreadstderr = not opts[b'noreadstderr']
4489 autoreadstderr = not opts[b'noreadstderr']
4487
4490
4488 if opts[b'peer'] == b'ssh1':
4491 if opts[b'peer'] == b'ssh1':
4489 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4492 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4490 peer = sshpeer.sshv1peer(
4493 peer = sshpeer.sshv1peer(
4491 ui,
4494 ui,
4492 url,
4495 url,
4493 proc,
4496 proc,
4494 stdin,
4497 stdin,
4495 stdout,
4498 stdout,
4496 stderr,
4499 stderr,
4497 None,
4500 None,
4498 autoreadstderr=autoreadstderr,
4501 autoreadstderr=autoreadstderr,
4499 )
4502 )
4500 elif opts[b'peer'] == b'raw':
4503 elif opts[b'peer'] == b'raw':
4501 ui.write(_(b'using raw connection to peer\n'))
4504 ui.write(_(b'using raw connection to peer\n'))
4502 peer = None
4505 peer = None
4503 else:
4506 else:
4504 ui.write(_(b'creating ssh peer from handshake results\n'))
4507 ui.write(_(b'creating ssh peer from handshake results\n'))
4505 peer = sshpeer.makepeer(
4508 peer = sshpeer.makepeer(
4506 ui,
4509 ui,
4507 url,
4510 url,
4508 proc,
4511 proc,
4509 stdin,
4512 stdin,
4510 stdout,
4513 stdout,
4511 stderr,
4514 stderr,
4512 autoreadstderr=autoreadstderr,
4515 autoreadstderr=autoreadstderr,
4513 )
4516 )
4514
4517
4515 elif path:
4518 elif path:
4516 # We bypass hg.peer() so we can proxy the sockets.
4519 # We bypass hg.peer() so we can proxy the sockets.
4517 # TODO consider not doing this because we skip
4520 # TODO consider not doing this because we skip
4518 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4521 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4519 u = urlutil.url(path)
4522 u = urlutil.url(path)
4520 if u.scheme != b'http':
4523 if u.scheme != b'http':
4521 raise error.Abort(_(b'only http:// paths are currently supported'))
4524 raise error.Abort(_(b'only http:// paths are currently supported'))
4522
4525
4523 url, authinfo = u.authinfo()
4526 url, authinfo = u.authinfo()
4524 openerargs = {
4527 openerargs = {
4525 'useragent': b'Mercurial debugwireproto',
4528 'useragent': b'Mercurial debugwireproto',
4526 }
4529 }
4527
4530
4528 # Turn pipes/sockets into observers so we can log I/O.
4531 # Turn pipes/sockets into observers so we can log I/O.
4529 if ui.verbose:
4532 if ui.verbose:
4530 openerargs.update(
4533 openerargs.update(
4531 {
4534 {
4532 'loggingfh': ui,
4535 'loggingfh': ui,
4533 'loggingname': b's',
4536 'loggingname': b's',
4534 'loggingopts': {
4537 'loggingopts': {
4535 'logdata': True,
4538 'logdata': True,
4536 'logdataapis': False,
4539 'logdataapis': False,
4537 },
4540 },
4538 }
4541 }
4539 )
4542 )
4540
4543
4541 if ui.debugflag:
4544 if ui.debugflag:
4542 openerargs['loggingopts']['logdataapis'] = True
4545 openerargs['loggingopts']['logdataapis'] = True
4543
4546
4544 # Don't send default headers when in raw mode. This allows us to
4547 # Don't send default headers when in raw mode. This allows us to
4545 # bypass most of the behavior of our URL handling code so we can
4548 # bypass most of the behavior of our URL handling code so we can
4546 # have near complete control over what's sent on the wire.
4549 # have near complete control over what's sent on the wire.
4547 if opts[b'peer'] == b'raw':
4550 if opts[b'peer'] == b'raw':
4548 openerargs['sendaccept'] = False
4551 openerargs['sendaccept'] = False
4549
4552
4550 opener = urlmod.opener(ui, authinfo, **openerargs)
4553 opener = urlmod.opener(ui, authinfo, **openerargs)
4551
4554
4552 if opts[b'peer'] == b'raw':
4555 if opts[b'peer'] == b'raw':
4553 ui.write(_(b'using raw connection to peer\n'))
4556 ui.write(_(b'using raw connection to peer\n'))
4554 peer = None
4557 peer = None
4555 elif opts[b'peer']:
4558 elif opts[b'peer']:
4556 raise error.Abort(
4559 raise error.Abort(
4557 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4560 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4558 )
4561 )
4559 else:
4562 else:
4560 peer_path = urlutil.try_path(ui, path)
4563 peer_path = urlutil.try_path(ui, path)
4561 peer = httppeer.makepeer(ui, peer_path, opener=opener)
4564 peer = httppeer.makepeer(ui, peer_path, opener=opener)
4562
4565
4563 # We /could/ populate stdin/stdout with sock.makefile()...
4566 # We /could/ populate stdin/stdout with sock.makefile()...
4564 else:
4567 else:
4565 raise error.Abort(_(b'unsupported connection configuration'))
4568 raise error.Abort(_(b'unsupported connection configuration'))
4566
4569
4567 batchedcommands = None
4570 batchedcommands = None
4568
4571
4569 # Now perform actions based on the parsed wire language instructions.
4572 # Now perform actions based on the parsed wire language instructions.
4570 for action, lines in blocks:
4573 for action, lines in blocks:
4571 if action in (b'raw', b'raw+'):
4574 if action in (b'raw', b'raw+'):
4572 if not stdin:
4575 if not stdin:
4573 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4576 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4574
4577
4575 # Concatenate the data together.
4578 # Concatenate the data together.
4576 data = b''.join(l.lstrip() for l in lines)
4579 data = b''.join(l.lstrip() for l in lines)
4577 data = stringutil.unescapestr(data)
4580 data = stringutil.unescapestr(data)
4578 stdin.write(data)
4581 stdin.write(data)
4579
4582
4580 if action == b'raw+':
4583 if action == b'raw+':
4581 stdin.flush()
4584 stdin.flush()
4582 elif action == b'flush':
4585 elif action == b'flush':
4583 if not stdin:
4586 if not stdin:
4584 raise error.Abort(_(b'cannot call flush on this peer'))
4587 raise error.Abort(_(b'cannot call flush on this peer'))
4585 stdin.flush()
4588 stdin.flush()
4586 elif action.startswith(b'command'):
4589 elif action.startswith(b'command'):
4587 if not peer:
4590 if not peer:
4588 raise error.Abort(
4591 raise error.Abort(
4589 _(
4592 _(
4590 b'cannot send commands unless peer instance '
4593 b'cannot send commands unless peer instance '
4591 b'is available'
4594 b'is available'
4592 )
4595 )
4593 )
4596 )
4594
4597
4595 command = action.split(b' ', 1)[1]
4598 command = action.split(b' ', 1)[1]
4596
4599
4597 args = {}
4600 args = {}
4598 for line in lines:
4601 for line in lines:
4599 # We need to allow empty values.
4602 # We need to allow empty values.
4600 fields = line.lstrip().split(b' ', 1)
4603 fields = line.lstrip().split(b' ', 1)
4601 if len(fields) == 1:
4604 if len(fields) == 1:
4602 key = fields[0]
4605 key = fields[0]
4603 value = b''
4606 value = b''
4604 else:
4607 else:
4605 key, value = fields
4608 key, value = fields
4606
4609
4607 if value.startswith(b'eval:'):
4610 if value.startswith(b'eval:'):
4608 value = stringutil.evalpythonliteral(value[5:])
4611 value = stringutil.evalpythonliteral(value[5:])
4609 else:
4612 else:
4610 value = stringutil.unescapestr(value)
4613 value = stringutil.unescapestr(value)
4611
4614
4612 args[key] = value
4615 args[key] = value
4613
4616
4614 if batchedcommands is not None:
4617 if batchedcommands is not None:
4615 batchedcommands.append((command, args))
4618 batchedcommands.append((command, args))
4616 continue
4619 continue
4617
4620
4618 ui.status(_(b'sending %s command\n') % command)
4621 ui.status(_(b'sending %s command\n') % command)
4619
4622
4620 if b'PUSHFILE' in args:
4623 if b'PUSHFILE' in args:
4621 with open(args[b'PUSHFILE'], 'rb') as fh:
4624 with open(args[b'PUSHFILE'], 'rb') as fh:
4622 del args[b'PUSHFILE']
4625 del args[b'PUSHFILE']
4623 res, output = peer._callpush(
4626 res, output = peer._callpush(
4624 command, fh, **pycompat.strkwargs(args)
4627 command, fh, **pycompat.strkwargs(args)
4625 )
4628 )
4626 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4629 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4627 ui.status(
4630 ui.status(
4628 _(b'remote output: %s\n') % stringutil.escapestr(output)
4631 _(b'remote output: %s\n') % stringutil.escapestr(output)
4629 )
4632 )
4630 else:
4633 else:
4631 with peer.commandexecutor() as e:
4634 with peer.commandexecutor() as e:
4632 res = e.callcommand(command, args).result()
4635 res = e.callcommand(command, args).result()
4633
4636
4634 ui.status(
4637 ui.status(
4635 _(b'response: %s\n')
4638 _(b'response: %s\n')
4636 % stringutil.pprint(res, bprefix=True, indent=2)
4639 % stringutil.pprint(res, bprefix=True, indent=2)
4637 )
4640 )
4638
4641
4639 elif action == b'batchbegin':
4642 elif action == b'batchbegin':
4640 if batchedcommands is not None:
4643 if batchedcommands is not None:
4641 raise error.Abort(_(b'nested batchbegin not allowed'))
4644 raise error.Abort(_(b'nested batchbegin not allowed'))
4642
4645
4643 batchedcommands = []
4646 batchedcommands = []
4644 elif action == b'batchsubmit':
4647 elif action == b'batchsubmit':
4645 # There is a batching API we could go through. But it would be
4648 # There is a batching API we could go through. But it would be
4646 # difficult to normalize requests into function calls. It is easier
4649 # difficult to normalize requests into function calls. It is easier
4647 # to bypass this layer and normalize to commands + args.
4650 # to bypass this layer and normalize to commands + args.
4648 ui.status(
4651 ui.status(
4649 _(b'sending batch with %d sub-commands\n')
4652 _(b'sending batch with %d sub-commands\n')
4650 % len(batchedcommands)
4653 % len(batchedcommands)
4651 )
4654 )
4652 assert peer is not None
4655 assert peer is not None
4653 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4656 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4654 ui.status(
4657 ui.status(
4655 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4658 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4656 )
4659 )
4657
4660
4658 batchedcommands = None
4661 batchedcommands = None
4659
4662
4660 elif action.startswith(b'httprequest '):
4663 elif action.startswith(b'httprequest '):
4661 if not opener:
4664 if not opener:
4662 raise error.Abort(
4665 raise error.Abort(
4663 _(b'cannot use httprequest without an HTTP peer')
4666 _(b'cannot use httprequest without an HTTP peer')
4664 )
4667 )
4665
4668
4666 request = action.split(b' ', 2)
4669 request = action.split(b' ', 2)
4667 if len(request) != 3:
4670 if len(request) != 3:
4668 raise error.Abort(
4671 raise error.Abort(
4669 _(
4672 _(
4670 b'invalid httprequest: expected format is '
4673 b'invalid httprequest: expected format is '
4671 b'"httprequest <method> <path>'
4674 b'"httprequest <method> <path>'
4672 )
4675 )
4673 )
4676 )
4674
4677
4675 method, httppath = request[1:]
4678 method, httppath = request[1:]
4676 headers = {}
4679 headers = {}
4677 body = None
4680 body = None
4678 frames = []
4681 frames = []
4679 for line in lines:
4682 for line in lines:
4680 line = line.lstrip()
4683 line = line.lstrip()
4681 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4684 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4682 if m:
4685 if m:
4683 # Headers need to use native strings.
4686 # Headers need to use native strings.
4684 key = pycompat.strurl(m.group(1))
4687 key = pycompat.strurl(m.group(1))
4685 value = pycompat.strurl(m.group(2))
4688 value = pycompat.strurl(m.group(2))
4686 headers[key] = value
4689 headers[key] = value
4687 continue
4690 continue
4688
4691
4689 if line.startswith(b'BODYFILE '):
4692 if line.startswith(b'BODYFILE '):
4690 with open(line.split(b' ', 1), b'rb') as fh:
4693 with open(line.split(b' ', 1), b'rb') as fh:
4691 body = fh.read()
4694 body = fh.read()
4692 elif line.startswith(b'frame '):
4695 elif line.startswith(b'frame '):
4693 frame = wireprotoframing.makeframefromhumanstring(
4696 frame = wireprotoframing.makeframefromhumanstring(
4694 line[len(b'frame ') :]
4697 line[len(b'frame ') :]
4695 )
4698 )
4696
4699
4697 frames.append(frame)
4700 frames.append(frame)
4698 else:
4701 else:
4699 raise error.Abort(
4702 raise error.Abort(
4700 _(b'unknown argument to httprequest: %s') % line
4703 _(b'unknown argument to httprequest: %s') % line
4701 )
4704 )
4702
4705
4703 url = path + httppath
4706 url = path + httppath
4704
4707
4705 if frames:
4708 if frames:
4706 body = b''.join(bytes(f) for f in frames)
4709 body = b''.join(bytes(f) for f in frames)
4707
4710
4708 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4711 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4709
4712
4710 # urllib.Request insists on using has_data() as a proxy for
4713 # urllib.Request insists on using has_data() as a proxy for
4711 # determining the request method. Override that to use our
4714 # determining the request method. Override that to use our
4712 # explicitly requested method.
4715 # explicitly requested method.
4713 req.get_method = lambda: pycompat.sysstr(method)
4716 req.get_method = lambda: pycompat.sysstr(method)
4714
4717
4715 try:
4718 try:
4716 res = opener.open(req)
4719 res = opener.open(req)
4717 body = res.read()
4720 body = res.read()
4718 except util.urlerr.urlerror as e:
4721 except util.urlerr.urlerror as e:
4719 # read() method must be called, but only exists in Python 2
4722 # read() method must be called, but only exists in Python 2
4720 getattr(e, 'read', lambda: None)()
4723 getattr(e, 'read', lambda: None)()
4721 continue
4724 continue
4722
4725
4723 ct = res.headers.get('Content-Type')
4726 ct = res.headers.get('Content-Type')
4724 if ct == 'application/mercurial-cbor':
4727 if ct == 'application/mercurial-cbor':
4725 ui.write(
4728 ui.write(
4726 _(b'cbor> %s\n')
4729 _(b'cbor> %s\n')
4727 % stringutil.pprint(
4730 % stringutil.pprint(
4728 cborutil.decodeall(body), bprefix=True, indent=2
4731 cborutil.decodeall(body), bprefix=True, indent=2
4729 )
4732 )
4730 )
4733 )
4731
4734
4732 elif action == b'close':
4735 elif action == b'close':
4733 assert peer is not None
4736 assert peer is not None
4734 peer.close()
4737 peer.close()
4735 elif action == b'readavailable':
4738 elif action == b'readavailable':
4736 if not stdout or not stderr:
4739 if not stdout or not stderr:
4737 raise error.Abort(
4740 raise error.Abort(
4738 _(b'readavailable not available on this peer')
4741 _(b'readavailable not available on this peer')
4739 )
4742 )
4740
4743
4741 stdin.close()
4744 stdin.close()
4742 stdout.read()
4745 stdout.read()
4743 stderr.read()
4746 stderr.read()
4744
4747
4745 elif action == b'readline':
4748 elif action == b'readline':
4746 if not stdout:
4749 if not stdout:
4747 raise error.Abort(_(b'readline not available on this peer'))
4750 raise error.Abort(_(b'readline not available on this peer'))
4748 stdout.readline()
4751 stdout.readline()
4749 elif action == b'ereadline':
4752 elif action == b'ereadline':
4750 if not stderr:
4753 if not stderr:
4751 raise error.Abort(_(b'ereadline not available on this peer'))
4754 raise error.Abort(_(b'ereadline not available on this peer'))
4752 stderr.readline()
4755 stderr.readline()
4753 elif action.startswith(b'read '):
4756 elif action.startswith(b'read '):
4754 count = int(action.split(b' ', 1)[1])
4757 count = int(action.split(b' ', 1)[1])
4755 if not stdout:
4758 if not stdout:
4756 raise error.Abort(_(b'read not available on this peer'))
4759 raise error.Abort(_(b'read not available on this peer'))
4757 stdout.read(count)
4760 stdout.read(count)
4758 elif action.startswith(b'eread '):
4761 elif action.startswith(b'eread '):
4759 count = int(action.split(b' ', 1)[1])
4762 count = int(action.split(b' ', 1)[1])
4760 if not stderr:
4763 if not stderr:
4761 raise error.Abort(_(b'eread not available on this peer'))
4764 raise error.Abort(_(b'eread not available on this peer'))
4762 stderr.read(count)
4765 stderr.read(count)
4763 else:
4766 else:
4764 raise error.Abort(_(b'unknown action: %s') % action)
4767 raise error.Abort(_(b'unknown action: %s') % action)
4765
4768
4766 if batchedcommands is not None:
4769 if batchedcommands is not None:
4767 raise error.Abort(_(b'unclosed "batchbegin" request'))
4770 raise error.Abort(_(b'unclosed "batchbegin" request'))
4768
4771
4769 if peer:
4772 if peer:
4770 peer.close()
4773 peer.close()
4771
4774
4772 if proc:
4775 if proc:
4773 proc.kill()
4776 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now