##// END OF EJS Templates
dirstate-item: use the v1_serialization method in debugstate...
marmoute -
r48368:85ce6ed5 default
parent child Browse files
Show More
@@ -1,4827 +1,4833 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import difflib
13 import difflib
14 import errno
14 import errno
15 import glob
15 import glob
16 import operator
16 import operator
17 import os
17 import os
18 import platform
18 import platform
19 import random
19 import random
20 import re
20 import re
21 import socket
21 import socket
22 import ssl
22 import ssl
23 import stat
23 import stat
24 import string
24 import string
25 import subprocess
25 import subprocess
26 import sys
26 import sys
27 import time
27 import time
28
28
29 from .i18n import _
29 from .i18n import _
30 from .node import (
30 from .node import (
31 bin,
31 bin,
32 hex,
32 hex,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 encoding,
49 encoding,
50 error,
50 error,
51 exchange,
51 exchange,
52 extensions,
52 extensions,
53 filemerge,
53 filemerge,
54 filesetlang,
54 filesetlang,
55 formatter,
55 formatter,
56 hg,
56 hg,
57 httppeer,
57 httppeer,
58 localrepo,
58 localrepo,
59 lock as lockmod,
59 lock as lockmod,
60 logcmdutil,
60 logcmdutil,
61 mergestate as mergestatemod,
61 mergestate as mergestatemod,
62 metadata,
62 metadata,
63 obsolete,
63 obsolete,
64 obsutil,
64 obsutil,
65 pathutil,
65 pathutil,
66 phases,
66 phases,
67 policy,
67 policy,
68 pvec,
68 pvec,
69 pycompat,
69 pycompat,
70 registrar,
70 registrar,
71 repair,
71 repair,
72 repoview,
72 repoview,
73 revlog,
73 revlog,
74 revset,
74 revset,
75 revsetlang,
75 revsetlang,
76 scmutil,
76 scmutil,
77 setdiscovery,
77 setdiscovery,
78 simplemerge,
78 simplemerge,
79 sshpeer,
79 sshpeer,
80 sslutil,
80 sslutil,
81 streamclone,
81 streamclone,
82 strip,
82 strip,
83 tags as tagsmod,
83 tags as tagsmod,
84 templater,
84 templater,
85 treediscovery,
85 treediscovery,
86 upgrade,
86 upgrade,
87 url as urlmod,
87 url as urlmod,
88 util,
88 util,
89 vfs as vfsmod,
89 vfs as vfsmod,
90 wireprotoframing,
90 wireprotoframing,
91 wireprotoserver,
91 wireprotoserver,
92 wireprotov2peer,
92 wireprotov2peer,
93 )
93 )
94 from .interfaces import repository
94 from .interfaces import repository
95 from .utils import (
95 from .utils import (
96 cborutil,
96 cborutil,
97 compression,
97 compression,
98 dateutil,
98 dateutil,
99 procutil,
99 procutil,
100 stringutil,
100 stringutil,
101 urlutil,
101 urlutil,
102 )
102 )
103
103
104 from .revlogutils import (
104 from .revlogutils import (
105 deltas as deltautil,
105 deltas as deltautil,
106 nodemap,
106 nodemap,
107 sidedata,
107 sidedata,
108 )
108 )
109
109
110 release = lockmod.release
110 release = lockmod.release
111
111
112 table = {}
112 table = {}
113 table.update(strip.command._table)
113 table.update(strip.command._table)
114 command = registrar.command(table)
114 command = registrar.command(table)
115
115
116
116
117 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
117 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
118 def debugancestor(ui, repo, *args):
118 def debugancestor(ui, repo, *args):
119 """find the ancestor revision of two revisions in a given index"""
119 """find the ancestor revision of two revisions in a given index"""
120 if len(args) == 3:
120 if len(args) == 3:
121 index, rev1, rev2 = args
121 index, rev1, rev2 = args
122 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
122 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
123 lookup = r.lookup
123 lookup = r.lookup
124 elif len(args) == 2:
124 elif len(args) == 2:
125 if not repo:
125 if not repo:
126 raise error.Abort(
126 raise error.Abort(
127 _(b'there is no Mercurial repository here (.hg not found)')
127 _(b'there is no Mercurial repository here (.hg not found)')
128 )
128 )
129 rev1, rev2 = args
129 rev1, rev2 = args
130 r = repo.changelog
130 r = repo.changelog
131 lookup = repo.lookup
131 lookup = repo.lookup
132 else:
132 else:
133 raise error.Abort(_(b'either two or three arguments required'))
133 raise error.Abort(_(b'either two or three arguments required'))
134 a = r.ancestor(lookup(rev1), lookup(rev2))
134 a = r.ancestor(lookup(rev1), lookup(rev2))
135 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
135 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
136
136
137
137
138 @command(b'debugantivirusrunning', [])
138 @command(b'debugantivirusrunning', [])
139 def debugantivirusrunning(ui, repo):
139 def debugantivirusrunning(ui, repo):
140 """attempt to trigger an antivirus scanner to see if one is active"""
140 """attempt to trigger an antivirus scanner to see if one is active"""
141 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
141 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
142 f.write(
142 f.write(
143 util.b85decode(
143 util.b85decode(
144 # This is a base85-armored version of the EICAR test file. See
144 # This is a base85-armored version of the EICAR test file. See
145 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
145 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
146 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
146 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
147 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
147 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
148 )
148 )
149 )
149 )
150 # Give an AV engine time to scan the file.
150 # Give an AV engine time to scan the file.
151 time.sleep(2)
151 time.sleep(2)
152 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
152 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
153
153
154
154
155 @command(b'debugapplystreamclonebundle', [], b'FILE')
155 @command(b'debugapplystreamclonebundle', [], b'FILE')
156 def debugapplystreamclonebundle(ui, repo, fname):
156 def debugapplystreamclonebundle(ui, repo, fname):
157 """apply a stream clone bundle file"""
157 """apply a stream clone bundle file"""
158 f = hg.openpath(ui, fname)
158 f = hg.openpath(ui, fname)
159 gen = exchange.readbundle(ui, f, fname)
159 gen = exchange.readbundle(ui, f, fname)
160 gen.apply(repo)
160 gen.apply(repo)
161
161
162
162
163 @command(
163 @command(
164 b'debugbuilddag',
164 b'debugbuilddag',
165 [
165 [
166 (
166 (
167 b'm',
167 b'm',
168 b'mergeable-file',
168 b'mergeable-file',
169 None,
169 None,
170 _(b'add single file mergeable changes'),
170 _(b'add single file mergeable changes'),
171 ),
171 ),
172 (
172 (
173 b'o',
173 b'o',
174 b'overwritten-file',
174 b'overwritten-file',
175 None,
175 None,
176 _(b'add single file all revs overwrite'),
176 _(b'add single file all revs overwrite'),
177 ),
177 ),
178 (b'n', b'new-file', None, _(b'add new file at each rev')),
178 (b'n', b'new-file', None, _(b'add new file at each rev')),
179 ],
179 ],
180 _(b'[OPTION]... [TEXT]'),
180 _(b'[OPTION]... [TEXT]'),
181 )
181 )
182 def debugbuilddag(
182 def debugbuilddag(
183 ui,
183 ui,
184 repo,
184 repo,
185 text=None,
185 text=None,
186 mergeable_file=False,
186 mergeable_file=False,
187 overwritten_file=False,
187 overwritten_file=False,
188 new_file=False,
188 new_file=False,
189 ):
189 ):
190 """builds a repo with a given DAG from scratch in the current empty repo
190 """builds a repo with a given DAG from scratch in the current empty repo
191
191
192 The description of the DAG is read from stdin if not given on the
192 The description of the DAG is read from stdin if not given on the
193 command line.
193 command line.
194
194
195 Elements:
195 Elements:
196
196
197 - "+n" is a linear run of n nodes based on the current default parent
197 - "+n" is a linear run of n nodes based on the current default parent
198 - "." is a single node based on the current default parent
198 - "." is a single node based on the current default parent
199 - "$" resets the default parent to null (implied at the start);
199 - "$" resets the default parent to null (implied at the start);
200 otherwise the default parent is always the last node created
200 otherwise the default parent is always the last node created
201 - "<p" sets the default parent to the backref p
201 - "<p" sets the default parent to the backref p
202 - "*p" is a fork at parent p, which is a backref
202 - "*p" is a fork at parent p, which is a backref
203 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
203 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
204 - "/p2" is a merge of the preceding node and p2
204 - "/p2" is a merge of the preceding node and p2
205 - ":tag" defines a local tag for the preceding node
205 - ":tag" defines a local tag for the preceding node
206 - "@branch" sets the named branch for subsequent nodes
206 - "@branch" sets the named branch for subsequent nodes
207 - "#...\\n" is a comment up to the end of the line
207 - "#...\\n" is a comment up to the end of the line
208
208
209 Whitespace between the above elements is ignored.
209 Whitespace between the above elements is ignored.
210
210
211 A backref is either
211 A backref is either
212
212
213 - a number n, which references the node curr-n, where curr is the current
213 - a number n, which references the node curr-n, where curr is the current
214 node, or
214 node, or
215 - the name of a local tag you placed earlier using ":tag", or
215 - the name of a local tag you placed earlier using ":tag", or
216 - empty to denote the default parent.
216 - empty to denote the default parent.
217
217
218 All string valued-elements are either strictly alphanumeric, or must
218 All string valued-elements are either strictly alphanumeric, or must
219 be enclosed in double quotes ("..."), with "\\" as escape character.
219 be enclosed in double quotes ("..."), with "\\" as escape character.
220 """
220 """
221
221
222 if text is None:
222 if text is None:
223 ui.status(_(b"reading DAG from stdin\n"))
223 ui.status(_(b"reading DAG from stdin\n"))
224 text = ui.fin.read()
224 text = ui.fin.read()
225
225
226 cl = repo.changelog
226 cl = repo.changelog
227 if len(cl) > 0:
227 if len(cl) > 0:
228 raise error.Abort(_(b'repository is not empty'))
228 raise error.Abort(_(b'repository is not empty'))
229
229
230 # determine number of revs in DAG
230 # determine number of revs in DAG
231 total = 0
231 total = 0
232 for type, data in dagparser.parsedag(text):
232 for type, data in dagparser.parsedag(text):
233 if type == b'n':
233 if type == b'n':
234 total += 1
234 total += 1
235
235
236 if mergeable_file:
236 if mergeable_file:
237 linesperrev = 2
237 linesperrev = 2
238 # make a file with k lines per rev
238 # make a file with k lines per rev
239 initialmergedlines = [
239 initialmergedlines = [
240 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
240 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
241 ]
241 ]
242 initialmergedlines.append(b"")
242 initialmergedlines.append(b"")
243
243
244 tags = []
244 tags = []
245 progress = ui.makeprogress(
245 progress = ui.makeprogress(
246 _(b'building'), unit=_(b'revisions'), total=total
246 _(b'building'), unit=_(b'revisions'), total=total
247 )
247 )
248 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
248 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
249 at = -1
249 at = -1
250 atbranch = b'default'
250 atbranch = b'default'
251 nodeids = []
251 nodeids = []
252 id = 0
252 id = 0
253 progress.update(id)
253 progress.update(id)
254 for type, data in dagparser.parsedag(text):
254 for type, data in dagparser.parsedag(text):
255 if type == b'n':
255 if type == b'n':
256 ui.note((b'node %s\n' % pycompat.bytestr(data)))
256 ui.note((b'node %s\n' % pycompat.bytestr(data)))
257 id, ps = data
257 id, ps = data
258
258
259 files = []
259 files = []
260 filecontent = {}
260 filecontent = {}
261
261
262 p2 = None
262 p2 = None
263 if mergeable_file:
263 if mergeable_file:
264 fn = b"mf"
264 fn = b"mf"
265 p1 = repo[ps[0]]
265 p1 = repo[ps[0]]
266 if len(ps) > 1:
266 if len(ps) > 1:
267 p2 = repo[ps[1]]
267 p2 = repo[ps[1]]
268 pa = p1.ancestor(p2)
268 pa = p1.ancestor(p2)
269 base, local, other = [
269 base, local, other = [
270 x[fn].data() for x in (pa, p1, p2)
270 x[fn].data() for x in (pa, p1, p2)
271 ]
271 ]
272 m3 = simplemerge.Merge3Text(base, local, other)
272 m3 = simplemerge.Merge3Text(base, local, other)
273 ml = [l.strip() for l in m3.merge_lines()]
273 ml = [l.strip() for l in m3.merge_lines()]
274 ml.append(b"")
274 ml.append(b"")
275 elif at > 0:
275 elif at > 0:
276 ml = p1[fn].data().split(b"\n")
276 ml = p1[fn].data().split(b"\n")
277 else:
277 else:
278 ml = initialmergedlines
278 ml = initialmergedlines
279 ml[id * linesperrev] += b" r%i" % id
279 ml[id * linesperrev] += b" r%i" % id
280 mergedtext = b"\n".join(ml)
280 mergedtext = b"\n".join(ml)
281 files.append(fn)
281 files.append(fn)
282 filecontent[fn] = mergedtext
282 filecontent[fn] = mergedtext
283
283
284 if overwritten_file:
284 if overwritten_file:
285 fn = b"of"
285 fn = b"of"
286 files.append(fn)
286 files.append(fn)
287 filecontent[fn] = b"r%i\n" % id
287 filecontent[fn] = b"r%i\n" % id
288
288
289 if new_file:
289 if new_file:
290 fn = b"nf%i" % id
290 fn = b"nf%i" % id
291 files.append(fn)
291 files.append(fn)
292 filecontent[fn] = b"r%i\n" % id
292 filecontent[fn] = b"r%i\n" % id
293 if len(ps) > 1:
293 if len(ps) > 1:
294 if not p2:
294 if not p2:
295 p2 = repo[ps[1]]
295 p2 = repo[ps[1]]
296 for fn in p2:
296 for fn in p2:
297 if fn.startswith(b"nf"):
297 if fn.startswith(b"nf"):
298 files.append(fn)
298 files.append(fn)
299 filecontent[fn] = p2[fn].data()
299 filecontent[fn] = p2[fn].data()
300
300
301 def fctxfn(repo, cx, path):
301 def fctxfn(repo, cx, path):
302 if path in filecontent:
302 if path in filecontent:
303 return context.memfilectx(
303 return context.memfilectx(
304 repo, cx, path, filecontent[path]
304 repo, cx, path, filecontent[path]
305 )
305 )
306 return None
306 return None
307
307
308 if len(ps) == 0 or ps[0] < 0:
308 if len(ps) == 0 or ps[0] < 0:
309 pars = [None, None]
309 pars = [None, None]
310 elif len(ps) == 1:
310 elif len(ps) == 1:
311 pars = [nodeids[ps[0]], None]
311 pars = [nodeids[ps[0]], None]
312 else:
312 else:
313 pars = [nodeids[p] for p in ps]
313 pars = [nodeids[p] for p in ps]
314 cx = context.memctx(
314 cx = context.memctx(
315 repo,
315 repo,
316 pars,
316 pars,
317 b"r%i" % id,
317 b"r%i" % id,
318 files,
318 files,
319 fctxfn,
319 fctxfn,
320 date=(id, 0),
320 date=(id, 0),
321 user=b"debugbuilddag",
321 user=b"debugbuilddag",
322 extra={b'branch': atbranch},
322 extra={b'branch': atbranch},
323 )
323 )
324 nodeid = repo.commitctx(cx)
324 nodeid = repo.commitctx(cx)
325 nodeids.append(nodeid)
325 nodeids.append(nodeid)
326 at = id
326 at = id
327 elif type == b'l':
327 elif type == b'l':
328 id, name = data
328 id, name = data
329 ui.note((b'tag %s\n' % name))
329 ui.note((b'tag %s\n' % name))
330 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
330 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
331 elif type == b'a':
331 elif type == b'a':
332 ui.note((b'branch %s\n' % data))
332 ui.note((b'branch %s\n' % data))
333 atbranch = data
333 atbranch = data
334 progress.update(id)
334 progress.update(id)
335
335
336 if tags:
336 if tags:
337 repo.vfs.write(b"localtags", b"".join(tags))
337 repo.vfs.write(b"localtags", b"".join(tags))
338
338
339
339
340 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
340 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
341 indent_string = b' ' * indent
341 indent_string = b' ' * indent
342 if all:
342 if all:
343 ui.writenoi18n(
343 ui.writenoi18n(
344 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
344 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
345 % indent_string
345 % indent_string
346 )
346 )
347
347
348 def showchunks(named):
348 def showchunks(named):
349 ui.write(b"\n%s%s\n" % (indent_string, named))
349 ui.write(b"\n%s%s\n" % (indent_string, named))
350 for deltadata in gen.deltaiter():
350 for deltadata in gen.deltaiter():
351 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
351 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
352 ui.write(
352 ui.write(
353 b"%s%s %s %s %s %s %d\n"
353 b"%s%s %s %s %s %s %d\n"
354 % (
354 % (
355 indent_string,
355 indent_string,
356 hex(node),
356 hex(node),
357 hex(p1),
357 hex(p1),
358 hex(p2),
358 hex(p2),
359 hex(cs),
359 hex(cs),
360 hex(deltabase),
360 hex(deltabase),
361 len(delta),
361 len(delta),
362 )
362 )
363 )
363 )
364
364
365 gen.changelogheader()
365 gen.changelogheader()
366 showchunks(b"changelog")
366 showchunks(b"changelog")
367 gen.manifestheader()
367 gen.manifestheader()
368 showchunks(b"manifest")
368 showchunks(b"manifest")
369 for chunkdata in iter(gen.filelogheader, {}):
369 for chunkdata in iter(gen.filelogheader, {}):
370 fname = chunkdata[b'filename']
370 fname = chunkdata[b'filename']
371 showchunks(fname)
371 showchunks(fname)
372 else:
372 else:
373 if isinstance(gen, bundle2.unbundle20):
373 if isinstance(gen, bundle2.unbundle20):
374 raise error.Abort(_(b'use debugbundle2 for this file'))
374 raise error.Abort(_(b'use debugbundle2 for this file'))
375 gen.changelogheader()
375 gen.changelogheader()
376 for deltadata in gen.deltaiter():
376 for deltadata in gen.deltaiter():
377 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
377 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
378 ui.write(b"%s%s\n" % (indent_string, hex(node)))
378 ui.write(b"%s%s\n" % (indent_string, hex(node)))
379
379
380
380
381 def _debugobsmarkers(ui, part, indent=0, **opts):
381 def _debugobsmarkers(ui, part, indent=0, **opts):
382 """display version and markers contained in 'data'"""
382 """display version and markers contained in 'data'"""
383 opts = pycompat.byteskwargs(opts)
383 opts = pycompat.byteskwargs(opts)
384 data = part.read()
384 data = part.read()
385 indent_string = b' ' * indent
385 indent_string = b' ' * indent
386 try:
386 try:
387 version, markers = obsolete._readmarkers(data)
387 version, markers = obsolete._readmarkers(data)
388 except error.UnknownVersion as exc:
388 except error.UnknownVersion as exc:
389 msg = b"%sunsupported version: %s (%d bytes)\n"
389 msg = b"%sunsupported version: %s (%d bytes)\n"
390 msg %= indent_string, exc.version, len(data)
390 msg %= indent_string, exc.version, len(data)
391 ui.write(msg)
391 ui.write(msg)
392 else:
392 else:
393 msg = b"%sversion: %d (%d bytes)\n"
393 msg = b"%sversion: %d (%d bytes)\n"
394 msg %= indent_string, version, len(data)
394 msg %= indent_string, version, len(data)
395 ui.write(msg)
395 ui.write(msg)
396 fm = ui.formatter(b'debugobsolete', opts)
396 fm = ui.formatter(b'debugobsolete', opts)
397 for rawmarker in sorted(markers):
397 for rawmarker in sorted(markers):
398 m = obsutil.marker(None, rawmarker)
398 m = obsutil.marker(None, rawmarker)
399 fm.startitem()
399 fm.startitem()
400 fm.plain(indent_string)
400 fm.plain(indent_string)
401 cmdutil.showmarker(fm, m)
401 cmdutil.showmarker(fm, m)
402 fm.end()
402 fm.end()
403
403
404
404
405 def _debugphaseheads(ui, data, indent=0):
405 def _debugphaseheads(ui, data, indent=0):
406 """display version and markers contained in 'data'"""
406 """display version and markers contained in 'data'"""
407 indent_string = b' ' * indent
407 indent_string = b' ' * indent
408 headsbyphase = phases.binarydecode(data)
408 headsbyphase = phases.binarydecode(data)
409 for phase in phases.allphases:
409 for phase in phases.allphases:
410 for head in headsbyphase[phase]:
410 for head in headsbyphase[phase]:
411 ui.write(indent_string)
411 ui.write(indent_string)
412 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
412 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
413
413
414
414
415 def _quasirepr(thing):
415 def _quasirepr(thing):
416 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
416 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
417 return b'{%s}' % (
417 return b'{%s}' % (
418 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
418 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
419 )
419 )
420 return pycompat.bytestr(repr(thing))
420 return pycompat.bytestr(repr(thing))
421
421
422
422
423 def _debugbundle2(ui, gen, all=None, **opts):
423 def _debugbundle2(ui, gen, all=None, **opts):
424 """lists the contents of a bundle2"""
424 """lists the contents of a bundle2"""
425 if not isinstance(gen, bundle2.unbundle20):
425 if not isinstance(gen, bundle2.unbundle20):
426 raise error.Abort(_(b'not a bundle2 file'))
426 raise error.Abort(_(b'not a bundle2 file'))
427 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
427 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
428 parttypes = opts.get('part_type', [])
428 parttypes = opts.get('part_type', [])
429 for part in gen.iterparts():
429 for part in gen.iterparts():
430 if parttypes and part.type not in parttypes:
430 if parttypes and part.type not in parttypes:
431 continue
431 continue
432 msg = b'%s -- %s (mandatory: %r)\n'
432 msg = b'%s -- %s (mandatory: %r)\n'
433 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
433 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
434 if part.type == b'changegroup':
434 if part.type == b'changegroup':
435 version = part.params.get(b'version', b'01')
435 version = part.params.get(b'version', b'01')
436 cg = changegroup.getunbundler(version, part, b'UN')
436 cg = changegroup.getunbundler(version, part, b'UN')
437 if not ui.quiet:
437 if not ui.quiet:
438 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
438 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
439 if part.type == b'obsmarkers':
439 if part.type == b'obsmarkers':
440 if not ui.quiet:
440 if not ui.quiet:
441 _debugobsmarkers(ui, part, indent=4, **opts)
441 _debugobsmarkers(ui, part, indent=4, **opts)
442 if part.type == b'phase-heads':
442 if part.type == b'phase-heads':
443 if not ui.quiet:
443 if not ui.quiet:
444 _debugphaseheads(ui, part, indent=4)
444 _debugphaseheads(ui, part, indent=4)
445
445
446
446
447 @command(
447 @command(
448 b'debugbundle',
448 b'debugbundle',
449 [
449 [
450 (b'a', b'all', None, _(b'show all details')),
450 (b'a', b'all', None, _(b'show all details')),
451 (b'', b'part-type', [], _(b'show only the named part type')),
451 (b'', b'part-type', [], _(b'show only the named part type')),
452 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
452 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
453 ],
453 ],
454 _(b'FILE'),
454 _(b'FILE'),
455 norepo=True,
455 norepo=True,
456 )
456 )
457 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
457 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
458 """lists the contents of a bundle"""
458 """lists the contents of a bundle"""
459 with hg.openpath(ui, bundlepath) as f:
459 with hg.openpath(ui, bundlepath) as f:
460 if spec:
460 if spec:
461 spec = exchange.getbundlespec(ui, f)
461 spec = exchange.getbundlespec(ui, f)
462 ui.write(b'%s\n' % spec)
462 ui.write(b'%s\n' % spec)
463 return
463 return
464
464
465 gen = exchange.readbundle(ui, f, bundlepath)
465 gen = exchange.readbundle(ui, f, bundlepath)
466 if isinstance(gen, bundle2.unbundle20):
466 if isinstance(gen, bundle2.unbundle20):
467 return _debugbundle2(ui, gen, all=all, **opts)
467 return _debugbundle2(ui, gen, all=all, **opts)
468 _debugchangegroup(ui, gen, all=all, **opts)
468 _debugchangegroup(ui, gen, all=all, **opts)
469
469
470
470
471 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
471 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
472 def debugcapabilities(ui, path, **opts):
472 def debugcapabilities(ui, path, **opts):
473 """lists the capabilities of a remote peer"""
473 """lists the capabilities of a remote peer"""
474 opts = pycompat.byteskwargs(opts)
474 opts = pycompat.byteskwargs(opts)
475 peer = hg.peer(ui, opts, path)
475 peer = hg.peer(ui, opts, path)
476 try:
476 try:
477 caps = peer.capabilities()
477 caps = peer.capabilities()
478 ui.writenoi18n(b'Main capabilities:\n')
478 ui.writenoi18n(b'Main capabilities:\n')
479 for c in sorted(caps):
479 for c in sorted(caps):
480 ui.write(b' %s\n' % c)
480 ui.write(b' %s\n' % c)
481 b2caps = bundle2.bundle2caps(peer)
481 b2caps = bundle2.bundle2caps(peer)
482 if b2caps:
482 if b2caps:
483 ui.writenoi18n(b'Bundle2 capabilities:\n')
483 ui.writenoi18n(b'Bundle2 capabilities:\n')
484 for key, values in sorted(pycompat.iteritems(b2caps)):
484 for key, values in sorted(pycompat.iteritems(b2caps)):
485 ui.write(b' %s\n' % key)
485 ui.write(b' %s\n' % key)
486 for v in values:
486 for v in values:
487 ui.write(b' %s\n' % v)
487 ui.write(b' %s\n' % v)
488 finally:
488 finally:
489 peer.close()
489 peer.close()
490
490
491
491
492 @command(
492 @command(
493 b'debugchangedfiles',
493 b'debugchangedfiles',
494 [
494 [
495 (
495 (
496 b'',
496 b'',
497 b'compute',
497 b'compute',
498 False,
498 False,
499 b"compute information instead of reading it from storage",
499 b"compute information instead of reading it from storage",
500 ),
500 ),
501 ],
501 ],
502 b'REV',
502 b'REV',
503 )
503 )
504 def debugchangedfiles(ui, repo, rev, **opts):
504 def debugchangedfiles(ui, repo, rev, **opts):
505 """list the stored files changes for a revision"""
505 """list the stored files changes for a revision"""
506 ctx = scmutil.revsingle(repo, rev, None)
506 ctx = scmutil.revsingle(repo, rev, None)
507 files = None
507 files = None
508
508
509 if opts['compute']:
509 if opts['compute']:
510 files = metadata.compute_all_files_changes(ctx)
510 files = metadata.compute_all_files_changes(ctx)
511 else:
511 else:
512 sd = repo.changelog.sidedata(ctx.rev())
512 sd = repo.changelog.sidedata(ctx.rev())
513 files_block = sd.get(sidedata.SD_FILES)
513 files_block = sd.get(sidedata.SD_FILES)
514 if files_block is not None:
514 if files_block is not None:
515 files = metadata.decode_files_sidedata(sd)
515 files = metadata.decode_files_sidedata(sd)
516 if files is not None:
516 if files is not None:
517 for f in sorted(files.touched):
517 for f in sorted(files.touched):
518 if f in files.added:
518 if f in files.added:
519 action = b"added"
519 action = b"added"
520 elif f in files.removed:
520 elif f in files.removed:
521 action = b"removed"
521 action = b"removed"
522 elif f in files.merged:
522 elif f in files.merged:
523 action = b"merged"
523 action = b"merged"
524 elif f in files.salvaged:
524 elif f in files.salvaged:
525 action = b"salvaged"
525 action = b"salvaged"
526 else:
526 else:
527 action = b"touched"
527 action = b"touched"
528
528
529 copy_parent = b""
529 copy_parent = b""
530 copy_source = b""
530 copy_source = b""
531 if f in files.copied_from_p1:
531 if f in files.copied_from_p1:
532 copy_parent = b"p1"
532 copy_parent = b"p1"
533 copy_source = files.copied_from_p1[f]
533 copy_source = files.copied_from_p1[f]
534 elif f in files.copied_from_p2:
534 elif f in files.copied_from_p2:
535 copy_parent = b"p2"
535 copy_parent = b"p2"
536 copy_source = files.copied_from_p2[f]
536 copy_source = files.copied_from_p2[f]
537
537
538 data = (action, copy_parent, f, copy_source)
538 data = (action, copy_parent, f, copy_source)
539 template = b"%-8s %2s: %s, %s;\n"
539 template = b"%-8s %2s: %s, %s;\n"
540 ui.write(template % data)
540 ui.write(template % data)
541
541
542
542
543 @command(b'debugcheckstate', [], b'')
543 @command(b'debugcheckstate', [], b'')
544 def debugcheckstate(ui, repo):
544 def debugcheckstate(ui, repo):
545 """validate the correctness of the current dirstate"""
545 """validate the correctness of the current dirstate"""
546 parent1, parent2 = repo.dirstate.parents()
546 parent1, parent2 = repo.dirstate.parents()
547 m1 = repo[parent1].manifest()
547 m1 = repo[parent1].manifest()
548 m2 = repo[parent2].manifest()
548 m2 = repo[parent2].manifest()
549 errors = 0
549 errors = 0
550 for f in repo.dirstate:
550 for f in repo.dirstate:
551 state = repo.dirstate[f]
551 state = repo.dirstate[f]
552 if state in b"nr" and f not in m1:
552 if state in b"nr" and f not in m1:
553 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
553 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
554 errors += 1
554 errors += 1
555 if state in b"a" and f in m1:
555 if state in b"a" and f in m1:
556 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
556 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
557 errors += 1
557 errors += 1
558 if state in b"m" and f not in m1 and f not in m2:
558 if state in b"m" and f not in m1 and f not in m2:
559 ui.warn(
559 ui.warn(
560 _(b"%s in state %s, but not in either manifest\n") % (f, state)
560 _(b"%s in state %s, but not in either manifest\n") % (f, state)
561 )
561 )
562 errors += 1
562 errors += 1
563 for f in m1:
563 for f in m1:
564 state = repo.dirstate[f]
564 state = repo.dirstate[f]
565 if state not in b"nrm":
565 if state not in b"nrm":
566 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
566 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
567 errors += 1
567 errors += 1
568 if errors:
568 if errors:
569 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
569 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
570 raise error.Abort(errstr)
570 raise error.Abort(errstr)
571
571
572
572
573 @command(
573 @command(
574 b'debugcolor',
574 b'debugcolor',
575 [(b'', b'style', None, _(b'show all configured styles'))],
575 [(b'', b'style', None, _(b'show all configured styles'))],
576 b'hg debugcolor',
576 b'hg debugcolor',
577 )
577 )
578 def debugcolor(ui, repo, **opts):
578 def debugcolor(ui, repo, **opts):
579 """show available color, effects or style"""
579 """show available color, effects or style"""
580 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
580 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
581 if opts.get('style'):
581 if opts.get('style'):
582 return _debugdisplaystyle(ui)
582 return _debugdisplaystyle(ui)
583 else:
583 else:
584 return _debugdisplaycolor(ui)
584 return _debugdisplaycolor(ui)
585
585
586
586
587 def _debugdisplaycolor(ui):
587 def _debugdisplaycolor(ui):
588 ui = ui.copy()
588 ui = ui.copy()
589 ui._styles.clear()
589 ui._styles.clear()
590 for effect in color._activeeffects(ui).keys():
590 for effect in color._activeeffects(ui).keys():
591 ui._styles[effect] = effect
591 ui._styles[effect] = effect
592 if ui._terminfoparams:
592 if ui._terminfoparams:
593 for k, v in ui.configitems(b'color'):
593 for k, v in ui.configitems(b'color'):
594 if k.startswith(b'color.'):
594 if k.startswith(b'color.'):
595 ui._styles[k] = k[6:]
595 ui._styles[k] = k[6:]
596 elif k.startswith(b'terminfo.'):
596 elif k.startswith(b'terminfo.'):
597 ui._styles[k] = k[9:]
597 ui._styles[k] = k[9:]
598 ui.write(_(b'available colors:\n'))
598 ui.write(_(b'available colors:\n'))
599 # sort label with a '_' after the other to group '_background' entry.
599 # sort label with a '_' after the other to group '_background' entry.
600 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
600 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
601 for colorname, label in items:
601 for colorname, label in items:
602 ui.write(b'%s\n' % colorname, label=label)
602 ui.write(b'%s\n' % colorname, label=label)
603
603
604
604
605 def _debugdisplaystyle(ui):
605 def _debugdisplaystyle(ui):
606 ui.write(_(b'available style:\n'))
606 ui.write(_(b'available style:\n'))
607 if not ui._styles:
607 if not ui._styles:
608 return
608 return
609 width = max(len(s) for s in ui._styles)
609 width = max(len(s) for s in ui._styles)
610 for label, effects in sorted(ui._styles.items()):
610 for label, effects in sorted(ui._styles.items()):
611 ui.write(b'%s' % label, label=label)
611 ui.write(b'%s' % label, label=label)
612 if effects:
612 if effects:
613 # 50
613 # 50
614 ui.write(b': ')
614 ui.write(b': ')
615 ui.write(b' ' * (max(0, width - len(label))))
615 ui.write(b' ' * (max(0, width - len(label))))
616 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
616 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
617 ui.write(b'\n')
617 ui.write(b'\n')
618
618
619
619
620 @command(b'debugcreatestreamclonebundle', [], b'FILE')
620 @command(b'debugcreatestreamclonebundle', [], b'FILE')
621 def debugcreatestreamclonebundle(ui, repo, fname):
621 def debugcreatestreamclonebundle(ui, repo, fname):
622 """create a stream clone bundle file
622 """create a stream clone bundle file
623
623
624 Stream bundles are special bundles that are essentially archives of
624 Stream bundles are special bundles that are essentially archives of
625 revlog files. They are commonly used for cloning very quickly.
625 revlog files. They are commonly used for cloning very quickly.
626 """
626 """
627 # TODO we may want to turn this into an abort when this functionality
627 # TODO we may want to turn this into an abort when this functionality
628 # is moved into `hg bundle`.
628 # is moved into `hg bundle`.
629 if phases.hassecret(repo):
629 if phases.hassecret(repo):
630 ui.warn(
630 ui.warn(
631 _(
631 _(
632 b'(warning: stream clone bundle will contain secret '
632 b'(warning: stream clone bundle will contain secret '
633 b'revisions)\n'
633 b'revisions)\n'
634 )
634 )
635 )
635 )
636
636
637 requirements, gen = streamclone.generatebundlev1(repo)
637 requirements, gen = streamclone.generatebundlev1(repo)
638 changegroup.writechunks(ui, gen, fname)
638 changegroup.writechunks(ui, gen, fname)
639
639
640 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
640 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
641
641
642
642
643 @command(
643 @command(
644 b'debugdag',
644 b'debugdag',
645 [
645 [
646 (b't', b'tags', None, _(b'use tags as labels')),
646 (b't', b'tags', None, _(b'use tags as labels')),
647 (b'b', b'branches', None, _(b'annotate with branch names')),
647 (b'b', b'branches', None, _(b'annotate with branch names')),
648 (b'', b'dots', None, _(b'use dots for runs')),
648 (b'', b'dots', None, _(b'use dots for runs')),
649 (b's', b'spaces', None, _(b'separate elements by spaces')),
649 (b's', b'spaces', None, _(b'separate elements by spaces')),
650 ],
650 ],
651 _(b'[OPTION]... [FILE [REV]...]'),
651 _(b'[OPTION]... [FILE [REV]...]'),
652 optionalrepo=True,
652 optionalrepo=True,
653 )
653 )
654 def debugdag(ui, repo, file_=None, *revs, **opts):
654 def debugdag(ui, repo, file_=None, *revs, **opts):
655 """format the changelog or an index DAG as a concise textual description
655 """format the changelog or an index DAG as a concise textual description
656
656
657 If you pass a revlog index, the revlog's DAG is emitted. If you list
657 If you pass a revlog index, the revlog's DAG is emitted. If you list
658 revision numbers, they get labeled in the output as rN.
658 revision numbers, they get labeled in the output as rN.
659
659
660 Otherwise, the changelog DAG of the current repo is emitted.
660 Otherwise, the changelog DAG of the current repo is emitted.
661 """
661 """
662 spaces = opts.get('spaces')
662 spaces = opts.get('spaces')
663 dots = opts.get('dots')
663 dots = opts.get('dots')
664 if file_:
664 if file_:
665 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
665 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
666 revs = {int(r) for r in revs}
666 revs = {int(r) for r in revs}
667
667
668 def events():
668 def events():
669 for r in rlog:
669 for r in rlog:
670 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
670 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
671 if r in revs:
671 if r in revs:
672 yield b'l', (r, b"r%i" % r)
672 yield b'l', (r, b"r%i" % r)
673
673
674 elif repo:
674 elif repo:
675 cl = repo.changelog
675 cl = repo.changelog
676 tags = opts.get('tags')
676 tags = opts.get('tags')
677 branches = opts.get('branches')
677 branches = opts.get('branches')
678 if tags:
678 if tags:
679 labels = {}
679 labels = {}
680 for l, n in repo.tags().items():
680 for l, n in repo.tags().items():
681 labels.setdefault(cl.rev(n), []).append(l)
681 labels.setdefault(cl.rev(n), []).append(l)
682
682
683 def events():
683 def events():
684 b = b"default"
684 b = b"default"
685 for r in cl:
685 for r in cl:
686 if branches:
686 if branches:
687 newb = cl.read(cl.node(r))[5][b'branch']
687 newb = cl.read(cl.node(r))[5][b'branch']
688 if newb != b:
688 if newb != b:
689 yield b'a', newb
689 yield b'a', newb
690 b = newb
690 b = newb
691 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
691 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
692 if tags:
692 if tags:
693 ls = labels.get(r)
693 ls = labels.get(r)
694 if ls:
694 if ls:
695 for l in ls:
695 for l in ls:
696 yield b'l', (r, l)
696 yield b'l', (r, l)
697
697
698 else:
698 else:
699 raise error.Abort(_(b'need repo for changelog dag'))
699 raise error.Abort(_(b'need repo for changelog dag'))
700
700
701 for line in dagparser.dagtextlines(
701 for line in dagparser.dagtextlines(
702 events(),
702 events(),
703 addspaces=spaces,
703 addspaces=spaces,
704 wraplabels=True,
704 wraplabels=True,
705 wrapannotations=True,
705 wrapannotations=True,
706 wrapnonlinear=dots,
706 wrapnonlinear=dots,
707 usedots=dots,
707 usedots=dots,
708 maxlinewidth=70,
708 maxlinewidth=70,
709 ):
709 ):
710 ui.write(line)
710 ui.write(line)
711 ui.write(b"\n")
711 ui.write(b"\n")
712
712
713
713
714 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
714 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
715 def debugdata(ui, repo, file_, rev=None, **opts):
715 def debugdata(ui, repo, file_, rev=None, **opts):
716 """dump the contents of a data file revision"""
716 """dump the contents of a data file revision"""
717 opts = pycompat.byteskwargs(opts)
717 opts = pycompat.byteskwargs(opts)
718 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
718 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
719 if rev is not None:
719 if rev is not None:
720 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
720 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
721 file_, rev = None, file_
721 file_, rev = None, file_
722 elif rev is None:
722 elif rev is None:
723 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
723 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
724 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
724 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
725 try:
725 try:
726 ui.write(r.rawdata(r.lookup(rev)))
726 ui.write(r.rawdata(r.lookup(rev)))
727 except KeyError:
727 except KeyError:
728 raise error.Abort(_(b'invalid revision identifier %s') % rev)
728 raise error.Abort(_(b'invalid revision identifier %s') % rev)
729
729
730
730
731 @command(
731 @command(
732 b'debugdate',
732 b'debugdate',
733 [(b'e', b'extended', None, _(b'try extended date formats'))],
733 [(b'e', b'extended', None, _(b'try extended date formats'))],
734 _(b'[-e] DATE [RANGE]'),
734 _(b'[-e] DATE [RANGE]'),
735 norepo=True,
735 norepo=True,
736 optionalrepo=True,
736 optionalrepo=True,
737 )
737 )
738 def debugdate(ui, date, range=None, **opts):
738 def debugdate(ui, date, range=None, **opts):
739 """parse and display a date"""
739 """parse and display a date"""
740 if opts["extended"]:
740 if opts["extended"]:
741 d = dateutil.parsedate(date, dateutil.extendeddateformats)
741 d = dateutil.parsedate(date, dateutil.extendeddateformats)
742 else:
742 else:
743 d = dateutil.parsedate(date)
743 d = dateutil.parsedate(date)
744 ui.writenoi18n(b"internal: %d %d\n" % d)
744 ui.writenoi18n(b"internal: %d %d\n" % d)
745 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
745 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
746 if range:
746 if range:
747 m = dateutil.matchdate(range)
747 m = dateutil.matchdate(range)
748 ui.writenoi18n(b"match: %s\n" % m(d[0]))
748 ui.writenoi18n(b"match: %s\n" % m(d[0]))
749
749
750
750
751 @command(
751 @command(
752 b'debugdeltachain',
752 b'debugdeltachain',
753 cmdutil.debugrevlogopts + cmdutil.formatteropts,
753 cmdutil.debugrevlogopts + cmdutil.formatteropts,
754 _(b'-c|-m|FILE'),
754 _(b'-c|-m|FILE'),
755 optionalrepo=True,
755 optionalrepo=True,
756 )
756 )
757 def debugdeltachain(ui, repo, file_=None, **opts):
757 def debugdeltachain(ui, repo, file_=None, **opts):
758 """dump information about delta chains in a revlog
758 """dump information about delta chains in a revlog
759
759
760 Output can be templatized. Available template keywords are:
760 Output can be templatized. Available template keywords are:
761
761
762 :``rev``: revision number
762 :``rev``: revision number
763 :``chainid``: delta chain identifier (numbered by unique base)
763 :``chainid``: delta chain identifier (numbered by unique base)
764 :``chainlen``: delta chain length to this revision
764 :``chainlen``: delta chain length to this revision
765 :``prevrev``: previous revision in delta chain
765 :``prevrev``: previous revision in delta chain
766 :``deltatype``: role of delta / how it was computed
766 :``deltatype``: role of delta / how it was computed
767 :``compsize``: compressed size of revision
767 :``compsize``: compressed size of revision
768 :``uncompsize``: uncompressed size of revision
768 :``uncompsize``: uncompressed size of revision
769 :``chainsize``: total size of compressed revisions in chain
769 :``chainsize``: total size of compressed revisions in chain
770 :``chainratio``: total chain size divided by uncompressed revision size
770 :``chainratio``: total chain size divided by uncompressed revision size
771 (new delta chains typically start at ratio 2.00)
771 (new delta chains typically start at ratio 2.00)
772 :``lindist``: linear distance from base revision in delta chain to end
772 :``lindist``: linear distance from base revision in delta chain to end
773 of this revision
773 of this revision
774 :``extradist``: total size of revisions not part of this delta chain from
774 :``extradist``: total size of revisions not part of this delta chain from
775 base of delta chain to end of this revision; a measurement
775 base of delta chain to end of this revision; a measurement
776 of how much extra data we need to read/seek across to read
776 of how much extra data we need to read/seek across to read
777 the delta chain for this revision
777 the delta chain for this revision
778 :``extraratio``: extradist divided by chainsize; another representation of
778 :``extraratio``: extradist divided by chainsize; another representation of
779 how much unrelated data is needed to load this delta chain
779 how much unrelated data is needed to load this delta chain
780
780
781 If the repository is configured to use the sparse read, additional keywords
781 If the repository is configured to use the sparse read, additional keywords
782 are available:
782 are available:
783
783
784 :``readsize``: total size of data read from the disk for a revision
784 :``readsize``: total size of data read from the disk for a revision
785 (sum of the sizes of all the blocks)
785 (sum of the sizes of all the blocks)
786 :``largestblock``: size of the largest block of data read from the disk
786 :``largestblock``: size of the largest block of data read from the disk
787 :``readdensity``: density of useful bytes in the data read from the disk
787 :``readdensity``: density of useful bytes in the data read from the disk
788 :``srchunks``: in how many data hunks the whole revision would be read
788 :``srchunks``: in how many data hunks the whole revision would be read
789
789
790 The sparse read can be enabled with experimental.sparse-read = True
790 The sparse read can be enabled with experimental.sparse-read = True
791 """
791 """
792 opts = pycompat.byteskwargs(opts)
792 opts = pycompat.byteskwargs(opts)
793 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
793 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
794 index = r.index
794 index = r.index
795 start = r.start
795 start = r.start
796 length = r.length
796 length = r.length
797 generaldelta = r._generaldelta
797 generaldelta = r._generaldelta
798 withsparseread = getattr(r, '_withsparseread', False)
798 withsparseread = getattr(r, '_withsparseread', False)
799
799
800 def revinfo(rev):
800 def revinfo(rev):
801 e = index[rev]
801 e = index[rev]
802 compsize = e[1]
802 compsize = e[1]
803 uncompsize = e[2]
803 uncompsize = e[2]
804 chainsize = 0
804 chainsize = 0
805
805
806 if generaldelta:
806 if generaldelta:
807 if e[3] == e[5]:
807 if e[3] == e[5]:
808 deltatype = b'p1'
808 deltatype = b'p1'
809 elif e[3] == e[6]:
809 elif e[3] == e[6]:
810 deltatype = b'p2'
810 deltatype = b'p2'
811 elif e[3] == rev - 1:
811 elif e[3] == rev - 1:
812 deltatype = b'prev'
812 deltatype = b'prev'
813 elif e[3] == rev:
813 elif e[3] == rev:
814 deltatype = b'base'
814 deltatype = b'base'
815 else:
815 else:
816 deltatype = b'other'
816 deltatype = b'other'
817 else:
817 else:
818 if e[3] == rev:
818 if e[3] == rev:
819 deltatype = b'base'
819 deltatype = b'base'
820 else:
820 else:
821 deltatype = b'prev'
821 deltatype = b'prev'
822
822
823 chain = r._deltachain(rev)[0]
823 chain = r._deltachain(rev)[0]
824 for iterrev in chain:
824 for iterrev in chain:
825 e = index[iterrev]
825 e = index[iterrev]
826 chainsize += e[1]
826 chainsize += e[1]
827
827
828 return compsize, uncompsize, deltatype, chain, chainsize
828 return compsize, uncompsize, deltatype, chain, chainsize
829
829
830 fm = ui.formatter(b'debugdeltachain', opts)
830 fm = ui.formatter(b'debugdeltachain', opts)
831
831
832 fm.plain(
832 fm.plain(
833 b' rev chain# chainlen prev delta '
833 b' rev chain# chainlen prev delta '
834 b'size rawsize chainsize ratio lindist extradist '
834 b'size rawsize chainsize ratio lindist extradist '
835 b'extraratio'
835 b'extraratio'
836 )
836 )
837 if withsparseread:
837 if withsparseread:
838 fm.plain(b' readsize largestblk rddensity srchunks')
838 fm.plain(b' readsize largestblk rddensity srchunks')
839 fm.plain(b'\n')
839 fm.plain(b'\n')
840
840
841 chainbases = {}
841 chainbases = {}
842 for rev in r:
842 for rev in r:
843 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
843 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
844 chainbase = chain[0]
844 chainbase = chain[0]
845 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
845 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
846 basestart = start(chainbase)
846 basestart = start(chainbase)
847 revstart = start(rev)
847 revstart = start(rev)
848 lineardist = revstart + comp - basestart
848 lineardist = revstart + comp - basestart
849 extradist = lineardist - chainsize
849 extradist = lineardist - chainsize
850 try:
850 try:
851 prevrev = chain[-2]
851 prevrev = chain[-2]
852 except IndexError:
852 except IndexError:
853 prevrev = -1
853 prevrev = -1
854
854
855 if uncomp != 0:
855 if uncomp != 0:
856 chainratio = float(chainsize) / float(uncomp)
856 chainratio = float(chainsize) / float(uncomp)
857 else:
857 else:
858 chainratio = chainsize
858 chainratio = chainsize
859
859
860 if chainsize != 0:
860 if chainsize != 0:
861 extraratio = float(extradist) / float(chainsize)
861 extraratio = float(extradist) / float(chainsize)
862 else:
862 else:
863 extraratio = extradist
863 extraratio = extradist
864
864
865 fm.startitem()
865 fm.startitem()
866 fm.write(
866 fm.write(
867 b'rev chainid chainlen prevrev deltatype compsize '
867 b'rev chainid chainlen prevrev deltatype compsize '
868 b'uncompsize chainsize chainratio lindist extradist '
868 b'uncompsize chainsize chainratio lindist extradist '
869 b'extraratio',
869 b'extraratio',
870 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
870 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
871 rev,
871 rev,
872 chainid,
872 chainid,
873 len(chain),
873 len(chain),
874 prevrev,
874 prevrev,
875 deltatype,
875 deltatype,
876 comp,
876 comp,
877 uncomp,
877 uncomp,
878 chainsize,
878 chainsize,
879 chainratio,
879 chainratio,
880 lineardist,
880 lineardist,
881 extradist,
881 extradist,
882 extraratio,
882 extraratio,
883 rev=rev,
883 rev=rev,
884 chainid=chainid,
884 chainid=chainid,
885 chainlen=len(chain),
885 chainlen=len(chain),
886 prevrev=prevrev,
886 prevrev=prevrev,
887 deltatype=deltatype,
887 deltatype=deltatype,
888 compsize=comp,
888 compsize=comp,
889 uncompsize=uncomp,
889 uncompsize=uncomp,
890 chainsize=chainsize,
890 chainsize=chainsize,
891 chainratio=chainratio,
891 chainratio=chainratio,
892 lindist=lineardist,
892 lindist=lineardist,
893 extradist=extradist,
893 extradist=extradist,
894 extraratio=extraratio,
894 extraratio=extraratio,
895 )
895 )
896 if withsparseread:
896 if withsparseread:
897 readsize = 0
897 readsize = 0
898 largestblock = 0
898 largestblock = 0
899 srchunks = 0
899 srchunks = 0
900
900
901 for revschunk in deltautil.slicechunk(r, chain):
901 for revschunk in deltautil.slicechunk(r, chain):
902 srchunks += 1
902 srchunks += 1
903 blkend = start(revschunk[-1]) + length(revschunk[-1])
903 blkend = start(revschunk[-1]) + length(revschunk[-1])
904 blksize = blkend - start(revschunk[0])
904 blksize = blkend - start(revschunk[0])
905
905
906 readsize += blksize
906 readsize += blksize
907 if largestblock < blksize:
907 if largestblock < blksize:
908 largestblock = blksize
908 largestblock = blksize
909
909
910 if readsize:
910 if readsize:
911 readdensity = float(chainsize) / float(readsize)
911 readdensity = float(chainsize) / float(readsize)
912 else:
912 else:
913 readdensity = 1
913 readdensity = 1
914
914
915 fm.write(
915 fm.write(
916 b'readsize largestblock readdensity srchunks',
916 b'readsize largestblock readdensity srchunks',
917 b' %10d %10d %9.5f %8d',
917 b' %10d %10d %9.5f %8d',
918 readsize,
918 readsize,
919 largestblock,
919 largestblock,
920 readdensity,
920 readdensity,
921 srchunks,
921 srchunks,
922 readsize=readsize,
922 readsize=readsize,
923 largestblock=largestblock,
923 largestblock=largestblock,
924 readdensity=readdensity,
924 readdensity=readdensity,
925 srchunks=srchunks,
925 srchunks=srchunks,
926 )
926 )
927
927
928 fm.plain(b'\n')
928 fm.plain(b'\n')
929
929
930 fm.end()
930 fm.end()
931
931
932
932
933 @command(
933 @command(
934 b'debugdirstate|debugstate',
934 b'debugdirstate|debugstate',
935 [
935 [
936 (
936 (
937 b'',
937 b'',
938 b'nodates',
938 b'nodates',
939 None,
939 None,
940 _(b'do not display the saved mtime (DEPRECATED)'),
940 _(b'do not display the saved mtime (DEPRECATED)'),
941 ),
941 ),
942 (b'', b'dates', True, _(b'display the saved mtime')),
942 (b'', b'dates', True, _(b'display the saved mtime')),
943 (b'', b'datesort', None, _(b'sort by saved mtime')),
943 (b'', b'datesort', None, _(b'sort by saved mtime')),
944 (b'', b'dirs', False, _(b'display directories')),
944 (b'', b'dirs', False, _(b'display directories')),
945 ],
945 ],
946 _(b'[OPTION]...'),
946 _(b'[OPTION]...'),
947 )
947 )
948 def debugstate(ui, repo, **opts):
948 def debugstate(ui, repo, **opts):
949 """show the contents of the current dirstate"""
949 """show the contents of the current dirstate"""
950
950
951 nodates = not opts['dates']
951 nodates = not opts['dates']
952 if opts.get('nodates') is not None:
952 if opts.get('nodates') is not None:
953 nodates = True
953 nodates = True
954 datesort = opts.get('datesort')
954 datesort = opts.get('datesort')
955
955
956 if datesort:
956 if datesort:
957 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
957 keyfunc = lambda x: (
958 x[1].v1_mtime(),
959 x[0],
960 ) # sort by mtime, then by filename
958 else:
961 else:
959 keyfunc = None # sort by filename
962 keyfunc = None # sort by filename
960 entries = list(pycompat.iteritems(repo.dirstate))
963 entries = list(pycompat.iteritems(repo.dirstate))
961 if opts['dirs']:
964 if opts['dirs']:
962 entries.extend(repo.dirstate.directories())
965 entries.extend(repo.dirstate.directories())
963 entries.sort(key=keyfunc)
966 entries.sort(key=keyfunc)
964 for file_, ent in entries:
967 for file_, ent in entries:
965 if ent[3] == -1:
968 if ent.v1_mtime() == -1:
966 timestr = b'unset '
969 timestr = b'unset '
967 elif nodates:
970 elif nodates:
968 timestr = b'set '
971 timestr = b'set '
969 else:
972 else:
970 timestr = time.strftime(
973 timestr = time.strftime(
971 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
974 "%Y-%m-%d %H:%M:%S ", time.localtime(ent.v1_mtime())
972 )
975 )
973 timestr = encoding.strtolocal(timestr)
976 timestr = encoding.strtolocal(timestr)
974 if ent[1] & 0o20000:
977 if ent.mode & 0o20000:
975 mode = b'lnk'
978 mode = b'lnk'
976 else:
979 else:
977 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
980 mode = b'%3o' % (ent.v1_mode() & 0o777 & ~util.umask)
978 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
981 ui.write(
982 b"%c %s %10d %s%s\n"
983 % (ent.v1_state(), mode, ent.v1_size(), timestr, file_)
984 )
979 for f in repo.dirstate.copies():
985 for f in repo.dirstate.copies():
980 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
986 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
981
987
982
988
983 @command(
989 @command(
984 b'debugdiscovery',
990 b'debugdiscovery',
985 [
991 [
986 (b'', b'old', None, _(b'use old-style discovery')),
992 (b'', b'old', None, _(b'use old-style discovery')),
987 (
993 (
988 b'',
994 b'',
989 b'nonheads',
995 b'nonheads',
990 None,
996 None,
991 _(b'use old-style discovery with non-heads included'),
997 _(b'use old-style discovery with non-heads included'),
992 ),
998 ),
993 (b'', b'rev', [], b'restrict discovery to this set of revs'),
999 (b'', b'rev', [], b'restrict discovery to this set of revs'),
994 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1000 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
995 (
1001 (
996 b'',
1002 b'',
997 b'local-as-revs',
1003 b'local-as-revs',
998 b"",
1004 b"",
999 b'treat local has having these revisions only',
1005 b'treat local has having these revisions only',
1000 ),
1006 ),
1001 (
1007 (
1002 b'',
1008 b'',
1003 b'remote-as-revs',
1009 b'remote-as-revs',
1004 b"",
1010 b"",
1005 b'use local as remote, with only these these revisions',
1011 b'use local as remote, with only these these revisions',
1006 ),
1012 ),
1007 ]
1013 ]
1008 + cmdutil.remoteopts
1014 + cmdutil.remoteopts
1009 + cmdutil.formatteropts,
1015 + cmdutil.formatteropts,
1010 _(b'[--rev REV] [OTHER]'),
1016 _(b'[--rev REV] [OTHER]'),
1011 )
1017 )
1012 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1018 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1013 """runs the changeset discovery protocol in isolation
1019 """runs the changeset discovery protocol in isolation
1014
1020
1015 The local peer can be "replaced" by a subset of the local repository by
1021 The local peer can be "replaced" by a subset of the local repository by
1016 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1022 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1017 be "replaced" by a subset of the local repository using the
1023 be "replaced" by a subset of the local repository using the
1018 `--local-as-revs` flag. This is useful to efficiently debug pathological
1024 `--local-as-revs` flag. This is useful to efficiently debug pathological
1019 discovery situation.
1025 discovery situation.
1020
1026
1021 The following developer oriented config are relevant for people playing with this command:
1027 The following developer oriented config are relevant for people playing with this command:
1022
1028
1023 * devel.discovery.exchange-heads=True
1029 * devel.discovery.exchange-heads=True
1024
1030
1025 If False, the discovery will not start with
1031 If False, the discovery will not start with
1026 remote head fetching and local head querying.
1032 remote head fetching and local head querying.
1027
1033
1028 * devel.discovery.grow-sample=True
1034 * devel.discovery.grow-sample=True
1029
1035
1030 If False, the sample size used in set discovery will not be increased
1036 If False, the sample size used in set discovery will not be increased
1031 through the process
1037 through the process
1032
1038
1033 * devel.discovery.grow-sample.dynamic=True
1039 * devel.discovery.grow-sample.dynamic=True
1034
1040
1035 When discovery.grow-sample.dynamic is True, the default, the sample size is
1041 When discovery.grow-sample.dynamic is True, the default, the sample size is
1036 adapted to the shape of the undecided set (it is set to the max of:
1042 adapted to the shape of the undecided set (it is set to the max of:
1037 <target-size>, len(roots(undecided)), len(heads(undecided)
1043 <target-size>, len(roots(undecided)), len(heads(undecided)
1038
1044
1039 * devel.discovery.grow-sample.rate=1.05
1045 * devel.discovery.grow-sample.rate=1.05
1040
1046
1041 the rate at which the sample grow
1047 the rate at which the sample grow
1042
1048
1043 * devel.discovery.randomize=True
1049 * devel.discovery.randomize=True
1044
1050
1045 If andom sampling during discovery are deterministic. It is meant for
1051 If andom sampling during discovery are deterministic. It is meant for
1046 integration tests.
1052 integration tests.
1047
1053
1048 * devel.discovery.sample-size=200
1054 * devel.discovery.sample-size=200
1049
1055
1050 Control the initial size of the discovery sample
1056 Control the initial size of the discovery sample
1051
1057
1052 * devel.discovery.sample-size.initial=100
1058 * devel.discovery.sample-size.initial=100
1053
1059
1054 Control the initial size of the discovery for initial change
1060 Control the initial size of the discovery for initial change
1055 """
1061 """
1056 opts = pycompat.byteskwargs(opts)
1062 opts = pycompat.byteskwargs(opts)
1057 unfi = repo.unfiltered()
1063 unfi = repo.unfiltered()
1058
1064
1059 # setup potential extra filtering
1065 # setup potential extra filtering
1060 local_revs = opts[b"local_as_revs"]
1066 local_revs = opts[b"local_as_revs"]
1061 remote_revs = opts[b"remote_as_revs"]
1067 remote_revs = opts[b"remote_as_revs"]
1062
1068
1063 # make sure tests are repeatable
1069 # make sure tests are repeatable
1064 random.seed(int(opts[b'seed']))
1070 random.seed(int(opts[b'seed']))
1065
1071
1066 if not remote_revs:
1072 if not remote_revs:
1067
1073
1068 remoteurl, branches = urlutil.get_unique_pull_path(
1074 remoteurl, branches = urlutil.get_unique_pull_path(
1069 b'debugdiscovery', repo, ui, remoteurl
1075 b'debugdiscovery', repo, ui, remoteurl
1070 )
1076 )
1071 remote = hg.peer(repo, opts, remoteurl)
1077 remote = hg.peer(repo, opts, remoteurl)
1072 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1078 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1073 else:
1079 else:
1074 branches = (None, [])
1080 branches = (None, [])
1075 remote_filtered_revs = scmutil.revrange(
1081 remote_filtered_revs = scmutil.revrange(
1076 unfi, [b"not (::(%s))" % remote_revs]
1082 unfi, [b"not (::(%s))" % remote_revs]
1077 )
1083 )
1078 remote_filtered_revs = frozenset(remote_filtered_revs)
1084 remote_filtered_revs = frozenset(remote_filtered_revs)
1079
1085
1080 def remote_func(x):
1086 def remote_func(x):
1081 return remote_filtered_revs
1087 return remote_filtered_revs
1082
1088
1083 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1089 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1084
1090
1085 remote = repo.peer()
1091 remote = repo.peer()
1086 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1092 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1087
1093
1088 if local_revs:
1094 if local_revs:
1089 local_filtered_revs = scmutil.revrange(
1095 local_filtered_revs = scmutil.revrange(
1090 unfi, [b"not (::(%s))" % local_revs]
1096 unfi, [b"not (::(%s))" % local_revs]
1091 )
1097 )
1092 local_filtered_revs = frozenset(local_filtered_revs)
1098 local_filtered_revs = frozenset(local_filtered_revs)
1093
1099
1094 def local_func(x):
1100 def local_func(x):
1095 return local_filtered_revs
1101 return local_filtered_revs
1096
1102
1097 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1103 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1098 repo = repo.filtered(b'debug-discovery-local-filter')
1104 repo = repo.filtered(b'debug-discovery-local-filter')
1099
1105
1100 data = {}
1106 data = {}
1101 if opts.get(b'old'):
1107 if opts.get(b'old'):
1102
1108
1103 def doit(pushedrevs, remoteheads, remote=remote):
1109 def doit(pushedrevs, remoteheads, remote=remote):
1104 if not util.safehasattr(remote, b'branches'):
1110 if not util.safehasattr(remote, b'branches'):
1105 # enable in-client legacy support
1111 # enable in-client legacy support
1106 remote = localrepo.locallegacypeer(remote.local())
1112 remote = localrepo.locallegacypeer(remote.local())
1107 common, _in, hds = treediscovery.findcommonincoming(
1113 common, _in, hds = treediscovery.findcommonincoming(
1108 repo, remote, force=True, audit=data
1114 repo, remote, force=True, audit=data
1109 )
1115 )
1110 common = set(common)
1116 common = set(common)
1111 if not opts.get(b'nonheads'):
1117 if not opts.get(b'nonheads'):
1112 ui.writenoi18n(
1118 ui.writenoi18n(
1113 b"unpruned common: %s\n"
1119 b"unpruned common: %s\n"
1114 % b" ".join(sorted(short(n) for n in common))
1120 % b" ".join(sorted(short(n) for n in common))
1115 )
1121 )
1116
1122
1117 clnode = repo.changelog.node
1123 clnode = repo.changelog.node
1118 common = repo.revs(b'heads(::%ln)', common)
1124 common = repo.revs(b'heads(::%ln)', common)
1119 common = {clnode(r) for r in common}
1125 common = {clnode(r) for r in common}
1120 return common, hds
1126 return common, hds
1121
1127
1122 else:
1128 else:
1123
1129
1124 def doit(pushedrevs, remoteheads, remote=remote):
1130 def doit(pushedrevs, remoteheads, remote=remote):
1125 nodes = None
1131 nodes = None
1126 if pushedrevs:
1132 if pushedrevs:
1127 revs = scmutil.revrange(repo, pushedrevs)
1133 revs = scmutil.revrange(repo, pushedrevs)
1128 nodes = [repo[r].node() for r in revs]
1134 nodes = [repo[r].node() for r in revs]
1129 common, any, hds = setdiscovery.findcommonheads(
1135 common, any, hds = setdiscovery.findcommonheads(
1130 ui, repo, remote, ancestorsof=nodes, audit=data
1136 ui, repo, remote, ancestorsof=nodes, audit=data
1131 )
1137 )
1132 return common, hds
1138 return common, hds
1133
1139
1134 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1140 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1135 localrevs = opts[b'rev']
1141 localrevs = opts[b'rev']
1136
1142
1137 fm = ui.formatter(b'debugdiscovery', opts)
1143 fm = ui.formatter(b'debugdiscovery', opts)
1138 if fm.strict_format:
1144 if fm.strict_format:
1139
1145
1140 @contextlib.contextmanager
1146 @contextlib.contextmanager
1141 def may_capture_output():
1147 def may_capture_output():
1142 ui.pushbuffer()
1148 ui.pushbuffer()
1143 yield
1149 yield
1144 data[b'output'] = ui.popbuffer()
1150 data[b'output'] = ui.popbuffer()
1145
1151
1146 else:
1152 else:
1147 may_capture_output = util.nullcontextmanager
1153 may_capture_output = util.nullcontextmanager
1148 with may_capture_output():
1154 with may_capture_output():
1149 with util.timedcm('debug-discovery') as t:
1155 with util.timedcm('debug-discovery') as t:
1150 common, hds = doit(localrevs, remoterevs)
1156 common, hds = doit(localrevs, remoterevs)
1151
1157
1152 # compute all statistics
1158 # compute all statistics
1153 heads_common = set(common)
1159 heads_common = set(common)
1154 heads_remote = set(hds)
1160 heads_remote = set(hds)
1155 heads_local = set(repo.heads())
1161 heads_local = set(repo.heads())
1156 # note: they cannot be a local or remote head that is in common and not
1162 # note: they cannot be a local or remote head that is in common and not
1157 # itself a head of common.
1163 # itself a head of common.
1158 heads_common_local = heads_common & heads_local
1164 heads_common_local = heads_common & heads_local
1159 heads_common_remote = heads_common & heads_remote
1165 heads_common_remote = heads_common & heads_remote
1160 heads_common_both = heads_common & heads_remote & heads_local
1166 heads_common_both = heads_common & heads_remote & heads_local
1161
1167
1162 all = repo.revs(b'all()')
1168 all = repo.revs(b'all()')
1163 common = repo.revs(b'::%ln', common)
1169 common = repo.revs(b'::%ln', common)
1164 roots_common = repo.revs(b'roots(::%ld)', common)
1170 roots_common = repo.revs(b'roots(::%ld)', common)
1165 missing = repo.revs(b'not ::%ld', common)
1171 missing = repo.revs(b'not ::%ld', common)
1166 heads_missing = repo.revs(b'heads(%ld)', missing)
1172 heads_missing = repo.revs(b'heads(%ld)', missing)
1167 roots_missing = repo.revs(b'roots(%ld)', missing)
1173 roots_missing = repo.revs(b'roots(%ld)', missing)
1168 assert len(common) + len(missing) == len(all)
1174 assert len(common) + len(missing) == len(all)
1169
1175
1170 initial_undecided = repo.revs(
1176 initial_undecided = repo.revs(
1171 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1177 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1172 )
1178 )
1173 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1179 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1174 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1180 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1175 common_initial_undecided = initial_undecided & common
1181 common_initial_undecided = initial_undecided & common
1176 missing_initial_undecided = initial_undecided & missing
1182 missing_initial_undecided = initial_undecided & missing
1177
1183
1178 data[b'elapsed'] = t.elapsed
1184 data[b'elapsed'] = t.elapsed
1179 data[b'nb-common-heads'] = len(heads_common)
1185 data[b'nb-common-heads'] = len(heads_common)
1180 data[b'nb-common-heads-local'] = len(heads_common_local)
1186 data[b'nb-common-heads-local'] = len(heads_common_local)
1181 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1187 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1182 data[b'nb-common-heads-both'] = len(heads_common_both)
1188 data[b'nb-common-heads-both'] = len(heads_common_both)
1183 data[b'nb-common-roots'] = len(roots_common)
1189 data[b'nb-common-roots'] = len(roots_common)
1184 data[b'nb-head-local'] = len(heads_local)
1190 data[b'nb-head-local'] = len(heads_local)
1185 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1191 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1186 data[b'nb-head-remote'] = len(heads_remote)
1192 data[b'nb-head-remote'] = len(heads_remote)
1187 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1193 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1188 heads_common_remote
1194 heads_common_remote
1189 )
1195 )
1190 data[b'nb-revs'] = len(all)
1196 data[b'nb-revs'] = len(all)
1191 data[b'nb-revs-common'] = len(common)
1197 data[b'nb-revs-common'] = len(common)
1192 data[b'nb-revs-missing'] = len(missing)
1198 data[b'nb-revs-missing'] = len(missing)
1193 data[b'nb-missing-heads'] = len(heads_missing)
1199 data[b'nb-missing-heads'] = len(heads_missing)
1194 data[b'nb-missing-roots'] = len(roots_missing)
1200 data[b'nb-missing-roots'] = len(roots_missing)
1195 data[b'nb-ini_und'] = len(initial_undecided)
1201 data[b'nb-ini_und'] = len(initial_undecided)
1196 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1202 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1197 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1203 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1198 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1204 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1199 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1205 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1200
1206
1201 fm.startitem()
1207 fm.startitem()
1202 fm.data(**pycompat.strkwargs(data))
1208 fm.data(**pycompat.strkwargs(data))
1203 # display discovery summary
1209 # display discovery summary
1204 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1210 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1205 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1211 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1206 fm.plain(b"heads summary:\n")
1212 fm.plain(b"heads summary:\n")
1207 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1213 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1208 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1214 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1209 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1215 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1210 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1216 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1211 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1217 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1212 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1218 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1213 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1219 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1214 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1220 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1215 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1221 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1216 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1222 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1217 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1223 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1218 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1224 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1219 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1225 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1220 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1226 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1221 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1227 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1222 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1228 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1223 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1229 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1224 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1230 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1225 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1231 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1226 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1232 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1227 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1233 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1228 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1234 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1229
1235
1230 if ui.verbose:
1236 if ui.verbose:
1231 fm.plain(
1237 fm.plain(
1232 b"common heads: %s\n"
1238 b"common heads: %s\n"
1233 % b" ".join(sorted(short(n) for n in heads_common))
1239 % b" ".join(sorted(short(n) for n in heads_common))
1234 )
1240 )
1235 fm.end()
1241 fm.end()
1236
1242
1237
1243
1238 _chunksize = 4 << 10
1244 _chunksize = 4 << 10
1239
1245
1240
1246
1241 @command(
1247 @command(
1242 b'debugdownload',
1248 b'debugdownload',
1243 [
1249 [
1244 (b'o', b'output', b'', _(b'path')),
1250 (b'o', b'output', b'', _(b'path')),
1245 ],
1251 ],
1246 optionalrepo=True,
1252 optionalrepo=True,
1247 )
1253 )
1248 def debugdownload(ui, repo, url, output=None, **opts):
1254 def debugdownload(ui, repo, url, output=None, **opts):
1249 """download a resource using Mercurial logic and config"""
1255 """download a resource using Mercurial logic and config"""
1250 fh = urlmod.open(ui, url, output)
1256 fh = urlmod.open(ui, url, output)
1251
1257
1252 dest = ui
1258 dest = ui
1253 if output:
1259 if output:
1254 dest = open(output, b"wb", _chunksize)
1260 dest = open(output, b"wb", _chunksize)
1255 try:
1261 try:
1256 data = fh.read(_chunksize)
1262 data = fh.read(_chunksize)
1257 while data:
1263 while data:
1258 dest.write(data)
1264 dest.write(data)
1259 data = fh.read(_chunksize)
1265 data = fh.read(_chunksize)
1260 finally:
1266 finally:
1261 if output:
1267 if output:
1262 dest.close()
1268 dest.close()
1263
1269
1264
1270
1265 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1271 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1266 def debugextensions(ui, repo, **opts):
1272 def debugextensions(ui, repo, **opts):
1267 '''show information about active extensions'''
1273 '''show information about active extensions'''
1268 opts = pycompat.byteskwargs(opts)
1274 opts = pycompat.byteskwargs(opts)
1269 exts = extensions.extensions(ui)
1275 exts = extensions.extensions(ui)
1270 hgver = util.version()
1276 hgver = util.version()
1271 fm = ui.formatter(b'debugextensions', opts)
1277 fm = ui.formatter(b'debugextensions', opts)
1272 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1278 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1273 isinternal = extensions.ismoduleinternal(extmod)
1279 isinternal = extensions.ismoduleinternal(extmod)
1274 extsource = None
1280 extsource = None
1275
1281
1276 if util.safehasattr(extmod, '__file__'):
1282 if util.safehasattr(extmod, '__file__'):
1277 extsource = pycompat.fsencode(extmod.__file__)
1283 extsource = pycompat.fsencode(extmod.__file__)
1278 elif getattr(sys, 'oxidized', False):
1284 elif getattr(sys, 'oxidized', False):
1279 extsource = pycompat.sysexecutable
1285 extsource = pycompat.sysexecutable
1280 if isinternal:
1286 if isinternal:
1281 exttestedwith = [] # never expose magic string to users
1287 exttestedwith = [] # never expose magic string to users
1282 else:
1288 else:
1283 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1289 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1284 extbuglink = getattr(extmod, 'buglink', None)
1290 extbuglink = getattr(extmod, 'buglink', None)
1285
1291
1286 fm.startitem()
1292 fm.startitem()
1287
1293
1288 if ui.quiet or ui.verbose:
1294 if ui.quiet or ui.verbose:
1289 fm.write(b'name', b'%s\n', extname)
1295 fm.write(b'name', b'%s\n', extname)
1290 else:
1296 else:
1291 fm.write(b'name', b'%s', extname)
1297 fm.write(b'name', b'%s', extname)
1292 if isinternal or hgver in exttestedwith:
1298 if isinternal or hgver in exttestedwith:
1293 fm.plain(b'\n')
1299 fm.plain(b'\n')
1294 elif not exttestedwith:
1300 elif not exttestedwith:
1295 fm.plain(_(b' (untested!)\n'))
1301 fm.plain(_(b' (untested!)\n'))
1296 else:
1302 else:
1297 lasttestedversion = exttestedwith[-1]
1303 lasttestedversion = exttestedwith[-1]
1298 fm.plain(b' (%s!)\n' % lasttestedversion)
1304 fm.plain(b' (%s!)\n' % lasttestedversion)
1299
1305
1300 fm.condwrite(
1306 fm.condwrite(
1301 ui.verbose and extsource,
1307 ui.verbose and extsource,
1302 b'source',
1308 b'source',
1303 _(b' location: %s\n'),
1309 _(b' location: %s\n'),
1304 extsource or b"",
1310 extsource or b"",
1305 )
1311 )
1306
1312
1307 if ui.verbose:
1313 if ui.verbose:
1308 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1314 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1309 fm.data(bundled=isinternal)
1315 fm.data(bundled=isinternal)
1310
1316
1311 fm.condwrite(
1317 fm.condwrite(
1312 ui.verbose and exttestedwith,
1318 ui.verbose and exttestedwith,
1313 b'testedwith',
1319 b'testedwith',
1314 _(b' tested with: %s\n'),
1320 _(b' tested with: %s\n'),
1315 fm.formatlist(exttestedwith, name=b'ver'),
1321 fm.formatlist(exttestedwith, name=b'ver'),
1316 )
1322 )
1317
1323
1318 fm.condwrite(
1324 fm.condwrite(
1319 ui.verbose and extbuglink,
1325 ui.verbose and extbuglink,
1320 b'buglink',
1326 b'buglink',
1321 _(b' bug reporting: %s\n'),
1327 _(b' bug reporting: %s\n'),
1322 extbuglink or b"",
1328 extbuglink or b"",
1323 )
1329 )
1324
1330
1325 fm.end()
1331 fm.end()
1326
1332
1327
1333
1328 @command(
1334 @command(
1329 b'debugfileset',
1335 b'debugfileset',
1330 [
1336 [
1331 (
1337 (
1332 b'r',
1338 b'r',
1333 b'rev',
1339 b'rev',
1334 b'',
1340 b'',
1335 _(b'apply the filespec on this revision'),
1341 _(b'apply the filespec on this revision'),
1336 _(b'REV'),
1342 _(b'REV'),
1337 ),
1343 ),
1338 (
1344 (
1339 b'',
1345 b'',
1340 b'all-files',
1346 b'all-files',
1341 False,
1347 False,
1342 _(b'test files from all revisions and working directory'),
1348 _(b'test files from all revisions and working directory'),
1343 ),
1349 ),
1344 (
1350 (
1345 b's',
1351 b's',
1346 b'show-matcher',
1352 b'show-matcher',
1347 None,
1353 None,
1348 _(b'print internal representation of matcher'),
1354 _(b'print internal representation of matcher'),
1349 ),
1355 ),
1350 (
1356 (
1351 b'p',
1357 b'p',
1352 b'show-stage',
1358 b'show-stage',
1353 [],
1359 [],
1354 _(b'print parsed tree at the given stage'),
1360 _(b'print parsed tree at the given stage'),
1355 _(b'NAME'),
1361 _(b'NAME'),
1356 ),
1362 ),
1357 ],
1363 ],
1358 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1364 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1359 )
1365 )
1360 def debugfileset(ui, repo, expr, **opts):
1366 def debugfileset(ui, repo, expr, **opts):
1361 '''parse and apply a fileset specification'''
1367 '''parse and apply a fileset specification'''
1362 from . import fileset
1368 from . import fileset
1363
1369
1364 fileset.symbols # force import of fileset so we have predicates to optimize
1370 fileset.symbols # force import of fileset so we have predicates to optimize
1365 opts = pycompat.byteskwargs(opts)
1371 opts = pycompat.byteskwargs(opts)
1366 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1372 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1367
1373
1368 stages = [
1374 stages = [
1369 (b'parsed', pycompat.identity),
1375 (b'parsed', pycompat.identity),
1370 (b'analyzed', filesetlang.analyze),
1376 (b'analyzed', filesetlang.analyze),
1371 (b'optimized', filesetlang.optimize),
1377 (b'optimized', filesetlang.optimize),
1372 ]
1378 ]
1373 stagenames = {n for n, f in stages}
1379 stagenames = {n for n, f in stages}
1374
1380
1375 showalways = set()
1381 showalways = set()
1376 if ui.verbose and not opts[b'show_stage']:
1382 if ui.verbose and not opts[b'show_stage']:
1377 # show parsed tree by --verbose (deprecated)
1383 # show parsed tree by --verbose (deprecated)
1378 showalways.add(b'parsed')
1384 showalways.add(b'parsed')
1379 if opts[b'show_stage'] == [b'all']:
1385 if opts[b'show_stage'] == [b'all']:
1380 showalways.update(stagenames)
1386 showalways.update(stagenames)
1381 else:
1387 else:
1382 for n in opts[b'show_stage']:
1388 for n in opts[b'show_stage']:
1383 if n not in stagenames:
1389 if n not in stagenames:
1384 raise error.Abort(_(b'invalid stage name: %s') % n)
1390 raise error.Abort(_(b'invalid stage name: %s') % n)
1385 showalways.update(opts[b'show_stage'])
1391 showalways.update(opts[b'show_stage'])
1386
1392
1387 tree = filesetlang.parse(expr)
1393 tree = filesetlang.parse(expr)
1388 for n, f in stages:
1394 for n, f in stages:
1389 tree = f(tree)
1395 tree = f(tree)
1390 if n in showalways:
1396 if n in showalways:
1391 if opts[b'show_stage'] or n != b'parsed':
1397 if opts[b'show_stage'] or n != b'parsed':
1392 ui.write(b"* %s:\n" % n)
1398 ui.write(b"* %s:\n" % n)
1393 ui.write(filesetlang.prettyformat(tree), b"\n")
1399 ui.write(filesetlang.prettyformat(tree), b"\n")
1394
1400
1395 files = set()
1401 files = set()
1396 if opts[b'all_files']:
1402 if opts[b'all_files']:
1397 for r in repo:
1403 for r in repo:
1398 c = repo[r]
1404 c = repo[r]
1399 files.update(c.files())
1405 files.update(c.files())
1400 files.update(c.substate)
1406 files.update(c.substate)
1401 if opts[b'all_files'] or ctx.rev() is None:
1407 if opts[b'all_files'] or ctx.rev() is None:
1402 wctx = repo[None]
1408 wctx = repo[None]
1403 files.update(
1409 files.update(
1404 repo.dirstate.walk(
1410 repo.dirstate.walk(
1405 scmutil.matchall(repo),
1411 scmutil.matchall(repo),
1406 subrepos=list(wctx.substate),
1412 subrepos=list(wctx.substate),
1407 unknown=True,
1413 unknown=True,
1408 ignored=True,
1414 ignored=True,
1409 )
1415 )
1410 )
1416 )
1411 files.update(wctx.substate)
1417 files.update(wctx.substate)
1412 else:
1418 else:
1413 files.update(ctx.files())
1419 files.update(ctx.files())
1414 files.update(ctx.substate)
1420 files.update(ctx.substate)
1415
1421
1416 m = ctx.matchfileset(repo.getcwd(), expr)
1422 m = ctx.matchfileset(repo.getcwd(), expr)
1417 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1423 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1418 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1424 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1419 for f in sorted(files):
1425 for f in sorted(files):
1420 if not m(f):
1426 if not m(f):
1421 continue
1427 continue
1422 ui.write(b"%s\n" % f)
1428 ui.write(b"%s\n" % f)
1423
1429
1424
1430
1425 @command(b'debugformat', [] + cmdutil.formatteropts)
1431 @command(b'debugformat', [] + cmdutil.formatteropts)
1426 def debugformat(ui, repo, **opts):
1432 def debugformat(ui, repo, **opts):
1427 """display format information about the current repository
1433 """display format information about the current repository
1428
1434
1429 Use --verbose to get extra information about current config value and
1435 Use --verbose to get extra information about current config value and
1430 Mercurial default."""
1436 Mercurial default."""
1431 opts = pycompat.byteskwargs(opts)
1437 opts = pycompat.byteskwargs(opts)
1432 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1438 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1433 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1439 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1434
1440
1435 def makeformatname(name):
1441 def makeformatname(name):
1436 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1442 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1437
1443
1438 fm = ui.formatter(b'debugformat', opts)
1444 fm = ui.formatter(b'debugformat', opts)
1439 if fm.isplain():
1445 if fm.isplain():
1440
1446
1441 def formatvalue(value):
1447 def formatvalue(value):
1442 if util.safehasattr(value, b'startswith'):
1448 if util.safehasattr(value, b'startswith'):
1443 return value
1449 return value
1444 if value:
1450 if value:
1445 return b'yes'
1451 return b'yes'
1446 else:
1452 else:
1447 return b'no'
1453 return b'no'
1448
1454
1449 else:
1455 else:
1450 formatvalue = pycompat.identity
1456 formatvalue = pycompat.identity
1451
1457
1452 fm.plain(b'format-variant')
1458 fm.plain(b'format-variant')
1453 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1459 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1454 fm.plain(b' repo')
1460 fm.plain(b' repo')
1455 if ui.verbose:
1461 if ui.verbose:
1456 fm.plain(b' config default')
1462 fm.plain(b' config default')
1457 fm.plain(b'\n')
1463 fm.plain(b'\n')
1458 for fv in upgrade.allformatvariant:
1464 for fv in upgrade.allformatvariant:
1459 fm.startitem()
1465 fm.startitem()
1460 repovalue = fv.fromrepo(repo)
1466 repovalue = fv.fromrepo(repo)
1461 configvalue = fv.fromconfig(repo)
1467 configvalue = fv.fromconfig(repo)
1462
1468
1463 if repovalue != configvalue:
1469 if repovalue != configvalue:
1464 namelabel = b'formatvariant.name.mismatchconfig'
1470 namelabel = b'formatvariant.name.mismatchconfig'
1465 repolabel = b'formatvariant.repo.mismatchconfig'
1471 repolabel = b'formatvariant.repo.mismatchconfig'
1466 elif repovalue != fv.default:
1472 elif repovalue != fv.default:
1467 namelabel = b'formatvariant.name.mismatchdefault'
1473 namelabel = b'formatvariant.name.mismatchdefault'
1468 repolabel = b'formatvariant.repo.mismatchdefault'
1474 repolabel = b'formatvariant.repo.mismatchdefault'
1469 else:
1475 else:
1470 namelabel = b'formatvariant.name.uptodate'
1476 namelabel = b'formatvariant.name.uptodate'
1471 repolabel = b'formatvariant.repo.uptodate'
1477 repolabel = b'formatvariant.repo.uptodate'
1472
1478
1473 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1479 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1474 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1480 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1475 if fv.default != configvalue:
1481 if fv.default != configvalue:
1476 configlabel = b'formatvariant.config.special'
1482 configlabel = b'formatvariant.config.special'
1477 else:
1483 else:
1478 configlabel = b'formatvariant.config.default'
1484 configlabel = b'formatvariant.config.default'
1479 fm.condwrite(
1485 fm.condwrite(
1480 ui.verbose,
1486 ui.verbose,
1481 b'config',
1487 b'config',
1482 b' %6s',
1488 b' %6s',
1483 formatvalue(configvalue),
1489 formatvalue(configvalue),
1484 label=configlabel,
1490 label=configlabel,
1485 )
1491 )
1486 fm.condwrite(
1492 fm.condwrite(
1487 ui.verbose,
1493 ui.verbose,
1488 b'default',
1494 b'default',
1489 b' %7s',
1495 b' %7s',
1490 formatvalue(fv.default),
1496 formatvalue(fv.default),
1491 label=b'formatvariant.default',
1497 label=b'formatvariant.default',
1492 )
1498 )
1493 fm.plain(b'\n')
1499 fm.plain(b'\n')
1494 fm.end()
1500 fm.end()
1495
1501
1496
1502
1497 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1503 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1498 def debugfsinfo(ui, path=b"."):
1504 def debugfsinfo(ui, path=b"."):
1499 """show information detected about current filesystem"""
1505 """show information detected about current filesystem"""
1500 ui.writenoi18n(b'path: %s\n' % path)
1506 ui.writenoi18n(b'path: %s\n' % path)
1501 ui.writenoi18n(
1507 ui.writenoi18n(
1502 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1508 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1503 )
1509 )
1504 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1510 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1505 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1511 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1506 ui.writenoi18n(
1512 ui.writenoi18n(
1507 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1513 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1508 )
1514 )
1509 ui.writenoi18n(
1515 ui.writenoi18n(
1510 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1516 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1511 )
1517 )
1512 casesensitive = b'(unknown)'
1518 casesensitive = b'(unknown)'
1513 try:
1519 try:
1514 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1520 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1515 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1521 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1516 except OSError:
1522 except OSError:
1517 pass
1523 pass
1518 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1524 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1519
1525
1520
1526
1521 @command(
1527 @command(
1522 b'debuggetbundle',
1528 b'debuggetbundle',
1523 [
1529 [
1524 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1530 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1525 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1531 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1526 (
1532 (
1527 b't',
1533 b't',
1528 b'type',
1534 b'type',
1529 b'bzip2',
1535 b'bzip2',
1530 _(b'bundle compression type to use'),
1536 _(b'bundle compression type to use'),
1531 _(b'TYPE'),
1537 _(b'TYPE'),
1532 ),
1538 ),
1533 ],
1539 ],
1534 _(b'REPO FILE [-H|-C ID]...'),
1540 _(b'REPO FILE [-H|-C ID]...'),
1535 norepo=True,
1541 norepo=True,
1536 )
1542 )
1537 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1543 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1538 """retrieves a bundle from a repo
1544 """retrieves a bundle from a repo
1539
1545
1540 Every ID must be a full-length hex node id string. Saves the bundle to the
1546 Every ID must be a full-length hex node id string. Saves the bundle to the
1541 given file.
1547 given file.
1542 """
1548 """
1543 opts = pycompat.byteskwargs(opts)
1549 opts = pycompat.byteskwargs(opts)
1544 repo = hg.peer(ui, opts, repopath)
1550 repo = hg.peer(ui, opts, repopath)
1545 if not repo.capable(b'getbundle'):
1551 if not repo.capable(b'getbundle'):
1546 raise error.Abort(b"getbundle() not supported by target repository")
1552 raise error.Abort(b"getbundle() not supported by target repository")
1547 args = {}
1553 args = {}
1548 if common:
1554 if common:
1549 args['common'] = [bin(s) for s in common]
1555 args['common'] = [bin(s) for s in common]
1550 if head:
1556 if head:
1551 args['heads'] = [bin(s) for s in head]
1557 args['heads'] = [bin(s) for s in head]
1552 # TODO: get desired bundlecaps from command line.
1558 # TODO: get desired bundlecaps from command line.
1553 args['bundlecaps'] = None
1559 args['bundlecaps'] = None
1554 bundle = repo.getbundle(b'debug', **args)
1560 bundle = repo.getbundle(b'debug', **args)
1555
1561
1556 bundletype = opts.get(b'type', b'bzip2').lower()
1562 bundletype = opts.get(b'type', b'bzip2').lower()
1557 btypes = {
1563 btypes = {
1558 b'none': b'HG10UN',
1564 b'none': b'HG10UN',
1559 b'bzip2': b'HG10BZ',
1565 b'bzip2': b'HG10BZ',
1560 b'gzip': b'HG10GZ',
1566 b'gzip': b'HG10GZ',
1561 b'bundle2': b'HG20',
1567 b'bundle2': b'HG20',
1562 }
1568 }
1563 bundletype = btypes.get(bundletype)
1569 bundletype = btypes.get(bundletype)
1564 if bundletype not in bundle2.bundletypes:
1570 if bundletype not in bundle2.bundletypes:
1565 raise error.Abort(_(b'unknown bundle type specified with --type'))
1571 raise error.Abort(_(b'unknown bundle type specified with --type'))
1566 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1572 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1567
1573
1568
1574
1569 @command(b'debugignore', [], b'[FILE]')
1575 @command(b'debugignore', [], b'[FILE]')
1570 def debugignore(ui, repo, *files, **opts):
1576 def debugignore(ui, repo, *files, **opts):
1571 """display the combined ignore pattern and information about ignored files
1577 """display the combined ignore pattern and information about ignored files
1572
1578
1573 With no argument display the combined ignore pattern.
1579 With no argument display the combined ignore pattern.
1574
1580
1575 Given space separated file names, shows if the given file is ignored and
1581 Given space separated file names, shows if the given file is ignored and
1576 if so, show the ignore rule (file and line number) that matched it.
1582 if so, show the ignore rule (file and line number) that matched it.
1577 """
1583 """
1578 ignore = repo.dirstate._ignore
1584 ignore = repo.dirstate._ignore
1579 if not files:
1585 if not files:
1580 # Show all the patterns
1586 # Show all the patterns
1581 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1587 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1582 else:
1588 else:
1583 m = scmutil.match(repo[None], pats=files)
1589 m = scmutil.match(repo[None], pats=files)
1584 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1590 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1585 for f in m.files():
1591 for f in m.files():
1586 nf = util.normpath(f)
1592 nf = util.normpath(f)
1587 ignored = None
1593 ignored = None
1588 ignoredata = None
1594 ignoredata = None
1589 if nf != b'.':
1595 if nf != b'.':
1590 if ignore(nf):
1596 if ignore(nf):
1591 ignored = nf
1597 ignored = nf
1592 ignoredata = repo.dirstate._ignorefileandline(nf)
1598 ignoredata = repo.dirstate._ignorefileandline(nf)
1593 else:
1599 else:
1594 for p in pathutil.finddirs(nf):
1600 for p in pathutil.finddirs(nf):
1595 if ignore(p):
1601 if ignore(p):
1596 ignored = p
1602 ignored = p
1597 ignoredata = repo.dirstate._ignorefileandline(p)
1603 ignoredata = repo.dirstate._ignorefileandline(p)
1598 break
1604 break
1599 if ignored:
1605 if ignored:
1600 if ignored == nf:
1606 if ignored == nf:
1601 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1607 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1602 else:
1608 else:
1603 ui.write(
1609 ui.write(
1604 _(
1610 _(
1605 b"%s is ignored because of "
1611 b"%s is ignored because of "
1606 b"containing directory %s\n"
1612 b"containing directory %s\n"
1607 )
1613 )
1608 % (uipathfn(f), ignored)
1614 % (uipathfn(f), ignored)
1609 )
1615 )
1610 ignorefile, lineno, line = ignoredata
1616 ignorefile, lineno, line = ignoredata
1611 ui.write(
1617 ui.write(
1612 _(b"(ignore rule in %s, line %d: '%s')\n")
1618 _(b"(ignore rule in %s, line %d: '%s')\n")
1613 % (ignorefile, lineno, line)
1619 % (ignorefile, lineno, line)
1614 )
1620 )
1615 else:
1621 else:
1616 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1622 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1617
1623
1618
1624
1619 @command(
1625 @command(
1620 b'debugindex',
1626 b'debugindex',
1621 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1627 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1622 _(b'-c|-m|FILE'),
1628 _(b'-c|-m|FILE'),
1623 )
1629 )
1624 def debugindex(ui, repo, file_=None, **opts):
1630 def debugindex(ui, repo, file_=None, **opts):
1625 """dump index data for a storage primitive"""
1631 """dump index data for a storage primitive"""
1626 opts = pycompat.byteskwargs(opts)
1632 opts = pycompat.byteskwargs(opts)
1627 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1633 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1628
1634
1629 if ui.debugflag:
1635 if ui.debugflag:
1630 shortfn = hex
1636 shortfn = hex
1631 else:
1637 else:
1632 shortfn = short
1638 shortfn = short
1633
1639
1634 idlen = 12
1640 idlen = 12
1635 for i in store:
1641 for i in store:
1636 idlen = len(shortfn(store.node(i)))
1642 idlen = len(shortfn(store.node(i)))
1637 break
1643 break
1638
1644
1639 fm = ui.formatter(b'debugindex', opts)
1645 fm = ui.formatter(b'debugindex', opts)
1640 fm.plain(
1646 fm.plain(
1641 b' rev linkrev %s %s p2\n'
1647 b' rev linkrev %s %s p2\n'
1642 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1648 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1643 )
1649 )
1644
1650
1645 for rev in store:
1651 for rev in store:
1646 node = store.node(rev)
1652 node = store.node(rev)
1647 parents = store.parents(node)
1653 parents = store.parents(node)
1648
1654
1649 fm.startitem()
1655 fm.startitem()
1650 fm.write(b'rev', b'%6d ', rev)
1656 fm.write(b'rev', b'%6d ', rev)
1651 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1657 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1652 fm.write(b'node', b'%s ', shortfn(node))
1658 fm.write(b'node', b'%s ', shortfn(node))
1653 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1659 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1654 fm.write(b'p2', b'%s', shortfn(parents[1]))
1660 fm.write(b'p2', b'%s', shortfn(parents[1]))
1655 fm.plain(b'\n')
1661 fm.plain(b'\n')
1656
1662
1657 fm.end()
1663 fm.end()
1658
1664
1659
1665
1660 @command(
1666 @command(
1661 b'debugindexdot',
1667 b'debugindexdot',
1662 cmdutil.debugrevlogopts,
1668 cmdutil.debugrevlogopts,
1663 _(b'-c|-m|FILE'),
1669 _(b'-c|-m|FILE'),
1664 optionalrepo=True,
1670 optionalrepo=True,
1665 )
1671 )
1666 def debugindexdot(ui, repo, file_=None, **opts):
1672 def debugindexdot(ui, repo, file_=None, **opts):
1667 """dump an index DAG as a graphviz dot file"""
1673 """dump an index DAG as a graphviz dot file"""
1668 opts = pycompat.byteskwargs(opts)
1674 opts = pycompat.byteskwargs(opts)
1669 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1675 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1670 ui.writenoi18n(b"digraph G {\n")
1676 ui.writenoi18n(b"digraph G {\n")
1671 for i in r:
1677 for i in r:
1672 node = r.node(i)
1678 node = r.node(i)
1673 pp = r.parents(node)
1679 pp = r.parents(node)
1674 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1680 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1675 if pp[1] != repo.nullid:
1681 if pp[1] != repo.nullid:
1676 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1682 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1677 ui.write(b"}\n")
1683 ui.write(b"}\n")
1678
1684
1679
1685
1680 @command(b'debugindexstats', [])
1686 @command(b'debugindexstats', [])
1681 def debugindexstats(ui, repo):
1687 def debugindexstats(ui, repo):
1682 """show stats related to the changelog index"""
1688 """show stats related to the changelog index"""
1683 repo.changelog.shortest(repo.nullid, 1)
1689 repo.changelog.shortest(repo.nullid, 1)
1684 index = repo.changelog.index
1690 index = repo.changelog.index
1685 if not util.safehasattr(index, b'stats'):
1691 if not util.safehasattr(index, b'stats'):
1686 raise error.Abort(_(b'debugindexstats only works with native code'))
1692 raise error.Abort(_(b'debugindexstats only works with native code'))
1687 for k, v in sorted(index.stats().items()):
1693 for k, v in sorted(index.stats().items()):
1688 ui.write(b'%s: %d\n' % (k, v))
1694 ui.write(b'%s: %d\n' % (k, v))
1689
1695
1690
1696
1691 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1697 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1692 def debuginstall(ui, **opts):
1698 def debuginstall(ui, **opts):
1693 """test Mercurial installation
1699 """test Mercurial installation
1694
1700
1695 Returns 0 on success.
1701 Returns 0 on success.
1696 """
1702 """
1697 opts = pycompat.byteskwargs(opts)
1703 opts = pycompat.byteskwargs(opts)
1698
1704
1699 problems = 0
1705 problems = 0
1700
1706
1701 fm = ui.formatter(b'debuginstall', opts)
1707 fm = ui.formatter(b'debuginstall', opts)
1702 fm.startitem()
1708 fm.startitem()
1703
1709
1704 # encoding might be unknown or wrong. don't translate these messages.
1710 # encoding might be unknown or wrong. don't translate these messages.
1705 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1711 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1706 err = None
1712 err = None
1707 try:
1713 try:
1708 codecs.lookup(pycompat.sysstr(encoding.encoding))
1714 codecs.lookup(pycompat.sysstr(encoding.encoding))
1709 except LookupError as inst:
1715 except LookupError as inst:
1710 err = stringutil.forcebytestr(inst)
1716 err = stringutil.forcebytestr(inst)
1711 problems += 1
1717 problems += 1
1712 fm.condwrite(
1718 fm.condwrite(
1713 err,
1719 err,
1714 b'encodingerror',
1720 b'encodingerror',
1715 b" %s\n (check that your locale is properly set)\n",
1721 b" %s\n (check that your locale is properly set)\n",
1716 err,
1722 err,
1717 )
1723 )
1718
1724
1719 # Python
1725 # Python
1720 pythonlib = None
1726 pythonlib = None
1721 if util.safehasattr(os, '__file__'):
1727 if util.safehasattr(os, '__file__'):
1722 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1728 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1723 elif getattr(sys, 'oxidized', False):
1729 elif getattr(sys, 'oxidized', False):
1724 pythonlib = pycompat.sysexecutable
1730 pythonlib = pycompat.sysexecutable
1725
1731
1726 fm.write(
1732 fm.write(
1727 b'pythonexe',
1733 b'pythonexe',
1728 _(b"checking Python executable (%s)\n"),
1734 _(b"checking Python executable (%s)\n"),
1729 pycompat.sysexecutable or _(b"unknown"),
1735 pycompat.sysexecutable or _(b"unknown"),
1730 )
1736 )
1731 fm.write(
1737 fm.write(
1732 b'pythonimplementation',
1738 b'pythonimplementation',
1733 _(b"checking Python implementation (%s)\n"),
1739 _(b"checking Python implementation (%s)\n"),
1734 pycompat.sysbytes(platform.python_implementation()),
1740 pycompat.sysbytes(platform.python_implementation()),
1735 )
1741 )
1736 fm.write(
1742 fm.write(
1737 b'pythonver',
1743 b'pythonver',
1738 _(b"checking Python version (%s)\n"),
1744 _(b"checking Python version (%s)\n"),
1739 (b"%d.%d.%d" % sys.version_info[:3]),
1745 (b"%d.%d.%d" % sys.version_info[:3]),
1740 )
1746 )
1741 fm.write(
1747 fm.write(
1742 b'pythonlib',
1748 b'pythonlib',
1743 _(b"checking Python lib (%s)...\n"),
1749 _(b"checking Python lib (%s)...\n"),
1744 pythonlib or _(b"unknown"),
1750 pythonlib or _(b"unknown"),
1745 )
1751 )
1746
1752
1747 try:
1753 try:
1748 from . import rustext # pytype: disable=import-error
1754 from . import rustext # pytype: disable=import-error
1749
1755
1750 rustext.__doc__ # trigger lazy import
1756 rustext.__doc__ # trigger lazy import
1751 except ImportError:
1757 except ImportError:
1752 rustext = None
1758 rustext = None
1753
1759
1754 security = set(sslutil.supportedprotocols)
1760 security = set(sslutil.supportedprotocols)
1755 if sslutil.hassni:
1761 if sslutil.hassni:
1756 security.add(b'sni')
1762 security.add(b'sni')
1757
1763
1758 fm.write(
1764 fm.write(
1759 b'pythonsecurity',
1765 b'pythonsecurity',
1760 _(b"checking Python security support (%s)\n"),
1766 _(b"checking Python security support (%s)\n"),
1761 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1767 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1762 )
1768 )
1763
1769
1764 # These are warnings, not errors. So don't increment problem count. This
1770 # These are warnings, not errors. So don't increment problem count. This
1765 # may change in the future.
1771 # may change in the future.
1766 if b'tls1.2' not in security:
1772 if b'tls1.2' not in security:
1767 fm.plain(
1773 fm.plain(
1768 _(
1774 _(
1769 b' TLS 1.2 not supported by Python install; '
1775 b' TLS 1.2 not supported by Python install; '
1770 b'network connections lack modern security\n'
1776 b'network connections lack modern security\n'
1771 )
1777 )
1772 )
1778 )
1773 if b'sni' not in security:
1779 if b'sni' not in security:
1774 fm.plain(
1780 fm.plain(
1775 _(
1781 _(
1776 b' SNI not supported by Python install; may have '
1782 b' SNI not supported by Python install; may have '
1777 b'connectivity issues with some servers\n'
1783 b'connectivity issues with some servers\n'
1778 )
1784 )
1779 )
1785 )
1780
1786
1781 fm.plain(
1787 fm.plain(
1782 _(
1788 _(
1783 b"checking Rust extensions (%s)\n"
1789 b"checking Rust extensions (%s)\n"
1784 % (b'missing' if rustext is None else b'installed')
1790 % (b'missing' if rustext is None else b'installed')
1785 ),
1791 ),
1786 )
1792 )
1787
1793
1788 # TODO print CA cert info
1794 # TODO print CA cert info
1789
1795
1790 # hg version
1796 # hg version
1791 hgver = util.version()
1797 hgver = util.version()
1792 fm.write(
1798 fm.write(
1793 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1799 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1794 )
1800 )
1795 fm.write(
1801 fm.write(
1796 b'hgverextra',
1802 b'hgverextra',
1797 _(b"checking Mercurial custom build (%s)\n"),
1803 _(b"checking Mercurial custom build (%s)\n"),
1798 b'+'.join(hgver.split(b'+')[1:]),
1804 b'+'.join(hgver.split(b'+')[1:]),
1799 )
1805 )
1800
1806
1801 # compiled modules
1807 # compiled modules
1802 hgmodules = None
1808 hgmodules = None
1803 if util.safehasattr(sys.modules[__name__], '__file__'):
1809 if util.safehasattr(sys.modules[__name__], '__file__'):
1804 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1810 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1805 elif getattr(sys, 'oxidized', False):
1811 elif getattr(sys, 'oxidized', False):
1806 hgmodules = pycompat.sysexecutable
1812 hgmodules = pycompat.sysexecutable
1807
1813
1808 fm.write(
1814 fm.write(
1809 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1815 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1810 )
1816 )
1811 fm.write(
1817 fm.write(
1812 b'hgmodules',
1818 b'hgmodules',
1813 _(b"checking installed modules (%s)...\n"),
1819 _(b"checking installed modules (%s)...\n"),
1814 hgmodules or _(b"unknown"),
1820 hgmodules or _(b"unknown"),
1815 )
1821 )
1816
1822
1817 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1823 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1818 rustext = rustandc # for now, that's the only case
1824 rustext = rustandc # for now, that's the only case
1819 cext = policy.policy in (b'c', b'allow') or rustandc
1825 cext = policy.policy in (b'c', b'allow') or rustandc
1820 nopure = cext or rustext
1826 nopure = cext or rustext
1821 if nopure:
1827 if nopure:
1822 err = None
1828 err = None
1823 try:
1829 try:
1824 if cext:
1830 if cext:
1825 from .cext import ( # pytype: disable=import-error
1831 from .cext import ( # pytype: disable=import-error
1826 base85,
1832 base85,
1827 bdiff,
1833 bdiff,
1828 mpatch,
1834 mpatch,
1829 osutil,
1835 osutil,
1830 )
1836 )
1831
1837
1832 # quiet pyflakes
1838 # quiet pyflakes
1833 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1839 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1834 if rustext:
1840 if rustext:
1835 from .rustext import ( # pytype: disable=import-error
1841 from .rustext import ( # pytype: disable=import-error
1836 ancestor,
1842 ancestor,
1837 dirstate,
1843 dirstate,
1838 )
1844 )
1839
1845
1840 dir(ancestor), dir(dirstate) # quiet pyflakes
1846 dir(ancestor), dir(dirstate) # quiet pyflakes
1841 except Exception as inst:
1847 except Exception as inst:
1842 err = stringutil.forcebytestr(inst)
1848 err = stringutil.forcebytestr(inst)
1843 problems += 1
1849 problems += 1
1844 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1850 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1845
1851
1846 compengines = util.compengines._engines.values()
1852 compengines = util.compengines._engines.values()
1847 fm.write(
1853 fm.write(
1848 b'compengines',
1854 b'compengines',
1849 _(b'checking registered compression engines (%s)\n'),
1855 _(b'checking registered compression engines (%s)\n'),
1850 fm.formatlist(
1856 fm.formatlist(
1851 sorted(e.name() for e in compengines),
1857 sorted(e.name() for e in compengines),
1852 name=b'compengine',
1858 name=b'compengine',
1853 fmt=b'%s',
1859 fmt=b'%s',
1854 sep=b', ',
1860 sep=b', ',
1855 ),
1861 ),
1856 )
1862 )
1857 fm.write(
1863 fm.write(
1858 b'compenginesavail',
1864 b'compenginesavail',
1859 _(b'checking available compression engines (%s)\n'),
1865 _(b'checking available compression engines (%s)\n'),
1860 fm.formatlist(
1866 fm.formatlist(
1861 sorted(e.name() for e in compengines if e.available()),
1867 sorted(e.name() for e in compengines if e.available()),
1862 name=b'compengine',
1868 name=b'compengine',
1863 fmt=b'%s',
1869 fmt=b'%s',
1864 sep=b', ',
1870 sep=b', ',
1865 ),
1871 ),
1866 )
1872 )
1867 wirecompengines = compression.compengines.supportedwireengines(
1873 wirecompengines = compression.compengines.supportedwireengines(
1868 compression.SERVERROLE
1874 compression.SERVERROLE
1869 )
1875 )
1870 fm.write(
1876 fm.write(
1871 b'compenginesserver',
1877 b'compenginesserver',
1872 _(
1878 _(
1873 b'checking available compression engines '
1879 b'checking available compression engines '
1874 b'for wire protocol (%s)\n'
1880 b'for wire protocol (%s)\n'
1875 ),
1881 ),
1876 fm.formatlist(
1882 fm.formatlist(
1877 [e.name() for e in wirecompengines if e.wireprotosupport()],
1883 [e.name() for e in wirecompengines if e.wireprotosupport()],
1878 name=b'compengine',
1884 name=b'compengine',
1879 fmt=b'%s',
1885 fmt=b'%s',
1880 sep=b', ',
1886 sep=b', ',
1881 ),
1887 ),
1882 )
1888 )
1883 re2 = b'missing'
1889 re2 = b'missing'
1884 if util._re2:
1890 if util._re2:
1885 re2 = b'available'
1891 re2 = b'available'
1886 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1892 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1887 fm.data(re2=bool(util._re2))
1893 fm.data(re2=bool(util._re2))
1888
1894
1889 # templates
1895 # templates
1890 p = templater.templatedir()
1896 p = templater.templatedir()
1891 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1897 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1892 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1898 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1893 if p:
1899 if p:
1894 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1900 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1895 if m:
1901 if m:
1896 # template found, check if it is working
1902 # template found, check if it is working
1897 err = None
1903 err = None
1898 try:
1904 try:
1899 templater.templater.frommapfile(m)
1905 templater.templater.frommapfile(m)
1900 except Exception as inst:
1906 except Exception as inst:
1901 err = stringutil.forcebytestr(inst)
1907 err = stringutil.forcebytestr(inst)
1902 p = None
1908 p = None
1903 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1909 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1904 else:
1910 else:
1905 p = None
1911 p = None
1906 fm.condwrite(
1912 fm.condwrite(
1907 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1913 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1908 )
1914 )
1909 fm.condwrite(
1915 fm.condwrite(
1910 not m,
1916 not m,
1911 b'defaulttemplatenotfound',
1917 b'defaulttemplatenotfound',
1912 _(b" template '%s' not found\n"),
1918 _(b" template '%s' not found\n"),
1913 b"default",
1919 b"default",
1914 )
1920 )
1915 if not p:
1921 if not p:
1916 problems += 1
1922 problems += 1
1917 fm.condwrite(
1923 fm.condwrite(
1918 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1924 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1919 )
1925 )
1920
1926
1921 # editor
1927 # editor
1922 editor = ui.geteditor()
1928 editor = ui.geteditor()
1923 editor = util.expandpath(editor)
1929 editor = util.expandpath(editor)
1924 editorbin = procutil.shellsplit(editor)[0]
1930 editorbin = procutil.shellsplit(editor)[0]
1925 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1931 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1926 cmdpath = procutil.findexe(editorbin)
1932 cmdpath = procutil.findexe(editorbin)
1927 fm.condwrite(
1933 fm.condwrite(
1928 not cmdpath and editor == b'vi',
1934 not cmdpath and editor == b'vi',
1929 b'vinotfound',
1935 b'vinotfound',
1930 _(
1936 _(
1931 b" No commit editor set and can't find %s in PATH\n"
1937 b" No commit editor set and can't find %s in PATH\n"
1932 b" (specify a commit editor in your configuration"
1938 b" (specify a commit editor in your configuration"
1933 b" file)\n"
1939 b" file)\n"
1934 ),
1940 ),
1935 not cmdpath and editor == b'vi' and editorbin,
1941 not cmdpath and editor == b'vi' and editorbin,
1936 )
1942 )
1937 fm.condwrite(
1943 fm.condwrite(
1938 not cmdpath and editor != b'vi',
1944 not cmdpath and editor != b'vi',
1939 b'editornotfound',
1945 b'editornotfound',
1940 _(
1946 _(
1941 b" Can't find editor '%s' in PATH\n"
1947 b" Can't find editor '%s' in PATH\n"
1942 b" (specify a commit editor in your configuration"
1948 b" (specify a commit editor in your configuration"
1943 b" file)\n"
1949 b" file)\n"
1944 ),
1950 ),
1945 not cmdpath and editorbin,
1951 not cmdpath and editorbin,
1946 )
1952 )
1947 if not cmdpath and editor != b'vi':
1953 if not cmdpath and editor != b'vi':
1948 problems += 1
1954 problems += 1
1949
1955
1950 # check username
1956 # check username
1951 username = None
1957 username = None
1952 err = None
1958 err = None
1953 try:
1959 try:
1954 username = ui.username()
1960 username = ui.username()
1955 except error.Abort as e:
1961 except error.Abort as e:
1956 err = e.message
1962 err = e.message
1957 problems += 1
1963 problems += 1
1958
1964
1959 fm.condwrite(
1965 fm.condwrite(
1960 username, b'username', _(b"checking username (%s)\n"), username
1966 username, b'username', _(b"checking username (%s)\n"), username
1961 )
1967 )
1962 fm.condwrite(
1968 fm.condwrite(
1963 err,
1969 err,
1964 b'usernameerror',
1970 b'usernameerror',
1965 _(
1971 _(
1966 b"checking username...\n %s\n"
1972 b"checking username...\n %s\n"
1967 b" (specify a username in your configuration file)\n"
1973 b" (specify a username in your configuration file)\n"
1968 ),
1974 ),
1969 err,
1975 err,
1970 )
1976 )
1971
1977
1972 for name, mod in extensions.extensions():
1978 for name, mod in extensions.extensions():
1973 handler = getattr(mod, 'debuginstall', None)
1979 handler = getattr(mod, 'debuginstall', None)
1974 if handler is not None:
1980 if handler is not None:
1975 problems += handler(ui, fm)
1981 problems += handler(ui, fm)
1976
1982
1977 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1983 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1978 if not problems:
1984 if not problems:
1979 fm.data(problems=problems)
1985 fm.data(problems=problems)
1980 fm.condwrite(
1986 fm.condwrite(
1981 problems,
1987 problems,
1982 b'problems',
1988 b'problems',
1983 _(b"%d problems detected, please check your install!\n"),
1989 _(b"%d problems detected, please check your install!\n"),
1984 problems,
1990 problems,
1985 )
1991 )
1986 fm.end()
1992 fm.end()
1987
1993
1988 return problems
1994 return problems
1989
1995
1990
1996
1991 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1997 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1992 def debugknown(ui, repopath, *ids, **opts):
1998 def debugknown(ui, repopath, *ids, **opts):
1993 """test whether node ids are known to a repo
1999 """test whether node ids are known to a repo
1994
2000
1995 Every ID must be a full-length hex node id string. Returns a list of 0s
2001 Every ID must be a full-length hex node id string. Returns a list of 0s
1996 and 1s indicating unknown/known.
2002 and 1s indicating unknown/known.
1997 """
2003 """
1998 opts = pycompat.byteskwargs(opts)
2004 opts = pycompat.byteskwargs(opts)
1999 repo = hg.peer(ui, opts, repopath)
2005 repo = hg.peer(ui, opts, repopath)
2000 if not repo.capable(b'known'):
2006 if not repo.capable(b'known'):
2001 raise error.Abort(b"known() not supported by target repository")
2007 raise error.Abort(b"known() not supported by target repository")
2002 flags = repo.known([bin(s) for s in ids])
2008 flags = repo.known([bin(s) for s in ids])
2003 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2009 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2004
2010
2005
2011
2006 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2012 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2007 def debuglabelcomplete(ui, repo, *args):
2013 def debuglabelcomplete(ui, repo, *args):
2008 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2014 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2009 debugnamecomplete(ui, repo, *args)
2015 debugnamecomplete(ui, repo, *args)
2010
2016
2011
2017
2012 @command(
2018 @command(
2013 b'debuglocks',
2019 b'debuglocks',
2014 [
2020 [
2015 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2021 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2016 (
2022 (
2017 b'W',
2023 b'W',
2018 b'force-free-wlock',
2024 b'force-free-wlock',
2019 None,
2025 None,
2020 _(b'free the working state lock (DANGEROUS)'),
2026 _(b'free the working state lock (DANGEROUS)'),
2021 ),
2027 ),
2022 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2028 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2023 (
2029 (
2024 b'S',
2030 b'S',
2025 b'set-wlock',
2031 b'set-wlock',
2026 None,
2032 None,
2027 _(b'set the working state lock until stopped'),
2033 _(b'set the working state lock until stopped'),
2028 ),
2034 ),
2029 ],
2035 ],
2030 _(b'[OPTION]...'),
2036 _(b'[OPTION]...'),
2031 )
2037 )
2032 def debuglocks(ui, repo, **opts):
2038 def debuglocks(ui, repo, **opts):
2033 """show or modify state of locks
2039 """show or modify state of locks
2034
2040
2035 By default, this command will show which locks are held. This
2041 By default, this command will show which locks are held. This
2036 includes the user and process holding the lock, the amount of time
2042 includes the user and process holding the lock, the amount of time
2037 the lock has been held, and the machine name where the process is
2043 the lock has been held, and the machine name where the process is
2038 running if it's not local.
2044 running if it's not local.
2039
2045
2040 Locks protect the integrity of Mercurial's data, so should be
2046 Locks protect the integrity of Mercurial's data, so should be
2041 treated with care. System crashes or other interruptions may cause
2047 treated with care. System crashes or other interruptions may cause
2042 locks to not be properly released, though Mercurial will usually
2048 locks to not be properly released, though Mercurial will usually
2043 detect and remove such stale locks automatically.
2049 detect and remove such stale locks automatically.
2044
2050
2045 However, detecting stale locks may not always be possible (for
2051 However, detecting stale locks may not always be possible (for
2046 instance, on a shared filesystem). Removing locks may also be
2052 instance, on a shared filesystem). Removing locks may also be
2047 blocked by filesystem permissions.
2053 blocked by filesystem permissions.
2048
2054
2049 Setting a lock will prevent other commands from changing the data.
2055 Setting a lock will prevent other commands from changing the data.
2050 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2056 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2051 The set locks are removed when the command exits.
2057 The set locks are removed when the command exits.
2052
2058
2053 Returns 0 if no locks are held.
2059 Returns 0 if no locks are held.
2054
2060
2055 """
2061 """
2056
2062
2057 if opts.get('force_free_lock'):
2063 if opts.get('force_free_lock'):
2058 repo.svfs.unlink(b'lock')
2064 repo.svfs.unlink(b'lock')
2059 if opts.get('force_free_wlock'):
2065 if opts.get('force_free_wlock'):
2060 repo.vfs.unlink(b'wlock')
2066 repo.vfs.unlink(b'wlock')
2061 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2067 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2062 return 0
2068 return 0
2063
2069
2064 locks = []
2070 locks = []
2065 try:
2071 try:
2066 if opts.get('set_wlock'):
2072 if opts.get('set_wlock'):
2067 try:
2073 try:
2068 locks.append(repo.wlock(False))
2074 locks.append(repo.wlock(False))
2069 except error.LockHeld:
2075 except error.LockHeld:
2070 raise error.Abort(_(b'wlock is already held'))
2076 raise error.Abort(_(b'wlock is already held'))
2071 if opts.get('set_lock'):
2077 if opts.get('set_lock'):
2072 try:
2078 try:
2073 locks.append(repo.lock(False))
2079 locks.append(repo.lock(False))
2074 except error.LockHeld:
2080 except error.LockHeld:
2075 raise error.Abort(_(b'lock is already held'))
2081 raise error.Abort(_(b'lock is already held'))
2076 if len(locks):
2082 if len(locks):
2077 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2083 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2078 return 0
2084 return 0
2079 finally:
2085 finally:
2080 release(*locks)
2086 release(*locks)
2081
2087
2082 now = time.time()
2088 now = time.time()
2083 held = 0
2089 held = 0
2084
2090
2085 def report(vfs, name, method):
2091 def report(vfs, name, method):
2086 # this causes stale locks to get reaped for more accurate reporting
2092 # this causes stale locks to get reaped for more accurate reporting
2087 try:
2093 try:
2088 l = method(False)
2094 l = method(False)
2089 except error.LockHeld:
2095 except error.LockHeld:
2090 l = None
2096 l = None
2091
2097
2092 if l:
2098 if l:
2093 l.release()
2099 l.release()
2094 else:
2100 else:
2095 try:
2101 try:
2096 st = vfs.lstat(name)
2102 st = vfs.lstat(name)
2097 age = now - st[stat.ST_MTIME]
2103 age = now - st[stat.ST_MTIME]
2098 user = util.username(st.st_uid)
2104 user = util.username(st.st_uid)
2099 locker = vfs.readlock(name)
2105 locker = vfs.readlock(name)
2100 if b":" in locker:
2106 if b":" in locker:
2101 host, pid = locker.split(b':')
2107 host, pid = locker.split(b':')
2102 if host == socket.gethostname():
2108 if host == socket.gethostname():
2103 locker = b'user %s, process %s' % (user or b'None', pid)
2109 locker = b'user %s, process %s' % (user or b'None', pid)
2104 else:
2110 else:
2105 locker = b'user %s, process %s, host %s' % (
2111 locker = b'user %s, process %s, host %s' % (
2106 user or b'None',
2112 user or b'None',
2107 pid,
2113 pid,
2108 host,
2114 host,
2109 )
2115 )
2110 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2116 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2111 return 1
2117 return 1
2112 except OSError as e:
2118 except OSError as e:
2113 if e.errno != errno.ENOENT:
2119 if e.errno != errno.ENOENT:
2114 raise
2120 raise
2115
2121
2116 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2122 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2117 return 0
2123 return 0
2118
2124
2119 held += report(repo.svfs, b"lock", repo.lock)
2125 held += report(repo.svfs, b"lock", repo.lock)
2120 held += report(repo.vfs, b"wlock", repo.wlock)
2126 held += report(repo.vfs, b"wlock", repo.wlock)
2121
2127
2122 return held
2128 return held
2123
2129
2124
2130
2125 @command(
2131 @command(
2126 b'debugmanifestfulltextcache',
2132 b'debugmanifestfulltextcache',
2127 [
2133 [
2128 (b'', b'clear', False, _(b'clear the cache')),
2134 (b'', b'clear', False, _(b'clear the cache')),
2129 (
2135 (
2130 b'a',
2136 b'a',
2131 b'add',
2137 b'add',
2132 [],
2138 [],
2133 _(b'add the given manifest nodes to the cache'),
2139 _(b'add the given manifest nodes to the cache'),
2134 _(b'NODE'),
2140 _(b'NODE'),
2135 ),
2141 ),
2136 ],
2142 ],
2137 b'',
2143 b'',
2138 )
2144 )
2139 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2145 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2140 """show, clear or amend the contents of the manifest fulltext cache"""
2146 """show, clear or amend the contents of the manifest fulltext cache"""
2141
2147
2142 def getcache():
2148 def getcache():
2143 r = repo.manifestlog.getstorage(b'')
2149 r = repo.manifestlog.getstorage(b'')
2144 try:
2150 try:
2145 return r._fulltextcache
2151 return r._fulltextcache
2146 except AttributeError:
2152 except AttributeError:
2147 msg = _(
2153 msg = _(
2148 b"Current revlog implementation doesn't appear to have a "
2154 b"Current revlog implementation doesn't appear to have a "
2149 b"manifest fulltext cache\n"
2155 b"manifest fulltext cache\n"
2150 )
2156 )
2151 raise error.Abort(msg)
2157 raise error.Abort(msg)
2152
2158
2153 if opts.get('clear'):
2159 if opts.get('clear'):
2154 with repo.wlock():
2160 with repo.wlock():
2155 cache = getcache()
2161 cache = getcache()
2156 cache.clear(clear_persisted_data=True)
2162 cache.clear(clear_persisted_data=True)
2157 return
2163 return
2158
2164
2159 if add:
2165 if add:
2160 with repo.wlock():
2166 with repo.wlock():
2161 m = repo.manifestlog
2167 m = repo.manifestlog
2162 store = m.getstorage(b'')
2168 store = m.getstorage(b'')
2163 for n in add:
2169 for n in add:
2164 try:
2170 try:
2165 manifest = m[store.lookup(n)]
2171 manifest = m[store.lookup(n)]
2166 except error.LookupError as e:
2172 except error.LookupError as e:
2167 raise error.Abort(
2173 raise error.Abort(
2168 bytes(e), hint=b"Check your manifest node id"
2174 bytes(e), hint=b"Check your manifest node id"
2169 )
2175 )
2170 manifest.read() # stores revisision in cache too
2176 manifest.read() # stores revisision in cache too
2171 return
2177 return
2172
2178
2173 cache = getcache()
2179 cache = getcache()
2174 if not len(cache):
2180 if not len(cache):
2175 ui.write(_(b'cache empty\n'))
2181 ui.write(_(b'cache empty\n'))
2176 else:
2182 else:
2177 ui.write(
2183 ui.write(
2178 _(
2184 _(
2179 b'cache contains %d manifest entries, in order of most to '
2185 b'cache contains %d manifest entries, in order of most to '
2180 b'least recent:\n'
2186 b'least recent:\n'
2181 )
2187 )
2182 % (len(cache),)
2188 % (len(cache),)
2183 )
2189 )
2184 totalsize = 0
2190 totalsize = 0
2185 for nodeid in cache:
2191 for nodeid in cache:
2186 # Use cache.get to not update the LRU order
2192 # Use cache.get to not update the LRU order
2187 data = cache.peek(nodeid)
2193 data = cache.peek(nodeid)
2188 size = len(data)
2194 size = len(data)
2189 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2195 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2190 ui.write(
2196 ui.write(
2191 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2197 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2192 )
2198 )
2193 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2199 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2194 ui.write(
2200 ui.write(
2195 _(b'total cache data size %s, on-disk %s\n')
2201 _(b'total cache data size %s, on-disk %s\n')
2196 % (util.bytecount(totalsize), util.bytecount(ondisk))
2202 % (util.bytecount(totalsize), util.bytecount(ondisk))
2197 )
2203 )
2198
2204
2199
2205
2200 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2206 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2201 def debugmergestate(ui, repo, *args, **opts):
2207 def debugmergestate(ui, repo, *args, **opts):
2202 """print merge state
2208 """print merge state
2203
2209
2204 Use --verbose to print out information about whether v1 or v2 merge state
2210 Use --verbose to print out information about whether v1 or v2 merge state
2205 was chosen."""
2211 was chosen."""
2206
2212
2207 if ui.verbose:
2213 if ui.verbose:
2208 ms = mergestatemod.mergestate(repo)
2214 ms = mergestatemod.mergestate(repo)
2209
2215
2210 # sort so that reasonable information is on top
2216 # sort so that reasonable information is on top
2211 v1records = ms._readrecordsv1()
2217 v1records = ms._readrecordsv1()
2212 v2records = ms._readrecordsv2()
2218 v2records = ms._readrecordsv2()
2213
2219
2214 if not v1records and not v2records:
2220 if not v1records and not v2records:
2215 pass
2221 pass
2216 elif not v2records:
2222 elif not v2records:
2217 ui.writenoi18n(b'no version 2 merge state\n')
2223 ui.writenoi18n(b'no version 2 merge state\n')
2218 elif ms._v1v2match(v1records, v2records):
2224 elif ms._v1v2match(v1records, v2records):
2219 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2225 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2220 else:
2226 else:
2221 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2227 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2222
2228
2223 opts = pycompat.byteskwargs(opts)
2229 opts = pycompat.byteskwargs(opts)
2224 if not opts[b'template']:
2230 if not opts[b'template']:
2225 opts[b'template'] = (
2231 opts[b'template'] = (
2226 b'{if(commits, "", "no merge state found\n")}'
2232 b'{if(commits, "", "no merge state found\n")}'
2227 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2233 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2228 b'{files % "file: {path} (state \\"{state}\\")\n'
2234 b'{files % "file: {path} (state \\"{state}\\")\n'
2229 b'{if(local_path, "'
2235 b'{if(local_path, "'
2230 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2236 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2231 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2237 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2232 b' other path: {other_path} (node {other_node})\n'
2238 b' other path: {other_path} (node {other_node})\n'
2233 b'")}'
2239 b'")}'
2234 b'{if(rename_side, "'
2240 b'{if(rename_side, "'
2235 b' rename side: {rename_side}\n'
2241 b' rename side: {rename_side}\n'
2236 b' renamed path: {renamed_path}\n'
2242 b' renamed path: {renamed_path}\n'
2237 b'")}'
2243 b'")}'
2238 b'{extras % " extra: {key} = {value}\n"}'
2244 b'{extras % " extra: {key} = {value}\n"}'
2239 b'"}'
2245 b'"}'
2240 b'{extras % "extra: {file} ({key} = {value})\n"}'
2246 b'{extras % "extra: {file} ({key} = {value})\n"}'
2241 )
2247 )
2242
2248
2243 ms = mergestatemod.mergestate.read(repo)
2249 ms = mergestatemod.mergestate.read(repo)
2244
2250
2245 fm = ui.formatter(b'debugmergestate', opts)
2251 fm = ui.formatter(b'debugmergestate', opts)
2246 fm.startitem()
2252 fm.startitem()
2247
2253
2248 fm_commits = fm.nested(b'commits')
2254 fm_commits = fm.nested(b'commits')
2249 if ms.active():
2255 if ms.active():
2250 for name, node, label_index in (
2256 for name, node, label_index in (
2251 (b'local', ms.local, 0),
2257 (b'local', ms.local, 0),
2252 (b'other', ms.other, 1),
2258 (b'other', ms.other, 1),
2253 ):
2259 ):
2254 fm_commits.startitem()
2260 fm_commits.startitem()
2255 fm_commits.data(name=name)
2261 fm_commits.data(name=name)
2256 fm_commits.data(node=hex(node))
2262 fm_commits.data(node=hex(node))
2257 if ms._labels and len(ms._labels) > label_index:
2263 if ms._labels and len(ms._labels) > label_index:
2258 fm_commits.data(label=ms._labels[label_index])
2264 fm_commits.data(label=ms._labels[label_index])
2259 fm_commits.end()
2265 fm_commits.end()
2260
2266
2261 fm_files = fm.nested(b'files')
2267 fm_files = fm.nested(b'files')
2262 if ms.active():
2268 if ms.active():
2263 for f in ms:
2269 for f in ms:
2264 fm_files.startitem()
2270 fm_files.startitem()
2265 fm_files.data(path=f)
2271 fm_files.data(path=f)
2266 state = ms._state[f]
2272 state = ms._state[f]
2267 fm_files.data(state=state[0])
2273 fm_files.data(state=state[0])
2268 if state[0] in (
2274 if state[0] in (
2269 mergestatemod.MERGE_RECORD_UNRESOLVED,
2275 mergestatemod.MERGE_RECORD_UNRESOLVED,
2270 mergestatemod.MERGE_RECORD_RESOLVED,
2276 mergestatemod.MERGE_RECORD_RESOLVED,
2271 ):
2277 ):
2272 fm_files.data(local_key=state[1])
2278 fm_files.data(local_key=state[1])
2273 fm_files.data(local_path=state[2])
2279 fm_files.data(local_path=state[2])
2274 fm_files.data(ancestor_path=state[3])
2280 fm_files.data(ancestor_path=state[3])
2275 fm_files.data(ancestor_node=state[4])
2281 fm_files.data(ancestor_node=state[4])
2276 fm_files.data(other_path=state[5])
2282 fm_files.data(other_path=state[5])
2277 fm_files.data(other_node=state[6])
2283 fm_files.data(other_node=state[6])
2278 fm_files.data(local_flags=state[7])
2284 fm_files.data(local_flags=state[7])
2279 elif state[0] in (
2285 elif state[0] in (
2280 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2286 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2281 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2287 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2282 ):
2288 ):
2283 fm_files.data(renamed_path=state[1])
2289 fm_files.data(renamed_path=state[1])
2284 fm_files.data(rename_side=state[2])
2290 fm_files.data(rename_side=state[2])
2285 fm_extras = fm_files.nested(b'extras')
2291 fm_extras = fm_files.nested(b'extras')
2286 for k, v in sorted(ms.extras(f).items()):
2292 for k, v in sorted(ms.extras(f).items()):
2287 fm_extras.startitem()
2293 fm_extras.startitem()
2288 fm_extras.data(key=k)
2294 fm_extras.data(key=k)
2289 fm_extras.data(value=v)
2295 fm_extras.data(value=v)
2290 fm_extras.end()
2296 fm_extras.end()
2291
2297
2292 fm_files.end()
2298 fm_files.end()
2293
2299
2294 fm_extras = fm.nested(b'extras')
2300 fm_extras = fm.nested(b'extras')
2295 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2301 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2296 if f in ms:
2302 if f in ms:
2297 # If file is in mergestate, we have already processed it's extras
2303 # If file is in mergestate, we have already processed it's extras
2298 continue
2304 continue
2299 for k, v in pycompat.iteritems(d):
2305 for k, v in pycompat.iteritems(d):
2300 fm_extras.startitem()
2306 fm_extras.startitem()
2301 fm_extras.data(file=f)
2307 fm_extras.data(file=f)
2302 fm_extras.data(key=k)
2308 fm_extras.data(key=k)
2303 fm_extras.data(value=v)
2309 fm_extras.data(value=v)
2304 fm_extras.end()
2310 fm_extras.end()
2305
2311
2306 fm.end()
2312 fm.end()
2307
2313
2308
2314
2309 @command(b'debugnamecomplete', [], _(b'NAME...'))
2315 @command(b'debugnamecomplete', [], _(b'NAME...'))
2310 def debugnamecomplete(ui, repo, *args):
2316 def debugnamecomplete(ui, repo, *args):
2311 '''complete "names" - tags, open branch names, bookmark names'''
2317 '''complete "names" - tags, open branch names, bookmark names'''
2312
2318
2313 names = set()
2319 names = set()
2314 # since we previously only listed open branches, we will handle that
2320 # since we previously only listed open branches, we will handle that
2315 # specially (after this for loop)
2321 # specially (after this for loop)
2316 for name, ns in pycompat.iteritems(repo.names):
2322 for name, ns in pycompat.iteritems(repo.names):
2317 if name != b'branches':
2323 if name != b'branches':
2318 names.update(ns.listnames(repo))
2324 names.update(ns.listnames(repo))
2319 names.update(
2325 names.update(
2320 tag
2326 tag
2321 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2327 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2322 if not closed
2328 if not closed
2323 )
2329 )
2324 completions = set()
2330 completions = set()
2325 if not args:
2331 if not args:
2326 args = [b'']
2332 args = [b'']
2327 for a in args:
2333 for a in args:
2328 completions.update(n for n in names if n.startswith(a))
2334 completions.update(n for n in names if n.startswith(a))
2329 ui.write(b'\n'.join(sorted(completions)))
2335 ui.write(b'\n'.join(sorted(completions)))
2330 ui.write(b'\n')
2336 ui.write(b'\n')
2331
2337
2332
2338
2333 @command(
2339 @command(
2334 b'debugnodemap',
2340 b'debugnodemap',
2335 [
2341 [
2336 (
2342 (
2337 b'',
2343 b'',
2338 b'dump-new',
2344 b'dump-new',
2339 False,
2345 False,
2340 _(b'write a (new) persistent binary nodemap on stdout'),
2346 _(b'write a (new) persistent binary nodemap on stdout'),
2341 ),
2347 ),
2342 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2348 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2343 (
2349 (
2344 b'',
2350 b'',
2345 b'check',
2351 b'check',
2346 False,
2352 False,
2347 _(b'check that the data on disk data are correct.'),
2353 _(b'check that the data on disk data are correct.'),
2348 ),
2354 ),
2349 (
2355 (
2350 b'',
2356 b'',
2351 b'metadata',
2357 b'metadata',
2352 False,
2358 False,
2353 _(b'display the on disk meta data for the nodemap'),
2359 _(b'display the on disk meta data for the nodemap'),
2354 ),
2360 ),
2355 ],
2361 ],
2356 )
2362 )
2357 def debugnodemap(ui, repo, **opts):
2363 def debugnodemap(ui, repo, **opts):
2358 """write and inspect on disk nodemap"""
2364 """write and inspect on disk nodemap"""
2359 if opts['dump_new']:
2365 if opts['dump_new']:
2360 unfi = repo.unfiltered()
2366 unfi = repo.unfiltered()
2361 cl = unfi.changelog
2367 cl = unfi.changelog
2362 if util.safehasattr(cl.index, "nodemap_data_all"):
2368 if util.safehasattr(cl.index, "nodemap_data_all"):
2363 data = cl.index.nodemap_data_all()
2369 data = cl.index.nodemap_data_all()
2364 else:
2370 else:
2365 data = nodemap.persistent_data(cl.index)
2371 data = nodemap.persistent_data(cl.index)
2366 ui.write(data)
2372 ui.write(data)
2367 elif opts['dump_disk']:
2373 elif opts['dump_disk']:
2368 unfi = repo.unfiltered()
2374 unfi = repo.unfiltered()
2369 cl = unfi.changelog
2375 cl = unfi.changelog
2370 nm_data = nodemap.persisted_data(cl)
2376 nm_data = nodemap.persisted_data(cl)
2371 if nm_data is not None:
2377 if nm_data is not None:
2372 docket, data = nm_data
2378 docket, data = nm_data
2373 ui.write(data[:])
2379 ui.write(data[:])
2374 elif opts['check']:
2380 elif opts['check']:
2375 unfi = repo.unfiltered()
2381 unfi = repo.unfiltered()
2376 cl = unfi.changelog
2382 cl = unfi.changelog
2377 nm_data = nodemap.persisted_data(cl)
2383 nm_data = nodemap.persisted_data(cl)
2378 if nm_data is not None:
2384 if nm_data is not None:
2379 docket, data = nm_data
2385 docket, data = nm_data
2380 return nodemap.check_data(ui, cl.index, data)
2386 return nodemap.check_data(ui, cl.index, data)
2381 elif opts['metadata']:
2387 elif opts['metadata']:
2382 unfi = repo.unfiltered()
2388 unfi = repo.unfiltered()
2383 cl = unfi.changelog
2389 cl = unfi.changelog
2384 nm_data = nodemap.persisted_data(cl)
2390 nm_data = nodemap.persisted_data(cl)
2385 if nm_data is not None:
2391 if nm_data is not None:
2386 docket, data = nm_data
2392 docket, data = nm_data
2387 ui.write((b"uid: %s\n") % docket.uid)
2393 ui.write((b"uid: %s\n") % docket.uid)
2388 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2394 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2389 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2395 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2390 ui.write((b"data-length: %d\n") % docket.data_length)
2396 ui.write((b"data-length: %d\n") % docket.data_length)
2391 ui.write((b"data-unused: %d\n") % docket.data_unused)
2397 ui.write((b"data-unused: %d\n") % docket.data_unused)
2392 unused_perc = docket.data_unused * 100.0 / docket.data_length
2398 unused_perc = docket.data_unused * 100.0 / docket.data_length
2393 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2399 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2394
2400
2395
2401
2396 @command(
2402 @command(
2397 b'debugobsolete',
2403 b'debugobsolete',
2398 [
2404 [
2399 (b'', b'flags', 0, _(b'markers flag')),
2405 (b'', b'flags', 0, _(b'markers flag')),
2400 (
2406 (
2401 b'',
2407 b'',
2402 b'record-parents',
2408 b'record-parents',
2403 False,
2409 False,
2404 _(b'record parent information for the precursor'),
2410 _(b'record parent information for the precursor'),
2405 ),
2411 ),
2406 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2412 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2407 (
2413 (
2408 b'',
2414 b'',
2409 b'exclusive',
2415 b'exclusive',
2410 False,
2416 False,
2411 _(b'restrict display to markers only relevant to REV'),
2417 _(b'restrict display to markers only relevant to REV'),
2412 ),
2418 ),
2413 (b'', b'index', False, _(b'display index of the marker')),
2419 (b'', b'index', False, _(b'display index of the marker')),
2414 (b'', b'delete', [], _(b'delete markers specified by indices')),
2420 (b'', b'delete', [], _(b'delete markers specified by indices')),
2415 ]
2421 ]
2416 + cmdutil.commitopts2
2422 + cmdutil.commitopts2
2417 + cmdutil.formatteropts,
2423 + cmdutil.formatteropts,
2418 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2424 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2419 )
2425 )
2420 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2426 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2421 """create arbitrary obsolete marker
2427 """create arbitrary obsolete marker
2422
2428
2423 With no arguments, displays the list of obsolescence markers."""
2429 With no arguments, displays the list of obsolescence markers."""
2424
2430
2425 opts = pycompat.byteskwargs(opts)
2431 opts = pycompat.byteskwargs(opts)
2426
2432
2427 def parsenodeid(s):
2433 def parsenodeid(s):
2428 try:
2434 try:
2429 # We do not use revsingle/revrange functions here to accept
2435 # We do not use revsingle/revrange functions here to accept
2430 # arbitrary node identifiers, possibly not present in the
2436 # arbitrary node identifiers, possibly not present in the
2431 # local repository.
2437 # local repository.
2432 n = bin(s)
2438 n = bin(s)
2433 if len(n) != repo.nodeconstants.nodelen:
2439 if len(n) != repo.nodeconstants.nodelen:
2434 raise TypeError()
2440 raise TypeError()
2435 return n
2441 return n
2436 except TypeError:
2442 except TypeError:
2437 raise error.InputError(
2443 raise error.InputError(
2438 b'changeset references must be full hexadecimal '
2444 b'changeset references must be full hexadecimal '
2439 b'node identifiers'
2445 b'node identifiers'
2440 )
2446 )
2441
2447
2442 if opts.get(b'delete'):
2448 if opts.get(b'delete'):
2443 indices = []
2449 indices = []
2444 for v in opts.get(b'delete'):
2450 for v in opts.get(b'delete'):
2445 try:
2451 try:
2446 indices.append(int(v))
2452 indices.append(int(v))
2447 except ValueError:
2453 except ValueError:
2448 raise error.InputError(
2454 raise error.InputError(
2449 _(b'invalid index value: %r') % v,
2455 _(b'invalid index value: %r') % v,
2450 hint=_(b'use integers for indices'),
2456 hint=_(b'use integers for indices'),
2451 )
2457 )
2452
2458
2453 if repo.currenttransaction():
2459 if repo.currenttransaction():
2454 raise error.Abort(
2460 raise error.Abort(
2455 _(b'cannot delete obsmarkers in the middle of transaction.')
2461 _(b'cannot delete obsmarkers in the middle of transaction.')
2456 )
2462 )
2457
2463
2458 with repo.lock():
2464 with repo.lock():
2459 n = repair.deleteobsmarkers(repo.obsstore, indices)
2465 n = repair.deleteobsmarkers(repo.obsstore, indices)
2460 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2466 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2461
2467
2462 return
2468 return
2463
2469
2464 if precursor is not None:
2470 if precursor is not None:
2465 if opts[b'rev']:
2471 if opts[b'rev']:
2466 raise error.InputError(
2472 raise error.InputError(
2467 b'cannot select revision when creating marker'
2473 b'cannot select revision when creating marker'
2468 )
2474 )
2469 metadata = {}
2475 metadata = {}
2470 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2476 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2471 succs = tuple(parsenodeid(succ) for succ in successors)
2477 succs = tuple(parsenodeid(succ) for succ in successors)
2472 l = repo.lock()
2478 l = repo.lock()
2473 try:
2479 try:
2474 tr = repo.transaction(b'debugobsolete')
2480 tr = repo.transaction(b'debugobsolete')
2475 try:
2481 try:
2476 date = opts.get(b'date')
2482 date = opts.get(b'date')
2477 if date:
2483 if date:
2478 date = dateutil.parsedate(date)
2484 date = dateutil.parsedate(date)
2479 else:
2485 else:
2480 date = None
2486 date = None
2481 prec = parsenodeid(precursor)
2487 prec = parsenodeid(precursor)
2482 parents = None
2488 parents = None
2483 if opts[b'record_parents']:
2489 if opts[b'record_parents']:
2484 if prec not in repo.unfiltered():
2490 if prec not in repo.unfiltered():
2485 raise error.Abort(
2491 raise error.Abort(
2486 b'cannot used --record-parents on '
2492 b'cannot used --record-parents on '
2487 b'unknown changesets'
2493 b'unknown changesets'
2488 )
2494 )
2489 parents = repo.unfiltered()[prec].parents()
2495 parents = repo.unfiltered()[prec].parents()
2490 parents = tuple(p.node() for p in parents)
2496 parents = tuple(p.node() for p in parents)
2491 repo.obsstore.create(
2497 repo.obsstore.create(
2492 tr,
2498 tr,
2493 prec,
2499 prec,
2494 succs,
2500 succs,
2495 opts[b'flags'],
2501 opts[b'flags'],
2496 parents=parents,
2502 parents=parents,
2497 date=date,
2503 date=date,
2498 metadata=metadata,
2504 metadata=metadata,
2499 ui=ui,
2505 ui=ui,
2500 )
2506 )
2501 tr.close()
2507 tr.close()
2502 except ValueError as exc:
2508 except ValueError as exc:
2503 raise error.Abort(
2509 raise error.Abort(
2504 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2510 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2505 )
2511 )
2506 finally:
2512 finally:
2507 tr.release()
2513 tr.release()
2508 finally:
2514 finally:
2509 l.release()
2515 l.release()
2510 else:
2516 else:
2511 if opts[b'rev']:
2517 if opts[b'rev']:
2512 revs = scmutil.revrange(repo, opts[b'rev'])
2518 revs = scmutil.revrange(repo, opts[b'rev'])
2513 nodes = [repo[r].node() for r in revs]
2519 nodes = [repo[r].node() for r in revs]
2514 markers = list(
2520 markers = list(
2515 obsutil.getmarkers(
2521 obsutil.getmarkers(
2516 repo, nodes=nodes, exclusive=opts[b'exclusive']
2522 repo, nodes=nodes, exclusive=opts[b'exclusive']
2517 )
2523 )
2518 )
2524 )
2519 markers.sort(key=lambda x: x._data)
2525 markers.sort(key=lambda x: x._data)
2520 else:
2526 else:
2521 markers = obsutil.getmarkers(repo)
2527 markers = obsutil.getmarkers(repo)
2522
2528
2523 markerstoiter = markers
2529 markerstoiter = markers
2524 isrelevant = lambda m: True
2530 isrelevant = lambda m: True
2525 if opts.get(b'rev') and opts.get(b'index'):
2531 if opts.get(b'rev') and opts.get(b'index'):
2526 markerstoiter = obsutil.getmarkers(repo)
2532 markerstoiter = obsutil.getmarkers(repo)
2527 markerset = set(markers)
2533 markerset = set(markers)
2528 isrelevant = lambda m: m in markerset
2534 isrelevant = lambda m: m in markerset
2529
2535
2530 fm = ui.formatter(b'debugobsolete', opts)
2536 fm = ui.formatter(b'debugobsolete', opts)
2531 for i, m in enumerate(markerstoiter):
2537 for i, m in enumerate(markerstoiter):
2532 if not isrelevant(m):
2538 if not isrelevant(m):
2533 # marker can be irrelevant when we're iterating over a set
2539 # marker can be irrelevant when we're iterating over a set
2534 # of markers (markerstoiter) which is bigger than the set
2540 # of markers (markerstoiter) which is bigger than the set
2535 # of markers we want to display (markers)
2541 # of markers we want to display (markers)
2536 # this can happen if both --index and --rev options are
2542 # this can happen if both --index and --rev options are
2537 # provided and thus we need to iterate over all of the markers
2543 # provided and thus we need to iterate over all of the markers
2538 # to get the correct indices, but only display the ones that
2544 # to get the correct indices, but only display the ones that
2539 # are relevant to --rev value
2545 # are relevant to --rev value
2540 continue
2546 continue
2541 fm.startitem()
2547 fm.startitem()
2542 ind = i if opts.get(b'index') else None
2548 ind = i if opts.get(b'index') else None
2543 cmdutil.showmarker(fm, m, index=ind)
2549 cmdutil.showmarker(fm, m, index=ind)
2544 fm.end()
2550 fm.end()
2545
2551
2546
2552
2547 @command(
2553 @command(
2548 b'debugp1copies',
2554 b'debugp1copies',
2549 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2555 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2550 _(b'[-r REV]'),
2556 _(b'[-r REV]'),
2551 )
2557 )
2552 def debugp1copies(ui, repo, **opts):
2558 def debugp1copies(ui, repo, **opts):
2553 """dump copy information compared to p1"""
2559 """dump copy information compared to p1"""
2554
2560
2555 opts = pycompat.byteskwargs(opts)
2561 opts = pycompat.byteskwargs(opts)
2556 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2562 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2557 for dst, src in ctx.p1copies().items():
2563 for dst, src in ctx.p1copies().items():
2558 ui.write(b'%s -> %s\n' % (src, dst))
2564 ui.write(b'%s -> %s\n' % (src, dst))
2559
2565
2560
2566
2561 @command(
2567 @command(
2562 b'debugp2copies',
2568 b'debugp2copies',
2563 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2569 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2564 _(b'[-r REV]'),
2570 _(b'[-r REV]'),
2565 )
2571 )
2566 def debugp1copies(ui, repo, **opts):
2572 def debugp1copies(ui, repo, **opts):
2567 """dump copy information compared to p2"""
2573 """dump copy information compared to p2"""
2568
2574
2569 opts = pycompat.byteskwargs(opts)
2575 opts = pycompat.byteskwargs(opts)
2570 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2576 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2571 for dst, src in ctx.p2copies().items():
2577 for dst, src in ctx.p2copies().items():
2572 ui.write(b'%s -> %s\n' % (src, dst))
2578 ui.write(b'%s -> %s\n' % (src, dst))
2573
2579
2574
2580
2575 @command(
2581 @command(
2576 b'debugpathcomplete',
2582 b'debugpathcomplete',
2577 [
2583 [
2578 (b'f', b'full', None, _(b'complete an entire path')),
2584 (b'f', b'full', None, _(b'complete an entire path')),
2579 (b'n', b'normal', None, _(b'show only normal files')),
2585 (b'n', b'normal', None, _(b'show only normal files')),
2580 (b'a', b'added', None, _(b'show only added files')),
2586 (b'a', b'added', None, _(b'show only added files')),
2581 (b'r', b'removed', None, _(b'show only removed files')),
2587 (b'r', b'removed', None, _(b'show only removed files')),
2582 ],
2588 ],
2583 _(b'FILESPEC...'),
2589 _(b'FILESPEC...'),
2584 )
2590 )
2585 def debugpathcomplete(ui, repo, *specs, **opts):
2591 def debugpathcomplete(ui, repo, *specs, **opts):
2586 """complete part or all of a tracked path
2592 """complete part or all of a tracked path
2587
2593
2588 This command supports shells that offer path name completion. It
2594 This command supports shells that offer path name completion. It
2589 currently completes only files already known to the dirstate.
2595 currently completes only files already known to the dirstate.
2590
2596
2591 Completion extends only to the next path segment unless
2597 Completion extends only to the next path segment unless
2592 --full is specified, in which case entire paths are used."""
2598 --full is specified, in which case entire paths are used."""
2593
2599
2594 def complete(path, acceptable):
2600 def complete(path, acceptable):
2595 dirstate = repo.dirstate
2601 dirstate = repo.dirstate
2596 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2602 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2597 rootdir = repo.root + pycompat.ossep
2603 rootdir = repo.root + pycompat.ossep
2598 if spec != repo.root and not spec.startswith(rootdir):
2604 if spec != repo.root and not spec.startswith(rootdir):
2599 return [], []
2605 return [], []
2600 if os.path.isdir(spec):
2606 if os.path.isdir(spec):
2601 spec += b'/'
2607 spec += b'/'
2602 spec = spec[len(rootdir) :]
2608 spec = spec[len(rootdir) :]
2603 fixpaths = pycompat.ossep != b'/'
2609 fixpaths = pycompat.ossep != b'/'
2604 if fixpaths:
2610 if fixpaths:
2605 spec = spec.replace(pycompat.ossep, b'/')
2611 spec = spec.replace(pycompat.ossep, b'/')
2606 speclen = len(spec)
2612 speclen = len(spec)
2607 fullpaths = opts['full']
2613 fullpaths = opts['full']
2608 files, dirs = set(), set()
2614 files, dirs = set(), set()
2609 adddir, addfile = dirs.add, files.add
2615 adddir, addfile = dirs.add, files.add
2610 for f, st in pycompat.iteritems(dirstate):
2616 for f, st in pycompat.iteritems(dirstate):
2611 if f.startswith(spec) and st.state in acceptable:
2617 if f.startswith(spec) and st.state in acceptable:
2612 if fixpaths:
2618 if fixpaths:
2613 f = f.replace(b'/', pycompat.ossep)
2619 f = f.replace(b'/', pycompat.ossep)
2614 if fullpaths:
2620 if fullpaths:
2615 addfile(f)
2621 addfile(f)
2616 continue
2622 continue
2617 s = f.find(pycompat.ossep, speclen)
2623 s = f.find(pycompat.ossep, speclen)
2618 if s >= 0:
2624 if s >= 0:
2619 adddir(f[:s])
2625 adddir(f[:s])
2620 else:
2626 else:
2621 addfile(f)
2627 addfile(f)
2622 return files, dirs
2628 return files, dirs
2623
2629
2624 acceptable = b''
2630 acceptable = b''
2625 if opts['normal']:
2631 if opts['normal']:
2626 acceptable += b'nm'
2632 acceptable += b'nm'
2627 if opts['added']:
2633 if opts['added']:
2628 acceptable += b'a'
2634 acceptable += b'a'
2629 if opts['removed']:
2635 if opts['removed']:
2630 acceptable += b'r'
2636 acceptable += b'r'
2631 cwd = repo.getcwd()
2637 cwd = repo.getcwd()
2632 if not specs:
2638 if not specs:
2633 specs = [b'.']
2639 specs = [b'.']
2634
2640
2635 files, dirs = set(), set()
2641 files, dirs = set(), set()
2636 for spec in specs:
2642 for spec in specs:
2637 f, d = complete(spec, acceptable or b'nmar')
2643 f, d = complete(spec, acceptable or b'nmar')
2638 files.update(f)
2644 files.update(f)
2639 dirs.update(d)
2645 dirs.update(d)
2640 files.update(dirs)
2646 files.update(dirs)
2641 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2647 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2642 ui.write(b'\n')
2648 ui.write(b'\n')
2643
2649
2644
2650
2645 @command(
2651 @command(
2646 b'debugpathcopies',
2652 b'debugpathcopies',
2647 cmdutil.walkopts,
2653 cmdutil.walkopts,
2648 b'hg debugpathcopies REV1 REV2 [FILE]',
2654 b'hg debugpathcopies REV1 REV2 [FILE]',
2649 inferrepo=True,
2655 inferrepo=True,
2650 )
2656 )
2651 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2657 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2652 """show copies between two revisions"""
2658 """show copies between two revisions"""
2653 ctx1 = scmutil.revsingle(repo, rev1)
2659 ctx1 = scmutil.revsingle(repo, rev1)
2654 ctx2 = scmutil.revsingle(repo, rev2)
2660 ctx2 = scmutil.revsingle(repo, rev2)
2655 m = scmutil.match(ctx1, pats, opts)
2661 m = scmutil.match(ctx1, pats, opts)
2656 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2662 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2657 ui.write(b'%s -> %s\n' % (src, dst))
2663 ui.write(b'%s -> %s\n' % (src, dst))
2658
2664
2659
2665
2660 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2666 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2661 def debugpeer(ui, path):
2667 def debugpeer(ui, path):
2662 """establish a connection to a peer repository"""
2668 """establish a connection to a peer repository"""
2663 # Always enable peer request logging. Requires --debug to display
2669 # Always enable peer request logging. Requires --debug to display
2664 # though.
2670 # though.
2665 overrides = {
2671 overrides = {
2666 (b'devel', b'debug.peer-request'): True,
2672 (b'devel', b'debug.peer-request'): True,
2667 }
2673 }
2668
2674
2669 with ui.configoverride(overrides):
2675 with ui.configoverride(overrides):
2670 peer = hg.peer(ui, {}, path)
2676 peer = hg.peer(ui, {}, path)
2671
2677
2672 try:
2678 try:
2673 local = peer.local() is not None
2679 local = peer.local() is not None
2674 canpush = peer.canpush()
2680 canpush = peer.canpush()
2675
2681
2676 ui.write(_(b'url: %s\n') % peer.url())
2682 ui.write(_(b'url: %s\n') % peer.url())
2677 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2683 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2678 ui.write(
2684 ui.write(
2679 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2685 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2680 )
2686 )
2681 finally:
2687 finally:
2682 peer.close()
2688 peer.close()
2683
2689
2684
2690
2685 @command(
2691 @command(
2686 b'debugpickmergetool',
2692 b'debugpickmergetool',
2687 [
2693 [
2688 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2694 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2689 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2695 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2690 ]
2696 ]
2691 + cmdutil.walkopts
2697 + cmdutil.walkopts
2692 + cmdutil.mergetoolopts,
2698 + cmdutil.mergetoolopts,
2693 _(b'[PATTERN]...'),
2699 _(b'[PATTERN]...'),
2694 inferrepo=True,
2700 inferrepo=True,
2695 )
2701 )
2696 def debugpickmergetool(ui, repo, *pats, **opts):
2702 def debugpickmergetool(ui, repo, *pats, **opts):
2697 """examine which merge tool is chosen for specified file
2703 """examine which merge tool is chosen for specified file
2698
2704
2699 As described in :hg:`help merge-tools`, Mercurial examines
2705 As described in :hg:`help merge-tools`, Mercurial examines
2700 configurations below in this order to decide which merge tool is
2706 configurations below in this order to decide which merge tool is
2701 chosen for specified file.
2707 chosen for specified file.
2702
2708
2703 1. ``--tool`` option
2709 1. ``--tool`` option
2704 2. ``HGMERGE`` environment variable
2710 2. ``HGMERGE`` environment variable
2705 3. configurations in ``merge-patterns`` section
2711 3. configurations in ``merge-patterns`` section
2706 4. configuration of ``ui.merge``
2712 4. configuration of ``ui.merge``
2707 5. configurations in ``merge-tools`` section
2713 5. configurations in ``merge-tools`` section
2708 6. ``hgmerge`` tool (for historical reason only)
2714 6. ``hgmerge`` tool (for historical reason only)
2709 7. default tool for fallback (``:merge`` or ``:prompt``)
2715 7. default tool for fallback (``:merge`` or ``:prompt``)
2710
2716
2711 This command writes out examination result in the style below::
2717 This command writes out examination result in the style below::
2712
2718
2713 FILE = MERGETOOL
2719 FILE = MERGETOOL
2714
2720
2715 By default, all files known in the first parent context of the
2721 By default, all files known in the first parent context of the
2716 working directory are examined. Use file patterns and/or -I/-X
2722 working directory are examined. Use file patterns and/or -I/-X
2717 options to limit target files. -r/--rev is also useful to examine
2723 options to limit target files. -r/--rev is also useful to examine
2718 files in another context without actual updating to it.
2724 files in another context without actual updating to it.
2719
2725
2720 With --debug, this command shows warning messages while matching
2726 With --debug, this command shows warning messages while matching
2721 against ``merge-patterns`` and so on, too. It is recommended to
2727 against ``merge-patterns`` and so on, too. It is recommended to
2722 use this option with explicit file patterns and/or -I/-X options,
2728 use this option with explicit file patterns and/or -I/-X options,
2723 because this option increases amount of output per file according
2729 because this option increases amount of output per file according
2724 to configurations in hgrc.
2730 to configurations in hgrc.
2725
2731
2726 With -v/--verbose, this command shows configurations below at
2732 With -v/--verbose, this command shows configurations below at
2727 first (only if specified).
2733 first (only if specified).
2728
2734
2729 - ``--tool`` option
2735 - ``--tool`` option
2730 - ``HGMERGE`` environment variable
2736 - ``HGMERGE`` environment variable
2731 - configuration of ``ui.merge``
2737 - configuration of ``ui.merge``
2732
2738
2733 If merge tool is chosen before matching against
2739 If merge tool is chosen before matching against
2734 ``merge-patterns``, this command can't show any helpful
2740 ``merge-patterns``, this command can't show any helpful
2735 information, even with --debug. In such case, information above is
2741 information, even with --debug. In such case, information above is
2736 useful to know why a merge tool is chosen.
2742 useful to know why a merge tool is chosen.
2737 """
2743 """
2738 opts = pycompat.byteskwargs(opts)
2744 opts = pycompat.byteskwargs(opts)
2739 overrides = {}
2745 overrides = {}
2740 if opts[b'tool']:
2746 if opts[b'tool']:
2741 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2747 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2742 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2748 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2743
2749
2744 with ui.configoverride(overrides, b'debugmergepatterns'):
2750 with ui.configoverride(overrides, b'debugmergepatterns'):
2745 hgmerge = encoding.environ.get(b"HGMERGE")
2751 hgmerge = encoding.environ.get(b"HGMERGE")
2746 if hgmerge is not None:
2752 if hgmerge is not None:
2747 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2753 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2748 uimerge = ui.config(b"ui", b"merge")
2754 uimerge = ui.config(b"ui", b"merge")
2749 if uimerge:
2755 if uimerge:
2750 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2756 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2751
2757
2752 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2758 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2753 m = scmutil.match(ctx, pats, opts)
2759 m = scmutil.match(ctx, pats, opts)
2754 changedelete = opts[b'changedelete']
2760 changedelete = opts[b'changedelete']
2755 for path in ctx.walk(m):
2761 for path in ctx.walk(m):
2756 fctx = ctx[path]
2762 fctx = ctx[path]
2757 with ui.silent(
2763 with ui.silent(
2758 error=True
2764 error=True
2759 ) if not ui.debugflag else util.nullcontextmanager():
2765 ) if not ui.debugflag else util.nullcontextmanager():
2760 tool, toolpath = filemerge._picktool(
2766 tool, toolpath = filemerge._picktool(
2761 repo,
2767 repo,
2762 ui,
2768 ui,
2763 path,
2769 path,
2764 fctx.isbinary(),
2770 fctx.isbinary(),
2765 b'l' in fctx.flags(),
2771 b'l' in fctx.flags(),
2766 changedelete,
2772 changedelete,
2767 )
2773 )
2768 ui.write(b'%s = %s\n' % (path, tool))
2774 ui.write(b'%s = %s\n' % (path, tool))
2769
2775
2770
2776
2771 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2777 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2772 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2778 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2773 """access the pushkey key/value protocol
2779 """access the pushkey key/value protocol
2774
2780
2775 With two args, list the keys in the given namespace.
2781 With two args, list the keys in the given namespace.
2776
2782
2777 With five args, set a key to new if it currently is set to old.
2783 With five args, set a key to new if it currently is set to old.
2778 Reports success or failure.
2784 Reports success or failure.
2779 """
2785 """
2780
2786
2781 target = hg.peer(ui, {}, repopath)
2787 target = hg.peer(ui, {}, repopath)
2782 try:
2788 try:
2783 if keyinfo:
2789 if keyinfo:
2784 key, old, new = keyinfo
2790 key, old, new = keyinfo
2785 with target.commandexecutor() as e:
2791 with target.commandexecutor() as e:
2786 r = e.callcommand(
2792 r = e.callcommand(
2787 b'pushkey',
2793 b'pushkey',
2788 {
2794 {
2789 b'namespace': namespace,
2795 b'namespace': namespace,
2790 b'key': key,
2796 b'key': key,
2791 b'old': old,
2797 b'old': old,
2792 b'new': new,
2798 b'new': new,
2793 },
2799 },
2794 ).result()
2800 ).result()
2795
2801
2796 ui.status(pycompat.bytestr(r) + b'\n')
2802 ui.status(pycompat.bytestr(r) + b'\n')
2797 return not r
2803 return not r
2798 else:
2804 else:
2799 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2805 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2800 ui.write(
2806 ui.write(
2801 b"%s\t%s\n"
2807 b"%s\t%s\n"
2802 % (stringutil.escapestr(k), stringutil.escapestr(v))
2808 % (stringutil.escapestr(k), stringutil.escapestr(v))
2803 )
2809 )
2804 finally:
2810 finally:
2805 target.close()
2811 target.close()
2806
2812
2807
2813
2808 @command(b'debugpvec', [], _(b'A B'))
2814 @command(b'debugpvec', [], _(b'A B'))
2809 def debugpvec(ui, repo, a, b=None):
2815 def debugpvec(ui, repo, a, b=None):
2810 ca = scmutil.revsingle(repo, a)
2816 ca = scmutil.revsingle(repo, a)
2811 cb = scmutil.revsingle(repo, b)
2817 cb = scmutil.revsingle(repo, b)
2812 pa = pvec.ctxpvec(ca)
2818 pa = pvec.ctxpvec(ca)
2813 pb = pvec.ctxpvec(cb)
2819 pb = pvec.ctxpvec(cb)
2814 if pa == pb:
2820 if pa == pb:
2815 rel = b"="
2821 rel = b"="
2816 elif pa > pb:
2822 elif pa > pb:
2817 rel = b">"
2823 rel = b">"
2818 elif pa < pb:
2824 elif pa < pb:
2819 rel = b"<"
2825 rel = b"<"
2820 elif pa | pb:
2826 elif pa | pb:
2821 rel = b"|"
2827 rel = b"|"
2822 ui.write(_(b"a: %s\n") % pa)
2828 ui.write(_(b"a: %s\n") % pa)
2823 ui.write(_(b"b: %s\n") % pb)
2829 ui.write(_(b"b: %s\n") % pb)
2824 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2830 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2825 ui.write(
2831 ui.write(
2826 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2832 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2827 % (
2833 % (
2828 abs(pa._depth - pb._depth),
2834 abs(pa._depth - pb._depth),
2829 pvec._hamming(pa._vec, pb._vec),
2835 pvec._hamming(pa._vec, pb._vec),
2830 pa.distance(pb),
2836 pa.distance(pb),
2831 rel,
2837 rel,
2832 )
2838 )
2833 )
2839 )
2834
2840
2835
2841
2836 @command(
2842 @command(
2837 b'debugrebuilddirstate|debugrebuildstate',
2843 b'debugrebuilddirstate|debugrebuildstate',
2838 [
2844 [
2839 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2845 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2840 (
2846 (
2841 b'',
2847 b'',
2842 b'minimal',
2848 b'minimal',
2843 None,
2849 None,
2844 _(
2850 _(
2845 b'only rebuild files that are inconsistent with '
2851 b'only rebuild files that are inconsistent with '
2846 b'the working copy parent'
2852 b'the working copy parent'
2847 ),
2853 ),
2848 ),
2854 ),
2849 ],
2855 ],
2850 _(b'[-r REV]'),
2856 _(b'[-r REV]'),
2851 )
2857 )
2852 def debugrebuilddirstate(ui, repo, rev, **opts):
2858 def debugrebuilddirstate(ui, repo, rev, **opts):
2853 """rebuild the dirstate as it would look like for the given revision
2859 """rebuild the dirstate as it would look like for the given revision
2854
2860
2855 If no revision is specified the first current parent will be used.
2861 If no revision is specified the first current parent will be used.
2856
2862
2857 The dirstate will be set to the files of the given revision.
2863 The dirstate will be set to the files of the given revision.
2858 The actual working directory content or existing dirstate
2864 The actual working directory content or existing dirstate
2859 information such as adds or removes is not considered.
2865 information such as adds or removes is not considered.
2860
2866
2861 ``minimal`` will only rebuild the dirstate status for files that claim to be
2867 ``minimal`` will only rebuild the dirstate status for files that claim to be
2862 tracked but are not in the parent manifest, or that exist in the parent
2868 tracked but are not in the parent manifest, or that exist in the parent
2863 manifest but are not in the dirstate. It will not change adds, removes, or
2869 manifest but are not in the dirstate. It will not change adds, removes, or
2864 modified files that are in the working copy parent.
2870 modified files that are in the working copy parent.
2865
2871
2866 One use of this command is to make the next :hg:`status` invocation
2872 One use of this command is to make the next :hg:`status` invocation
2867 check the actual file content.
2873 check the actual file content.
2868 """
2874 """
2869 ctx = scmutil.revsingle(repo, rev)
2875 ctx = scmutil.revsingle(repo, rev)
2870 with repo.wlock():
2876 with repo.wlock():
2871 dirstate = repo.dirstate
2877 dirstate = repo.dirstate
2872 changedfiles = None
2878 changedfiles = None
2873 # See command doc for what minimal does.
2879 # See command doc for what minimal does.
2874 if opts.get('minimal'):
2880 if opts.get('minimal'):
2875 manifestfiles = set(ctx.manifest().keys())
2881 manifestfiles = set(ctx.manifest().keys())
2876 dirstatefiles = set(dirstate)
2882 dirstatefiles = set(dirstate)
2877 manifestonly = manifestfiles - dirstatefiles
2883 manifestonly = manifestfiles - dirstatefiles
2878 dsonly = dirstatefiles - manifestfiles
2884 dsonly = dirstatefiles - manifestfiles
2879 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2885 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2880 changedfiles = manifestonly | dsnotadded
2886 changedfiles = manifestonly | dsnotadded
2881
2887
2882 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2888 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2883
2889
2884
2890
2885 @command(b'debugrebuildfncache', [], b'')
2891 @command(b'debugrebuildfncache', [], b'')
2886 def debugrebuildfncache(ui, repo):
2892 def debugrebuildfncache(ui, repo):
2887 """rebuild the fncache file"""
2893 """rebuild the fncache file"""
2888 repair.rebuildfncache(ui, repo)
2894 repair.rebuildfncache(ui, repo)
2889
2895
2890
2896
2891 @command(
2897 @command(
2892 b'debugrename',
2898 b'debugrename',
2893 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2899 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2894 _(b'[-r REV] [FILE]...'),
2900 _(b'[-r REV] [FILE]...'),
2895 )
2901 )
2896 def debugrename(ui, repo, *pats, **opts):
2902 def debugrename(ui, repo, *pats, **opts):
2897 """dump rename information"""
2903 """dump rename information"""
2898
2904
2899 opts = pycompat.byteskwargs(opts)
2905 opts = pycompat.byteskwargs(opts)
2900 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2906 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2901 m = scmutil.match(ctx, pats, opts)
2907 m = scmutil.match(ctx, pats, opts)
2902 for abs in ctx.walk(m):
2908 for abs in ctx.walk(m):
2903 fctx = ctx[abs]
2909 fctx = ctx[abs]
2904 o = fctx.filelog().renamed(fctx.filenode())
2910 o = fctx.filelog().renamed(fctx.filenode())
2905 rel = repo.pathto(abs)
2911 rel = repo.pathto(abs)
2906 if o:
2912 if o:
2907 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2913 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2908 else:
2914 else:
2909 ui.write(_(b"%s not renamed\n") % rel)
2915 ui.write(_(b"%s not renamed\n") % rel)
2910
2916
2911
2917
2912 @command(b'debugrequires|debugrequirements', [], b'')
2918 @command(b'debugrequires|debugrequirements', [], b'')
2913 def debugrequirements(ui, repo):
2919 def debugrequirements(ui, repo):
2914 """print the current repo requirements"""
2920 """print the current repo requirements"""
2915 for r in sorted(repo.requirements):
2921 for r in sorted(repo.requirements):
2916 ui.write(b"%s\n" % r)
2922 ui.write(b"%s\n" % r)
2917
2923
2918
2924
2919 @command(
2925 @command(
2920 b'debugrevlog',
2926 b'debugrevlog',
2921 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2927 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2922 _(b'-c|-m|FILE'),
2928 _(b'-c|-m|FILE'),
2923 optionalrepo=True,
2929 optionalrepo=True,
2924 )
2930 )
2925 def debugrevlog(ui, repo, file_=None, **opts):
2931 def debugrevlog(ui, repo, file_=None, **opts):
2926 """show data and statistics about a revlog"""
2932 """show data and statistics about a revlog"""
2927 opts = pycompat.byteskwargs(opts)
2933 opts = pycompat.byteskwargs(opts)
2928 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2934 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2929
2935
2930 if opts.get(b"dump"):
2936 if opts.get(b"dump"):
2931 numrevs = len(r)
2937 numrevs = len(r)
2932 ui.write(
2938 ui.write(
2933 (
2939 (
2934 b"# rev p1rev p2rev start end deltastart base p1 p2"
2940 b"# rev p1rev p2rev start end deltastart base p1 p2"
2935 b" rawsize totalsize compression heads chainlen\n"
2941 b" rawsize totalsize compression heads chainlen\n"
2936 )
2942 )
2937 )
2943 )
2938 ts = 0
2944 ts = 0
2939 heads = set()
2945 heads = set()
2940
2946
2941 for rev in pycompat.xrange(numrevs):
2947 for rev in pycompat.xrange(numrevs):
2942 dbase = r.deltaparent(rev)
2948 dbase = r.deltaparent(rev)
2943 if dbase == -1:
2949 if dbase == -1:
2944 dbase = rev
2950 dbase = rev
2945 cbase = r.chainbase(rev)
2951 cbase = r.chainbase(rev)
2946 clen = r.chainlen(rev)
2952 clen = r.chainlen(rev)
2947 p1, p2 = r.parentrevs(rev)
2953 p1, p2 = r.parentrevs(rev)
2948 rs = r.rawsize(rev)
2954 rs = r.rawsize(rev)
2949 ts = ts + rs
2955 ts = ts + rs
2950 heads -= set(r.parentrevs(rev))
2956 heads -= set(r.parentrevs(rev))
2951 heads.add(rev)
2957 heads.add(rev)
2952 try:
2958 try:
2953 compression = ts / r.end(rev)
2959 compression = ts / r.end(rev)
2954 except ZeroDivisionError:
2960 except ZeroDivisionError:
2955 compression = 0
2961 compression = 0
2956 ui.write(
2962 ui.write(
2957 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2963 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2958 b"%11d %5d %8d\n"
2964 b"%11d %5d %8d\n"
2959 % (
2965 % (
2960 rev,
2966 rev,
2961 p1,
2967 p1,
2962 p2,
2968 p2,
2963 r.start(rev),
2969 r.start(rev),
2964 r.end(rev),
2970 r.end(rev),
2965 r.start(dbase),
2971 r.start(dbase),
2966 r.start(cbase),
2972 r.start(cbase),
2967 r.start(p1),
2973 r.start(p1),
2968 r.start(p2),
2974 r.start(p2),
2969 rs,
2975 rs,
2970 ts,
2976 ts,
2971 compression,
2977 compression,
2972 len(heads),
2978 len(heads),
2973 clen,
2979 clen,
2974 )
2980 )
2975 )
2981 )
2976 return 0
2982 return 0
2977
2983
2978 format = r._format_version
2984 format = r._format_version
2979 v = r._format_flags
2985 v = r._format_flags
2980 flags = []
2986 flags = []
2981 gdelta = False
2987 gdelta = False
2982 if v & revlog.FLAG_INLINE_DATA:
2988 if v & revlog.FLAG_INLINE_DATA:
2983 flags.append(b'inline')
2989 flags.append(b'inline')
2984 if v & revlog.FLAG_GENERALDELTA:
2990 if v & revlog.FLAG_GENERALDELTA:
2985 gdelta = True
2991 gdelta = True
2986 flags.append(b'generaldelta')
2992 flags.append(b'generaldelta')
2987 if not flags:
2993 if not flags:
2988 flags = [b'(none)']
2994 flags = [b'(none)']
2989
2995
2990 ### tracks merge vs single parent
2996 ### tracks merge vs single parent
2991 nummerges = 0
2997 nummerges = 0
2992
2998
2993 ### tracks ways the "delta" are build
2999 ### tracks ways the "delta" are build
2994 # nodelta
3000 # nodelta
2995 numempty = 0
3001 numempty = 0
2996 numemptytext = 0
3002 numemptytext = 0
2997 numemptydelta = 0
3003 numemptydelta = 0
2998 # full file content
3004 # full file content
2999 numfull = 0
3005 numfull = 0
3000 # intermediate snapshot against a prior snapshot
3006 # intermediate snapshot against a prior snapshot
3001 numsemi = 0
3007 numsemi = 0
3002 # snapshot count per depth
3008 # snapshot count per depth
3003 numsnapdepth = collections.defaultdict(lambda: 0)
3009 numsnapdepth = collections.defaultdict(lambda: 0)
3004 # delta against previous revision
3010 # delta against previous revision
3005 numprev = 0
3011 numprev = 0
3006 # delta against first or second parent (not prev)
3012 # delta against first or second parent (not prev)
3007 nump1 = 0
3013 nump1 = 0
3008 nump2 = 0
3014 nump2 = 0
3009 # delta against neither prev nor parents
3015 # delta against neither prev nor parents
3010 numother = 0
3016 numother = 0
3011 # delta against prev that are also first or second parent
3017 # delta against prev that are also first or second parent
3012 # (details of `numprev`)
3018 # (details of `numprev`)
3013 nump1prev = 0
3019 nump1prev = 0
3014 nump2prev = 0
3020 nump2prev = 0
3015
3021
3016 # data about delta chain of each revs
3022 # data about delta chain of each revs
3017 chainlengths = []
3023 chainlengths = []
3018 chainbases = []
3024 chainbases = []
3019 chainspans = []
3025 chainspans = []
3020
3026
3021 # data about each revision
3027 # data about each revision
3022 datasize = [None, 0, 0]
3028 datasize = [None, 0, 0]
3023 fullsize = [None, 0, 0]
3029 fullsize = [None, 0, 0]
3024 semisize = [None, 0, 0]
3030 semisize = [None, 0, 0]
3025 # snapshot count per depth
3031 # snapshot count per depth
3026 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3032 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3027 deltasize = [None, 0, 0]
3033 deltasize = [None, 0, 0]
3028 chunktypecounts = {}
3034 chunktypecounts = {}
3029 chunktypesizes = {}
3035 chunktypesizes = {}
3030
3036
3031 def addsize(size, l):
3037 def addsize(size, l):
3032 if l[0] is None or size < l[0]:
3038 if l[0] is None or size < l[0]:
3033 l[0] = size
3039 l[0] = size
3034 if size > l[1]:
3040 if size > l[1]:
3035 l[1] = size
3041 l[1] = size
3036 l[2] += size
3042 l[2] += size
3037
3043
3038 numrevs = len(r)
3044 numrevs = len(r)
3039 for rev in pycompat.xrange(numrevs):
3045 for rev in pycompat.xrange(numrevs):
3040 p1, p2 = r.parentrevs(rev)
3046 p1, p2 = r.parentrevs(rev)
3041 delta = r.deltaparent(rev)
3047 delta = r.deltaparent(rev)
3042 if format > 0:
3048 if format > 0:
3043 addsize(r.rawsize(rev), datasize)
3049 addsize(r.rawsize(rev), datasize)
3044 if p2 != nullrev:
3050 if p2 != nullrev:
3045 nummerges += 1
3051 nummerges += 1
3046 size = r.length(rev)
3052 size = r.length(rev)
3047 if delta == nullrev:
3053 if delta == nullrev:
3048 chainlengths.append(0)
3054 chainlengths.append(0)
3049 chainbases.append(r.start(rev))
3055 chainbases.append(r.start(rev))
3050 chainspans.append(size)
3056 chainspans.append(size)
3051 if size == 0:
3057 if size == 0:
3052 numempty += 1
3058 numempty += 1
3053 numemptytext += 1
3059 numemptytext += 1
3054 else:
3060 else:
3055 numfull += 1
3061 numfull += 1
3056 numsnapdepth[0] += 1
3062 numsnapdepth[0] += 1
3057 addsize(size, fullsize)
3063 addsize(size, fullsize)
3058 addsize(size, snapsizedepth[0])
3064 addsize(size, snapsizedepth[0])
3059 else:
3065 else:
3060 chainlengths.append(chainlengths[delta] + 1)
3066 chainlengths.append(chainlengths[delta] + 1)
3061 baseaddr = chainbases[delta]
3067 baseaddr = chainbases[delta]
3062 revaddr = r.start(rev)
3068 revaddr = r.start(rev)
3063 chainbases.append(baseaddr)
3069 chainbases.append(baseaddr)
3064 chainspans.append((revaddr - baseaddr) + size)
3070 chainspans.append((revaddr - baseaddr) + size)
3065 if size == 0:
3071 if size == 0:
3066 numempty += 1
3072 numempty += 1
3067 numemptydelta += 1
3073 numemptydelta += 1
3068 elif r.issnapshot(rev):
3074 elif r.issnapshot(rev):
3069 addsize(size, semisize)
3075 addsize(size, semisize)
3070 numsemi += 1
3076 numsemi += 1
3071 depth = r.snapshotdepth(rev)
3077 depth = r.snapshotdepth(rev)
3072 numsnapdepth[depth] += 1
3078 numsnapdepth[depth] += 1
3073 addsize(size, snapsizedepth[depth])
3079 addsize(size, snapsizedepth[depth])
3074 else:
3080 else:
3075 addsize(size, deltasize)
3081 addsize(size, deltasize)
3076 if delta == rev - 1:
3082 if delta == rev - 1:
3077 numprev += 1
3083 numprev += 1
3078 if delta == p1:
3084 if delta == p1:
3079 nump1prev += 1
3085 nump1prev += 1
3080 elif delta == p2:
3086 elif delta == p2:
3081 nump2prev += 1
3087 nump2prev += 1
3082 elif delta == p1:
3088 elif delta == p1:
3083 nump1 += 1
3089 nump1 += 1
3084 elif delta == p2:
3090 elif delta == p2:
3085 nump2 += 1
3091 nump2 += 1
3086 elif delta != nullrev:
3092 elif delta != nullrev:
3087 numother += 1
3093 numother += 1
3088
3094
3089 # Obtain data on the raw chunks in the revlog.
3095 # Obtain data on the raw chunks in the revlog.
3090 if util.safehasattr(r, b'_getsegmentforrevs'):
3096 if util.safehasattr(r, b'_getsegmentforrevs'):
3091 segment = r._getsegmentforrevs(rev, rev)[1]
3097 segment = r._getsegmentforrevs(rev, rev)[1]
3092 else:
3098 else:
3093 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3099 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3094 if segment:
3100 if segment:
3095 chunktype = bytes(segment[0:1])
3101 chunktype = bytes(segment[0:1])
3096 else:
3102 else:
3097 chunktype = b'empty'
3103 chunktype = b'empty'
3098
3104
3099 if chunktype not in chunktypecounts:
3105 if chunktype not in chunktypecounts:
3100 chunktypecounts[chunktype] = 0
3106 chunktypecounts[chunktype] = 0
3101 chunktypesizes[chunktype] = 0
3107 chunktypesizes[chunktype] = 0
3102
3108
3103 chunktypecounts[chunktype] += 1
3109 chunktypecounts[chunktype] += 1
3104 chunktypesizes[chunktype] += size
3110 chunktypesizes[chunktype] += size
3105
3111
3106 # Adjust size min value for empty cases
3112 # Adjust size min value for empty cases
3107 for size in (datasize, fullsize, semisize, deltasize):
3113 for size in (datasize, fullsize, semisize, deltasize):
3108 if size[0] is None:
3114 if size[0] is None:
3109 size[0] = 0
3115 size[0] = 0
3110
3116
3111 numdeltas = numrevs - numfull - numempty - numsemi
3117 numdeltas = numrevs - numfull - numempty - numsemi
3112 numoprev = numprev - nump1prev - nump2prev
3118 numoprev = numprev - nump1prev - nump2prev
3113 totalrawsize = datasize[2]
3119 totalrawsize = datasize[2]
3114 datasize[2] /= numrevs
3120 datasize[2] /= numrevs
3115 fulltotal = fullsize[2]
3121 fulltotal = fullsize[2]
3116 if numfull == 0:
3122 if numfull == 0:
3117 fullsize[2] = 0
3123 fullsize[2] = 0
3118 else:
3124 else:
3119 fullsize[2] /= numfull
3125 fullsize[2] /= numfull
3120 semitotal = semisize[2]
3126 semitotal = semisize[2]
3121 snaptotal = {}
3127 snaptotal = {}
3122 if numsemi > 0:
3128 if numsemi > 0:
3123 semisize[2] /= numsemi
3129 semisize[2] /= numsemi
3124 for depth in snapsizedepth:
3130 for depth in snapsizedepth:
3125 snaptotal[depth] = snapsizedepth[depth][2]
3131 snaptotal[depth] = snapsizedepth[depth][2]
3126 snapsizedepth[depth][2] /= numsnapdepth[depth]
3132 snapsizedepth[depth][2] /= numsnapdepth[depth]
3127
3133
3128 deltatotal = deltasize[2]
3134 deltatotal = deltasize[2]
3129 if numdeltas > 0:
3135 if numdeltas > 0:
3130 deltasize[2] /= numdeltas
3136 deltasize[2] /= numdeltas
3131 totalsize = fulltotal + semitotal + deltatotal
3137 totalsize = fulltotal + semitotal + deltatotal
3132 avgchainlen = sum(chainlengths) / numrevs
3138 avgchainlen = sum(chainlengths) / numrevs
3133 maxchainlen = max(chainlengths)
3139 maxchainlen = max(chainlengths)
3134 maxchainspan = max(chainspans)
3140 maxchainspan = max(chainspans)
3135 compratio = 1
3141 compratio = 1
3136 if totalsize:
3142 if totalsize:
3137 compratio = totalrawsize / totalsize
3143 compratio = totalrawsize / totalsize
3138
3144
3139 basedfmtstr = b'%%%dd\n'
3145 basedfmtstr = b'%%%dd\n'
3140 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3146 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3141
3147
3142 def dfmtstr(max):
3148 def dfmtstr(max):
3143 return basedfmtstr % len(str(max))
3149 return basedfmtstr % len(str(max))
3144
3150
3145 def pcfmtstr(max, padding=0):
3151 def pcfmtstr(max, padding=0):
3146 return basepcfmtstr % (len(str(max)), b' ' * padding)
3152 return basepcfmtstr % (len(str(max)), b' ' * padding)
3147
3153
3148 def pcfmt(value, total):
3154 def pcfmt(value, total):
3149 if total:
3155 if total:
3150 return (value, 100 * float(value) / total)
3156 return (value, 100 * float(value) / total)
3151 else:
3157 else:
3152 return value, 100.0
3158 return value, 100.0
3153
3159
3154 ui.writenoi18n(b'format : %d\n' % format)
3160 ui.writenoi18n(b'format : %d\n' % format)
3155 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3161 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3156
3162
3157 ui.write(b'\n')
3163 ui.write(b'\n')
3158 fmt = pcfmtstr(totalsize)
3164 fmt = pcfmtstr(totalsize)
3159 fmt2 = dfmtstr(totalsize)
3165 fmt2 = dfmtstr(totalsize)
3160 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3166 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3161 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3167 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3162 ui.writenoi18n(
3168 ui.writenoi18n(
3163 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3169 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3164 )
3170 )
3165 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3171 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3166 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3172 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3167 ui.writenoi18n(
3173 ui.writenoi18n(
3168 b' text : '
3174 b' text : '
3169 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3175 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3170 )
3176 )
3171 ui.writenoi18n(
3177 ui.writenoi18n(
3172 b' delta : '
3178 b' delta : '
3173 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3179 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3174 )
3180 )
3175 ui.writenoi18n(
3181 ui.writenoi18n(
3176 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3182 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3177 )
3183 )
3178 for depth in sorted(numsnapdepth):
3184 for depth in sorted(numsnapdepth):
3179 ui.write(
3185 ui.write(
3180 (b' lvl-%-3d : ' % depth)
3186 (b' lvl-%-3d : ' % depth)
3181 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3187 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3182 )
3188 )
3183 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3189 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3184 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3190 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3185 ui.writenoi18n(
3191 ui.writenoi18n(
3186 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3192 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3187 )
3193 )
3188 for depth in sorted(numsnapdepth):
3194 for depth in sorted(numsnapdepth):
3189 ui.write(
3195 ui.write(
3190 (b' lvl-%-3d : ' % depth)
3196 (b' lvl-%-3d : ' % depth)
3191 + fmt % pcfmt(snaptotal[depth], totalsize)
3197 + fmt % pcfmt(snaptotal[depth], totalsize)
3192 )
3198 )
3193 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3199 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3194
3200
3195 def fmtchunktype(chunktype):
3201 def fmtchunktype(chunktype):
3196 if chunktype == b'empty':
3202 if chunktype == b'empty':
3197 return b' %s : ' % chunktype
3203 return b' %s : ' % chunktype
3198 elif chunktype in pycompat.bytestr(string.ascii_letters):
3204 elif chunktype in pycompat.bytestr(string.ascii_letters):
3199 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3205 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3200 else:
3206 else:
3201 return b' 0x%s : ' % hex(chunktype)
3207 return b' 0x%s : ' % hex(chunktype)
3202
3208
3203 ui.write(b'\n')
3209 ui.write(b'\n')
3204 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3210 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3205 for chunktype in sorted(chunktypecounts):
3211 for chunktype in sorted(chunktypecounts):
3206 ui.write(fmtchunktype(chunktype))
3212 ui.write(fmtchunktype(chunktype))
3207 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3213 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3208 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3214 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3209 for chunktype in sorted(chunktypecounts):
3215 for chunktype in sorted(chunktypecounts):
3210 ui.write(fmtchunktype(chunktype))
3216 ui.write(fmtchunktype(chunktype))
3211 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3217 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3212
3218
3213 ui.write(b'\n')
3219 ui.write(b'\n')
3214 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3220 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3215 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3221 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3216 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3222 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3217 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3223 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3218 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3224 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3219
3225
3220 if format > 0:
3226 if format > 0:
3221 ui.write(b'\n')
3227 ui.write(b'\n')
3222 ui.writenoi18n(
3228 ui.writenoi18n(
3223 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3229 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3224 % tuple(datasize)
3230 % tuple(datasize)
3225 )
3231 )
3226 ui.writenoi18n(
3232 ui.writenoi18n(
3227 b'full revision size (min/max/avg) : %d / %d / %d\n'
3233 b'full revision size (min/max/avg) : %d / %d / %d\n'
3228 % tuple(fullsize)
3234 % tuple(fullsize)
3229 )
3235 )
3230 ui.writenoi18n(
3236 ui.writenoi18n(
3231 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3237 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3232 % tuple(semisize)
3238 % tuple(semisize)
3233 )
3239 )
3234 for depth in sorted(snapsizedepth):
3240 for depth in sorted(snapsizedepth):
3235 if depth == 0:
3241 if depth == 0:
3236 continue
3242 continue
3237 ui.writenoi18n(
3243 ui.writenoi18n(
3238 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3244 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3239 % ((depth,) + tuple(snapsizedepth[depth]))
3245 % ((depth,) + tuple(snapsizedepth[depth]))
3240 )
3246 )
3241 ui.writenoi18n(
3247 ui.writenoi18n(
3242 b'delta size (min/max/avg) : %d / %d / %d\n'
3248 b'delta size (min/max/avg) : %d / %d / %d\n'
3243 % tuple(deltasize)
3249 % tuple(deltasize)
3244 )
3250 )
3245
3251
3246 if numdeltas > 0:
3252 if numdeltas > 0:
3247 ui.write(b'\n')
3253 ui.write(b'\n')
3248 fmt = pcfmtstr(numdeltas)
3254 fmt = pcfmtstr(numdeltas)
3249 fmt2 = pcfmtstr(numdeltas, 4)
3255 fmt2 = pcfmtstr(numdeltas, 4)
3250 ui.writenoi18n(
3256 ui.writenoi18n(
3251 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3257 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3252 )
3258 )
3253 if numprev > 0:
3259 if numprev > 0:
3254 ui.writenoi18n(
3260 ui.writenoi18n(
3255 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3261 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3256 )
3262 )
3257 ui.writenoi18n(
3263 ui.writenoi18n(
3258 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3264 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3259 )
3265 )
3260 ui.writenoi18n(
3266 ui.writenoi18n(
3261 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3267 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3262 )
3268 )
3263 if gdelta:
3269 if gdelta:
3264 ui.writenoi18n(
3270 ui.writenoi18n(
3265 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3271 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3266 )
3272 )
3267 ui.writenoi18n(
3273 ui.writenoi18n(
3268 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3274 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3269 )
3275 )
3270 ui.writenoi18n(
3276 ui.writenoi18n(
3271 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3277 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3272 )
3278 )
3273
3279
3274
3280
3275 @command(
3281 @command(
3276 b'debugrevlogindex',
3282 b'debugrevlogindex',
3277 cmdutil.debugrevlogopts
3283 cmdutil.debugrevlogopts
3278 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3284 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3279 _(b'[-f FORMAT] -c|-m|FILE'),
3285 _(b'[-f FORMAT] -c|-m|FILE'),
3280 optionalrepo=True,
3286 optionalrepo=True,
3281 )
3287 )
3282 def debugrevlogindex(ui, repo, file_=None, **opts):
3288 def debugrevlogindex(ui, repo, file_=None, **opts):
3283 """dump the contents of a revlog index"""
3289 """dump the contents of a revlog index"""
3284 opts = pycompat.byteskwargs(opts)
3290 opts = pycompat.byteskwargs(opts)
3285 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3291 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3286 format = opts.get(b'format', 0)
3292 format = opts.get(b'format', 0)
3287 if format not in (0, 1):
3293 if format not in (0, 1):
3288 raise error.Abort(_(b"unknown format %d") % format)
3294 raise error.Abort(_(b"unknown format %d") % format)
3289
3295
3290 if ui.debugflag:
3296 if ui.debugflag:
3291 shortfn = hex
3297 shortfn = hex
3292 else:
3298 else:
3293 shortfn = short
3299 shortfn = short
3294
3300
3295 # There might not be anything in r, so have a sane default
3301 # There might not be anything in r, so have a sane default
3296 idlen = 12
3302 idlen = 12
3297 for i in r:
3303 for i in r:
3298 idlen = len(shortfn(r.node(i)))
3304 idlen = len(shortfn(r.node(i)))
3299 break
3305 break
3300
3306
3301 if format == 0:
3307 if format == 0:
3302 if ui.verbose:
3308 if ui.verbose:
3303 ui.writenoi18n(
3309 ui.writenoi18n(
3304 b" rev offset length linkrev %s %s p2\n"
3310 b" rev offset length linkrev %s %s p2\n"
3305 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3311 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3306 )
3312 )
3307 else:
3313 else:
3308 ui.writenoi18n(
3314 ui.writenoi18n(
3309 b" rev linkrev %s %s p2\n"
3315 b" rev linkrev %s %s p2\n"
3310 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3316 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3311 )
3317 )
3312 elif format == 1:
3318 elif format == 1:
3313 if ui.verbose:
3319 if ui.verbose:
3314 ui.writenoi18n(
3320 ui.writenoi18n(
3315 (
3321 (
3316 b" rev flag offset length size link p1"
3322 b" rev flag offset length size link p1"
3317 b" p2 %s\n"
3323 b" p2 %s\n"
3318 )
3324 )
3319 % b"nodeid".rjust(idlen)
3325 % b"nodeid".rjust(idlen)
3320 )
3326 )
3321 else:
3327 else:
3322 ui.writenoi18n(
3328 ui.writenoi18n(
3323 b" rev flag size link p1 p2 %s\n"
3329 b" rev flag size link p1 p2 %s\n"
3324 % b"nodeid".rjust(idlen)
3330 % b"nodeid".rjust(idlen)
3325 )
3331 )
3326
3332
3327 for i in r:
3333 for i in r:
3328 node = r.node(i)
3334 node = r.node(i)
3329 if format == 0:
3335 if format == 0:
3330 try:
3336 try:
3331 pp = r.parents(node)
3337 pp = r.parents(node)
3332 except Exception:
3338 except Exception:
3333 pp = [repo.nullid, repo.nullid]
3339 pp = [repo.nullid, repo.nullid]
3334 if ui.verbose:
3340 if ui.verbose:
3335 ui.write(
3341 ui.write(
3336 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3342 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3337 % (
3343 % (
3338 i,
3344 i,
3339 r.start(i),
3345 r.start(i),
3340 r.length(i),
3346 r.length(i),
3341 r.linkrev(i),
3347 r.linkrev(i),
3342 shortfn(node),
3348 shortfn(node),
3343 shortfn(pp[0]),
3349 shortfn(pp[0]),
3344 shortfn(pp[1]),
3350 shortfn(pp[1]),
3345 )
3351 )
3346 )
3352 )
3347 else:
3353 else:
3348 ui.write(
3354 ui.write(
3349 b"% 6d % 7d %s %s %s\n"
3355 b"% 6d % 7d %s %s %s\n"
3350 % (
3356 % (
3351 i,
3357 i,
3352 r.linkrev(i),
3358 r.linkrev(i),
3353 shortfn(node),
3359 shortfn(node),
3354 shortfn(pp[0]),
3360 shortfn(pp[0]),
3355 shortfn(pp[1]),
3361 shortfn(pp[1]),
3356 )
3362 )
3357 )
3363 )
3358 elif format == 1:
3364 elif format == 1:
3359 pr = r.parentrevs(i)
3365 pr = r.parentrevs(i)
3360 if ui.verbose:
3366 if ui.verbose:
3361 ui.write(
3367 ui.write(
3362 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3368 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3363 % (
3369 % (
3364 i,
3370 i,
3365 r.flags(i),
3371 r.flags(i),
3366 r.start(i),
3372 r.start(i),
3367 r.length(i),
3373 r.length(i),
3368 r.rawsize(i),
3374 r.rawsize(i),
3369 r.linkrev(i),
3375 r.linkrev(i),
3370 pr[0],
3376 pr[0],
3371 pr[1],
3377 pr[1],
3372 shortfn(node),
3378 shortfn(node),
3373 )
3379 )
3374 )
3380 )
3375 else:
3381 else:
3376 ui.write(
3382 ui.write(
3377 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3383 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3378 % (
3384 % (
3379 i,
3385 i,
3380 r.flags(i),
3386 r.flags(i),
3381 r.rawsize(i),
3387 r.rawsize(i),
3382 r.linkrev(i),
3388 r.linkrev(i),
3383 pr[0],
3389 pr[0],
3384 pr[1],
3390 pr[1],
3385 shortfn(node),
3391 shortfn(node),
3386 )
3392 )
3387 )
3393 )
3388
3394
3389
3395
3390 @command(
3396 @command(
3391 b'debugrevspec',
3397 b'debugrevspec',
3392 [
3398 [
3393 (
3399 (
3394 b'',
3400 b'',
3395 b'optimize',
3401 b'optimize',
3396 None,
3402 None,
3397 _(b'print parsed tree after optimizing (DEPRECATED)'),
3403 _(b'print parsed tree after optimizing (DEPRECATED)'),
3398 ),
3404 ),
3399 (
3405 (
3400 b'',
3406 b'',
3401 b'show-revs',
3407 b'show-revs',
3402 True,
3408 True,
3403 _(b'print list of result revisions (default)'),
3409 _(b'print list of result revisions (default)'),
3404 ),
3410 ),
3405 (
3411 (
3406 b's',
3412 b's',
3407 b'show-set',
3413 b'show-set',
3408 None,
3414 None,
3409 _(b'print internal representation of result set'),
3415 _(b'print internal representation of result set'),
3410 ),
3416 ),
3411 (
3417 (
3412 b'p',
3418 b'p',
3413 b'show-stage',
3419 b'show-stage',
3414 [],
3420 [],
3415 _(b'print parsed tree at the given stage'),
3421 _(b'print parsed tree at the given stage'),
3416 _(b'NAME'),
3422 _(b'NAME'),
3417 ),
3423 ),
3418 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3424 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3419 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3425 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3420 ],
3426 ],
3421 b'REVSPEC',
3427 b'REVSPEC',
3422 )
3428 )
3423 def debugrevspec(ui, repo, expr, **opts):
3429 def debugrevspec(ui, repo, expr, **opts):
3424 """parse and apply a revision specification
3430 """parse and apply a revision specification
3425
3431
3426 Use -p/--show-stage option to print the parsed tree at the given stages.
3432 Use -p/--show-stage option to print the parsed tree at the given stages.
3427 Use -p all to print tree at every stage.
3433 Use -p all to print tree at every stage.
3428
3434
3429 Use --no-show-revs option with -s or -p to print only the set
3435 Use --no-show-revs option with -s or -p to print only the set
3430 representation or the parsed tree respectively.
3436 representation or the parsed tree respectively.
3431
3437
3432 Use --verify-optimized to compare the optimized result with the unoptimized
3438 Use --verify-optimized to compare the optimized result with the unoptimized
3433 one. Returns 1 if the optimized result differs.
3439 one. Returns 1 if the optimized result differs.
3434 """
3440 """
3435 opts = pycompat.byteskwargs(opts)
3441 opts = pycompat.byteskwargs(opts)
3436 aliases = ui.configitems(b'revsetalias')
3442 aliases = ui.configitems(b'revsetalias')
3437 stages = [
3443 stages = [
3438 (b'parsed', lambda tree: tree),
3444 (b'parsed', lambda tree: tree),
3439 (
3445 (
3440 b'expanded',
3446 b'expanded',
3441 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3447 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3442 ),
3448 ),
3443 (b'concatenated', revsetlang.foldconcat),
3449 (b'concatenated', revsetlang.foldconcat),
3444 (b'analyzed', revsetlang.analyze),
3450 (b'analyzed', revsetlang.analyze),
3445 (b'optimized', revsetlang.optimize),
3451 (b'optimized', revsetlang.optimize),
3446 ]
3452 ]
3447 if opts[b'no_optimized']:
3453 if opts[b'no_optimized']:
3448 stages = stages[:-1]
3454 stages = stages[:-1]
3449 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3455 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3450 raise error.Abort(
3456 raise error.Abort(
3451 _(b'cannot use --verify-optimized with --no-optimized')
3457 _(b'cannot use --verify-optimized with --no-optimized')
3452 )
3458 )
3453 stagenames = {n for n, f in stages}
3459 stagenames = {n for n, f in stages}
3454
3460
3455 showalways = set()
3461 showalways = set()
3456 showchanged = set()
3462 showchanged = set()
3457 if ui.verbose and not opts[b'show_stage']:
3463 if ui.verbose and not opts[b'show_stage']:
3458 # show parsed tree by --verbose (deprecated)
3464 # show parsed tree by --verbose (deprecated)
3459 showalways.add(b'parsed')
3465 showalways.add(b'parsed')
3460 showchanged.update([b'expanded', b'concatenated'])
3466 showchanged.update([b'expanded', b'concatenated'])
3461 if opts[b'optimize']:
3467 if opts[b'optimize']:
3462 showalways.add(b'optimized')
3468 showalways.add(b'optimized')
3463 if opts[b'show_stage'] and opts[b'optimize']:
3469 if opts[b'show_stage'] and opts[b'optimize']:
3464 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3470 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3465 if opts[b'show_stage'] == [b'all']:
3471 if opts[b'show_stage'] == [b'all']:
3466 showalways.update(stagenames)
3472 showalways.update(stagenames)
3467 else:
3473 else:
3468 for n in opts[b'show_stage']:
3474 for n in opts[b'show_stage']:
3469 if n not in stagenames:
3475 if n not in stagenames:
3470 raise error.Abort(_(b'invalid stage name: %s') % n)
3476 raise error.Abort(_(b'invalid stage name: %s') % n)
3471 showalways.update(opts[b'show_stage'])
3477 showalways.update(opts[b'show_stage'])
3472
3478
3473 treebystage = {}
3479 treebystage = {}
3474 printedtree = None
3480 printedtree = None
3475 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3481 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3476 for n, f in stages:
3482 for n, f in stages:
3477 treebystage[n] = tree = f(tree)
3483 treebystage[n] = tree = f(tree)
3478 if n in showalways or (n in showchanged and tree != printedtree):
3484 if n in showalways or (n in showchanged and tree != printedtree):
3479 if opts[b'show_stage'] or n != b'parsed':
3485 if opts[b'show_stage'] or n != b'parsed':
3480 ui.write(b"* %s:\n" % n)
3486 ui.write(b"* %s:\n" % n)
3481 ui.write(revsetlang.prettyformat(tree), b"\n")
3487 ui.write(revsetlang.prettyformat(tree), b"\n")
3482 printedtree = tree
3488 printedtree = tree
3483
3489
3484 if opts[b'verify_optimized']:
3490 if opts[b'verify_optimized']:
3485 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3491 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3486 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3492 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3487 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3493 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3488 ui.writenoi18n(
3494 ui.writenoi18n(
3489 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3495 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3490 )
3496 )
3491 ui.writenoi18n(
3497 ui.writenoi18n(
3492 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3498 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3493 )
3499 )
3494 arevs = list(arevs)
3500 arevs = list(arevs)
3495 brevs = list(brevs)
3501 brevs = list(brevs)
3496 if arevs == brevs:
3502 if arevs == brevs:
3497 return 0
3503 return 0
3498 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3504 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3499 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3505 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3500 sm = difflib.SequenceMatcher(None, arevs, brevs)
3506 sm = difflib.SequenceMatcher(None, arevs, brevs)
3501 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3507 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3502 if tag in ('delete', 'replace'):
3508 if tag in ('delete', 'replace'):
3503 for c in arevs[alo:ahi]:
3509 for c in arevs[alo:ahi]:
3504 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3510 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3505 if tag in ('insert', 'replace'):
3511 if tag in ('insert', 'replace'):
3506 for c in brevs[blo:bhi]:
3512 for c in brevs[blo:bhi]:
3507 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3513 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3508 if tag == 'equal':
3514 if tag == 'equal':
3509 for c in arevs[alo:ahi]:
3515 for c in arevs[alo:ahi]:
3510 ui.write(b' %d\n' % c)
3516 ui.write(b' %d\n' % c)
3511 return 1
3517 return 1
3512
3518
3513 func = revset.makematcher(tree)
3519 func = revset.makematcher(tree)
3514 revs = func(repo)
3520 revs = func(repo)
3515 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3521 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3516 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3522 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3517 if not opts[b'show_revs']:
3523 if not opts[b'show_revs']:
3518 return
3524 return
3519 for c in revs:
3525 for c in revs:
3520 ui.write(b"%d\n" % c)
3526 ui.write(b"%d\n" % c)
3521
3527
3522
3528
3523 @command(
3529 @command(
3524 b'debugserve',
3530 b'debugserve',
3525 [
3531 [
3526 (
3532 (
3527 b'',
3533 b'',
3528 b'sshstdio',
3534 b'sshstdio',
3529 False,
3535 False,
3530 _(b'run an SSH server bound to process handles'),
3536 _(b'run an SSH server bound to process handles'),
3531 ),
3537 ),
3532 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3538 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3533 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3539 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3534 ],
3540 ],
3535 b'',
3541 b'',
3536 )
3542 )
3537 def debugserve(ui, repo, **opts):
3543 def debugserve(ui, repo, **opts):
3538 """run a server with advanced settings
3544 """run a server with advanced settings
3539
3545
3540 This command is similar to :hg:`serve`. It exists partially as a
3546 This command is similar to :hg:`serve`. It exists partially as a
3541 workaround to the fact that ``hg serve --stdio`` must have specific
3547 workaround to the fact that ``hg serve --stdio`` must have specific
3542 arguments for security reasons.
3548 arguments for security reasons.
3543 """
3549 """
3544 opts = pycompat.byteskwargs(opts)
3550 opts = pycompat.byteskwargs(opts)
3545
3551
3546 if not opts[b'sshstdio']:
3552 if not opts[b'sshstdio']:
3547 raise error.Abort(_(b'only --sshstdio is currently supported'))
3553 raise error.Abort(_(b'only --sshstdio is currently supported'))
3548
3554
3549 logfh = None
3555 logfh = None
3550
3556
3551 if opts[b'logiofd'] and opts[b'logiofile']:
3557 if opts[b'logiofd'] and opts[b'logiofile']:
3552 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3558 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3553
3559
3554 if opts[b'logiofd']:
3560 if opts[b'logiofd']:
3555 # Ideally we would be line buffered. But line buffering in binary
3561 # Ideally we would be line buffered. But line buffering in binary
3556 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3562 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3557 # buffering could have performance impacts. But since this isn't
3563 # buffering could have performance impacts. But since this isn't
3558 # performance critical code, it should be fine.
3564 # performance critical code, it should be fine.
3559 try:
3565 try:
3560 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3566 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3561 except OSError as e:
3567 except OSError as e:
3562 if e.errno != errno.ESPIPE:
3568 if e.errno != errno.ESPIPE:
3563 raise
3569 raise
3564 # can't seek a pipe, so `ab` mode fails on py3
3570 # can't seek a pipe, so `ab` mode fails on py3
3565 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3571 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3566 elif opts[b'logiofile']:
3572 elif opts[b'logiofile']:
3567 logfh = open(opts[b'logiofile'], b'ab', 0)
3573 logfh = open(opts[b'logiofile'], b'ab', 0)
3568
3574
3569 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3575 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3570 s.serve_forever()
3576 s.serve_forever()
3571
3577
3572
3578
3573 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3579 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3574 def debugsetparents(ui, repo, rev1, rev2=None):
3580 def debugsetparents(ui, repo, rev1, rev2=None):
3575 """manually set the parents of the current working directory (DANGEROUS)
3581 """manually set the parents of the current working directory (DANGEROUS)
3576
3582
3577 This command is not what you are looking for and should not be used. Using
3583 This command is not what you are looking for and should not be used. Using
3578 this command will most certainly results in slight corruption of the file
3584 this command will most certainly results in slight corruption of the file
3579 level histories withing your repository. DO NOT USE THIS COMMAND.
3585 level histories withing your repository. DO NOT USE THIS COMMAND.
3580
3586
3581 The command update the p1 and p2 field in the dirstate, and not touching
3587 The command update the p1 and p2 field in the dirstate, and not touching
3582 anything else. This useful for writing repository conversion tools, but
3588 anything else. This useful for writing repository conversion tools, but
3583 should be used with extreme care. For example, neither the working
3589 should be used with extreme care. For example, neither the working
3584 directory nor the dirstate is updated, so file status may be incorrect
3590 directory nor the dirstate is updated, so file status may be incorrect
3585 after running this command. Only used if you are one of the few people that
3591 after running this command. Only used if you are one of the few people that
3586 deeply unstand both conversion tools and file level histories. If you are
3592 deeply unstand both conversion tools and file level histories. If you are
3587 reading this help, you are not one of this people (most of them sailed west
3593 reading this help, you are not one of this people (most of them sailed west
3588 from Mithlond anyway.
3594 from Mithlond anyway.
3589
3595
3590 So one last time DO NOT USE THIS COMMAND.
3596 So one last time DO NOT USE THIS COMMAND.
3591
3597
3592 Returns 0 on success.
3598 Returns 0 on success.
3593 """
3599 """
3594
3600
3595 node1 = scmutil.revsingle(repo, rev1).node()
3601 node1 = scmutil.revsingle(repo, rev1).node()
3596 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3602 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3597
3603
3598 with repo.wlock():
3604 with repo.wlock():
3599 repo.setparents(node1, node2)
3605 repo.setparents(node1, node2)
3600
3606
3601
3607
3602 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3608 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3603 def debugsidedata(ui, repo, file_, rev=None, **opts):
3609 def debugsidedata(ui, repo, file_, rev=None, **opts):
3604 """dump the side data for a cl/manifest/file revision
3610 """dump the side data for a cl/manifest/file revision
3605
3611
3606 Use --verbose to dump the sidedata content."""
3612 Use --verbose to dump the sidedata content."""
3607 opts = pycompat.byteskwargs(opts)
3613 opts = pycompat.byteskwargs(opts)
3608 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3614 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3609 if rev is not None:
3615 if rev is not None:
3610 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3616 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3611 file_, rev = None, file_
3617 file_, rev = None, file_
3612 elif rev is None:
3618 elif rev is None:
3613 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3619 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3614 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3620 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3615 r = getattr(r, '_revlog', r)
3621 r = getattr(r, '_revlog', r)
3616 try:
3622 try:
3617 sidedata = r.sidedata(r.lookup(rev))
3623 sidedata = r.sidedata(r.lookup(rev))
3618 except KeyError:
3624 except KeyError:
3619 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3625 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3620 if sidedata:
3626 if sidedata:
3621 sidedata = list(sidedata.items())
3627 sidedata = list(sidedata.items())
3622 sidedata.sort()
3628 sidedata.sort()
3623 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3629 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3624 for key, value in sidedata:
3630 for key, value in sidedata:
3625 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3631 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3626 if ui.verbose:
3632 if ui.verbose:
3627 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3633 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3628
3634
3629
3635
3630 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3636 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3631 def debugssl(ui, repo, source=None, **opts):
3637 def debugssl(ui, repo, source=None, **opts):
3632 """test a secure connection to a server
3638 """test a secure connection to a server
3633
3639
3634 This builds the certificate chain for the server on Windows, installing the
3640 This builds the certificate chain for the server on Windows, installing the
3635 missing intermediates and trusted root via Windows Update if necessary. It
3641 missing intermediates and trusted root via Windows Update if necessary. It
3636 does nothing on other platforms.
3642 does nothing on other platforms.
3637
3643
3638 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3644 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3639 that server is used. See :hg:`help urls` for more information.
3645 that server is used. See :hg:`help urls` for more information.
3640
3646
3641 If the update succeeds, retry the original operation. Otherwise, the cause
3647 If the update succeeds, retry the original operation. Otherwise, the cause
3642 of the SSL error is likely another issue.
3648 of the SSL error is likely another issue.
3643 """
3649 """
3644 if not pycompat.iswindows:
3650 if not pycompat.iswindows:
3645 raise error.Abort(
3651 raise error.Abort(
3646 _(b'certificate chain building is only possible on Windows')
3652 _(b'certificate chain building is only possible on Windows')
3647 )
3653 )
3648
3654
3649 if not source:
3655 if not source:
3650 if not repo:
3656 if not repo:
3651 raise error.Abort(
3657 raise error.Abort(
3652 _(
3658 _(
3653 b"there is no Mercurial repository here, and no "
3659 b"there is no Mercurial repository here, and no "
3654 b"server specified"
3660 b"server specified"
3655 )
3661 )
3656 )
3662 )
3657 source = b"default"
3663 source = b"default"
3658
3664
3659 source, branches = urlutil.get_unique_pull_path(
3665 source, branches = urlutil.get_unique_pull_path(
3660 b'debugssl', repo, ui, source
3666 b'debugssl', repo, ui, source
3661 )
3667 )
3662 url = urlutil.url(source)
3668 url = urlutil.url(source)
3663
3669
3664 defaultport = {b'https': 443, b'ssh': 22}
3670 defaultport = {b'https': 443, b'ssh': 22}
3665 if url.scheme in defaultport:
3671 if url.scheme in defaultport:
3666 try:
3672 try:
3667 addr = (url.host, int(url.port or defaultport[url.scheme]))
3673 addr = (url.host, int(url.port or defaultport[url.scheme]))
3668 except ValueError:
3674 except ValueError:
3669 raise error.Abort(_(b"malformed port number in URL"))
3675 raise error.Abort(_(b"malformed port number in URL"))
3670 else:
3676 else:
3671 raise error.Abort(_(b"only https and ssh connections are supported"))
3677 raise error.Abort(_(b"only https and ssh connections are supported"))
3672
3678
3673 from . import win32
3679 from . import win32
3674
3680
3675 s = ssl.wrap_socket(
3681 s = ssl.wrap_socket(
3676 socket.socket(),
3682 socket.socket(),
3677 ssl_version=ssl.PROTOCOL_TLS,
3683 ssl_version=ssl.PROTOCOL_TLS,
3678 cert_reqs=ssl.CERT_NONE,
3684 cert_reqs=ssl.CERT_NONE,
3679 ca_certs=None,
3685 ca_certs=None,
3680 )
3686 )
3681
3687
3682 try:
3688 try:
3683 s.connect(addr)
3689 s.connect(addr)
3684 cert = s.getpeercert(True)
3690 cert = s.getpeercert(True)
3685
3691
3686 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3692 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3687
3693
3688 complete = win32.checkcertificatechain(cert, build=False)
3694 complete = win32.checkcertificatechain(cert, build=False)
3689
3695
3690 if not complete:
3696 if not complete:
3691 ui.status(_(b'certificate chain is incomplete, updating... '))
3697 ui.status(_(b'certificate chain is incomplete, updating... '))
3692
3698
3693 if not win32.checkcertificatechain(cert):
3699 if not win32.checkcertificatechain(cert):
3694 ui.status(_(b'failed.\n'))
3700 ui.status(_(b'failed.\n'))
3695 else:
3701 else:
3696 ui.status(_(b'done.\n'))
3702 ui.status(_(b'done.\n'))
3697 else:
3703 else:
3698 ui.status(_(b'full certificate chain is available\n'))
3704 ui.status(_(b'full certificate chain is available\n'))
3699 finally:
3705 finally:
3700 s.close()
3706 s.close()
3701
3707
3702
3708
3703 @command(
3709 @command(
3704 b"debugbackupbundle",
3710 b"debugbackupbundle",
3705 [
3711 [
3706 (
3712 (
3707 b"",
3713 b"",
3708 b"recover",
3714 b"recover",
3709 b"",
3715 b"",
3710 b"brings the specified changeset back into the repository",
3716 b"brings the specified changeset back into the repository",
3711 )
3717 )
3712 ]
3718 ]
3713 + cmdutil.logopts,
3719 + cmdutil.logopts,
3714 _(b"hg debugbackupbundle [--recover HASH]"),
3720 _(b"hg debugbackupbundle [--recover HASH]"),
3715 )
3721 )
3716 def debugbackupbundle(ui, repo, *pats, **opts):
3722 def debugbackupbundle(ui, repo, *pats, **opts):
3717 """lists the changesets available in backup bundles
3723 """lists the changesets available in backup bundles
3718
3724
3719 Without any arguments, this command prints a list of the changesets in each
3725 Without any arguments, this command prints a list of the changesets in each
3720 backup bundle.
3726 backup bundle.
3721
3727
3722 --recover takes a changeset hash and unbundles the first bundle that
3728 --recover takes a changeset hash and unbundles the first bundle that
3723 contains that hash, which puts that changeset back in your repository.
3729 contains that hash, which puts that changeset back in your repository.
3724
3730
3725 --verbose will print the entire commit message and the bundle path for that
3731 --verbose will print the entire commit message and the bundle path for that
3726 backup.
3732 backup.
3727 """
3733 """
3728 backups = list(
3734 backups = list(
3729 filter(
3735 filter(
3730 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3736 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3731 )
3737 )
3732 )
3738 )
3733 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3739 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3734
3740
3735 opts = pycompat.byteskwargs(opts)
3741 opts = pycompat.byteskwargs(opts)
3736 opts[b"bundle"] = b""
3742 opts[b"bundle"] = b""
3737 opts[b"force"] = None
3743 opts[b"force"] = None
3738 limit = logcmdutil.getlimit(opts)
3744 limit = logcmdutil.getlimit(opts)
3739
3745
3740 def display(other, chlist, displayer):
3746 def display(other, chlist, displayer):
3741 if opts.get(b"newest_first"):
3747 if opts.get(b"newest_first"):
3742 chlist.reverse()
3748 chlist.reverse()
3743 count = 0
3749 count = 0
3744 for n in chlist:
3750 for n in chlist:
3745 if limit is not None and count >= limit:
3751 if limit is not None and count >= limit:
3746 break
3752 break
3747 parents = [
3753 parents = [
3748 True for p in other.changelog.parents(n) if p != repo.nullid
3754 True for p in other.changelog.parents(n) if p != repo.nullid
3749 ]
3755 ]
3750 if opts.get(b"no_merges") and len(parents) == 2:
3756 if opts.get(b"no_merges") and len(parents) == 2:
3751 continue
3757 continue
3752 count += 1
3758 count += 1
3753 displayer.show(other[n])
3759 displayer.show(other[n])
3754
3760
3755 recovernode = opts.get(b"recover")
3761 recovernode = opts.get(b"recover")
3756 if recovernode:
3762 if recovernode:
3757 if scmutil.isrevsymbol(repo, recovernode):
3763 if scmutil.isrevsymbol(repo, recovernode):
3758 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3764 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3759 return
3765 return
3760 elif backups:
3766 elif backups:
3761 msg = _(
3767 msg = _(
3762 b"Recover changesets using: hg debugbackupbundle --recover "
3768 b"Recover changesets using: hg debugbackupbundle --recover "
3763 b"<changeset hash>\n\nAvailable backup changesets:"
3769 b"<changeset hash>\n\nAvailable backup changesets:"
3764 )
3770 )
3765 ui.status(msg, label=b"status.removed")
3771 ui.status(msg, label=b"status.removed")
3766 else:
3772 else:
3767 ui.status(_(b"no backup changesets found\n"))
3773 ui.status(_(b"no backup changesets found\n"))
3768 return
3774 return
3769
3775
3770 for backup in backups:
3776 for backup in backups:
3771 # Much of this is copied from the hg incoming logic
3777 # Much of this is copied from the hg incoming logic
3772 source = os.path.relpath(backup, encoding.getcwd())
3778 source = os.path.relpath(backup, encoding.getcwd())
3773 source, branches = urlutil.get_unique_pull_path(
3779 source, branches = urlutil.get_unique_pull_path(
3774 b'debugbackupbundle',
3780 b'debugbackupbundle',
3775 repo,
3781 repo,
3776 ui,
3782 ui,
3777 source,
3783 source,
3778 default_branches=opts.get(b'branch'),
3784 default_branches=opts.get(b'branch'),
3779 )
3785 )
3780 try:
3786 try:
3781 other = hg.peer(repo, opts, source)
3787 other = hg.peer(repo, opts, source)
3782 except error.LookupError as ex:
3788 except error.LookupError as ex:
3783 msg = _(b"\nwarning: unable to open bundle %s") % source
3789 msg = _(b"\nwarning: unable to open bundle %s") % source
3784 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3790 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3785 ui.warn(msg, hint=hint)
3791 ui.warn(msg, hint=hint)
3786 continue
3792 continue
3787 revs, checkout = hg.addbranchrevs(
3793 revs, checkout = hg.addbranchrevs(
3788 repo, other, branches, opts.get(b"rev")
3794 repo, other, branches, opts.get(b"rev")
3789 )
3795 )
3790
3796
3791 if revs:
3797 if revs:
3792 revs = [other.lookup(rev) for rev in revs]
3798 revs = [other.lookup(rev) for rev in revs]
3793
3799
3794 with ui.silent():
3800 with ui.silent():
3795 try:
3801 try:
3796 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3802 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3797 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3803 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3798 )
3804 )
3799 except error.LookupError:
3805 except error.LookupError:
3800 continue
3806 continue
3801
3807
3802 try:
3808 try:
3803 if not chlist:
3809 if not chlist:
3804 continue
3810 continue
3805 if recovernode:
3811 if recovernode:
3806 with repo.lock(), repo.transaction(b"unbundle") as tr:
3812 with repo.lock(), repo.transaction(b"unbundle") as tr:
3807 if scmutil.isrevsymbol(other, recovernode):
3813 if scmutil.isrevsymbol(other, recovernode):
3808 ui.status(_(b"Unbundling %s\n") % (recovernode))
3814 ui.status(_(b"Unbundling %s\n") % (recovernode))
3809 f = hg.openpath(ui, source)
3815 f = hg.openpath(ui, source)
3810 gen = exchange.readbundle(ui, f, source)
3816 gen = exchange.readbundle(ui, f, source)
3811 if isinstance(gen, bundle2.unbundle20):
3817 if isinstance(gen, bundle2.unbundle20):
3812 bundle2.applybundle(
3818 bundle2.applybundle(
3813 repo,
3819 repo,
3814 gen,
3820 gen,
3815 tr,
3821 tr,
3816 source=b"unbundle",
3822 source=b"unbundle",
3817 url=b"bundle:" + source,
3823 url=b"bundle:" + source,
3818 )
3824 )
3819 else:
3825 else:
3820 gen.apply(repo, b"unbundle", b"bundle:" + source)
3826 gen.apply(repo, b"unbundle", b"bundle:" + source)
3821 break
3827 break
3822 else:
3828 else:
3823 backupdate = encoding.strtolocal(
3829 backupdate = encoding.strtolocal(
3824 time.strftime(
3830 time.strftime(
3825 "%a %H:%M, %Y-%m-%d",
3831 "%a %H:%M, %Y-%m-%d",
3826 time.localtime(os.path.getmtime(source)),
3832 time.localtime(os.path.getmtime(source)),
3827 )
3833 )
3828 )
3834 )
3829 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3835 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3830 if ui.verbose:
3836 if ui.verbose:
3831 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3837 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3832 else:
3838 else:
3833 opts[
3839 opts[
3834 b"template"
3840 b"template"
3835 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3841 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3836 displayer = logcmdutil.changesetdisplayer(
3842 displayer = logcmdutil.changesetdisplayer(
3837 ui, other, opts, False
3843 ui, other, opts, False
3838 )
3844 )
3839 display(other, chlist, displayer)
3845 display(other, chlist, displayer)
3840 displayer.close()
3846 displayer.close()
3841 finally:
3847 finally:
3842 cleanupfn()
3848 cleanupfn()
3843
3849
3844
3850
3845 @command(
3851 @command(
3846 b'debugsub',
3852 b'debugsub',
3847 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3853 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3848 _(b'[-r REV] [REV]'),
3854 _(b'[-r REV] [REV]'),
3849 )
3855 )
3850 def debugsub(ui, repo, rev=None):
3856 def debugsub(ui, repo, rev=None):
3851 ctx = scmutil.revsingle(repo, rev, None)
3857 ctx = scmutil.revsingle(repo, rev, None)
3852 for k, v in sorted(ctx.substate.items()):
3858 for k, v in sorted(ctx.substate.items()):
3853 ui.writenoi18n(b'path %s\n' % k)
3859 ui.writenoi18n(b'path %s\n' % k)
3854 ui.writenoi18n(b' source %s\n' % v[0])
3860 ui.writenoi18n(b' source %s\n' % v[0])
3855 ui.writenoi18n(b' revision %s\n' % v[1])
3861 ui.writenoi18n(b' revision %s\n' % v[1])
3856
3862
3857
3863
3858 @command(b'debugshell', optionalrepo=True)
3864 @command(b'debugshell', optionalrepo=True)
3859 def debugshell(ui, repo):
3865 def debugshell(ui, repo):
3860 """run an interactive Python interpreter
3866 """run an interactive Python interpreter
3861
3867
3862 The local namespace is provided with a reference to the ui and
3868 The local namespace is provided with a reference to the ui and
3863 the repo instance (if available).
3869 the repo instance (if available).
3864 """
3870 """
3865 import code
3871 import code
3866
3872
3867 imported_objects = {
3873 imported_objects = {
3868 'ui': ui,
3874 'ui': ui,
3869 'repo': repo,
3875 'repo': repo,
3870 }
3876 }
3871
3877
3872 code.interact(local=imported_objects)
3878 code.interact(local=imported_objects)
3873
3879
3874
3880
3875 @command(
3881 @command(
3876 b'debugsuccessorssets',
3882 b'debugsuccessorssets',
3877 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3883 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3878 _(b'[REV]'),
3884 _(b'[REV]'),
3879 )
3885 )
3880 def debugsuccessorssets(ui, repo, *revs, **opts):
3886 def debugsuccessorssets(ui, repo, *revs, **opts):
3881 """show set of successors for revision
3887 """show set of successors for revision
3882
3888
3883 A successors set of changeset A is a consistent group of revisions that
3889 A successors set of changeset A is a consistent group of revisions that
3884 succeed A. It contains non-obsolete changesets only unless closests
3890 succeed A. It contains non-obsolete changesets only unless closests
3885 successors set is set.
3891 successors set is set.
3886
3892
3887 In most cases a changeset A has a single successors set containing a single
3893 In most cases a changeset A has a single successors set containing a single
3888 successor (changeset A replaced by A').
3894 successor (changeset A replaced by A').
3889
3895
3890 A changeset that is made obsolete with no successors are called "pruned".
3896 A changeset that is made obsolete with no successors are called "pruned".
3891 Such changesets have no successors sets at all.
3897 Such changesets have no successors sets at all.
3892
3898
3893 A changeset that has been "split" will have a successors set containing
3899 A changeset that has been "split" will have a successors set containing
3894 more than one successor.
3900 more than one successor.
3895
3901
3896 A changeset that has been rewritten in multiple different ways is called
3902 A changeset that has been rewritten in multiple different ways is called
3897 "divergent". Such changesets have multiple successor sets (each of which
3903 "divergent". Such changesets have multiple successor sets (each of which
3898 may also be split, i.e. have multiple successors).
3904 may also be split, i.e. have multiple successors).
3899
3905
3900 Results are displayed as follows::
3906 Results are displayed as follows::
3901
3907
3902 <rev1>
3908 <rev1>
3903 <successors-1A>
3909 <successors-1A>
3904 <rev2>
3910 <rev2>
3905 <successors-2A>
3911 <successors-2A>
3906 <successors-2B1> <successors-2B2> <successors-2B3>
3912 <successors-2B1> <successors-2B2> <successors-2B3>
3907
3913
3908 Here rev2 has two possible (i.e. divergent) successors sets. The first
3914 Here rev2 has two possible (i.e. divergent) successors sets. The first
3909 holds one element, whereas the second holds three (i.e. the changeset has
3915 holds one element, whereas the second holds three (i.e. the changeset has
3910 been split).
3916 been split).
3911 """
3917 """
3912 # passed to successorssets caching computation from one call to another
3918 # passed to successorssets caching computation from one call to another
3913 cache = {}
3919 cache = {}
3914 ctx2str = bytes
3920 ctx2str = bytes
3915 node2str = short
3921 node2str = short
3916 for rev in scmutil.revrange(repo, revs):
3922 for rev in scmutil.revrange(repo, revs):
3917 ctx = repo[rev]
3923 ctx = repo[rev]
3918 ui.write(b'%s\n' % ctx2str(ctx))
3924 ui.write(b'%s\n' % ctx2str(ctx))
3919 for succsset in obsutil.successorssets(
3925 for succsset in obsutil.successorssets(
3920 repo, ctx.node(), closest=opts['closest'], cache=cache
3926 repo, ctx.node(), closest=opts['closest'], cache=cache
3921 ):
3927 ):
3922 if succsset:
3928 if succsset:
3923 ui.write(b' ')
3929 ui.write(b' ')
3924 ui.write(node2str(succsset[0]))
3930 ui.write(node2str(succsset[0]))
3925 for node in succsset[1:]:
3931 for node in succsset[1:]:
3926 ui.write(b' ')
3932 ui.write(b' ')
3927 ui.write(node2str(node))
3933 ui.write(node2str(node))
3928 ui.write(b'\n')
3934 ui.write(b'\n')
3929
3935
3930
3936
3931 @command(b'debugtagscache', [])
3937 @command(b'debugtagscache', [])
3932 def debugtagscache(ui, repo):
3938 def debugtagscache(ui, repo):
3933 """display the contents of .hg/cache/hgtagsfnodes1"""
3939 """display the contents of .hg/cache/hgtagsfnodes1"""
3934 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3940 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3935 flog = repo.file(b'.hgtags')
3941 flog = repo.file(b'.hgtags')
3936 for r in repo:
3942 for r in repo:
3937 node = repo[r].node()
3943 node = repo[r].node()
3938 tagsnode = cache.getfnode(node, computemissing=False)
3944 tagsnode = cache.getfnode(node, computemissing=False)
3939 if tagsnode:
3945 if tagsnode:
3940 tagsnodedisplay = hex(tagsnode)
3946 tagsnodedisplay = hex(tagsnode)
3941 if not flog.hasnode(tagsnode):
3947 if not flog.hasnode(tagsnode):
3942 tagsnodedisplay += b' (unknown node)'
3948 tagsnodedisplay += b' (unknown node)'
3943 elif tagsnode is None:
3949 elif tagsnode is None:
3944 tagsnodedisplay = b'missing'
3950 tagsnodedisplay = b'missing'
3945 else:
3951 else:
3946 tagsnodedisplay = b'invalid'
3952 tagsnodedisplay = b'invalid'
3947
3953
3948 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3954 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3949
3955
3950
3956
3951 @command(
3957 @command(
3952 b'debugtemplate',
3958 b'debugtemplate',
3953 [
3959 [
3954 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3960 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3955 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3961 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3956 ],
3962 ],
3957 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3963 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3958 optionalrepo=True,
3964 optionalrepo=True,
3959 )
3965 )
3960 def debugtemplate(ui, repo, tmpl, **opts):
3966 def debugtemplate(ui, repo, tmpl, **opts):
3961 """parse and apply a template
3967 """parse and apply a template
3962
3968
3963 If -r/--rev is given, the template is processed as a log template and
3969 If -r/--rev is given, the template is processed as a log template and
3964 applied to the given changesets. Otherwise, it is processed as a generic
3970 applied to the given changesets. Otherwise, it is processed as a generic
3965 template.
3971 template.
3966
3972
3967 Use --verbose to print the parsed tree.
3973 Use --verbose to print the parsed tree.
3968 """
3974 """
3969 revs = None
3975 revs = None
3970 if opts['rev']:
3976 if opts['rev']:
3971 if repo is None:
3977 if repo is None:
3972 raise error.RepoError(
3978 raise error.RepoError(
3973 _(b'there is no Mercurial repository here (.hg not found)')
3979 _(b'there is no Mercurial repository here (.hg not found)')
3974 )
3980 )
3975 revs = scmutil.revrange(repo, opts['rev'])
3981 revs = scmutil.revrange(repo, opts['rev'])
3976
3982
3977 props = {}
3983 props = {}
3978 for d in opts['define']:
3984 for d in opts['define']:
3979 try:
3985 try:
3980 k, v = (e.strip() for e in d.split(b'=', 1))
3986 k, v = (e.strip() for e in d.split(b'=', 1))
3981 if not k or k == b'ui':
3987 if not k or k == b'ui':
3982 raise ValueError
3988 raise ValueError
3983 props[k] = v
3989 props[k] = v
3984 except ValueError:
3990 except ValueError:
3985 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3991 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3986
3992
3987 if ui.verbose:
3993 if ui.verbose:
3988 aliases = ui.configitems(b'templatealias')
3994 aliases = ui.configitems(b'templatealias')
3989 tree = templater.parse(tmpl)
3995 tree = templater.parse(tmpl)
3990 ui.note(templater.prettyformat(tree), b'\n')
3996 ui.note(templater.prettyformat(tree), b'\n')
3991 newtree = templater.expandaliases(tree, aliases)
3997 newtree = templater.expandaliases(tree, aliases)
3992 if newtree != tree:
3998 if newtree != tree:
3993 ui.notenoi18n(
3999 ui.notenoi18n(
3994 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4000 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3995 )
4001 )
3996
4002
3997 if revs is None:
4003 if revs is None:
3998 tres = formatter.templateresources(ui, repo)
4004 tres = formatter.templateresources(ui, repo)
3999 t = formatter.maketemplater(ui, tmpl, resources=tres)
4005 t = formatter.maketemplater(ui, tmpl, resources=tres)
4000 if ui.verbose:
4006 if ui.verbose:
4001 kwds, funcs = t.symbolsuseddefault()
4007 kwds, funcs = t.symbolsuseddefault()
4002 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4008 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4003 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4009 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4004 ui.write(t.renderdefault(props))
4010 ui.write(t.renderdefault(props))
4005 else:
4011 else:
4006 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4012 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4007 if ui.verbose:
4013 if ui.verbose:
4008 kwds, funcs = displayer.t.symbolsuseddefault()
4014 kwds, funcs = displayer.t.symbolsuseddefault()
4009 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4015 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4010 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4016 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4011 for r in revs:
4017 for r in revs:
4012 displayer.show(repo[r], **pycompat.strkwargs(props))
4018 displayer.show(repo[r], **pycompat.strkwargs(props))
4013 displayer.close()
4019 displayer.close()
4014
4020
4015
4021
4016 @command(
4022 @command(
4017 b'debuguigetpass',
4023 b'debuguigetpass',
4018 [
4024 [
4019 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4025 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4020 ],
4026 ],
4021 _(b'[-p TEXT]'),
4027 _(b'[-p TEXT]'),
4022 norepo=True,
4028 norepo=True,
4023 )
4029 )
4024 def debuguigetpass(ui, prompt=b''):
4030 def debuguigetpass(ui, prompt=b''):
4025 """show prompt to type password"""
4031 """show prompt to type password"""
4026 r = ui.getpass(prompt)
4032 r = ui.getpass(prompt)
4027 if r is None:
4033 if r is None:
4028 r = b"<default response>"
4034 r = b"<default response>"
4029 ui.writenoi18n(b'response: %s\n' % r)
4035 ui.writenoi18n(b'response: %s\n' % r)
4030
4036
4031
4037
4032 @command(
4038 @command(
4033 b'debuguiprompt',
4039 b'debuguiprompt',
4034 [
4040 [
4035 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4041 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4036 ],
4042 ],
4037 _(b'[-p TEXT]'),
4043 _(b'[-p TEXT]'),
4038 norepo=True,
4044 norepo=True,
4039 )
4045 )
4040 def debuguiprompt(ui, prompt=b''):
4046 def debuguiprompt(ui, prompt=b''):
4041 """show plain prompt"""
4047 """show plain prompt"""
4042 r = ui.prompt(prompt)
4048 r = ui.prompt(prompt)
4043 ui.writenoi18n(b'response: %s\n' % r)
4049 ui.writenoi18n(b'response: %s\n' % r)
4044
4050
4045
4051
4046 @command(b'debugupdatecaches', [])
4052 @command(b'debugupdatecaches', [])
4047 def debugupdatecaches(ui, repo, *pats, **opts):
4053 def debugupdatecaches(ui, repo, *pats, **opts):
4048 """warm all known caches in the repository"""
4054 """warm all known caches in the repository"""
4049 with repo.wlock(), repo.lock():
4055 with repo.wlock(), repo.lock():
4050 repo.updatecaches(caches=repository.CACHES_ALL)
4056 repo.updatecaches(caches=repository.CACHES_ALL)
4051
4057
4052
4058
4053 @command(
4059 @command(
4054 b'debugupgraderepo',
4060 b'debugupgraderepo',
4055 [
4061 [
4056 (
4062 (
4057 b'o',
4063 b'o',
4058 b'optimize',
4064 b'optimize',
4059 [],
4065 [],
4060 _(b'extra optimization to perform'),
4066 _(b'extra optimization to perform'),
4061 _(b'NAME'),
4067 _(b'NAME'),
4062 ),
4068 ),
4063 (b'', b'run', False, _(b'performs an upgrade')),
4069 (b'', b'run', False, _(b'performs an upgrade')),
4064 (b'', b'backup', True, _(b'keep the old repository content around')),
4070 (b'', b'backup', True, _(b'keep the old repository content around')),
4065 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4071 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4066 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4072 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4067 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4073 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4068 ],
4074 ],
4069 )
4075 )
4070 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4076 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4071 """upgrade a repository to use different features
4077 """upgrade a repository to use different features
4072
4078
4073 If no arguments are specified, the repository is evaluated for upgrade
4079 If no arguments are specified, the repository is evaluated for upgrade
4074 and a list of problems and potential optimizations is printed.
4080 and a list of problems and potential optimizations is printed.
4075
4081
4076 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4082 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4077 can be influenced via additional arguments. More details will be provided
4083 can be influenced via additional arguments. More details will be provided
4078 by the command output when run without ``--run``.
4084 by the command output when run without ``--run``.
4079
4085
4080 During the upgrade, the repository will be locked and no writes will be
4086 During the upgrade, the repository will be locked and no writes will be
4081 allowed.
4087 allowed.
4082
4088
4083 At the end of the upgrade, the repository may not be readable while new
4089 At the end of the upgrade, the repository may not be readable while new
4084 repository data is swapped in. This window will be as long as it takes to
4090 repository data is swapped in. This window will be as long as it takes to
4085 rename some directories inside the ``.hg`` directory. On most machines, this
4091 rename some directories inside the ``.hg`` directory. On most machines, this
4086 should complete almost instantaneously and the chances of a consumer being
4092 should complete almost instantaneously and the chances of a consumer being
4087 unable to access the repository should be low.
4093 unable to access the repository should be low.
4088
4094
4089 By default, all revlogs will be upgraded. You can restrict this using flags
4095 By default, all revlogs will be upgraded. You can restrict this using flags
4090 such as `--manifest`:
4096 such as `--manifest`:
4091
4097
4092 * `--manifest`: only optimize the manifest
4098 * `--manifest`: only optimize the manifest
4093 * `--no-manifest`: optimize all revlog but the manifest
4099 * `--no-manifest`: optimize all revlog but the manifest
4094 * `--changelog`: optimize the changelog only
4100 * `--changelog`: optimize the changelog only
4095 * `--no-changelog --no-manifest`: optimize filelogs only
4101 * `--no-changelog --no-manifest`: optimize filelogs only
4096 * `--filelogs`: optimize the filelogs only
4102 * `--filelogs`: optimize the filelogs only
4097 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4103 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4098 """
4104 """
4099 return upgrade.upgraderepo(
4105 return upgrade.upgraderepo(
4100 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4106 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4101 )
4107 )
4102
4108
4103
4109
4104 @command(
4110 @command(
4105 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4111 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4106 )
4112 )
4107 def debugwalk(ui, repo, *pats, **opts):
4113 def debugwalk(ui, repo, *pats, **opts):
4108 """show how files match on given patterns"""
4114 """show how files match on given patterns"""
4109 opts = pycompat.byteskwargs(opts)
4115 opts = pycompat.byteskwargs(opts)
4110 m = scmutil.match(repo[None], pats, opts)
4116 m = scmutil.match(repo[None], pats, opts)
4111 if ui.verbose:
4117 if ui.verbose:
4112 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4118 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4113 items = list(repo[None].walk(m))
4119 items = list(repo[None].walk(m))
4114 if not items:
4120 if not items:
4115 return
4121 return
4116 f = lambda fn: fn
4122 f = lambda fn: fn
4117 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4123 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4118 f = lambda fn: util.normpath(fn)
4124 f = lambda fn: util.normpath(fn)
4119 fmt = b'f %%-%ds %%-%ds %%s' % (
4125 fmt = b'f %%-%ds %%-%ds %%s' % (
4120 max([len(abs) for abs in items]),
4126 max([len(abs) for abs in items]),
4121 max([len(repo.pathto(abs)) for abs in items]),
4127 max([len(repo.pathto(abs)) for abs in items]),
4122 )
4128 )
4123 for abs in items:
4129 for abs in items:
4124 line = fmt % (
4130 line = fmt % (
4125 abs,
4131 abs,
4126 f(repo.pathto(abs)),
4132 f(repo.pathto(abs)),
4127 m.exact(abs) and b'exact' or b'',
4133 m.exact(abs) and b'exact' or b'',
4128 )
4134 )
4129 ui.write(b"%s\n" % line.rstrip())
4135 ui.write(b"%s\n" % line.rstrip())
4130
4136
4131
4137
4132 @command(b'debugwhyunstable', [], _(b'REV'))
4138 @command(b'debugwhyunstable', [], _(b'REV'))
4133 def debugwhyunstable(ui, repo, rev):
4139 def debugwhyunstable(ui, repo, rev):
4134 """explain instabilities of a changeset"""
4140 """explain instabilities of a changeset"""
4135 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4141 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4136 dnodes = b''
4142 dnodes = b''
4137 if entry.get(b'divergentnodes'):
4143 if entry.get(b'divergentnodes'):
4138 dnodes = (
4144 dnodes = (
4139 b' '.join(
4145 b' '.join(
4140 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4146 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4141 for ctx in entry[b'divergentnodes']
4147 for ctx in entry[b'divergentnodes']
4142 )
4148 )
4143 + b' '
4149 + b' '
4144 )
4150 )
4145 ui.write(
4151 ui.write(
4146 b'%s: %s%s %s\n'
4152 b'%s: %s%s %s\n'
4147 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4153 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4148 )
4154 )
4149
4155
4150
4156
4151 @command(
4157 @command(
4152 b'debugwireargs',
4158 b'debugwireargs',
4153 [
4159 [
4154 (b'', b'three', b'', b'three'),
4160 (b'', b'three', b'', b'three'),
4155 (b'', b'four', b'', b'four'),
4161 (b'', b'four', b'', b'four'),
4156 (b'', b'five', b'', b'five'),
4162 (b'', b'five', b'', b'five'),
4157 ]
4163 ]
4158 + cmdutil.remoteopts,
4164 + cmdutil.remoteopts,
4159 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4165 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4160 norepo=True,
4166 norepo=True,
4161 )
4167 )
4162 def debugwireargs(ui, repopath, *vals, **opts):
4168 def debugwireargs(ui, repopath, *vals, **opts):
4163 opts = pycompat.byteskwargs(opts)
4169 opts = pycompat.byteskwargs(opts)
4164 repo = hg.peer(ui, opts, repopath)
4170 repo = hg.peer(ui, opts, repopath)
4165 try:
4171 try:
4166 for opt in cmdutil.remoteopts:
4172 for opt in cmdutil.remoteopts:
4167 del opts[opt[1]]
4173 del opts[opt[1]]
4168 args = {}
4174 args = {}
4169 for k, v in pycompat.iteritems(opts):
4175 for k, v in pycompat.iteritems(opts):
4170 if v:
4176 if v:
4171 args[k] = v
4177 args[k] = v
4172 args = pycompat.strkwargs(args)
4178 args = pycompat.strkwargs(args)
4173 # run twice to check that we don't mess up the stream for the next command
4179 # run twice to check that we don't mess up the stream for the next command
4174 res1 = repo.debugwireargs(*vals, **args)
4180 res1 = repo.debugwireargs(*vals, **args)
4175 res2 = repo.debugwireargs(*vals, **args)
4181 res2 = repo.debugwireargs(*vals, **args)
4176 ui.write(b"%s\n" % res1)
4182 ui.write(b"%s\n" % res1)
4177 if res1 != res2:
4183 if res1 != res2:
4178 ui.warn(b"%s\n" % res2)
4184 ui.warn(b"%s\n" % res2)
4179 finally:
4185 finally:
4180 repo.close()
4186 repo.close()
4181
4187
4182
4188
4183 def _parsewirelangblocks(fh):
4189 def _parsewirelangblocks(fh):
4184 activeaction = None
4190 activeaction = None
4185 blocklines = []
4191 blocklines = []
4186 lastindent = 0
4192 lastindent = 0
4187
4193
4188 for line in fh:
4194 for line in fh:
4189 line = line.rstrip()
4195 line = line.rstrip()
4190 if not line:
4196 if not line:
4191 continue
4197 continue
4192
4198
4193 if line.startswith(b'#'):
4199 if line.startswith(b'#'):
4194 continue
4200 continue
4195
4201
4196 if not line.startswith(b' '):
4202 if not line.startswith(b' '):
4197 # New block. Flush previous one.
4203 # New block. Flush previous one.
4198 if activeaction:
4204 if activeaction:
4199 yield activeaction, blocklines
4205 yield activeaction, blocklines
4200
4206
4201 activeaction = line
4207 activeaction = line
4202 blocklines = []
4208 blocklines = []
4203 lastindent = 0
4209 lastindent = 0
4204 continue
4210 continue
4205
4211
4206 # Else we start with an indent.
4212 # Else we start with an indent.
4207
4213
4208 if not activeaction:
4214 if not activeaction:
4209 raise error.Abort(_(b'indented line outside of block'))
4215 raise error.Abort(_(b'indented line outside of block'))
4210
4216
4211 indent = len(line) - len(line.lstrip())
4217 indent = len(line) - len(line.lstrip())
4212
4218
4213 # If this line is indented more than the last line, concatenate it.
4219 # If this line is indented more than the last line, concatenate it.
4214 if indent > lastindent and blocklines:
4220 if indent > lastindent and blocklines:
4215 blocklines[-1] += line.lstrip()
4221 blocklines[-1] += line.lstrip()
4216 else:
4222 else:
4217 blocklines.append(line)
4223 blocklines.append(line)
4218 lastindent = indent
4224 lastindent = indent
4219
4225
4220 # Flush last block.
4226 # Flush last block.
4221 if activeaction:
4227 if activeaction:
4222 yield activeaction, blocklines
4228 yield activeaction, blocklines
4223
4229
4224
4230
4225 @command(
4231 @command(
4226 b'debugwireproto',
4232 b'debugwireproto',
4227 [
4233 [
4228 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4234 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4229 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4235 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4230 (
4236 (
4231 b'',
4237 b'',
4232 b'noreadstderr',
4238 b'noreadstderr',
4233 False,
4239 False,
4234 _(b'do not read from stderr of the remote'),
4240 _(b'do not read from stderr of the remote'),
4235 ),
4241 ),
4236 (
4242 (
4237 b'',
4243 b'',
4238 b'nologhandshake',
4244 b'nologhandshake',
4239 False,
4245 False,
4240 _(b'do not log I/O related to the peer handshake'),
4246 _(b'do not log I/O related to the peer handshake'),
4241 ),
4247 ),
4242 ]
4248 ]
4243 + cmdutil.remoteopts,
4249 + cmdutil.remoteopts,
4244 _(b'[PATH]'),
4250 _(b'[PATH]'),
4245 optionalrepo=True,
4251 optionalrepo=True,
4246 )
4252 )
4247 def debugwireproto(ui, repo, path=None, **opts):
4253 def debugwireproto(ui, repo, path=None, **opts):
4248 """send wire protocol commands to a server
4254 """send wire protocol commands to a server
4249
4255
4250 This command can be used to issue wire protocol commands to remote
4256 This command can be used to issue wire protocol commands to remote
4251 peers and to debug the raw data being exchanged.
4257 peers and to debug the raw data being exchanged.
4252
4258
4253 ``--localssh`` will start an SSH server against the current repository
4259 ``--localssh`` will start an SSH server against the current repository
4254 and connect to that. By default, the connection will perform a handshake
4260 and connect to that. By default, the connection will perform a handshake
4255 and establish an appropriate peer instance.
4261 and establish an appropriate peer instance.
4256
4262
4257 ``--peer`` can be used to bypass the handshake protocol and construct a
4263 ``--peer`` can be used to bypass the handshake protocol and construct a
4258 peer instance using the specified class type. Valid values are ``raw``,
4264 peer instance using the specified class type. Valid values are ``raw``,
4259 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4265 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4260 raw data payloads and don't support higher-level command actions.
4266 raw data payloads and don't support higher-level command actions.
4261
4267
4262 ``--noreadstderr`` can be used to disable automatic reading from stderr
4268 ``--noreadstderr`` can be used to disable automatic reading from stderr
4263 of the peer (for SSH connections only). Disabling automatic reading of
4269 of the peer (for SSH connections only). Disabling automatic reading of
4264 stderr is useful for making output more deterministic.
4270 stderr is useful for making output more deterministic.
4265
4271
4266 Commands are issued via a mini language which is specified via stdin.
4272 Commands are issued via a mini language which is specified via stdin.
4267 The language consists of individual actions to perform. An action is
4273 The language consists of individual actions to perform. An action is
4268 defined by a block. A block is defined as a line with no leading
4274 defined by a block. A block is defined as a line with no leading
4269 space followed by 0 or more lines with leading space. Blocks are
4275 space followed by 0 or more lines with leading space. Blocks are
4270 effectively a high-level command with additional metadata.
4276 effectively a high-level command with additional metadata.
4271
4277
4272 Lines beginning with ``#`` are ignored.
4278 Lines beginning with ``#`` are ignored.
4273
4279
4274 The following sections denote available actions.
4280 The following sections denote available actions.
4275
4281
4276 raw
4282 raw
4277 ---
4283 ---
4278
4284
4279 Send raw data to the server.
4285 Send raw data to the server.
4280
4286
4281 The block payload contains the raw data to send as one atomic send
4287 The block payload contains the raw data to send as one atomic send
4282 operation. The data may not actually be delivered in a single system
4288 operation. The data may not actually be delivered in a single system
4283 call: it depends on the abilities of the transport being used.
4289 call: it depends on the abilities of the transport being used.
4284
4290
4285 Each line in the block is de-indented and concatenated. Then, that
4291 Each line in the block is de-indented and concatenated. Then, that
4286 value is evaluated as a Python b'' literal. This allows the use of
4292 value is evaluated as a Python b'' literal. This allows the use of
4287 backslash escaping, etc.
4293 backslash escaping, etc.
4288
4294
4289 raw+
4295 raw+
4290 ----
4296 ----
4291
4297
4292 Behaves like ``raw`` except flushes output afterwards.
4298 Behaves like ``raw`` except flushes output afterwards.
4293
4299
4294 command <X>
4300 command <X>
4295 -----------
4301 -----------
4296
4302
4297 Send a request to run a named command, whose name follows the ``command``
4303 Send a request to run a named command, whose name follows the ``command``
4298 string.
4304 string.
4299
4305
4300 Arguments to the command are defined as lines in this block. The format of
4306 Arguments to the command are defined as lines in this block. The format of
4301 each line is ``<key> <value>``. e.g.::
4307 each line is ``<key> <value>``. e.g.::
4302
4308
4303 command listkeys
4309 command listkeys
4304 namespace bookmarks
4310 namespace bookmarks
4305
4311
4306 If the value begins with ``eval:``, it will be interpreted as a Python
4312 If the value begins with ``eval:``, it will be interpreted as a Python
4307 literal expression. Otherwise values are interpreted as Python b'' literals.
4313 literal expression. Otherwise values are interpreted as Python b'' literals.
4308 This allows sending complex types and encoding special byte sequences via
4314 This allows sending complex types and encoding special byte sequences via
4309 backslash escaping.
4315 backslash escaping.
4310
4316
4311 The following arguments have special meaning:
4317 The following arguments have special meaning:
4312
4318
4313 ``PUSHFILE``
4319 ``PUSHFILE``
4314 When defined, the *push* mechanism of the peer will be used instead
4320 When defined, the *push* mechanism of the peer will be used instead
4315 of the static request-response mechanism and the content of the
4321 of the static request-response mechanism and the content of the
4316 file specified in the value of this argument will be sent as the
4322 file specified in the value of this argument will be sent as the
4317 command payload.
4323 command payload.
4318
4324
4319 This can be used to submit a local bundle file to the remote.
4325 This can be used to submit a local bundle file to the remote.
4320
4326
4321 batchbegin
4327 batchbegin
4322 ----------
4328 ----------
4323
4329
4324 Instruct the peer to begin a batched send.
4330 Instruct the peer to begin a batched send.
4325
4331
4326 All ``command`` blocks are queued for execution until the next
4332 All ``command`` blocks are queued for execution until the next
4327 ``batchsubmit`` block.
4333 ``batchsubmit`` block.
4328
4334
4329 batchsubmit
4335 batchsubmit
4330 -----------
4336 -----------
4331
4337
4332 Submit previously queued ``command`` blocks as a batch request.
4338 Submit previously queued ``command`` blocks as a batch request.
4333
4339
4334 This action MUST be paired with a ``batchbegin`` action.
4340 This action MUST be paired with a ``batchbegin`` action.
4335
4341
4336 httprequest <method> <path>
4342 httprequest <method> <path>
4337 ---------------------------
4343 ---------------------------
4338
4344
4339 (HTTP peer only)
4345 (HTTP peer only)
4340
4346
4341 Send an HTTP request to the peer.
4347 Send an HTTP request to the peer.
4342
4348
4343 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4349 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4344
4350
4345 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4351 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4346 headers to add to the request. e.g. ``Accept: foo``.
4352 headers to add to the request. e.g. ``Accept: foo``.
4347
4353
4348 The following arguments are special:
4354 The following arguments are special:
4349
4355
4350 ``BODYFILE``
4356 ``BODYFILE``
4351 The content of the file defined as the value to this argument will be
4357 The content of the file defined as the value to this argument will be
4352 transferred verbatim as the HTTP request body.
4358 transferred verbatim as the HTTP request body.
4353
4359
4354 ``frame <type> <flags> <payload>``
4360 ``frame <type> <flags> <payload>``
4355 Send a unified protocol frame as part of the request body.
4361 Send a unified protocol frame as part of the request body.
4356
4362
4357 All frames will be collected and sent as the body to the HTTP
4363 All frames will be collected and sent as the body to the HTTP
4358 request.
4364 request.
4359
4365
4360 close
4366 close
4361 -----
4367 -----
4362
4368
4363 Close the connection to the server.
4369 Close the connection to the server.
4364
4370
4365 flush
4371 flush
4366 -----
4372 -----
4367
4373
4368 Flush data written to the server.
4374 Flush data written to the server.
4369
4375
4370 readavailable
4376 readavailable
4371 -------------
4377 -------------
4372
4378
4373 Close the write end of the connection and read all available data from
4379 Close the write end of the connection and read all available data from
4374 the server.
4380 the server.
4375
4381
4376 If the connection to the server encompasses multiple pipes, we poll both
4382 If the connection to the server encompasses multiple pipes, we poll both
4377 pipes and read available data.
4383 pipes and read available data.
4378
4384
4379 readline
4385 readline
4380 --------
4386 --------
4381
4387
4382 Read a line of output from the server. If there are multiple output
4388 Read a line of output from the server. If there are multiple output
4383 pipes, reads only the main pipe.
4389 pipes, reads only the main pipe.
4384
4390
4385 ereadline
4391 ereadline
4386 ---------
4392 ---------
4387
4393
4388 Like ``readline``, but read from the stderr pipe, if available.
4394 Like ``readline``, but read from the stderr pipe, if available.
4389
4395
4390 read <X>
4396 read <X>
4391 --------
4397 --------
4392
4398
4393 ``read()`` N bytes from the server's main output pipe.
4399 ``read()`` N bytes from the server's main output pipe.
4394
4400
4395 eread <X>
4401 eread <X>
4396 ---------
4402 ---------
4397
4403
4398 ``read()`` N bytes from the server's stderr pipe, if available.
4404 ``read()`` N bytes from the server's stderr pipe, if available.
4399
4405
4400 Specifying Unified Frame-Based Protocol Frames
4406 Specifying Unified Frame-Based Protocol Frames
4401 ----------------------------------------------
4407 ----------------------------------------------
4402
4408
4403 It is possible to emit a *Unified Frame-Based Protocol* by using special
4409 It is possible to emit a *Unified Frame-Based Protocol* by using special
4404 syntax.
4410 syntax.
4405
4411
4406 A frame is composed as a type, flags, and payload. These can be parsed
4412 A frame is composed as a type, flags, and payload. These can be parsed
4407 from a string of the form:
4413 from a string of the form:
4408
4414
4409 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4415 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4410
4416
4411 ``request-id`` and ``stream-id`` are integers defining the request and
4417 ``request-id`` and ``stream-id`` are integers defining the request and
4412 stream identifiers.
4418 stream identifiers.
4413
4419
4414 ``type`` can be an integer value for the frame type or the string name
4420 ``type`` can be an integer value for the frame type or the string name
4415 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4421 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4416 ``command-name``.
4422 ``command-name``.
4417
4423
4418 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4424 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4419 components. Each component (and there can be just one) can be an integer
4425 components. Each component (and there can be just one) can be an integer
4420 or a flag name for stream flags or frame flags, respectively. Values are
4426 or a flag name for stream flags or frame flags, respectively. Values are
4421 resolved to integers and then bitwise OR'd together.
4427 resolved to integers and then bitwise OR'd together.
4422
4428
4423 ``payload`` represents the raw frame payload. If it begins with
4429 ``payload`` represents the raw frame payload. If it begins with
4424 ``cbor:``, the following string is evaluated as Python code and the
4430 ``cbor:``, the following string is evaluated as Python code and the
4425 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4431 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4426 as a Python byte string literal.
4432 as a Python byte string literal.
4427 """
4433 """
4428 opts = pycompat.byteskwargs(opts)
4434 opts = pycompat.byteskwargs(opts)
4429
4435
4430 if opts[b'localssh'] and not repo:
4436 if opts[b'localssh'] and not repo:
4431 raise error.Abort(_(b'--localssh requires a repository'))
4437 raise error.Abort(_(b'--localssh requires a repository'))
4432
4438
4433 if opts[b'peer'] and opts[b'peer'] not in (
4439 if opts[b'peer'] and opts[b'peer'] not in (
4434 b'raw',
4440 b'raw',
4435 b'http2',
4441 b'http2',
4436 b'ssh1',
4442 b'ssh1',
4437 b'ssh2',
4443 b'ssh2',
4438 ):
4444 ):
4439 raise error.Abort(
4445 raise error.Abort(
4440 _(b'invalid value for --peer'),
4446 _(b'invalid value for --peer'),
4441 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4447 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4442 )
4448 )
4443
4449
4444 if path and opts[b'localssh']:
4450 if path and opts[b'localssh']:
4445 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4451 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4446
4452
4447 if ui.interactive():
4453 if ui.interactive():
4448 ui.write(_(b'(waiting for commands on stdin)\n'))
4454 ui.write(_(b'(waiting for commands on stdin)\n'))
4449
4455
4450 blocks = list(_parsewirelangblocks(ui.fin))
4456 blocks = list(_parsewirelangblocks(ui.fin))
4451
4457
4452 proc = None
4458 proc = None
4453 stdin = None
4459 stdin = None
4454 stdout = None
4460 stdout = None
4455 stderr = None
4461 stderr = None
4456 opener = None
4462 opener = None
4457
4463
4458 if opts[b'localssh']:
4464 if opts[b'localssh']:
4459 # We start the SSH server in its own process so there is process
4465 # We start the SSH server in its own process so there is process
4460 # separation. This prevents a whole class of potential bugs around
4466 # separation. This prevents a whole class of potential bugs around
4461 # shared state from interfering with server operation.
4467 # shared state from interfering with server operation.
4462 args = procutil.hgcmd() + [
4468 args = procutil.hgcmd() + [
4463 b'-R',
4469 b'-R',
4464 repo.root,
4470 repo.root,
4465 b'debugserve',
4471 b'debugserve',
4466 b'--sshstdio',
4472 b'--sshstdio',
4467 ]
4473 ]
4468 proc = subprocess.Popen(
4474 proc = subprocess.Popen(
4469 pycompat.rapply(procutil.tonativestr, args),
4475 pycompat.rapply(procutil.tonativestr, args),
4470 stdin=subprocess.PIPE,
4476 stdin=subprocess.PIPE,
4471 stdout=subprocess.PIPE,
4477 stdout=subprocess.PIPE,
4472 stderr=subprocess.PIPE,
4478 stderr=subprocess.PIPE,
4473 bufsize=0,
4479 bufsize=0,
4474 )
4480 )
4475
4481
4476 stdin = proc.stdin
4482 stdin = proc.stdin
4477 stdout = proc.stdout
4483 stdout = proc.stdout
4478 stderr = proc.stderr
4484 stderr = proc.stderr
4479
4485
4480 # We turn the pipes into observers so we can log I/O.
4486 # We turn the pipes into observers so we can log I/O.
4481 if ui.verbose or opts[b'peer'] == b'raw':
4487 if ui.verbose or opts[b'peer'] == b'raw':
4482 stdin = util.makeloggingfileobject(
4488 stdin = util.makeloggingfileobject(
4483 ui, proc.stdin, b'i', logdata=True
4489 ui, proc.stdin, b'i', logdata=True
4484 )
4490 )
4485 stdout = util.makeloggingfileobject(
4491 stdout = util.makeloggingfileobject(
4486 ui, proc.stdout, b'o', logdata=True
4492 ui, proc.stdout, b'o', logdata=True
4487 )
4493 )
4488 stderr = util.makeloggingfileobject(
4494 stderr = util.makeloggingfileobject(
4489 ui, proc.stderr, b'e', logdata=True
4495 ui, proc.stderr, b'e', logdata=True
4490 )
4496 )
4491
4497
4492 # --localssh also implies the peer connection settings.
4498 # --localssh also implies the peer connection settings.
4493
4499
4494 url = b'ssh://localserver'
4500 url = b'ssh://localserver'
4495 autoreadstderr = not opts[b'noreadstderr']
4501 autoreadstderr = not opts[b'noreadstderr']
4496
4502
4497 if opts[b'peer'] == b'ssh1':
4503 if opts[b'peer'] == b'ssh1':
4498 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4504 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4499 peer = sshpeer.sshv1peer(
4505 peer = sshpeer.sshv1peer(
4500 ui,
4506 ui,
4501 url,
4507 url,
4502 proc,
4508 proc,
4503 stdin,
4509 stdin,
4504 stdout,
4510 stdout,
4505 stderr,
4511 stderr,
4506 None,
4512 None,
4507 autoreadstderr=autoreadstderr,
4513 autoreadstderr=autoreadstderr,
4508 )
4514 )
4509 elif opts[b'peer'] == b'ssh2':
4515 elif opts[b'peer'] == b'ssh2':
4510 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4516 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4511 peer = sshpeer.sshv2peer(
4517 peer = sshpeer.sshv2peer(
4512 ui,
4518 ui,
4513 url,
4519 url,
4514 proc,
4520 proc,
4515 stdin,
4521 stdin,
4516 stdout,
4522 stdout,
4517 stderr,
4523 stderr,
4518 None,
4524 None,
4519 autoreadstderr=autoreadstderr,
4525 autoreadstderr=autoreadstderr,
4520 )
4526 )
4521 elif opts[b'peer'] == b'raw':
4527 elif opts[b'peer'] == b'raw':
4522 ui.write(_(b'using raw connection to peer\n'))
4528 ui.write(_(b'using raw connection to peer\n'))
4523 peer = None
4529 peer = None
4524 else:
4530 else:
4525 ui.write(_(b'creating ssh peer from handshake results\n'))
4531 ui.write(_(b'creating ssh peer from handshake results\n'))
4526 peer = sshpeer.makepeer(
4532 peer = sshpeer.makepeer(
4527 ui,
4533 ui,
4528 url,
4534 url,
4529 proc,
4535 proc,
4530 stdin,
4536 stdin,
4531 stdout,
4537 stdout,
4532 stderr,
4538 stderr,
4533 autoreadstderr=autoreadstderr,
4539 autoreadstderr=autoreadstderr,
4534 )
4540 )
4535
4541
4536 elif path:
4542 elif path:
4537 # We bypass hg.peer() so we can proxy the sockets.
4543 # We bypass hg.peer() so we can proxy the sockets.
4538 # TODO consider not doing this because we skip
4544 # TODO consider not doing this because we skip
4539 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4545 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4540 u = urlutil.url(path)
4546 u = urlutil.url(path)
4541 if u.scheme != b'http':
4547 if u.scheme != b'http':
4542 raise error.Abort(_(b'only http:// paths are currently supported'))
4548 raise error.Abort(_(b'only http:// paths are currently supported'))
4543
4549
4544 url, authinfo = u.authinfo()
4550 url, authinfo = u.authinfo()
4545 openerargs = {
4551 openerargs = {
4546 'useragent': b'Mercurial debugwireproto',
4552 'useragent': b'Mercurial debugwireproto',
4547 }
4553 }
4548
4554
4549 # Turn pipes/sockets into observers so we can log I/O.
4555 # Turn pipes/sockets into observers so we can log I/O.
4550 if ui.verbose:
4556 if ui.verbose:
4551 openerargs.update(
4557 openerargs.update(
4552 {
4558 {
4553 'loggingfh': ui,
4559 'loggingfh': ui,
4554 'loggingname': b's',
4560 'loggingname': b's',
4555 'loggingopts': {
4561 'loggingopts': {
4556 'logdata': True,
4562 'logdata': True,
4557 'logdataapis': False,
4563 'logdataapis': False,
4558 },
4564 },
4559 }
4565 }
4560 )
4566 )
4561
4567
4562 if ui.debugflag:
4568 if ui.debugflag:
4563 openerargs['loggingopts']['logdataapis'] = True
4569 openerargs['loggingopts']['logdataapis'] = True
4564
4570
4565 # Don't send default headers when in raw mode. This allows us to
4571 # Don't send default headers when in raw mode. This allows us to
4566 # bypass most of the behavior of our URL handling code so we can
4572 # bypass most of the behavior of our URL handling code so we can
4567 # have near complete control over what's sent on the wire.
4573 # have near complete control over what's sent on the wire.
4568 if opts[b'peer'] == b'raw':
4574 if opts[b'peer'] == b'raw':
4569 openerargs['sendaccept'] = False
4575 openerargs['sendaccept'] = False
4570
4576
4571 opener = urlmod.opener(ui, authinfo, **openerargs)
4577 opener = urlmod.opener(ui, authinfo, **openerargs)
4572
4578
4573 if opts[b'peer'] == b'http2':
4579 if opts[b'peer'] == b'http2':
4574 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4580 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4575 # We go through makepeer() because we need an API descriptor for
4581 # We go through makepeer() because we need an API descriptor for
4576 # the peer instance to be useful.
4582 # the peer instance to be useful.
4577 maybe_silent = (
4583 maybe_silent = (
4578 ui.silent()
4584 ui.silent()
4579 if opts[b'nologhandshake']
4585 if opts[b'nologhandshake']
4580 else util.nullcontextmanager()
4586 else util.nullcontextmanager()
4581 )
4587 )
4582 with maybe_silent, ui.configoverride(
4588 with maybe_silent, ui.configoverride(
4583 {(b'experimental', b'httppeer.advertise-v2'): True}
4589 {(b'experimental', b'httppeer.advertise-v2'): True}
4584 ):
4590 ):
4585 peer = httppeer.makepeer(ui, path, opener=opener)
4591 peer = httppeer.makepeer(ui, path, opener=opener)
4586
4592
4587 if not isinstance(peer, httppeer.httpv2peer):
4593 if not isinstance(peer, httppeer.httpv2peer):
4588 raise error.Abort(
4594 raise error.Abort(
4589 _(
4595 _(
4590 b'could not instantiate HTTP peer for '
4596 b'could not instantiate HTTP peer for '
4591 b'wire protocol version 2'
4597 b'wire protocol version 2'
4592 ),
4598 ),
4593 hint=_(
4599 hint=_(
4594 b'the server may not have the feature '
4600 b'the server may not have the feature '
4595 b'enabled or is not allowing this '
4601 b'enabled or is not allowing this '
4596 b'client version'
4602 b'client version'
4597 ),
4603 ),
4598 )
4604 )
4599
4605
4600 elif opts[b'peer'] == b'raw':
4606 elif opts[b'peer'] == b'raw':
4601 ui.write(_(b'using raw connection to peer\n'))
4607 ui.write(_(b'using raw connection to peer\n'))
4602 peer = None
4608 peer = None
4603 elif opts[b'peer']:
4609 elif opts[b'peer']:
4604 raise error.Abort(
4610 raise error.Abort(
4605 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4611 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4606 )
4612 )
4607 else:
4613 else:
4608 peer = httppeer.makepeer(ui, path, opener=opener)
4614 peer = httppeer.makepeer(ui, path, opener=opener)
4609
4615
4610 # We /could/ populate stdin/stdout with sock.makefile()...
4616 # We /could/ populate stdin/stdout with sock.makefile()...
4611 else:
4617 else:
4612 raise error.Abort(_(b'unsupported connection configuration'))
4618 raise error.Abort(_(b'unsupported connection configuration'))
4613
4619
4614 batchedcommands = None
4620 batchedcommands = None
4615
4621
4616 # Now perform actions based on the parsed wire language instructions.
4622 # Now perform actions based on the parsed wire language instructions.
4617 for action, lines in blocks:
4623 for action, lines in blocks:
4618 if action in (b'raw', b'raw+'):
4624 if action in (b'raw', b'raw+'):
4619 if not stdin:
4625 if not stdin:
4620 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4626 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4621
4627
4622 # Concatenate the data together.
4628 # Concatenate the data together.
4623 data = b''.join(l.lstrip() for l in lines)
4629 data = b''.join(l.lstrip() for l in lines)
4624 data = stringutil.unescapestr(data)
4630 data = stringutil.unescapestr(data)
4625 stdin.write(data)
4631 stdin.write(data)
4626
4632
4627 if action == b'raw+':
4633 if action == b'raw+':
4628 stdin.flush()
4634 stdin.flush()
4629 elif action == b'flush':
4635 elif action == b'flush':
4630 if not stdin:
4636 if not stdin:
4631 raise error.Abort(_(b'cannot call flush on this peer'))
4637 raise error.Abort(_(b'cannot call flush on this peer'))
4632 stdin.flush()
4638 stdin.flush()
4633 elif action.startswith(b'command'):
4639 elif action.startswith(b'command'):
4634 if not peer:
4640 if not peer:
4635 raise error.Abort(
4641 raise error.Abort(
4636 _(
4642 _(
4637 b'cannot send commands unless peer instance '
4643 b'cannot send commands unless peer instance '
4638 b'is available'
4644 b'is available'
4639 )
4645 )
4640 )
4646 )
4641
4647
4642 command = action.split(b' ', 1)[1]
4648 command = action.split(b' ', 1)[1]
4643
4649
4644 args = {}
4650 args = {}
4645 for line in lines:
4651 for line in lines:
4646 # We need to allow empty values.
4652 # We need to allow empty values.
4647 fields = line.lstrip().split(b' ', 1)
4653 fields = line.lstrip().split(b' ', 1)
4648 if len(fields) == 1:
4654 if len(fields) == 1:
4649 key = fields[0]
4655 key = fields[0]
4650 value = b''
4656 value = b''
4651 else:
4657 else:
4652 key, value = fields
4658 key, value = fields
4653
4659
4654 if value.startswith(b'eval:'):
4660 if value.startswith(b'eval:'):
4655 value = stringutil.evalpythonliteral(value[5:])
4661 value = stringutil.evalpythonliteral(value[5:])
4656 else:
4662 else:
4657 value = stringutil.unescapestr(value)
4663 value = stringutil.unescapestr(value)
4658
4664
4659 args[key] = value
4665 args[key] = value
4660
4666
4661 if batchedcommands is not None:
4667 if batchedcommands is not None:
4662 batchedcommands.append((command, args))
4668 batchedcommands.append((command, args))
4663 continue
4669 continue
4664
4670
4665 ui.status(_(b'sending %s command\n') % command)
4671 ui.status(_(b'sending %s command\n') % command)
4666
4672
4667 if b'PUSHFILE' in args:
4673 if b'PUSHFILE' in args:
4668 with open(args[b'PUSHFILE'], 'rb') as fh:
4674 with open(args[b'PUSHFILE'], 'rb') as fh:
4669 del args[b'PUSHFILE']
4675 del args[b'PUSHFILE']
4670 res, output = peer._callpush(
4676 res, output = peer._callpush(
4671 command, fh, **pycompat.strkwargs(args)
4677 command, fh, **pycompat.strkwargs(args)
4672 )
4678 )
4673 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4679 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4674 ui.status(
4680 ui.status(
4675 _(b'remote output: %s\n') % stringutil.escapestr(output)
4681 _(b'remote output: %s\n') % stringutil.escapestr(output)
4676 )
4682 )
4677 else:
4683 else:
4678 with peer.commandexecutor() as e:
4684 with peer.commandexecutor() as e:
4679 res = e.callcommand(command, args).result()
4685 res = e.callcommand(command, args).result()
4680
4686
4681 if isinstance(res, wireprotov2peer.commandresponse):
4687 if isinstance(res, wireprotov2peer.commandresponse):
4682 val = res.objects()
4688 val = res.objects()
4683 ui.status(
4689 ui.status(
4684 _(b'response: %s\n')
4690 _(b'response: %s\n')
4685 % stringutil.pprint(val, bprefix=True, indent=2)
4691 % stringutil.pprint(val, bprefix=True, indent=2)
4686 )
4692 )
4687 else:
4693 else:
4688 ui.status(
4694 ui.status(
4689 _(b'response: %s\n')
4695 _(b'response: %s\n')
4690 % stringutil.pprint(res, bprefix=True, indent=2)
4696 % stringutil.pprint(res, bprefix=True, indent=2)
4691 )
4697 )
4692
4698
4693 elif action == b'batchbegin':
4699 elif action == b'batchbegin':
4694 if batchedcommands is not None:
4700 if batchedcommands is not None:
4695 raise error.Abort(_(b'nested batchbegin not allowed'))
4701 raise error.Abort(_(b'nested batchbegin not allowed'))
4696
4702
4697 batchedcommands = []
4703 batchedcommands = []
4698 elif action == b'batchsubmit':
4704 elif action == b'batchsubmit':
4699 # There is a batching API we could go through. But it would be
4705 # There is a batching API we could go through. But it would be
4700 # difficult to normalize requests into function calls. It is easier
4706 # difficult to normalize requests into function calls. It is easier
4701 # to bypass this layer and normalize to commands + args.
4707 # to bypass this layer and normalize to commands + args.
4702 ui.status(
4708 ui.status(
4703 _(b'sending batch with %d sub-commands\n')
4709 _(b'sending batch with %d sub-commands\n')
4704 % len(batchedcommands)
4710 % len(batchedcommands)
4705 )
4711 )
4706 assert peer is not None
4712 assert peer is not None
4707 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4713 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4708 ui.status(
4714 ui.status(
4709 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4715 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4710 )
4716 )
4711
4717
4712 batchedcommands = None
4718 batchedcommands = None
4713
4719
4714 elif action.startswith(b'httprequest '):
4720 elif action.startswith(b'httprequest '):
4715 if not opener:
4721 if not opener:
4716 raise error.Abort(
4722 raise error.Abort(
4717 _(b'cannot use httprequest without an HTTP peer')
4723 _(b'cannot use httprequest without an HTTP peer')
4718 )
4724 )
4719
4725
4720 request = action.split(b' ', 2)
4726 request = action.split(b' ', 2)
4721 if len(request) != 3:
4727 if len(request) != 3:
4722 raise error.Abort(
4728 raise error.Abort(
4723 _(
4729 _(
4724 b'invalid httprequest: expected format is '
4730 b'invalid httprequest: expected format is '
4725 b'"httprequest <method> <path>'
4731 b'"httprequest <method> <path>'
4726 )
4732 )
4727 )
4733 )
4728
4734
4729 method, httppath = request[1:]
4735 method, httppath = request[1:]
4730 headers = {}
4736 headers = {}
4731 body = None
4737 body = None
4732 frames = []
4738 frames = []
4733 for line in lines:
4739 for line in lines:
4734 line = line.lstrip()
4740 line = line.lstrip()
4735 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4741 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4736 if m:
4742 if m:
4737 # Headers need to use native strings.
4743 # Headers need to use native strings.
4738 key = pycompat.strurl(m.group(1))
4744 key = pycompat.strurl(m.group(1))
4739 value = pycompat.strurl(m.group(2))
4745 value = pycompat.strurl(m.group(2))
4740 headers[key] = value
4746 headers[key] = value
4741 continue
4747 continue
4742
4748
4743 if line.startswith(b'BODYFILE '):
4749 if line.startswith(b'BODYFILE '):
4744 with open(line.split(b' ', 1), b'rb') as fh:
4750 with open(line.split(b' ', 1), b'rb') as fh:
4745 body = fh.read()
4751 body = fh.read()
4746 elif line.startswith(b'frame '):
4752 elif line.startswith(b'frame '):
4747 frame = wireprotoframing.makeframefromhumanstring(
4753 frame = wireprotoframing.makeframefromhumanstring(
4748 line[len(b'frame ') :]
4754 line[len(b'frame ') :]
4749 )
4755 )
4750
4756
4751 frames.append(frame)
4757 frames.append(frame)
4752 else:
4758 else:
4753 raise error.Abort(
4759 raise error.Abort(
4754 _(b'unknown argument to httprequest: %s') % line
4760 _(b'unknown argument to httprequest: %s') % line
4755 )
4761 )
4756
4762
4757 url = path + httppath
4763 url = path + httppath
4758
4764
4759 if frames:
4765 if frames:
4760 body = b''.join(bytes(f) for f in frames)
4766 body = b''.join(bytes(f) for f in frames)
4761
4767
4762 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4768 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4763
4769
4764 # urllib.Request insists on using has_data() as a proxy for
4770 # urllib.Request insists on using has_data() as a proxy for
4765 # determining the request method. Override that to use our
4771 # determining the request method. Override that to use our
4766 # explicitly requested method.
4772 # explicitly requested method.
4767 req.get_method = lambda: pycompat.sysstr(method)
4773 req.get_method = lambda: pycompat.sysstr(method)
4768
4774
4769 try:
4775 try:
4770 res = opener.open(req)
4776 res = opener.open(req)
4771 body = res.read()
4777 body = res.read()
4772 except util.urlerr.urlerror as e:
4778 except util.urlerr.urlerror as e:
4773 # read() method must be called, but only exists in Python 2
4779 # read() method must be called, but only exists in Python 2
4774 getattr(e, 'read', lambda: None)()
4780 getattr(e, 'read', lambda: None)()
4775 continue
4781 continue
4776
4782
4777 ct = res.headers.get('Content-Type')
4783 ct = res.headers.get('Content-Type')
4778 if ct == 'application/mercurial-cbor':
4784 if ct == 'application/mercurial-cbor':
4779 ui.write(
4785 ui.write(
4780 _(b'cbor> %s\n')
4786 _(b'cbor> %s\n')
4781 % stringutil.pprint(
4787 % stringutil.pprint(
4782 cborutil.decodeall(body), bprefix=True, indent=2
4788 cborutil.decodeall(body), bprefix=True, indent=2
4783 )
4789 )
4784 )
4790 )
4785
4791
4786 elif action == b'close':
4792 elif action == b'close':
4787 assert peer is not None
4793 assert peer is not None
4788 peer.close()
4794 peer.close()
4789 elif action == b'readavailable':
4795 elif action == b'readavailable':
4790 if not stdout or not stderr:
4796 if not stdout or not stderr:
4791 raise error.Abort(
4797 raise error.Abort(
4792 _(b'readavailable not available on this peer')
4798 _(b'readavailable not available on this peer')
4793 )
4799 )
4794
4800
4795 stdin.close()
4801 stdin.close()
4796 stdout.read()
4802 stdout.read()
4797 stderr.read()
4803 stderr.read()
4798
4804
4799 elif action == b'readline':
4805 elif action == b'readline':
4800 if not stdout:
4806 if not stdout:
4801 raise error.Abort(_(b'readline not available on this peer'))
4807 raise error.Abort(_(b'readline not available on this peer'))
4802 stdout.readline()
4808 stdout.readline()
4803 elif action == b'ereadline':
4809 elif action == b'ereadline':
4804 if not stderr:
4810 if not stderr:
4805 raise error.Abort(_(b'ereadline not available on this peer'))
4811 raise error.Abort(_(b'ereadline not available on this peer'))
4806 stderr.readline()
4812 stderr.readline()
4807 elif action.startswith(b'read '):
4813 elif action.startswith(b'read '):
4808 count = int(action.split(b' ', 1)[1])
4814 count = int(action.split(b' ', 1)[1])
4809 if not stdout:
4815 if not stdout:
4810 raise error.Abort(_(b'read not available on this peer'))
4816 raise error.Abort(_(b'read not available on this peer'))
4811 stdout.read(count)
4817 stdout.read(count)
4812 elif action.startswith(b'eread '):
4818 elif action.startswith(b'eread '):
4813 count = int(action.split(b' ', 1)[1])
4819 count = int(action.split(b' ', 1)[1])
4814 if not stderr:
4820 if not stderr:
4815 raise error.Abort(_(b'eread not available on this peer'))
4821 raise error.Abort(_(b'eread not available on this peer'))
4816 stderr.read(count)
4822 stderr.read(count)
4817 else:
4823 else:
4818 raise error.Abort(_(b'unknown action: %s') % action)
4824 raise error.Abort(_(b'unknown action: %s') % action)
4819
4825
4820 if batchedcommands is not None:
4826 if batchedcommands is not None:
4821 raise error.Abort(_(b'unclosed "batchbegin" request'))
4827 raise error.Abort(_(b'unclosed "batchbegin" request'))
4822
4828
4823 if peer:
4829 if peer:
4824 peer.close()
4830 peer.close()
4825
4831
4826 if proc:
4832 if proc:
4827 proc.kill()
4833 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now