##// END OF EJS Templates
debugcommands: add a `--paranoid` option to `debug-repair-issue-6528`...
Raphaël Gomès -
r48625:855463b5 stable
parent child Browse files
Show More
@@ -1,4915 +1,4932
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import binascii
10 import binascii
11 import codecs
11 import codecs
12 import collections
12 import collections
13 import contextlib
13 import contextlib
14 import difflib
14 import difflib
15 import errno
15 import errno
16 import glob
16 import glob
17 import operator
17 import operator
18 import os
18 import os
19 import platform
19 import platform
20 import random
20 import random
21 import re
21 import re
22 import socket
22 import socket
23 import ssl
23 import ssl
24 import stat
24 import stat
25 import string
25 import string
26 import subprocess
26 import subprocess
27 import sys
27 import sys
28 import time
28 import time
29
29
30 from .i18n import _
30 from .i18n import _
31 from .node import (
31 from .node import (
32 bin,
32 bin,
33 hex,
33 hex,
34 nullrev,
34 nullrev,
35 short,
35 short,
36 )
36 )
37 from .pycompat import (
37 from .pycompat import (
38 getattr,
38 getattr,
39 open,
39 open,
40 )
40 )
41 from . import (
41 from . import (
42 bundle2,
42 bundle2,
43 bundlerepo,
43 bundlerepo,
44 changegroup,
44 changegroup,
45 cmdutil,
45 cmdutil,
46 color,
46 color,
47 context,
47 context,
48 copies,
48 copies,
49 dagparser,
49 dagparser,
50 encoding,
50 encoding,
51 error,
51 error,
52 exchange,
52 exchange,
53 extensions,
53 extensions,
54 filemerge,
54 filemerge,
55 filesetlang,
55 filesetlang,
56 formatter,
56 formatter,
57 hg,
57 hg,
58 httppeer,
58 httppeer,
59 localrepo,
59 localrepo,
60 lock as lockmod,
60 lock as lockmod,
61 logcmdutil,
61 logcmdutil,
62 mergestate as mergestatemod,
62 mergestate as mergestatemod,
63 metadata,
63 metadata,
64 obsolete,
64 obsolete,
65 obsutil,
65 obsutil,
66 pathutil,
66 pathutil,
67 phases,
67 phases,
68 policy,
68 policy,
69 pvec,
69 pvec,
70 pycompat,
70 pycompat,
71 registrar,
71 registrar,
72 repair,
72 repair,
73 repoview,
73 repoview,
74 requirements,
74 requirements,
75 revlog,
75 revlog,
76 revset,
76 revset,
77 revsetlang,
77 revsetlang,
78 scmutil,
78 scmutil,
79 setdiscovery,
79 setdiscovery,
80 simplemerge,
80 simplemerge,
81 sshpeer,
81 sshpeer,
82 sslutil,
82 sslutil,
83 streamclone,
83 streamclone,
84 strip,
84 strip,
85 tags as tagsmod,
85 tags as tagsmod,
86 templater,
86 templater,
87 treediscovery,
87 treediscovery,
88 upgrade,
88 upgrade,
89 url as urlmod,
89 url as urlmod,
90 util,
90 util,
91 vfs as vfsmod,
91 vfs as vfsmod,
92 wireprotoframing,
92 wireprotoframing,
93 wireprotoserver,
93 wireprotoserver,
94 wireprotov2peer,
94 wireprotov2peer,
95 )
95 )
96 from .interfaces import repository
96 from .interfaces import repository
97 from .utils import (
97 from .utils import (
98 cborutil,
98 cborutil,
99 compression,
99 compression,
100 dateutil,
100 dateutil,
101 procutil,
101 procutil,
102 stringutil,
102 stringutil,
103 urlutil,
103 urlutil,
104 )
104 )
105
105
106 from .revlogutils import (
106 from .revlogutils import (
107 deltas as deltautil,
107 deltas as deltautil,
108 nodemap,
108 nodemap,
109 rewrite,
109 rewrite,
110 sidedata,
110 sidedata,
111 )
111 )
112
112
113 release = lockmod.release
113 release = lockmod.release
114
114
115 table = {}
115 table = {}
116 table.update(strip.command._table)
116 table.update(strip.command._table)
117 command = registrar.command(table)
117 command = registrar.command(table)
118
118
119
119
120 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
120 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
121 def debugancestor(ui, repo, *args):
121 def debugancestor(ui, repo, *args):
122 """find the ancestor revision of two revisions in a given index"""
122 """find the ancestor revision of two revisions in a given index"""
123 if len(args) == 3:
123 if len(args) == 3:
124 index, rev1, rev2 = args
124 index, rev1, rev2 = args
125 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
125 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
126 lookup = r.lookup
126 lookup = r.lookup
127 elif len(args) == 2:
127 elif len(args) == 2:
128 if not repo:
128 if not repo:
129 raise error.Abort(
129 raise error.Abort(
130 _(b'there is no Mercurial repository here (.hg not found)')
130 _(b'there is no Mercurial repository here (.hg not found)')
131 )
131 )
132 rev1, rev2 = args
132 rev1, rev2 = args
133 r = repo.changelog
133 r = repo.changelog
134 lookup = repo.lookup
134 lookup = repo.lookup
135 else:
135 else:
136 raise error.Abort(_(b'either two or three arguments required'))
136 raise error.Abort(_(b'either two or three arguments required'))
137 a = r.ancestor(lookup(rev1), lookup(rev2))
137 a = r.ancestor(lookup(rev1), lookup(rev2))
138 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
138 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
139
139
140
140
141 @command(b'debugantivirusrunning', [])
141 @command(b'debugantivirusrunning', [])
142 def debugantivirusrunning(ui, repo):
142 def debugantivirusrunning(ui, repo):
143 """attempt to trigger an antivirus scanner to see if one is active"""
143 """attempt to trigger an antivirus scanner to see if one is active"""
144 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
144 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
145 f.write(
145 f.write(
146 util.b85decode(
146 util.b85decode(
147 # This is a base85-armored version of the EICAR test file. See
147 # This is a base85-armored version of the EICAR test file. See
148 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
148 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
149 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
149 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
150 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
150 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
151 )
151 )
152 )
152 )
153 # Give an AV engine time to scan the file.
153 # Give an AV engine time to scan the file.
154 time.sleep(2)
154 time.sleep(2)
155 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
155 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
156
156
157
157
158 @command(b'debugapplystreamclonebundle', [], b'FILE')
158 @command(b'debugapplystreamclonebundle', [], b'FILE')
159 def debugapplystreamclonebundle(ui, repo, fname):
159 def debugapplystreamclonebundle(ui, repo, fname):
160 """apply a stream clone bundle file"""
160 """apply a stream clone bundle file"""
161 f = hg.openpath(ui, fname)
161 f = hg.openpath(ui, fname)
162 gen = exchange.readbundle(ui, f, fname)
162 gen = exchange.readbundle(ui, f, fname)
163 gen.apply(repo)
163 gen.apply(repo)
164
164
165
165
166 @command(
166 @command(
167 b'debugbuilddag',
167 b'debugbuilddag',
168 [
168 [
169 (
169 (
170 b'm',
170 b'm',
171 b'mergeable-file',
171 b'mergeable-file',
172 None,
172 None,
173 _(b'add single file mergeable changes'),
173 _(b'add single file mergeable changes'),
174 ),
174 ),
175 (
175 (
176 b'o',
176 b'o',
177 b'overwritten-file',
177 b'overwritten-file',
178 None,
178 None,
179 _(b'add single file all revs overwrite'),
179 _(b'add single file all revs overwrite'),
180 ),
180 ),
181 (b'n', b'new-file', None, _(b'add new file at each rev')),
181 (b'n', b'new-file', None, _(b'add new file at each rev')),
182 ],
182 ],
183 _(b'[OPTION]... [TEXT]'),
183 _(b'[OPTION]... [TEXT]'),
184 )
184 )
185 def debugbuilddag(
185 def debugbuilddag(
186 ui,
186 ui,
187 repo,
187 repo,
188 text=None,
188 text=None,
189 mergeable_file=False,
189 mergeable_file=False,
190 overwritten_file=False,
190 overwritten_file=False,
191 new_file=False,
191 new_file=False,
192 ):
192 ):
193 """builds a repo with a given DAG from scratch in the current empty repo
193 """builds a repo with a given DAG from scratch in the current empty repo
194
194
195 The description of the DAG is read from stdin if not given on the
195 The description of the DAG is read from stdin if not given on the
196 command line.
196 command line.
197
197
198 Elements:
198 Elements:
199
199
200 - "+n" is a linear run of n nodes based on the current default parent
200 - "+n" is a linear run of n nodes based on the current default parent
201 - "." is a single node based on the current default parent
201 - "." is a single node based on the current default parent
202 - "$" resets the default parent to null (implied at the start);
202 - "$" resets the default parent to null (implied at the start);
203 otherwise the default parent is always the last node created
203 otherwise the default parent is always the last node created
204 - "<p" sets the default parent to the backref p
204 - "<p" sets the default parent to the backref p
205 - "*p" is a fork at parent p, which is a backref
205 - "*p" is a fork at parent p, which is a backref
206 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
206 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
207 - "/p2" is a merge of the preceding node and p2
207 - "/p2" is a merge of the preceding node and p2
208 - ":tag" defines a local tag for the preceding node
208 - ":tag" defines a local tag for the preceding node
209 - "@branch" sets the named branch for subsequent nodes
209 - "@branch" sets the named branch for subsequent nodes
210 - "#...\\n" is a comment up to the end of the line
210 - "#...\\n" is a comment up to the end of the line
211
211
212 Whitespace between the above elements is ignored.
212 Whitespace between the above elements is ignored.
213
213
214 A backref is either
214 A backref is either
215
215
216 - a number n, which references the node curr-n, where curr is the current
216 - a number n, which references the node curr-n, where curr is the current
217 node, or
217 node, or
218 - the name of a local tag you placed earlier using ":tag", or
218 - the name of a local tag you placed earlier using ":tag", or
219 - empty to denote the default parent.
219 - empty to denote the default parent.
220
220
221 All string valued-elements are either strictly alphanumeric, or must
221 All string valued-elements are either strictly alphanumeric, or must
222 be enclosed in double quotes ("..."), with "\\" as escape character.
222 be enclosed in double quotes ("..."), with "\\" as escape character.
223 """
223 """
224
224
225 if text is None:
225 if text is None:
226 ui.status(_(b"reading DAG from stdin\n"))
226 ui.status(_(b"reading DAG from stdin\n"))
227 text = ui.fin.read()
227 text = ui.fin.read()
228
228
229 cl = repo.changelog
229 cl = repo.changelog
230 if len(cl) > 0:
230 if len(cl) > 0:
231 raise error.Abort(_(b'repository is not empty'))
231 raise error.Abort(_(b'repository is not empty'))
232
232
233 # determine number of revs in DAG
233 # determine number of revs in DAG
234 total = 0
234 total = 0
235 for type, data in dagparser.parsedag(text):
235 for type, data in dagparser.parsedag(text):
236 if type == b'n':
236 if type == b'n':
237 total += 1
237 total += 1
238
238
239 if mergeable_file:
239 if mergeable_file:
240 linesperrev = 2
240 linesperrev = 2
241 # make a file with k lines per rev
241 # make a file with k lines per rev
242 initialmergedlines = [
242 initialmergedlines = [
243 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
243 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
244 ]
244 ]
245 initialmergedlines.append(b"")
245 initialmergedlines.append(b"")
246
246
247 tags = []
247 tags = []
248 progress = ui.makeprogress(
248 progress = ui.makeprogress(
249 _(b'building'), unit=_(b'revisions'), total=total
249 _(b'building'), unit=_(b'revisions'), total=total
250 )
250 )
251 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
251 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
252 at = -1
252 at = -1
253 atbranch = b'default'
253 atbranch = b'default'
254 nodeids = []
254 nodeids = []
255 id = 0
255 id = 0
256 progress.update(id)
256 progress.update(id)
257 for type, data in dagparser.parsedag(text):
257 for type, data in dagparser.parsedag(text):
258 if type == b'n':
258 if type == b'n':
259 ui.note((b'node %s\n' % pycompat.bytestr(data)))
259 ui.note((b'node %s\n' % pycompat.bytestr(data)))
260 id, ps = data
260 id, ps = data
261
261
262 files = []
262 files = []
263 filecontent = {}
263 filecontent = {}
264
264
265 p2 = None
265 p2 = None
266 if mergeable_file:
266 if mergeable_file:
267 fn = b"mf"
267 fn = b"mf"
268 p1 = repo[ps[0]]
268 p1 = repo[ps[0]]
269 if len(ps) > 1:
269 if len(ps) > 1:
270 p2 = repo[ps[1]]
270 p2 = repo[ps[1]]
271 pa = p1.ancestor(p2)
271 pa = p1.ancestor(p2)
272 base, local, other = [
272 base, local, other = [
273 x[fn].data() for x in (pa, p1, p2)
273 x[fn].data() for x in (pa, p1, p2)
274 ]
274 ]
275 m3 = simplemerge.Merge3Text(base, local, other)
275 m3 = simplemerge.Merge3Text(base, local, other)
276 ml = [l.strip() for l in m3.merge_lines()]
276 ml = [l.strip() for l in m3.merge_lines()]
277 ml.append(b"")
277 ml.append(b"")
278 elif at > 0:
278 elif at > 0:
279 ml = p1[fn].data().split(b"\n")
279 ml = p1[fn].data().split(b"\n")
280 else:
280 else:
281 ml = initialmergedlines
281 ml = initialmergedlines
282 ml[id * linesperrev] += b" r%i" % id
282 ml[id * linesperrev] += b" r%i" % id
283 mergedtext = b"\n".join(ml)
283 mergedtext = b"\n".join(ml)
284 files.append(fn)
284 files.append(fn)
285 filecontent[fn] = mergedtext
285 filecontent[fn] = mergedtext
286
286
287 if overwritten_file:
287 if overwritten_file:
288 fn = b"of"
288 fn = b"of"
289 files.append(fn)
289 files.append(fn)
290 filecontent[fn] = b"r%i\n" % id
290 filecontent[fn] = b"r%i\n" % id
291
291
292 if new_file:
292 if new_file:
293 fn = b"nf%i" % id
293 fn = b"nf%i" % id
294 files.append(fn)
294 files.append(fn)
295 filecontent[fn] = b"r%i\n" % id
295 filecontent[fn] = b"r%i\n" % id
296 if len(ps) > 1:
296 if len(ps) > 1:
297 if not p2:
297 if not p2:
298 p2 = repo[ps[1]]
298 p2 = repo[ps[1]]
299 for fn in p2:
299 for fn in p2:
300 if fn.startswith(b"nf"):
300 if fn.startswith(b"nf"):
301 files.append(fn)
301 files.append(fn)
302 filecontent[fn] = p2[fn].data()
302 filecontent[fn] = p2[fn].data()
303
303
304 def fctxfn(repo, cx, path):
304 def fctxfn(repo, cx, path):
305 if path in filecontent:
305 if path in filecontent:
306 return context.memfilectx(
306 return context.memfilectx(
307 repo, cx, path, filecontent[path]
307 repo, cx, path, filecontent[path]
308 )
308 )
309 return None
309 return None
310
310
311 if len(ps) == 0 or ps[0] < 0:
311 if len(ps) == 0 or ps[0] < 0:
312 pars = [None, None]
312 pars = [None, None]
313 elif len(ps) == 1:
313 elif len(ps) == 1:
314 pars = [nodeids[ps[0]], None]
314 pars = [nodeids[ps[0]], None]
315 else:
315 else:
316 pars = [nodeids[p] for p in ps]
316 pars = [nodeids[p] for p in ps]
317 cx = context.memctx(
317 cx = context.memctx(
318 repo,
318 repo,
319 pars,
319 pars,
320 b"r%i" % id,
320 b"r%i" % id,
321 files,
321 files,
322 fctxfn,
322 fctxfn,
323 date=(id, 0),
323 date=(id, 0),
324 user=b"debugbuilddag",
324 user=b"debugbuilddag",
325 extra={b'branch': atbranch},
325 extra={b'branch': atbranch},
326 )
326 )
327 nodeid = repo.commitctx(cx)
327 nodeid = repo.commitctx(cx)
328 nodeids.append(nodeid)
328 nodeids.append(nodeid)
329 at = id
329 at = id
330 elif type == b'l':
330 elif type == b'l':
331 id, name = data
331 id, name = data
332 ui.note((b'tag %s\n' % name))
332 ui.note((b'tag %s\n' % name))
333 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
333 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
334 elif type == b'a':
334 elif type == b'a':
335 ui.note((b'branch %s\n' % data))
335 ui.note((b'branch %s\n' % data))
336 atbranch = data
336 atbranch = data
337 progress.update(id)
337 progress.update(id)
338
338
339 if tags:
339 if tags:
340 repo.vfs.write(b"localtags", b"".join(tags))
340 repo.vfs.write(b"localtags", b"".join(tags))
341
341
342
342
343 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
343 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
344 indent_string = b' ' * indent
344 indent_string = b' ' * indent
345 if all:
345 if all:
346 ui.writenoi18n(
346 ui.writenoi18n(
347 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
347 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
348 % indent_string
348 % indent_string
349 )
349 )
350
350
351 def showchunks(named):
351 def showchunks(named):
352 ui.write(b"\n%s%s\n" % (indent_string, named))
352 ui.write(b"\n%s%s\n" % (indent_string, named))
353 for deltadata in gen.deltaiter():
353 for deltadata in gen.deltaiter():
354 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
354 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
355 ui.write(
355 ui.write(
356 b"%s%s %s %s %s %s %d\n"
356 b"%s%s %s %s %s %s %d\n"
357 % (
357 % (
358 indent_string,
358 indent_string,
359 hex(node),
359 hex(node),
360 hex(p1),
360 hex(p1),
361 hex(p2),
361 hex(p2),
362 hex(cs),
362 hex(cs),
363 hex(deltabase),
363 hex(deltabase),
364 len(delta),
364 len(delta),
365 )
365 )
366 )
366 )
367
367
368 gen.changelogheader()
368 gen.changelogheader()
369 showchunks(b"changelog")
369 showchunks(b"changelog")
370 gen.manifestheader()
370 gen.manifestheader()
371 showchunks(b"manifest")
371 showchunks(b"manifest")
372 for chunkdata in iter(gen.filelogheader, {}):
372 for chunkdata in iter(gen.filelogheader, {}):
373 fname = chunkdata[b'filename']
373 fname = chunkdata[b'filename']
374 showchunks(fname)
374 showchunks(fname)
375 else:
375 else:
376 if isinstance(gen, bundle2.unbundle20):
376 if isinstance(gen, bundle2.unbundle20):
377 raise error.Abort(_(b'use debugbundle2 for this file'))
377 raise error.Abort(_(b'use debugbundle2 for this file'))
378 gen.changelogheader()
378 gen.changelogheader()
379 for deltadata in gen.deltaiter():
379 for deltadata in gen.deltaiter():
380 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
380 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
381 ui.write(b"%s%s\n" % (indent_string, hex(node)))
381 ui.write(b"%s%s\n" % (indent_string, hex(node)))
382
382
383
383
384 def _debugobsmarkers(ui, part, indent=0, **opts):
384 def _debugobsmarkers(ui, part, indent=0, **opts):
385 """display version and markers contained in 'data'"""
385 """display version and markers contained in 'data'"""
386 opts = pycompat.byteskwargs(opts)
386 opts = pycompat.byteskwargs(opts)
387 data = part.read()
387 data = part.read()
388 indent_string = b' ' * indent
388 indent_string = b' ' * indent
389 try:
389 try:
390 version, markers = obsolete._readmarkers(data)
390 version, markers = obsolete._readmarkers(data)
391 except error.UnknownVersion as exc:
391 except error.UnknownVersion as exc:
392 msg = b"%sunsupported version: %s (%d bytes)\n"
392 msg = b"%sunsupported version: %s (%d bytes)\n"
393 msg %= indent_string, exc.version, len(data)
393 msg %= indent_string, exc.version, len(data)
394 ui.write(msg)
394 ui.write(msg)
395 else:
395 else:
396 msg = b"%sversion: %d (%d bytes)\n"
396 msg = b"%sversion: %d (%d bytes)\n"
397 msg %= indent_string, version, len(data)
397 msg %= indent_string, version, len(data)
398 ui.write(msg)
398 ui.write(msg)
399 fm = ui.formatter(b'debugobsolete', opts)
399 fm = ui.formatter(b'debugobsolete', opts)
400 for rawmarker in sorted(markers):
400 for rawmarker in sorted(markers):
401 m = obsutil.marker(None, rawmarker)
401 m = obsutil.marker(None, rawmarker)
402 fm.startitem()
402 fm.startitem()
403 fm.plain(indent_string)
403 fm.plain(indent_string)
404 cmdutil.showmarker(fm, m)
404 cmdutil.showmarker(fm, m)
405 fm.end()
405 fm.end()
406
406
407
407
408 def _debugphaseheads(ui, data, indent=0):
408 def _debugphaseheads(ui, data, indent=0):
409 """display version and markers contained in 'data'"""
409 """display version and markers contained in 'data'"""
410 indent_string = b' ' * indent
410 indent_string = b' ' * indent
411 headsbyphase = phases.binarydecode(data)
411 headsbyphase = phases.binarydecode(data)
412 for phase in phases.allphases:
412 for phase in phases.allphases:
413 for head in headsbyphase[phase]:
413 for head in headsbyphase[phase]:
414 ui.write(indent_string)
414 ui.write(indent_string)
415 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
415 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
416
416
417
417
418 def _quasirepr(thing):
418 def _quasirepr(thing):
419 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
419 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
420 return b'{%s}' % (
420 return b'{%s}' % (
421 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
421 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
422 )
422 )
423 return pycompat.bytestr(repr(thing))
423 return pycompat.bytestr(repr(thing))
424
424
425
425
426 def _debugbundle2(ui, gen, all=None, **opts):
426 def _debugbundle2(ui, gen, all=None, **opts):
427 """lists the contents of a bundle2"""
427 """lists the contents of a bundle2"""
428 if not isinstance(gen, bundle2.unbundle20):
428 if not isinstance(gen, bundle2.unbundle20):
429 raise error.Abort(_(b'not a bundle2 file'))
429 raise error.Abort(_(b'not a bundle2 file'))
430 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
430 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
431 parttypes = opts.get('part_type', [])
431 parttypes = opts.get('part_type', [])
432 for part in gen.iterparts():
432 for part in gen.iterparts():
433 if parttypes and part.type not in parttypes:
433 if parttypes and part.type not in parttypes:
434 continue
434 continue
435 msg = b'%s -- %s (mandatory: %r)\n'
435 msg = b'%s -- %s (mandatory: %r)\n'
436 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
436 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
437 if part.type == b'changegroup':
437 if part.type == b'changegroup':
438 version = part.params.get(b'version', b'01')
438 version = part.params.get(b'version', b'01')
439 cg = changegroup.getunbundler(version, part, b'UN')
439 cg = changegroup.getunbundler(version, part, b'UN')
440 if not ui.quiet:
440 if not ui.quiet:
441 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
441 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
442 if part.type == b'obsmarkers':
442 if part.type == b'obsmarkers':
443 if not ui.quiet:
443 if not ui.quiet:
444 _debugobsmarkers(ui, part, indent=4, **opts)
444 _debugobsmarkers(ui, part, indent=4, **opts)
445 if part.type == b'phase-heads':
445 if part.type == b'phase-heads':
446 if not ui.quiet:
446 if not ui.quiet:
447 _debugphaseheads(ui, part, indent=4)
447 _debugphaseheads(ui, part, indent=4)
448
448
449
449
450 @command(
450 @command(
451 b'debugbundle',
451 b'debugbundle',
452 [
452 [
453 (b'a', b'all', None, _(b'show all details')),
453 (b'a', b'all', None, _(b'show all details')),
454 (b'', b'part-type', [], _(b'show only the named part type')),
454 (b'', b'part-type', [], _(b'show only the named part type')),
455 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
455 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
456 ],
456 ],
457 _(b'FILE'),
457 _(b'FILE'),
458 norepo=True,
458 norepo=True,
459 )
459 )
460 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
460 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
461 """lists the contents of a bundle"""
461 """lists the contents of a bundle"""
462 with hg.openpath(ui, bundlepath) as f:
462 with hg.openpath(ui, bundlepath) as f:
463 if spec:
463 if spec:
464 spec = exchange.getbundlespec(ui, f)
464 spec = exchange.getbundlespec(ui, f)
465 ui.write(b'%s\n' % spec)
465 ui.write(b'%s\n' % spec)
466 return
466 return
467
467
468 gen = exchange.readbundle(ui, f, bundlepath)
468 gen = exchange.readbundle(ui, f, bundlepath)
469 if isinstance(gen, bundle2.unbundle20):
469 if isinstance(gen, bundle2.unbundle20):
470 return _debugbundle2(ui, gen, all=all, **opts)
470 return _debugbundle2(ui, gen, all=all, **opts)
471 _debugchangegroup(ui, gen, all=all, **opts)
471 _debugchangegroup(ui, gen, all=all, **opts)
472
472
473
473
474 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
474 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
475 def debugcapabilities(ui, path, **opts):
475 def debugcapabilities(ui, path, **opts):
476 """lists the capabilities of a remote peer"""
476 """lists the capabilities of a remote peer"""
477 opts = pycompat.byteskwargs(opts)
477 opts = pycompat.byteskwargs(opts)
478 peer = hg.peer(ui, opts, path)
478 peer = hg.peer(ui, opts, path)
479 try:
479 try:
480 caps = peer.capabilities()
480 caps = peer.capabilities()
481 ui.writenoi18n(b'Main capabilities:\n')
481 ui.writenoi18n(b'Main capabilities:\n')
482 for c in sorted(caps):
482 for c in sorted(caps):
483 ui.write(b' %s\n' % c)
483 ui.write(b' %s\n' % c)
484 b2caps = bundle2.bundle2caps(peer)
484 b2caps = bundle2.bundle2caps(peer)
485 if b2caps:
485 if b2caps:
486 ui.writenoi18n(b'Bundle2 capabilities:\n')
486 ui.writenoi18n(b'Bundle2 capabilities:\n')
487 for key, values in sorted(pycompat.iteritems(b2caps)):
487 for key, values in sorted(pycompat.iteritems(b2caps)):
488 ui.write(b' %s\n' % key)
488 ui.write(b' %s\n' % key)
489 for v in values:
489 for v in values:
490 ui.write(b' %s\n' % v)
490 ui.write(b' %s\n' % v)
491 finally:
491 finally:
492 peer.close()
492 peer.close()
493
493
494
494
495 @command(
495 @command(
496 b'debugchangedfiles',
496 b'debugchangedfiles',
497 [
497 [
498 (
498 (
499 b'',
499 b'',
500 b'compute',
500 b'compute',
501 False,
501 False,
502 b"compute information instead of reading it from storage",
502 b"compute information instead of reading it from storage",
503 ),
503 ),
504 ],
504 ],
505 b'REV',
505 b'REV',
506 )
506 )
507 def debugchangedfiles(ui, repo, rev, **opts):
507 def debugchangedfiles(ui, repo, rev, **opts):
508 """list the stored files changes for a revision"""
508 """list the stored files changes for a revision"""
509 ctx = scmutil.revsingle(repo, rev, None)
509 ctx = scmutil.revsingle(repo, rev, None)
510 files = None
510 files = None
511
511
512 if opts['compute']:
512 if opts['compute']:
513 files = metadata.compute_all_files_changes(ctx)
513 files = metadata.compute_all_files_changes(ctx)
514 else:
514 else:
515 sd = repo.changelog.sidedata(ctx.rev())
515 sd = repo.changelog.sidedata(ctx.rev())
516 files_block = sd.get(sidedata.SD_FILES)
516 files_block = sd.get(sidedata.SD_FILES)
517 if files_block is not None:
517 if files_block is not None:
518 files = metadata.decode_files_sidedata(sd)
518 files = metadata.decode_files_sidedata(sd)
519 if files is not None:
519 if files is not None:
520 for f in sorted(files.touched):
520 for f in sorted(files.touched):
521 if f in files.added:
521 if f in files.added:
522 action = b"added"
522 action = b"added"
523 elif f in files.removed:
523 elif f in files.removed:
524 action = b"removed"
524 action = b"removed"
525 elif f in files.merged:
525 elif f in files.merged:
526 action = b"merged"
526 action = b"merged"
527 elif f in files.salvaged:
527 elif f in files.salvaged:
528 action = b"salvaged"
528 action = b"salvaged"
529 else:
529 else:
530 action = b"touched"
530 action = b"touched"
531
531
532 copy_parent = b""
532 copy_parent = b""
533 copy_source = b""
533 copy_source = b""
534 if f in files.copied_from_p1:
534 if f in files.copied_from_p1:
535 copy_parent = b"p1"
535 copy_parent = b"p1"
536 copy_source = files.copied_from_p1[f]
536 copy_source = files.copied_from_p1[f]
537 elif f in files.copied_from_p2:
537 elif f in files.copied_from_p2:
538 copy_parent = b"p2"
538 copy_parent = b"p2"
539 copy_source = files.copied_from_p2[f]
539 copy_source = files.copied_from_p2[f]
540
540
541 data = (action, copy_parent, f, copy_source)
541 data = (action, copy_parent, f, copy_source)
542 template = b"%-8s %2s: %s, %s;\n"
542 template = b"%-8s %2s: %s, %s;\n"
543 ui.write(template % data)
543 ui.write(template % data)
544
544
545
545
546 @command(b'debugcheckstate', [], b'')
546 @command(b'debugcheckstate', [], b'')
547 def debugcheckstate(ui, repo):
547 def debugcheckstate(ui, repo):
548 """validate the correctness of the current dirstate"""
548 """validate the correctness of the current dirstate"""
549 parent1, parent2 = repo.dirstate.parents()
549 parent1, parent2 = repo.dirstate.parents()
550 m1 = repo[parent1].manifest()
550 m1 = repo[parent1].manifest()
551 m2 = repo[parent2].manifest()
551 m2 = repo[parent2].manifest()
552 errors = 0
552 errors = 0
553 for f in repo.dirstate:
553 for f in repo.dirstate:
554 state = repo.dirstate[f]
554 state = repo.dirstate[f]
555 if state in b"nr" and f not in m1:
555 if state in b"nr" and f not in m1:
556 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
556 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
557 errors += 1
557 errors += 1
558 if state in b"a" and f in m1:
558 if state in b"a" and f in m1:
559 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
559 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
560 errors += 1
560 errors += 1
561 if state in b"m" and f not in m1 and f not in m2:
561 if state in b"m" and f not in m1 and f not in m2:
562 ui.warn(
562 ui.warn(
563 _(b"%s in state %s, but not in either manifest\n") % (f, state)
563 _(b"%s in state %s, but not in either manifest\n") % (f, state)
564 )
564 )
565 errors += 1
565 errors += 1
566 for f in m1:
566 for f in m1:
567 state = repo.dirstate[f]
567 state = repo.dirstate[f]
568 if state not in b"nrm":
568 if state not in b"nrm":
569 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
569 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
570 errors += 1
570 errors += 1
571 if errors:
571 if errors:
572 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
572 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
573 raise error.Abort(errstr)
573 raise error.Abort(errstr)
574
574
575
575
576 @command(
576 @command(
577 b'debugcolor',
577 b'debugcolor',
578 [(b'', b'style', None, _(b'show all configured styles'))],
578 [(b'', b'style', None, _(b'show all configured styles'))],
579 b'hg debugcolor',
579 b'hg debugcolor',
580 )
580 )
581 def debugcolor(ui, repo, **opts):
581 def debugcolor(ui, repo, **opts):
582 """show available color, effects or style"""
582 """show available color, effects or style"""
583 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
583 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
584 if opts.get('style'):
584 if opts.get('style'):
585 return _debugdisplaystyle(ui)
585 return _debugdisplaystyle(ui)
586 else:
586 else:
587 return _debugdisplaycolor(ui)
587 return _debugdisplaycolor(ui)
588
588
589
589
590 def _debugdisplaycolor(ui):
590 def _debugdisplaycolor(ui):
591 ui = ui.copy()
591 ui = ui.copy()
592 ui._styles.clear()
592 ui._styles.clear()
593 for effect in color._activeeffects(ui).keys():
593 for effect in color._activeeffects(ui).keys():
594 ui._styles[effect] = effect
594 ui._styles[effect] = effect
595 if ui._terminfoparams:
595 if ui._terminfoparams:
596 for k, v in ui.configitems(b'color'):
596 for k, v in ui.configitems(b'color'):
597 if k.startswith(b'color.'):
597 if k.startswith(b'color.'):
598 ui._styles[k] = k[6:]
598 ui._styles[k] = k[6:]
599 elif k.startswith(b'terminfo.'):
599 elif k.startswith(b'terminfo.'):
600 ui._styles[k] = k[9:]
600 ui._styles[k] = k[9:]
601 ui.write(_(b'available colors:\n'))
601 ui.write(_(b'available colors:\n'))
602 # sort label with a '_' after the other to group '_background' entry.
602 # sort label with a '_' after the other to group '_background' entry.
603 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
603 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
604 for colorname, label in items:
604 for colorname, label in items:
605 ui.write(b'%s\n' % colorname, label=label)
605 ui.write(b'%s\n' % colorname, label=label)
606
606
607
607
608 def _debugdisplaystyle(ui):
608 def _debugdisplaystyle(ui):
609 ui.write(_(b'available style:\n'))
609 ui.write(_(b'available style:\n'))
610 if not ui._styles:
610 if not ui._styles:
611 return
611 return
612 width = max(len(s) for s in ui._styles)
612 width = max(len(s) for s in ui._styles)
613 for label, effects in sorted(ui._styles.items()):
613 for label, effects in sorted(ui._styles.items()):
614 ui.write(b'%s' % label, label=label)
614 ui.write(b'%s' % label, label=label)
615 if effects:
615 if effects:
616 # 50
616 # 50
617 ui.write(b': ')
617 ui.write(b': ')
618 ui.write(b' ' * (max(0, width - len(label))))
618 ui.write(b' ' * (max(0, width - len(label))))
619 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
619 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
620 ui.write(b'\n')
620 ui.write(b'\n')
621
621
622
622
623 @command(b'debugcreatestreamclonebundle', [], b'FILE')
623 @command(b'debugcreatestreamclonebundle', [], b'FILE')
624 def debugcreatestreamclonebundle(ui, repo, fname):
624 def debugcreatestreamclonebundle(ui, repo, fname):
625 """create a stream clone bundle file
625 """create a stream clone bundle file
626
626
627 Stream bundles are special bundles that are essentially archives of
627 Stream bundles are special bundles that are essentially archives of
628 revlog files. They are commonly used for cloning very quickly.
628 revlog files. They are commonly used for cloning very quickly.
629 """
629 """
630 # TODO we may want to turn this into an abort when this functionality
630 # TODO we may want to turn this into an abort when this functionality
631 # is moved into `hg bundle`.
631 # is moved into `hg bundle`.
632 if phases.hassecret(repo):
632 if phases.hassecret(repo):
633 ui.warn(
633 ui.warn(
634 _(
634 _(
635 b'(warning: stream clone bundle will contain secret '
635 b'(warning: stream clone bundle will contain secret '
636 b'revisions)\n'
636 b'revisions)\n'
637 )
637 )
638 )
638 )
639
639
640 requirements, gen = streamclone.generatebundlev1(repo)
640 requirements, gen = streamclone.generatebundlev1(repo)
641 changegroup.writechunks(ui, gen, fname)
641 changegroup.writechunks(ui, gen, fname)
642
642
643 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
643 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
644
644
645
645
646 @command(
646 @command(
647 b'debugdag',
647 b'debugdag',
648 [
648 [
649 (b't', b'tags', None, _(b'use tags as labels')),
649 (b't', b'tags', None, _(b'use tags as labels')),
650 (b'b', b'branches', None, _(b'annotate with branch names')),
650 (b'b', b'branches', None, _(b'annotate with branch names')),
651 (b'', b'dots', None, _(b'use dots for runs')),
651 (b'', b'dots', None, _(b'use dots for runs')),
652 (b's', b'spaces', None, _(b'separate elements by spaces')),
652 (b's', b'spaces', None, _(b'separate elements by spaces')),
653 ],
653 ],
654 _(b'[OPTION]... [FILE [REV]...]'),
654 _(b'[OPTION]... [FILE [REV]...]'),
655 optionalrepo=True,
655 optionalrepo=True,
656 )
656 )
657 def debugdag(ui, repo, file_=None, *revs, **opts):
657 def debugdag(ui, repo, file_=None, *revs, **opts):
658 """format the changelog or an index DAG as a concise textual description
658 """format the changelog or an index DAG as a concise textual description
659
659
660 If you pass a revlog index, the revlog's DAG is emitted. If you list
660 If you pass a revlog index, the revlog's DAG is emitted. If you list
661 revision numbers, they get labeled in the output as rN.
661 revision numbers, they get labeled in the output as rN.
662
662
663 Otherwise, the changelog DAG of the current repo is emitted.
663 Otherwise, the changelog DAG of the current repo is emitted.
664 """
664 """
665 spaces = opts.get('spaces')
665 spaces = opts.get('spaces')
666 dots = opts.get('dots')
666 dots = opts.get('dots')
667 if file_:
667 if file_:
668 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
668 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
669 revs = {int(r) for r in revs}
669 revs = {int(r) for r in revs}
670
670
671 def events():
671 def events():
672 for r in rlog:
672 for r in rlog:
673 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
673 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
674 if r in revs:
674 if r in revs:
675 yield b'l', (r, b"r%i" % r)
675 yield b'l', (r, b"r%i" % r)
676
676
677 elif repo:
677 elif repo:
678 cl = repo.changelog
678 cl = repo.changelog
679 tags = opts.get('tags')
679 tags = opts.get('tags')
680 branches = opts.get('branches')
680 branches = opts.get('branches')
681 if tags:
681 if tags:
682 labels = {}
682 labels = {}
683 for l, n in repo.tags().items():
683 for l, n in repo.tags().items():
684 labels.setdefault(cl.rev(n), []).append(l)
684 labels.setdefault(cl.rev(n), []).append(l)
685
685
686 def events():
686 def events():
687 b = b"default"
687 b = b"default"
688 for r in cl:
688 for r in cl:
689 if branches:
689 if branches:
690 newb = cl.read(cl.node(r))[5][b'branch']
690 newb = cl.read(cl.node(r))[5][b'branch']
691 if newb != b:
691 if newb != b:
692 yield b'a', newb
692 yield b'a', newb
693 b = newb
693 b = newb
694 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
694 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
695 if tags:
695 if tags:
696 ls = labels.get(r)
696 ls = labels.get(r)
697 if ls:
697 if ls:
698 for l in ls:
698 for l in ls:
699 yield b'l', (r, l)
699 yield b'l', (r, l)
700
700
701 else:
701 else:
702 raise error.Abort(_(b'need repo for changelog dag'))
702 raise error.Abort(_(b'need repo for changelog dag'))
703
703
704 for line in dagparser.dagtextlines(
704 for line in dagparser.dagtextlines(
705 events(),
705 events(),
706 addspaces=spaces,
706 addspaces=spaces,
707 wraplabels=True,
707 wraplabels=True,
708 wrapannotations=True,
708 wrapannotations=True,
709 wrapnonlinear=dots,
709 wrapnonlinear=dots,
710 usedots=dots,
710 usedots=dots,
711 maxlinewidth=70,
711 maxlinewidth=70,
712 ):
712 ):
713 ui.write(line)
713 ui.write(line)
714 ui.write(b"\n")
714 ui.write(b"\n")
715
715
716
716
717 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
717 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
718 def debugdata(ui, repo, file_, rev=None, **opts):
718 def debugdata(ui, repo, file_, rev=None, **opts):
719 """dump the contents of a data file revision"""
719 """dump the contents of a data file revision"""
720 opts = pycompat.byteskwargs(opts)
720 opts = pycompat.byteskwargs(opts)
721 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
721 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
722 if rev is not None:
722 if rev is not None:
723 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
723 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
724 file_, rev = None, file_
724 file_, rev = None, file_
725 elif rev is None:
725 elif rev is None:
726 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
726 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
727 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
727 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
728 try:
728 try:
729 ui.write(r.rawdata(r.lookup(rev)))
729 ui.write(r.rawdata(r.lookup(rev)))
730 except KeyError:
730 except KeyError:
731 raise error.Abort(_(b'invalid revision identifier %s') % rev)
731 raise error.Abort(_(b'invalid revision identifier %s') % rev)
732
732
733
733
734 @command(
734 @command(
735 b'debugdate',
735 b'debugdate',
736 [(b'e', b'extended', None, _(b'try extended date formats'))],
736 [(b'e', b'extended', None, _(b'try extended date formats'))],
737 _(b'[-e] DATE [RANGE]'),
737 _(b'[-e] DATE [RANGE]'),
738 norepo=True,
738 norepo=True,
739 optionalrepo=True,
739 optionalrepo=True,
740 )
740 )
741 def debugdate(ui, date, range=None, **opts):
741 def debugdate(ui, date, range=None, **opts):
742 """parse and display a date"""
742 """parse and display a date"""
743 if opts["extended"]:
743 if opts["extended"]:
744 d = dateutil.parsedate(date, dateutil.extendeddateformats)
744 d = dateutil.parsedate(date, dateutil.extendeddateformats)
745 else:
745 else:
746 d = dateutil.parsedate(date)
746 d = dateutil.parsedate(date)
747 ui.writenoi18n(b"internal: %d %d\n" % d)
747 ui.writenoi18n(b"internal: %d %d\n" % d)
748 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
748 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
749 if range:
749 if range:
750 m = dateutil.matchdate(range)
750 m = dateutil.matchdate(range)
751 ui.writenoi18n(b"match: %s\n" % m(d[0]))
751 ui.writenoi18n(b"match: %s\n" % m(d[0]))
752
752
753
753
754 @command(
754 @command(
755 b'debugdeltachain',
755 b'debugdeltachain',
756 cmdutil.debugrevlogopts + cmdutil.formatteropts,
756 cmdutil.debugrevlogopts + cmdutil.formatteropts,
757 _(b'-c|-m|FILE'),
757 _(b'-c|-m|FILE'),
758 optionalrepo=True,
758 optionalrepo=True,
759 )
759 )
760 def debugdeltachain(ui, repo, file_=None, **opts):
760 def debugdeltachain(ui, repo, file_=None, **opts):
761 """dump information about delta chains in a revlog
761 """dump information about delta chains in a revlog
762
762
763 Output can be templatized. Available template keywords are:
763 Output can be templatized. Available template keywords are:
764
764
765 :``rev``: revision number
765 :``rev``: revision number
766 :``chainid``: delta chain identifier (numbered by unique base)
766 :``chainid``: delta chain identifier (numbered by unique base)
767 :``chainlen``: delta chain length to this revision
767 :``chainlen``: delta chain length to this revision
768 :``prevrev``: previous revision in delta chain
768 :``prevrev``: previous revision in delta chain
769 :``deltatype``: role of delta / how it was computed
769 :``deltatype``: role of delta / how it was computed
770 :``compsize``: compressed size of revision
770 :``compsize``: compressed size of revision
771 :``uncompsize``: uncompressed size of revision
771 :``uncompsize``: uncompressed size of revision
772 :``chainsize``: total size of compressed revisions in chain
772 :``chainsize``: total size of compressed revisions in chain
773 :``chainratio``: total chain size divided by uncompressed revision size
773 :``chainratio``: total chain size divided by uncompressed revision size
774 (new delta chains typically start at ratio 2.00)
774 (new delta chains typically start at ratio 2.00)
775 :``lindist``: linear distance from base revision in delta chain to end
775 :``lindist``: linear distance from base revision in delta chain to end
776 of this revision
776 of this revision
777 :``extradist``: total size of revisions not part of this delta chain from
777 :``extradist``: total size of revisions not part of this delta chain from
778 base of delta chain to end of this revision; a measurement
778 base of delta chain to end of this revision; a measurement
779 of how much extra data we need to read/seek across to read
779 of how much extra data we need to read/seek across to read
780 the delta chain for this revision
780 the delta chain for this revision
781 :``extraratio``: extradist divided by chainsize; another representation of
781 :``extraratio``: extradist divided by chainsize; another representation of
782 how much unrelated data is needed to load this delta chain
782 how much unrelated data is needed to load this delta chain
783
783
784 If the repository is configured to use the sparse read, additional keywords
784 If the repository is configured to use the sparse read, additional keywords
785 are available:
785 are available:
786
786
787 :``readsize``: total size of data read from the disk for a revision
787 :``readsize``: total size of data read from the disk for a revision
788 (sum of the sizes of all the blocks)
788 (sum of the sizes of all the blocks)
789 :``largestblock``: size of the largest block of data read from the disk
789 :``largestblock``: size of the largest block of data read from the disk
790 :``readdensity``: density of useful bytes in the data read from the disk
790 :``readdensity``: density of useful bytes in the data read from the disk
791 :``srchunks``: in how many data hunks the whole revision would be read
791 :``srchunks``: in how many data hunks the whole revision would be read
792
792
793 The sparse read can be enabled with experimental.sparse-read = True
793 The sparse read can be enabled with experimental.sparse-read = True
794 """
794 """
795 opts = pycompat.byteskwargs(opts)
795 opts = pycompat.byteskwargs(opts)
796 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
796 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
797 index = r.index
797 index = r.index
798 start = r.start
798 start = r.start
799 length = r.length
799 length = r.length
800 generaldelta = r._generaldelta
800 generaldelta = r._generaldelta
801 withsparseread = getattr(r, '_withsparseread', False)
801 withsparseread = getattr(r, '_withsparseread', False)
802
802
803 def revinfo(rev):
803 def revinfo(rev):
804 e = index[rev]
804 e = index[rev]
805 compsize = e[1]
805 compsize = e[1]
806 uncompsize = e[2]
806 uncompsize = e[2]
807 chainsize = 0
807 chainsize = 0
808
808
809 if generaldelta:
809 if generaldelta:
810 if e[3] == e[5]:
810 if e[3] == e[5]:
811 deltatype = b'p1'
811 deltatype = b'p1'
812 elif e[3] == e[6]:
812 elif e[3] == e[6]:
813 deltatype = b'p2'
813 deltatype = b'p2'
814 elif e[3] == rev - 1:
814 elif e[3] == rev - 1:
815 deltatype = b'prev'
815 deltatype = b'prev'
816 elif e[3] == rev:
816 elif e[3] == rev:
817 deltatype = b'base'
817 deltatype = b'base'
818 else:
818 else:
819 deltatype = b'other'
819 deltatype = b'other'
820 else:
820 else:
821 if e[3] == rev:
821 if e[3] == rev:
822 deltatype = b'base'
822 deltatype = b'base'
823 else:
823 else:
824 deltatype = b'prev'
824 deltatype = b'prev'
825
825
826 chain = r._deltachain(rev)[0]
826 chain = r._deltachain(rev)[0]
827 for iterrev in chain:
827 for iterrev in chain:
828 e = index[iterrev]
828 e = index[iterrev]
829 chainsize += e[1]
829 chainsize += e[1]
830
830
831 return compsize, uncompsize, deltatype, chain, chainsize
831 return compsize, uncompsize, deltatype, chain, chainsize
832
832
833 fm = ui.formatter(b'debugdeltachain', opts)
833 fm = ui.formatter(b'debugdeltachain', opts)
834
834
835 fm.plain(
835 fm.plain(
836 b' rev chain# chainlen prev delta '
836 b' rev chain# chainlen prev delta '
837 b'size rawsize chainsize ratio lindist extradist '
837 b'size rawsize chainsize ratio lindist extradist '
838 b'extraratio'
838 b'extraratio'
839 )
839 )
840 if withsparseread:
840 if withsparseread:
841 fm.plain(b' readsize largestblk rddensity srchunks')
841 fm.plain(b' readsize largestblk rddensity srchunks')
842 fm.plain(b'\n')
842 fm.plain(b'\n')
843
843
844 chainbases = {}
844 chainbases = {}
845 for rev in r:
845 for rev in r:
846 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
846 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
847 chainbase = chain[0]
847 chainbase = chain[0]
848 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
848 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
849 basestart = start(chainbase)
849 basestart = start(chainbase)
850 revstart = start(rev)
850 revstart = start(rev)
851 lineardist = revstart + comp - basestart
851 lineardist = revstart + comp - basestart
852 extradist = lineardist - chainsize
852 extradist = lineardist - chainsize
853 try:
853 try:
854 prevrev = chain[-2]
854 prevrev = chain[-2]
855 except IndexError:
855 except IndexError:
856 prevrev = -1
856 prevrev = -1
857
857
858 if uncomp != 0:
858 if uncomp != 0:
859 chainratio = float(chainsize) / float(uncomp)
859 chainratio = float(chainsize) / float(uncomp)
860 else:
860 else:
861 chainratio = chainsize
861 chainratio = chainsize
862
862
863 if chainsize != 0:
863 if chainsize != 0:
864 extraratio = float(extradist) / float(chainsize)
864 extraratio = float(extradist) / float(chainsize)
865 else:
865 else:
866 extraratio = extradist
866 extraratio = extradist
867
867
868 fm.startitem()
868 fm.startitem()
869 fm.write(
869 fm.write(
870 b'rev chainid chainlen prevrev deltatype compsize '
870 b'rev chainid chainlen prevrev deltatype compsize '
871 b'uncompsize chainsize chainratio lindist extradist '
871 b'uncompsize chainsize chainratio lindist extradist '
872 b'extraratio',
872 b'extraratio',
873 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
873 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
874 rev,
874 rev,
875 chainid,
875 chainid,
876 len(chain),
876 len(chain),
877 prevrev,
877 prevrev,
878 deltatype,
878 deltatype,
879 comp,
879 comp,
880 uncomp,
880 uncomp,
881 chainsize,
881 chainsize,
882 chainratio,
882 chainratio,
883 lineardist,
883 lineardist,
884 extradist,
884 extradist,
885 extraratio,
885 extraratio,
886 rev=rev,
886 rev=rev,
887 chainid=chainid,
887 chainid=chainid,
888 chainlen=len(chain),
888 chainlen=len(chain),
889 prevrev=prevrev,
889 prevrev=prevrev,
890 deltatype=deltatype,
890 deltatype=deltatype,
891 compsize=comp,
891 compsize=comp,
892 uncompsize=uncomp,
892 uncompsize=uncomp,
893 chainsize=chainsize,
893 chainsize=chainsize,
894 chainratio=chainratio,
894 chainratio=chainratio,
895 lindist=lineardist,
895 lindist=lineardist,
896 extradist=extradist,
896 extradist=extradist,
897 extraratio=extraratio,
897 extraratio=extraratio,
898 )
898 )
899 if withsparseread:
899 if withsparseread:
900 readsize = 0
900 readsize = 0
901 largestblock = 0
901 largestblock = 0
902 srchunks = 0
902 srchunks = 0
903
903
904 for revschunk in deltautil.slicechunk(r, chain):
904 for revschunk in deltautil.slicechunk(r, chain):
905 srchunks += 1
905 srchunks += 1
906 blkend = start(revschunk[-1]) + length(revschunk[-1])
906 blkend = start(revschunk[-1]) + length(revschunk[-1])
907 blksize = blkend - start(revschunk[0])
907 blksize = blkend - start(revschunk[0])
908
908
909 readsize += blksize
909 readsize += blksize
910 if largestblock < blksize:
910 if largestblock < blksize:
911 largestblock = blksize
911 largestblock = blksize
912
912
913 if readsize:
913 if readsize:
914 readdensity = float(chainsize) / float(readsize)
914 readdensity = float(chainsize) / float(readsize)
915 else:
915 else:
916 readdensity = 1
916 readdensity = 1
917
917
918 fm.write(
918 fm.write(
919 b'readsize largestblock readdensity srchunks',
919 b'readsize largestblock readdensity srchunks',
920 b' %10d %10d %9.5f %8d',
920 b' %10d %10d %9.5f %8d',
921 readsize,
921 readsize,
922 largestblock,
922 largestblock,
923 readdensity,
923 readdensity,
924 srchunks,
924 srchunks,
925 readsize=readsize,
925 readsize=readsize,
926 largestblock=largestblock,
926 largestblock=largestblock,
927 readdensity=readdensity,
927 readdensity=readdensity,
928 srchunks=srchunks,
928 srchunks=srchunks,
929 )
929 )
930
930
931 fm.plain(b'\n')
931 fm.plain(b'\n')
932
932
933 fm.end()
933 fm.end()
934
934
935
935
936 @command(
936 @command(
937 b'debugdirstate|debugstate',
937 b'debugdirstate|debugstate',
938 [
938 [
939 (
939 (
940 b'',
940 b'',
941 b'nodates',
941 b'nodates',
942 None,
942 None,
943 _(b'do not display the saved mtime (DEPRECATED)'),
943 _(b'do not display the saved mtime (DEPRECATED)'),
944 ),
944 ),
945 (b'', b'dates', True, _(b'display the saved mtime')),
945 (b'', b'dates', True, _(b'display the saved mtime')),
946 (b'', b'datesort', None, _(b'sort by saved mtime')),
946 (b'', b'datesort', None, _(b'sort by saved mtime')),
947 (
947 (
948 b'',
948 b'',
949 b'all',
949 b'all',
950 False,
950 False,
951 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
951 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
952 ),
952 ),
953 ],
953 ],
954 _(b'[OPTION]...'),
954 _(b'[OPTION]...'),
955 )
955 )
956 def debugstate(ui, repo, **opts):
956 def debugstate(ui, repo, **opts):
957 """show the contents of the current dirstate"""
957 """show the contents of the current dirstate"""
958
958
959 nodates = not opts['dates']
959 nodates = not opts['dates']
960 if opts.get('nodates') is not None:
960 if opts.get('nodates') is not None:
961 nodates = True
961 nodates = True
962 datesort = opts.get('datesort')
962 datesort = opts.get('datesort')
963
963
964 if datesort:
964 if datesort:
965 keyfunc = lambda x: (
965 keyfunc = lambda x: (
966 x[1].v1_mtime(),
966 x[1].v1_mtime(),
967 x[0],
967 x[0],
968 ) # sort by mtime, then by filename
968 ) # sort by mtime, then by filename
969 else:
969 else:
970 keyfunc = None # sort by filename
970 keyfunc = None # sort by filename
971 if opts['all']:
971 if opts['all']:
972 entries = list(repo.dirstate._map.debug_iter())
972 entries = list(repo.dirstate._map.debug_iter())
973 else:
973 else:
974 entries = list(pycompat.iteritems(repo.dirstate))
974 entries = list(pycompat.iteritems(repo.dirstate))
975 entries.sort(key=keyfunc)
975 entries.sort(key=keyfunc)
976 for file_, ent in entries:
976 for file_, ent in entries:
977 if ent.v1_mtime() == -1:
977 if ent.v1_mtime() == -1:
978 timestr = b'unset '
978 timestr = b'unset '
979 elif nodates:
979 elif nodates:
980 timestr = b'set '
980 timestr = b'set '
981 else:
981 else:
982 timestr = time.strftime(
982 timestr = time.strftime(
983 "%Y-%m-%d %H:%M:%S ", time.localtime(ent.v1_mtime())
983 "%Y-%m-%d %H:%M:%S ", time.localtime(ent.v1_mtime())
984 )
984 )
985 timestr = encoding.strtolocal(timestr)
985 timestr = encoding.strtolocal(timestr)
986 if ent.mode & 0o20000:
986 if ent.mode & 0o20000:
987 mode = b'lnk'
987 mode = b'lnk'
988 else:
988 else:
989 mode = b'%3o' % (ent.v1_mode() & 0o777 & ~util.umask)
989 mode = b'%3o' % (ent.v1_mode() & 0o777 & ~util.umask)
990 ui.write(
990 ui.write(
991 b"%c %s %10d %s%s\n"
991 b"%c %s %10d %s%s\n"
992 % (ent.v1_state(), mode, ent.v1_size(), timestr, file_)
992 % (ent.v1_state(), mode, ent.v1_size(), timestr, file_)
993 )
993 )
994 for f in repo.dirstate.copies():
994 for f in repo.dirstate.copies():
995 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
995 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
996
996
997
997
998 @command(
998 @command(
999 b'debugdirstateignorepatternshash',
999 b'debugdirstateignorepatternshash',
1000 [],
1000 [],
1001 _(b''),
1001 _(b''),
1002 )
1002 )
1003 def debugdirstateignorepatternshash(ui, repo, **opts):
1003 def debugdirstateignorepatternshash(ui, repo, **opts):
1004 """show the hash of ignore patterns stored in dirstate if v2,
1004 """show the hash of ignore patterns stored in dirstate if v2,
1005 or nothing for dirstate-v2
1005 or nothing for dirstate-v2
1006 """
1006 """
1007 if repo.dirstate._use_dirstate_v2:
1007 if repo.dirstate._use_dirstate_v2:
1008 docket = repo.dirstate._map.docket
1008 docket = repo.dirstate._map.docket
1009 hash_len = 20 # 160 bits for SHA-1
1009 hash_len = 20 # 160 bits for SHA-1
1010 hash_bytes = docket.tree_metadata[-hash_len:]
1010 hash_bytes = docket.tree_metadata[-hash_len:]
1011 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1011 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1012
1012
1013
1013
1014 @command(
1014 @command(
1015 b'debugdiscovery',
1015 b'debugdiscovery',
1016 [
1016 [
1017 (b'', b'old', None, _(b'use old-style discovery')),
1017 (b'', b'old', None, _(b'use old-style discovery')),
1018 (
1018 (
1019 b'',
1019 b'',
1020 b'nonheads',
1020 b'nonheads',
1021 None,
1021 None,
1022 _(b'use old-style discovery with non-heads included'),
1022 _(b'use old-style discovery with non-heads included'),
1023 ),
1023 ),
1024 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1024 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1025 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1025 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1026 (
1026 (
1027 b'',
1027 b'',
1028 b'local-as-revs',
1028 b'local-as-revs',
1029 b"",
1029 b"",
1030 b'treat local has having these revisions only',
1030 b'treat local has having these revisions only',
1031 ),
1031 ),
1032 (
1032 (
1033 b'',
1033 b'',
1034 b'remote-as-revs',
1034 b'remote-as-revs',
1035 b"",
1035 b"",
1036 b'use local as remote, with only these these revisions',
1036 b'use local as remote, with only these these revisions',
1037 ),
1037 ),
1038 ]
1038 ]
1039 + cmdutil.remoteopts
1039 + cmdutil.remoteopts
1040 + cmdutil.formatteropts,
1040 + cmdutil.formatteropts,
1041 _(b'[--rev REV] [OTHER]'),
1041 _(b'[--rev REV] [OTHER]'),
1042 )
1042 )
1043 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1043 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1044 """runs the changeset discovery protocol in isolation
1044 """runs the changeset discovery protocol in isolation
1045
1045
1046 The local peer can be "replaced" by a subset of the local repository by
1046 The local peer can be "replaced" by a subset of the local repository by
1047 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1047 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1048 be "replaced" by a subset of the local repository using the
1048 be "replaced" by a subset of the local repository using the
1049 `--local-as-revs` flag. This is useful to efficiently debug pathological
1049 `--local-as-revs` flag. This is useful to efficiently debug pathological
1050 discovery situation.
1050 discovery situation.
1051
1051
1052 The following developer oriented config are relevant for people playing with this command:
1052 The following developer oriented config are relevant for people playing with this command:
1053
1053
1054 * devel.discovery.exchange-heads=True
1054 * devel.discovery.exchange-heads=True
1055
1055
1056 If False, the discovery will not start with
1056 If False, the discovery will not start with
1057 remote head fetching and local head querying.
1057 remote head fetching and local head querying.
1058
1058
1059 * devel.discovery.grow-sample=True
1059 * devel.discovery.grow-sample=True
1060
1060
1061 If False, the sample size used in set discovery will not be increased
1061 If False, the sample size used in set discovery will not be increased
1062 through the process
1062 through the process
1063
1063
1064 * devel.discovery.grow-sample.dynamic=True
1064 * devel.discovery.grow-sample.dynamic=True
1065
1065
1066 When discovery.grow-sample.dynamic is True, the default, the sample size is
1066 When discovery.grow-sample.dynamic is True, the default, the sample size is
1067 adapted to the shape of the undecided set (it is set to the max of:
1067 adapted to the shape of the undecided set (it is set to the max of:
1068 <target-size>, len(roots(undecided)), len(heads(undecided)
1068 <target-size>, len(roots(undecided)), len(heads(undecided)
1069
1069
1070 * devel.discovery.grow-sample.rate=1.05
1070 * devel.discovery.grow-sample.rate=1.05
1071
1071
1072 the rate at which the sample grow
1072 the rate at which the sample grow
1073
1073
1074 * devel.discovery.randomize=True
1074 * devel.discovery.randomize=True
1075
1075
1076 If andom sampling during discovery are deterministic. It is meant for
1076 If andom sampling during discovery are deterministic. It is meant for
1077 integration tests.
1077 integration tests.
1078
1078
1079 * devel.discovery.sample-size=200
1079 * devel.discovery.sample-size=200
1080
1080
1081 Control the initial size of the discovery sample
1081 Control the initial size of the discovery sample
1082
1082
1083 * devel.discovery.sample-size.initial=100
1083 * devel.discovery.sample-size.initial=100
1084
1084
1085 Control the initial size of the discovery for initial change
1085 Control the initial size of the discovery for initial change
1086 """
1086 """
1087 opts = pycompat.byteskwargs(opts)
1087 opts = pycompat.byteskwargs(opts)
1088 unfi = repo.unfiltered()
1088 unfi = repo.unfiltered()
1089
1089
1090 # setup potential extra filtering
1090 # setup potential extra filtering
1091 local_revs = opts[b"local_as_revs"]
1091 local_revs = opts[b"local_as_revs"]
1092 remote_revs = opts[b"remote_as_revs"]
1092 remote_revs = opts[b"remote_as_revs"]
1093
1093
1094 # make sure tests are repeatable
1094 # make sure tests are repeatable
1095 random.seed(int(opts[b'seed']))
1095 random.seed(int(opts[b'seed']))
1096
1096
1097 if not remote_revs:
1097 if not remote_revs:
1098
1098
1099 remoteurl, branches = urlutil.get_unique_pull_path(
1099 remoteurl, branches = urlutil.get_unique_pull_path(
1100 b'debugdiscovery', repo, ui, remoteurl
1100 b'debugdiscovery', repo, ui, remoteurl
1101 )
1101 )
1102 remote = hg.peer(repo, opts, remoteurl)
1102 remote = hg.peer(repo, opts, remoteurl)
1103 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1103 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1104 else:
1104 else:
1105 branches = (None, [])
1105 branches = (None, [])
1106 remote_filtered_revs = scmutil.revrange(
1106 remote_filtered_revs = scmutil.revrange(
1107 unfi, [b"not (::(%s))" % remote_revs]
1107 unfi, [b"not (::(%s))" % remote_revs]
1108 )
1108 )
1109 remote_filtered_revs = frozenset(remote_filtered_revs)
1109 remote_filtered_revs = frozenset(remote_filtered_revs)
1110
1110
1111 def remote_func(x):
1111 def remote_func(x):
1112 return remote_filtered_revs
1112 return remote_filtered_revs
1113
1113
1114 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1114 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1115
1115
1116 remote = repo.peer()
1116 remote = repo.peer()
1117 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1117 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1118
1118
1119 if local_revs:
1119 if local_revs:
1120 local_filtered_revs = scmutil.revrange(
1120 local_filtered_revs = scmutil.revrange(
1121 unfi, [b"not (::(%s))" % local_revs]
1121 unfi, [b"not (::(%s))" % local_revs]
1122 )
1122 )
1123 local_filtered_revs = frozenset(local_filtered_revs)
1123 local_filtered_revs = frozenset(local_filtered_revs)
1124
1124
1125 def local_func(x):
1125 def local_func(x):
1126 return local_filtered_revs
1126 return local_filtered_revs
1127
1127
1128 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1128 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1129 repo = repo.filtered(b'debug-discovery-local-filter')
1129 repo = repo.filtered(b'debug-discovery-local-filter')
1130
1130
1131 data = {}
1131 data = {}
1132 if opts.get(b'old'):
1132 if opts.get(b'old'):
1133
1133
1134 def doit(pushedrevs, remoteheads, remote=remote):
1134 def doit(pushedrevs, remoteheads, remote=remote):
1135 if not util.safehasattr(remote, b'branches'):
1135 if not util.safehasattr(remote, b'branches'):
1136 # enable in-client legacy support
1136 # enable in-client legacy support
1137 remote = localrepo.locallegacypeer(remote.local())
1137 remote = localrepo.locallegacypeer(remote.local())
1138 common, _in, hds = treediscovery.findcommonincoming(
1138 common, _in, hds = treediscovery.findcommonincoming(
1139 repo, remote, force=True, audit=data
1139 repo, remote, force=True, audit=data
1140 )
1140 )
1141 common = set(common)
1141 common = set(common)
1142 if not opts.get(b'nonheads'):
1142 if not opts.get(b'nonheads'):
1143 ui.writenoi18n(
1143 ui.writenoi18n(
1144 b"unpruned common: %s\n"
1144 b"unpruned common: %s\n"
1145 % b" ".join(sorted(short(n) for n in common))
1145 % b" ".join(sorted(short(n) for n in common))
1146 )
1146 )
1147
1147
1148 clnode = repo.changelog.node
1148 clnode = repo.changelog.node
1149 common = repo.revs(b'heads(::%ln)', common)
1149 common = repo.revs(b'heads(::%ln)', common)
1150 common = {clnode(r) for r in common}
1150 common = {clnode(r) for r in common}
1151 return common, hds
1151 return common, hds
1152
1152
1153 else:
1153 else:
1154
1154
1155 def doit(pushedrevs, remoteheads, remote=remote):
1155 def doit(pushedrevs, remoteheads, remote=remote):
1156 nodes = None
1156 nodes = None
1157 if pushedrevs:
1157 if pushedrevs:
1158 revs = scmutil.revrange(repo, pushedrevs)
1158 revs = scmutil.revrange(repo, pushedrevs)
1159 nodes = [repo[r].node() for r in revs]
1159 nodes = [repo[r].node() for r in revs]
1160 common, any, hds = setdiscovery.findcommonheads(
1160 common, any, hds = setdiscovery.findcommonheads(
1161 ui, repo, remote, ancestorsof=nodes, audit=data
1161 ui, repo, remote, ancestorsof=nodes, audit=data
1162 )
1162 )
1163 return common, hds
1163 return common, hds
1164
1164
1165 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1165 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1166 localrevs = opts[b'rev']
1166 localrevs = opts[b'rev']
1167
1167
1168 fm = ui.formatter(b'debugdiscovery', opts)
1168 fm = ui.formatter(b'debugdiscovery', opts)
1169 if fm.strict_format:
1169 if fm.strict_format:
1170
1170
1171 @contextlib.contextmanager
1171 @contextlib.contextmanager
1172 def may_capture_output():
1172 def may_capture_output():
1173 ui.pushbuffer()
1173 ui.pushbuffer()
1174 yield
1174 yield
1175 data[b'output'] = ui.popbuffer()
1175 data[b'output'] = ui.popbuffer()
1176
1176
1177 else:
1177 else:
1178 may_capture_output = util.nullcontextmanager
1178 may_capture_output = util.nullcontextmanager
1179 with may_capture_output():
1179 with may_capture_output():
1180 with util.timedcm('debug-discovery') as t:
1180 with util.timedcm('debug-discovery') as t:
1181 common, hds = doit(localrevs, remoterevs)
1181 common, hds = doit(localrevs, remoterevs)
1182
1182
1183 # compute all statistics
1183 # compute all statistics
1184 heads_common = set(common)
1184 heads_common = set(common)
1185 heads_remote = set(hds)
1185 heads_remote = set(hds)
1186 heads_local = set(repo.heads())
1186 heads_local = set(repo.heads())
1187 # note: they cannot be a local or remote head that is in common and not
1187 # note: they cannot be a local or remote head that is in common and not
1188 # itself a head of common.
1188 # itself a head of common.
1189 heads_common_local = heads_common & heads_local
1189 heads_common_local = heads_common & heads_local
1190 heads_common_remote = heads_common & heads_remote
1190 heads_common_remote = heads_common & heads_remote
1191 heads_common_both = heads_common & heads_remote & heads_local
1191 heads_common_both = heads_common & heads_remote & heads_local
1192
1192
1193 all = repo.revs(b'all()')
1193 all = repo.revs(b'all()')
1194 common = repo.revs(b'::%ln', common)
1194 common = repo.revs(b'::%ln', common)
1195 roots_common = repo.revs(b'roots(::%ld)', common)
1195 roots_common = repo.revs(b'roots(::%ld)', common)
1196 missing = repo.revs(b'not ::%ld', common)
1196 missing = repo.revs(b'not ::%ld', common)
1197 heads_missing = repo.revs(b'heads(%ld)', missing)
1197 heads_missing = repo.revs(b'heads(%ld)', missing)
1198 roots_missing = repo.revs(b'roots(%ld)', missing)
1198 roots_missing = repo.revs(b'roots(%ld)', missing)
1199 assert len(common) + len(missing) == len(all)
1199 assert len(common) + len(missing) == len(all)
1200
1200
1201 initial_undecided = repo.revs(
1201 initial_undecided = repo.revs(
1202 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1202 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1203 )
1203 )
1204 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1204 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1205 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1205 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1206 common_initial_undecided = initial_undecided & common
1206 common_initial_undecided = initial_undecided & common
1207 missing_initial_undecided = initial_undecided & missing
1207 missing_initial_undecided = initial_undecided & missing
1208
1208
1209 data[b'elapsed'] = t.elapsed
1209 data[b'elapsed'] = t.elapsed
1210 data[b'nb-common-heads'] = len(heads_common)
1210 data[b'nb-common-heads'] = len(heads_common)
1211 data[b'nb-common-heads-local'] = len(heads_common_local)
1211 data[b'nb-common-heads-local'] = len(heads_common_local)
1212 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1212 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1213 data[b'nb-common-heads-both'] = len(heads_common_both)
1213 data[b'nb-common-heads-both'] = len(heads_common_both)
1214 data[b'nb-common-roots'] = len(roots_common)
1214 data[b'nb-common-roots'] = len(roots_common)
1215 data[b'nb-head-local'] = len(heads_local)
1215 data[b'nb-head-local'] = len(heads_local)
1216 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1216 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1217 data[b'nb-head-remote'] = len(heads_remote)
1217 data[b'nb-head-remote'] = len(heads_remote)
1218 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1218 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1219 heads_common_remote
1219 heads_common_remote
1220 )
1220 )
1221 data[b'nb-revs'] = len(all)
1221 data[b'nb-revs'] = len(all)
1222 data[b'nb-revs-common'] = len(common)
1222 data[b'nb-revs-common'] = len(common)
1223 data[b'nb-revs-missing'] = len(missing)
1223 data[b'nb-revs-missing'] = len(missing)
1224 data[b'nb-missing-heads'] = len(heads_missing)
1224 data[b'nb-missing-heads'] = len(heads_missing)
1225 data[b'nb-missing-roots'] = len(roots_missing)
1225 data[b'nb-missing-roots'] = len(roots_missing)
1226 data[b'nb-ini_und'] = len(initial_undecided)
1226 data[b'nb-ini_und'] = len(initial_undecided)
1227 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1227 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1228 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1228 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1229 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1229 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1230 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1230 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1231
1231
1232 fm.startitem()
1232 fm.startitem()
1233 fm.data(**pycompat.strkwargs(data))
1233 fm.data(**pycompat.strkwargs(data))
1234 # display discovery summary
1234 # display discovery summary
1235 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1235 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1236 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1236 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1237 fm.plain(b"heads summary:\n")
1237 fm.plain(b"heads summary:\n")
1238 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1238 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1239 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1239 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1240 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1240 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1241 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1241 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1242 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1242 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1243 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1243 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1244 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1244 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1245 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1245 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1246 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1246 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1247 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1247 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1248 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1248 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1249 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1249 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1250 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1250 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1251 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1251 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1252 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1252 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1253 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1253 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1254 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1254 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1255 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1255 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1256 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1256 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1257 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1257 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1258 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1258 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1259 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1259 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1260
1260
1261 if ui.verbose:
1261 if ui.verbose:
1262 fm.plain(
1262 fm.plain(
1263 b"common heads: %s\n"
1263 b"common heads: %s\n"
1264 % b" ".join(sorted(short(n) for n in heads_common))
1264 % b" ".join(sorted(short(n) for n in heads_common))
1265 )
1265 )
1266 fm.end()
1266 fm.end()
1267
1267
1268
1268
1269 _chunksize = 4 << 10
1269 _chunksize = 4 << 10
1270
1270
1271
1271
1272 @command(
1272 @command(
1273 b'debugdownload',
1273 b'debugdownload',
1274 [
1274 [
1275 (b'o', b'output', b'', _(b'path')),
1275 (b'o', b'output', b'', _(b'path')),
1276 ],
1276 ],
1277 optionalrepo=True,
1277 optionalrepo=True,
1278 )
1278 )
1279 def debugdownload(ui, repo, url, output=None, **opts):
1279 def debugdownload(ui, repo, url, output=None, **opts):
1280 """download a resource using Mercurial logic and config"""
1280 """download a resource using Mercurial logic and config"""
1281 fh = urlmod.open(ui, url, output)
1281 fh = urlmod.open(ui, url, output)
1282
1282
1283 dest = ui
1283 dest = ui
1284 if output:
1284 if output:
1285 dest = open(output, b"wb", _chunksize)
1285 dest = open(output, b"wb", _chunksize)
1286 try:
1286 try:
1287 data = fh.read(_chunksize)
1287 data = fh.read(_chunksize)
1288 while data:
1288 while data:
1289 dest.write(data)
1289 dest.write(data)
1290 data = fh.read(_chunksize)
1290 data = fh.read(_chunksize)
1291 finally:
1291 finally:
1292 if output:
1292 if output:
1293 dest.close()
1293 dest.close()
1294
1294
1295
1295
1296 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1296 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1297 def debugextensions(ui, repo, **opts):
1297 def debugextensions(ui, repo, **opts):
1298 '''show information about active extensions'''
1298 '''show information about active extensions'''
1299 opts = pycompat.byteskwargs(opts)
1299 opts = pycompat.byteskwargs(opts)
1300 exts = extensions.extensions(ui)
1300 exts = extensions.extensions(ui)
1301 hgver = util.version()
1301 hgver = util.version()
1302 fm = ui.formatter(b'debugextensions', opts)
1302 fm = ui.formatter(b'debugextensions', opts)
1303 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1303 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1304 isinternal = extensions.ismoduleinternal(extmod)
1304 isinternal = extensions.ismoduleinternal(extmod)
1305 extsource = None
1305 extsource = None
1306
1306
1307 if util.safehasattr(extmod, '__file__'):
1307 if util.safehasattr(extmod, '__file__'):
1308 extsource = pycompat.fsencode(extmod.__file__)
1308 extsource = pycompat.fsencode(extmod.__file__)
1309 elif getattr(sys, 'oxidized', False):
1309 elif getattr(sys, 'oxidized', False):
1310 extsource = pycompat.sysexecutable
1310 extsource = pycompat.sysexecutable
1311 if isinternal:
1311 if isinternal:
1312 exttestedwith = [] # never expose magic string to users
1312 exttestedwith = [] # never expose magic string to users
1313 else:
1313 else:
1314 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1314 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1315 extbuglink = getattr(extmod, 'buglink', None)
1315 extbuglink = getattr(extmod, 'buglink', None)
1316
1316
1317 fm.startitem()
1317 fm.startitem()
1318
1318
1319 if ui.quiet or ui.verbose:
1319 if ui.quiet or ui.verbose:
1320 fm.write(b'name', b'%s\n', extname)
1320 fm.write(b'name', b'%s\n', extname)
1321 else:
1321 else:
1322 fm.write(b'name', b'%s', extname)
1322 fm.write(b'name', b'%s', extname)
1323 if isinternal or hgver in exttestedwith:
1323 if isinternal or hgver in exttestedwith:
1324 fm.plain(b'\n')
1324 fm.plain(b'\n')
1325 elif not exttestedwith:
1325 elif not exttestedwith:
1326 fm.plain(_(b' (untested!)\n'))
1326 fm.plain(_(b' (untested!)\n'))
1327 else:
1327 else:
1328 lasttestedversion = exttestedwith[-1]
1328 lasttestedversion = exttestedwith[-1]
1329 fm.plain(b' (%s!)\n' % lasttestedversion)
1329 fm.plain(b' (%s!)\n' % lasttestedversion)
1330
1330
1331 fm.condwrite(
1331 fm.condwrite(
1332 ui.verbose and extsource,
1332 ui.verbose and extsource,
1333 b'source',
1333 b'source',
1334 _(b' location: %s\n'),
1334 _(b' location: %s\n'),
1335 extsource or b"",
1335 extsource or b"",
1336 )
1336 )
1337
1337
1338 if ui.verbose:
1338 if ui.verbose:
1339 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1339 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1340 fm.data(bundled=isinternal)
1340 fm.data(bundled=isinternal)
1341
1341
1342 fm.condwrite(
1342 fm.condwrite(
1343 ui.verbose and exttestedwith,
1343 ui.verbose and exttestedwith,
1344 b'testedwith',
1344 b'testedwith',
1345 _(b' tested with: %s\n'),
1345 _(b' tested with: %s\n'),
1346 fm.formatlist(exttestedwith, name=b'ver'),
1346 fm.formatlist(exttestedwith, name=b'ver'),
1347 )
1347 )
1348
1348
1349 fm.condwrite(
1349 fm.condwrite(
1350 ui.verbose and extbuglink,
1350 ui.verbose and extbuglink,
1351 b'buglink',
1351 b'buglink',
1352 _(b' bug reporting: %s\n'),
1352 _(b' bug reporting: %s\n'),
1353 extbuglink or b"",
1353 extbuglink or b"",
1354 )
1354 )
1355
1355
1356 fm.end()
1356 fm.end()
1357
1357
1358
1358
1359 @command(
1359 @command(
1360 b'debugfileset',
1360 b'debugfileset',
1361 [
1361 [
1362 (
1362 (
1363 b'r',
1363 b'r',
1364 b'rev',
1364 b'rev',
1365 b'',
1365 b'',
1366 _(b'apply the filespec on this revision'),
1366 _(b'apply the filespec on this revision'),
1367 _(b'REV'),
1367 _(b'REV'),
1368 ),
1368 ),
1369 (
1369 (
1370 b'',
1370 b'',
1371 b'all-files',
1371 b'all-files',
1372 False,
1372 False,
1373 _(b'test files from all revisions and working directory'),
1373 _(b'test files from all revisions and working directory'),
1374 ),
1374 ),
1375 (
1375 (
1376 b's',
1376 b's',
1377 b'show-matcher',
1377 b'show-matcher',
1378 None,
1378 None,
1379 _(b'print internal representation of matcher'),
1379 _(b'print internal representation of matcher'),
1380 ),
1380 ),
1381 (
1381 (
1382 b'p',
1382 b'p',
1383 b'show-stage',
1383 b'show-stage',
1384 [],
1384 [],
1385 _(b'print parsed tree at the given stage'),
1385 _(b'print parsed tree at the given stage'),
1386 _(b'NAME'),
1386 _(b'NAME'),
1387 ),
1387 ),
1388 ],
1388 ],
1389 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1389 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1390 )
1390 )
1391 def debugfileset(ui, repo, expr, **opts):
1391 def debugfileset(ui, repo, expr, **opts):
1392 '''parse and apply a fileset specification'''
1392 '''parse and apply a fileset specification'''
1393 from . import fileset
1393 from . import fileset
1394
1394
1395 fileset.symbols # force import of fileset so we have predicates to optimize
1395 fileset.symbols # force import of fileset so we have predicates to optimize
1396 opts = pycompat.byteskwargs(opts)
1396 opts = pycompat.byteskwargs(opts)
1397 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1397 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1398
1398
1399 stages = [
1399 stages = [
1400 (b'parsed', pycompat.identity),
1400 (b'parsed', pycompat.identity),
1401 (b'analyzed', filesetlang.analyze),
1401 (b'analyzed', filesetlang.analyze),
1402 (b'optimized', filesetlang.optimize),
1402 (b'optimized', filesetlang.optimize),
1403 ]
1403 ]
1404 stagenames = {n for n, f in stages}
1404 stagenames = {n for n, f in stages}
1405
1405
1406 showalways = set()
1406 showalways = set()
1407 if ui.verbose and not opts[b'show_stage']:
1407 if ui.verbose and not opts[b'show_stage']:
1408 # show parsed tree by --verbose (deprecated)
1408 # show parsed tree by --verbose (deprecated)
1409 showalways.add(b'parsed')
1409 showalways.add(b'parsed')
1410 if opts[b'show_stage'] == [b'all']:
1410 if opts[b'show_stage'] == [b'all']:
1411 showalways.update(stagenames)
1411 showalways.update(stagenames)
1412 else:
1412 else:
1413 for n in opts[b'show_stage']:
1413 for n in opts[b'show_stage']:
1414 if n not in stagenames:
1414 if n not in stagenames:
1415 raise error.Abort(_(b'invalid stage name: %s') % n)
1415 raise error.Abort(_(b'invalid stage name: %s') % n)
1416 showalways.update(opts[b'show_stage'])
1416 showalways.update(opts[b'show_stage'])
1417
1417
1418 tree = filesetlang.parse(expr)
1418 tree = filesetlang.parse(expr)
1419 for n, f in stages:
1419 for n, f in stages:
1420 tree = f(tree)
1420 tree = f(tree)
1421 if n in showalways:
1421 if n in showalways:
1422 if opts[b'show_stage'] or n != b'parsed':
1422 if opts[b'show_stage'] or n != b'parsed':
1423 ui.write(b"* %s:\n" % n)
1423 ui.write(b"* %s:\n" % n)
1424 ui.write(filesetlang.prettyformat(tree), b"\n")
1424 ui.write(filesetlang.prettyformat(tree), b"\n")
1425
1425
1426 files = set()
1426 files = set()
1427 if opts[b'all_files']:
1427 if opts[b'all_files']:
1428 for r in repo:
1428 for r in repo:
1429 c = repo[r]
1429 c = repo[r]
1430 files.update(c.files())
1430 files.update(c.files())
1431 files.update(c.substate)
1431 files.update(c.substate)
1432 if opts[b'all_files'] or ctx.rev() is None:
1432 if opts[b'all_files'] or ctx.rev() is None:
1433 wctx = repo[None]
1433 wctx = repo[None]
1434 files.update(
1434 files.update(
1435 repo.dirstate.walk(
1435 repo.dirstate.walk(
1436 scmutil.matchall(repo),
1436 scmutil.matchall(repo),
1437 subrepos=list(wctx.substate),
1437 subrepos=list(wctx.substate),
1438 unknown=True,
1438 unknown=True,
1439 ignored=True,
1439 ignored=True,
1440 )
1440 )
1441 )
1441 )
1442 files.update(wctx.substate)
1442 files.update(wctx.substate)
1443 else:
1443 else:
1444 files.update(ctx.files())
1444 files.update(ctx.files())
1445 files.update(ctx.substate)
1445 files.update(ctx.substate)
1446
1446
1447 m = ctx.matchfileset(repo.getcwd(), expr)
1447 m = ctx.matchfileset(repo.getcwd(), expr)
1448 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1448 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1449 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1449 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1450 for f in sorted(files):
1450 for f in sorted(files):
1451 if not m(f):
1451 if not m(f):
1452 continue
1452 continue
1453 ui.write(b"%s\n" % f)
1453 ui.write(b"%s\n" % f)
1454
1454
1455
1455
1456 @command(
1456 @command(
1457 b"debug-repair-issue6528",
1457 b"debug-repair-issue6528",
1458 [
1458 [
1459 (
1459 (
1460 b'',
1460 b'',
1461 b'to-report',
1461 b'to-report',
1462 b'',
1462 b'',
1463 _(b'build a report of affected revisions to this file'),
1463 _(b'build a report of affected revisions to this file'),
1464 _(b'FILE'),
1464 _(b'FILE'),
1465 ),
1465 ),
1466 (
1466 (
1467 b'',
1467 b'',
1468 b'from-report',
1468 b'from-report',
1469 b'',
1469 b'',
1470 _(b'repair revisions listed in this report file'),
1470 _(b'repair revisions listed in this report file'),
1471 _(b'FILE'),
1471 _(b'FILE'),
1472 ),
1472 ),
1473 (
1474 b'',
1475 b'paranoid',
1476 False,
1477 _(b'check that both detection methods do the same thing'),
1478 ),
1473 ]
1479 ]
1474 + cmdutil.dryrunopts,
1480 + cmdutil.dryrunopts,
1475 )
1481 )
1476 def debug_repair_issue6528(ui, repo, **opts):
1482 def debug_repair_issue6528(ui, repo, **opts):
1477 """find affected revisions and repair them. See issue6528 for more details.
1483 """find affected revisions and repair them. See issue6528 for more details.
1478
1484
1479 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1485 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1480 computation of affected revisions for a given repository across clones.
1486 computation of affected revisions for a given repository across clones.
1481 The report format is line-based (with empty lines ignored):
1487 The report format is line-based (with empty lines ignored):
1482
1488
1483 ```
1489 ```
1484 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1490 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1485 ```
1491 ```
1486
1492
1487 There can be multiple broken revisions per filelog, they are separated by
1493 There can be multiple broken revisions per filelog, they are separated by
1488 a comma with no spaces. The only space is between the revision(s) and the
1494 a comma with no spaces. The only space is between the revision(s) and the
1489 filename.
1495 filename.
1490
1496
1491 Note that this does *not* mean that this repairs future affected revisions,
1497 Note that this does *not* mean that this repairs future affected revisions,
1492 that needs a separate fix at the exchange level that hasn't been written yet
1498 that needs a separate fix at the exchange level that hasn't been written yet
1493 (as of 5.9rc0).
1499 (as of 5.9rc0).
1500
1501 There is a `--paranoid` flag to test that the fast implementation is correct
1502 by checking it against the slow implementation. Since this matter is quite
1503 urgent and testing every edge-case is probably quite costly, we use this
1504 method to test on large repositories as a fuzzing method of sorts.
1494 """
1505 """
1495 cmdutil.check_incompatible_arguments(
1506 cmdutil.check_incompatible_arguments(
1496 opts, 'to_report', ['from_report', 'dry_run']
1507 opts, 'to_report', ['from_report', 'dry_run']
1497 )
1508 )
1498 dry_run = opts.get('dry_run')
1509 dry_run = opts.get('dry_run')
1499 to_report = opts.get('to_report')
1510 to_report = opts.get('to_report')
1500 from_report = opts.get('from_report')
1511 from_report = opts.get('from_report')
1512 paranoid = opts.get('paranoid')
1501 # TODO maybe add filelog pattern and revision pattern parameters to help
1513 # TODO maybe add filelog pattern and revision pattern parameters to help
1502 # narrow down the search for users that know what they're looking for?
1514 # narrow down the search for users that know what they're looking for?
1503
1515
1504 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1516 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1505 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1517 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1506 raise error.Abort(_(msg))
1518 raise error.Abort(_(msg))
1507
1519
1508 rewrite.repair_issue6528(
1520 rewrite.repair_issue6528(
1509 ui, repo, dry_run=dry_run, to_report=to_report, from_report=from_report
1521 ui,
1522 repo,
1523 dry_run=dry_run,
1524 to_report=to_report,
1525 from_report=from_report,
1526 paranoid=paranoid,
1510 )
1527 )
1511
1528
1512
1529
1513 @command(b'debugformat', [] + cmdutil.formatteropts)
1530 @command(b'debugformat', [] + cmdutil.formatteropts)
1514 def debugformat(ui, repo, **opts):
1531 def debugformat(ui, repo, **opts):
1515 """display format information about the current repository
1532 """display format information about the current repository
1516
1533
1517 Use --verbose to get extra information about current config value and
1534 Use --verbose to get extra information about current config value and
1518 Mercurial default."""
1535 Mercurial default."""
1519 opts = pycompat.byteskwargs(opts)
1536 opts = pycompat.byteskwargs(opts)
1520 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1537 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1521 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1538 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1522
1539
1523 def makeformatname(name):
1540 def makeformatname(name):
1524 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1541 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1525
1542
1526 fm = ui.formatter(b'debugformat', opts)
1543 fm = ui.formatter(b'debugformat', opts)
1527 if fm.isplain():
1544 if fm.isplain():
1528
1545
1529 def formatvalue(value):
1546 def formatvalue(value):
1530 if util.safehasattr(value, b'startswith'):
1547 if util.safehasattr(value, b'startswith'):
1531 return value
1548 return value
1532 if value:
1549 if value:
1533 return b'yes'
1550 return b'yes'
1534 else:
1551 else:
1535 return b'no'
1552 return b'no'
1536
1553
1537 else:
1554 else:
1538 formatvalue = pycompat.identity
1555 formatvalue = pycompat.identity
1539
1556
1540 fm.plain(b'format-variant')
1557 fm.plain(b'format-variant')
1541 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1558 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1542 fm.plain(b' repo')
1559 fm.plain(b' repo')
1543 if ui.verbose:
1560 if ui.verbose:
1544 fm.plain(b' config default')
1561 fm.plain(b' config default')
1545 fm.plain(b'\n')
1562 fm.plain(b'\n')
1546 for fv in upgrade.allformatvariant:
1563 for fv in upgrade.allformatvariant:
1547 fm.startitem()
1564 fm.startitem()
1548 repovalue = fv.fromrepo(repo)
1565 repovalue = fv.fromrepo(repo)
1549 configvalue = fv.fromconfig(repo)
1566 configvalue = fv.fromconfig(repo)
1550
1567
1551 if repovalue != configvalue:
1568 if repovalue != configvalue:
1552 namelabel = b'formatvariant.name.mismatchconfig'
1569 namelabel = b'formatvariant.name.mismatchconfig'
1553 repolabel = b'formatvariant.repo.mismatchconfig'
1570 repolabel = b'formatvariant.repo.mismatchconfig'
1554 elif repovalue != fv.default:
1571 elif repovalue != fv.default:
1555 namelabel = b'formatvariant.name.mismatchdefault'
1572 namelabel = b'formatvariant.name.mismatchdefault'
1556 repolabel = b'formatvariant.repo.mismatchdefault'
1573 repolabel = b'formatvariant.repo.mismatchdefault'
1557 else:
1574 else:
1558 namelabel = b'formatvariant.name.uptodate'
1575 namelabel = b'formatvariant.name.uptodate'
1559 repolabel = b'formatvariant.repo.uptodate'
1576 repolabel = b'formatvariant.repo.uptodate'
1560
1577
1561 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1578 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1562 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1579 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1563 if fv.default != configvalue:
1580 if fv.default != configvalue:
1564 configlabel = b'formatvariant.config.special'
1581 configlabel = b'formatvariant.config.special'
1565 else:
1582 else:
1566 configlabel = b'formatvariant.config.default'
1583 configlabel = b'formatvariant.config.default'
1567 fm.condwrite(
1584 fm.condwrite(
1568 ui.verbose,
1585 ui.verbose,
1569 b'config',
1586 b'config',
1570 b' %6s',
1587 b' %6s',
1571 formatvalue(configvalue),
1588 formatvalue(configvalue),
1572 label=configlabel,
1589 label=configlabel,
1573 )
1590 )
1574 fm.condwrite(
1591 fm.condwrite(
1575 ui.verbose,
1592 ui.verbose,
1576 b'default',
1593 b'default',
1577 b' %7s',
1594 b' %7s',
1578 formatvalue(fv.default),
1595 formatvalue(fv.default),
1579 label=b'formatvariant.default',
1596 label=b'formatvariant.default',
1580 )
1597 )
1581 fm.plain(b'\n')
1598 fm.plain(b'\n')
1582 fm.end()
1599 fm.end()
1583
1600
1584
1601
1585 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1602 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1586 def debugfsinfo(ui, path=b"."):
1603 def debugfsinfo(ui, path=b"."):
1587 """show information detected about current filesystem"""
1604 """show information detected about current filesystem"""
1588 ui.writenoi18n(b'path: %s\n' % path)
1605 ui.writenoi18n(b'path: %s\n' % path)
1589 ui.writenoi18n(
1606 ui.writenoi18n(
1590 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1607 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1591 )
1608 )
1592 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1609 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1593 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1610 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1594 ui.writenoi18n(
1611 ui.writenoi18n(
1595 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1612 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1596 )
1613 )
1597 ui.writenoi18n(
1614 ui.writenoi18n(
1598 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1615 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1599 )
1616 )
1600 casesensitive = b'(unknown)'
1617 casesensitive = b'(unknown)'
1601 try:
1618 try:
1602 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1619 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1603 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1620 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1604 except OSError:
1621 except OSError:
1605 pass
1622 pass
1606 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1623 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1607
1624
1608
1625
1609 @command(
1626 @command(
1610 b'debuggetbundle',
1627 b'debuggetbundle',
1611 [
1628 [
1612 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1629 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1613 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1630 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1614 (
1631 (
1615 b't',
1632 b't',
1616 b'type',
1633 b'type',
1617 b'bzip2',
1634 b'bzip2',
1618 _(b'bundle compression type to use'),
1635 _(b'bundle compression type to use'),
1619 _(b'TYPE'),
1636 _(b'TYPE'),
1620 ),
1637 ),
1621 ],
1638 ],
1622 _(b'REPO FILE [-H|-C ID]...'),
1639 _(b'REPO FILE [-H|-C ID]...'),
1623 norepo=True,
1640 norepo=True,
1624 )
1641 )
1625 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1642 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1626 """retrieves a bundle from a repo
1643 """retrieves a bundle from a repo
1627
1644
1628 Every ID must be a full-length hex node id string. Saves the bundle to the
1645 Every ID must be a full-length hex node id string. Saves the bundle to the
1629 given file.
1646 given file.
1630 """
1647 """
1631 opts = pycompat.byteskwargs(opts)
1648 opts = pycompat.byteskwargs(opts)
1632 repo = hg.peer(ui, opts, repopath)
1649 repo = hg.peer(ui, opts, repopath)
1633 if not repo.capable(b'getbundle'):
1650 if not repo.capable(b'getbundle'):
1634 raise error.Abort(b"getbundle() not supported by target repository")
1651 raise error.Abort(b"getbundle() not supported by target repository")
1635 args = {}
1652 args = {}
1636 if common:
1653 if common:
1637 args['common'] = [bin(s) for s in common]
1654 args['common'] = [bin(s) for s in common]
1638 if head:
1655 if head:
1639 args['heads'] = [bin(s) for s in head]
1656 args['heads'] = [bin(s) for s in head]
1640 # TODO: get desired bundlecaps from command line.
1657 # TODO: get desired bundlecaps from command line.
1641 args['bundlecaps'] = None
1658 args['bundlecaps'] = None
1642 bundle = repo.getbundle(b'debug', **args)
1659 bundle = repo.getbundle(b'debug', **args)
1643
1660
1644 bundletype = opts.get(b'type', b'bzip2').lower()
1661 bundletype = opts.get(b'type', b'bzip2').lower()
1645 btypes = {
1662 btypes = {
1646 b'none': b'HG10UN',
1663 b'none': b'HG10UN',
1647 b'bzip2': b'HG10BZ',
1664 b'bzip2': b'HG10BZ',
1648 b'gzip': b'HG10GZ',
1665 b'gzip': b'HG10GZ',
1649 b'bundle2': b'HG20',
1666 b'bundle2': b'HG20',
1650 }
1667 }
1651 bundletype = btypes.get(bundletype)
1668 bundletype = btypes.get(bundletype)
1652 if bundletype not in bundle2.bundletypes:
1669 if bundletype not in bundle2.bundletypes:
1653 raise error.Abort(_(b'unknown bundle type specified with --type'))
1670 raise error.Abort(_(b'unknown bundle type specified with --type'))
1654 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1671 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1655
1672
1656
1673
1657 @command(b'debugignore', [], b'[FILE]')
1674 @command(b'debugignore', [], b'[FILE]')
1658 def debugignore(ui, repo, *files, **opts):
1675 def debugignore(ui, repo, *files, **opts):
1659 """display the combined ignore pattern and information about ignored files
1676 """display the combined ignore pattern and information about ignored files
1660
1677
1661 With no argument display the combined ignore pattern.
1678 With no argument display the combined ignore pattern.
1662
1679
1663 Given space separated file names, shows if the given file is ignored and
1680 Given space separated file names, shows if the given file is ignored and
1664 if so, show the ignore rule (file and line number) that matched it.
1681 if so, show the ignore rule (file and line number) that matched it.
1665 """
1682 """
1666 ignore = repo.dirstate._ignore
1683 ignore = repo.dirstate._ignore
1667 if not files:
1684 if not files:
1668 # Show all the patterns
1685 # Show all the patterns
1669 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1686 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1670 else:
1687 else:
1671 m = scmutil.match(repo[None], pats=files)
1688 m = scmutil.match(repo[None], pats=files)
1672 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1689 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1673 for f in m.files():
1690 for f in m.files():
1674 nf = util.normpath(f)
1691 nf = util.normpath(f)
1675 ignored = None
1692 ignored = None
1676 ignoredata = None
1693 ignoredata = None
1677 if nf != b'.':
1694 if nf != b'.':
1678 if ignore(nf):
1695 if ignore(nf):
1679 ignored = nf
1696 ignored = nf
1680 ignoredata = repo.dirstate._ignorefileandline(nf)
1697 ignoredata = repo.dirstate._ignorefileandline(nf)
1681 else:
1698 else:
1682 for p in pathutil.finddirs(nf):
1699 for p in pathutil.finddirs(nf):
1683 if ignore(p):
1700 if ignore(p):
1684 ignored = p
1701 ignored = p
1685 ignoredata = repo.dirstate._ignorefileandline(p)
1702 ignoredata = repo.dirstate._ignorefileandline(p)
1686 break
1703 break
1687 if ignored:
1704 if ignored:
1688 if ignored == nf:
1705 if ignored == nf:
1689 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1706 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1690 else:
1707 else:
1691 ui.write(
1708 ui.write(
1692 _(
1709 _(
1693 b"%s is ignored because of "
1710 b"%s is ignored because of "
1694 b"containing directory %s\n"
1711 b"containing directory %s\n"
1695 )
1712 )
1696 % (uipathfn(f), ignored)
1713 % (uipathfn(f), ignored)
1697 )
1714 )
1698 ignorefile, lineno, line = ignoredata
1715 ignorefile, lineno, line = ignoredata
1699 ui.write(
1716 ui.write(
1700 _(b"(ignore rule in %s, line %d: '%s')\n")
1717 _(b"(ignore rule in %s, line %d: '%s')\n")
1701 % (ignorefile, lineno, line)
1718 % (ignorefile, lineno, line)
1702 )
1719 )
1703 else:
1720 else:
1704 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1721 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1705
1722
1706
1723
1707 @command(
1724 @command(
1708 b'debugindex',
1725 b'debugindex',
1709 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1726 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1710 _(b'-c|-m|FILE'),
1727 _(b'-c|-m|FILE'),
1711 )
1728 )
1712 def debugindex(ui, repo, file_=None, **opts):
1729 def debugindex(ui, repo, file_=None, **opts):
1713 """dump index data for a storage primitive"""
1730 """dump index data for a storage primitive"""
1714 opts = pycompat.byteskwargs(opts)
1731 opts = pycompat.byteskwargs(opts)
1715 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1732 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1716
1733
1717 if ui.debugflag:
1734 if ui.debugflag:
1718 shortfn = hex
1735 shortfn = hex
1719 else:
1736 else:
1720 shortfn = short
1737 shortfn = short
1721
1738
1722 idlen = 12
1739 idlen = 12
1723 for i in store:
1740 for i in store:
1724 idlen = len(shortfn(store.node(i)))
1741 idlen = len(shortfn(store.node(i)))
1725 break
1742 break
1726
1743
1727 fm = ui.formatter(b'debugindex', opts)
1744 fm = ui.formatter(b'debugindex', opts)
1728 fm.plain(
1745 fm.plain(
1729 b' rev linkrev %s %s p2\n'
1746 b' rev linkrev %s %s p2\n'
1730 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1747 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1731 )
1748 )
1732
1749
1733 for rev in store:
1750 for rev in store:
1734 node = store.node(rev)
1751 node = store.node(rev)
1735 parents = store.parents(node)
1752 parents = store.parents(node)
1736
1753
1737 fm.startitem()
1754 fm.startitem()
1738 fm.write(b'rev', b'%6d ', rev)
1755 fm.write(b'rev', b'%6d ', rev)
1739 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1756 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1740 fm.write(b'node', b'%s ', shortfn(node))
1757 fm.write(b'node', b'%s ', shortfn(node))
1741 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1758 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1742 fm.write(b'p2', b'%s', shortfn(parents[1]))
1759 fm.write(b'p2', b'%s', shortfn(parents[1]))
1743 fm.plain(b'\n')
1760 fm.plain(b'\n')
1744
1761
1745 fm.end()
1762 fm.end()
1746
1763
1747
1764
1748 @command(
1765 @command(
1749 b'debugindexdot',
1766 b'debugindexdot',
1750 cmdutil.debugrevlogopts,
1767 cmdutil.debugrevlogopts,
1751 _(b'-c|-m|FILE'),
1768 _(b'-c|-m|FILE'),
1752 optionalrepo=True,
1769 optionalrepo=True,
1753 )
1770 )
1754 def debugindexdot(ui, repo, file_=None, **opts):
1771 def debugindexdot(ui, repo, file_=None, **opts):
1755 """dump an index DAG as a graphviz dot file"""
1772 """dump an index DAG as a graphviz dot file"""
1756 opts = pycompat.byteskwargs(opts)
1773 opts = pycompat.byteskwargs(opts)
1757 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1774 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1758 ui.writenoi18n(b"digraph G {\n")
1775 ui.writenoi18n(b"digraph G {\n")
1759 for i in r:
1776 for i in r:
1760 node = r.node(i)
1777 node = r.node(i)
1761 pp = r.parents(node)
1778 pp = r.parents(node)
1762 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1779 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1763 if pp[1] != repo.nullid:
1780 if pp[1] != repo.nullid:
1764 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1781 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1765 ui.write(b"}\n")
1782 ui.write(b"}\n")
1766
1783
1767
1784
1768 @command(b'debugindexstats', [])
1785 @command(b'debugindexstats', [])
1769 def debugindexstats(ui, repo):
1786 def debugindexstats(ui, repo):
1770 """show stats related to the changelog index"""
1787 """show stats related to the changelog index"""
1771 repo.changelog.shortest(repo.nullid, 1)
1788 repo.changelog.shortest(repo.nullid, 1)
1772 index = repo.changelog.index
1789 index = repo.changelog.index
1773 if not util.safehasattr(index, b'stats'):
1790 if not util.safehasattr(index, b'stats'):
1774 raise error.Abort(_(b'debugindexstats only works with native code'))
1791 raise error.Abort(_(b'debugindexstats only works with native code'))
1775 for k, v in sorted(index.stats().items()):
1792 for k, v in sorted(index.stats().items()):
1776 ui.write(b'%s: %d\n' % (k, v))
1793 ui.write(b'%s: %d\n' % (k, v))
1777
1794
1778
1795
1779 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1796 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1780 def debuginstall(ui, **opts):
1797 def debuginstall(ui, **opts):
1781 """test Mercurial installation
1798 """test Mercurial installation
1782
1799
1783 Returns 0 on success.
1800 Returns 0 on success.
1784 """
1801 """
1785 opts = pycompat.byteskwargs(opts)
1802 opts = pycompat.byteskwargs(opts)
1786
1803
1787 problems = 0
1804 problems = 0
1788
1805
1789 fm = ui.formatter(b'debuginstall', opts)
1806 fm = ui.formatter(b'debuginstall', opts)
1790 fm.startitem()
1807 fm.startitem()
1791
1808
1792 # encoding might be unknown or wrong. don't translate these messages.
1809 # encoding might be unknown or wrong. don't translate these messages.
1793 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1810 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1794 err = None
1811 err = None
1795 try:
1812 try:
1796 codecs.lookup(pycompat.sysstr(encoding.encoding))
1813 codecs.lookup(pycompat.sysstr(encoding.encoding))
1797 except LookupError as inst:
1814 except LookupError as inst:
1798 err = stringutil.forcebytestr(inst)
1815 err = stringutil.forcebytestr(inst)
1799 problems += 1
1816 problems += 1
1800 fm.condwrite(
1817 fm.condwrite(
1801 err,
1818 err,
1802 b'encodingerror',
1819 b'encodingerror',
1803 b" %s\n (check that your locale is properly set)\n",
1820 b" %s\n (check that your locale is properly set)\n",
1804 err,
1821 err,
1805 )
1822 )
1806
1823
1807 # Python
1824 # Python
1808 pythonlib = None
1825 pythonlib = None
1809 if util.safehasattr(os, '__file__'):
1826 if util.safehasattr(os, '__file__'):
1810 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1827 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1811 elif getattr(sys, 'oxidized', False):
1828 elif getattr(sys, 'oxidized', False):
1812 pythonlib = pycompat.sysexecutable
1829 pythonlib = pycompat.sysexecutable
1813
1830
1814 fm.write(
1831 fm.write(
1815 b'pythonexe',
1832 b'pythonexe',
1816 _(b"checking Python executable (%s)\n"),
1833 _(b"checking Python executable (%s)\n"),
1817 pycompat.sysexecutable or _(b"unknown"),
1834 pycompat.sysexecutable or _(b"unknown"),
1818 )
1835 )
1819 fm.write(
1836 fm.write(
1820 b'pythonimplementation',
1837 b'pythonimplementation',
1821 _(b"checking Python implementation (%s)\n"),
1838 _(b"checking Python implementation (%s)\n"),
1822 pycompat.sysbytes(platform.python_implementation()),
1839 pycompat.sysbytes(platform.python_implementation()),
1823 )
1840 )
1824 fm.write(
1841 fm.write(
1825 b'pythonver',
1842 b'pythonver',
1826 _(b"checking Python version (%s)\n"),
1843 _(b"checking Python version (%s)\n"),
1827 (b"%d.%d.%d" % sys.version_info[:3]),
1844 (b"%d.%d.%d" % sys.version_info[:3]),
1828 )
1845 )
1829 fm.write(
1846 fm.write(
1830 b'pythonlib',
1847 b'pythonlib',
1831 _(b"checking Python lib (%s)...\n"),
1848 _(b"checking Python lib (%s)...\n"),
1832 pythonlib or _(b"unknown"),
1849 pythonlib or _(b"unknown"),
1833 )
1850 )
1834
1851
1835 try:
1852 try:
1836 from . import rustext # pytype: disable=import-error
1853 from . import rustext # pytype: disable=import-error
1837
1854
1838 rustext.__doc__ # trigger lazy import
1855 rustext.__doc__ # trigger lazy import
1839 except ImportError:
1856 except ImportError:
1840 rustext = None
1857 rustext = None
1841
1858
1842 security = set(sslutil.supportedprotocols)
1859 security = set(sslutil.supportedprotocols)
1843 if sslutil.hassni:
1860 if sslutil.hassni:
1844 security.add(b'sni')
1861 security.add(b'sni')
1845
1862
1846 fm.write(
1863 fm.write(
1847 b'pythonsecurity',
1864 b'pythonsecurity',
1848 _(b"checking Python security support (%s)\n"),
1865 _(b"checking Python security support (%s)\n"),
1849 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1866 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1850 )
1867 )
1851
1868
1852 # These are warnings, not errors. So don't increment problem count. This
1869 # These are warnings, not errors. So don't increment problem count. This
1853 # may change in the future.
1870 # may change in the future.
1854 if b'tls1.2' not in security:
1871 if b'tls1.2' not in security:
1855 fm.plain(
1872 fm.plain(
1856 _(
1873 _(
1857 b' TLS 1.2 not supported by Python install; '
1874 b' TLS 1.2 not supported by Python install; '
1858 b'network connections lack modern security\n'
1875 b'network connections lack modern security\n'
1859 )
1876 )
1860 )
1877 )
1861 if b'sni' not in security:
1878 if b'sni' not in security:
1862 fm.plain(
1879 fm.plain(
1863 _(
1880 _(
1864 b' SNI not supported by Python install; may have '
1881 b' SNI not supported by Python install; may have '
1865 b'connectivity issues with some servers\n'
1882 b'connectivity issues with some servers\n'
1866 )
1883 )
1867 )
1884 )
1868
1885
1869 fm.plain(
1886 fm.plain(
1870 _(
1887 _(
1871 b"checking Rust extensions (%s)\n"
1888 b"checking Rust extensions (%s)\n"
1872 % (b'missing' if rustext is None else b'installed')
1889 % (b'missing' if rustext is None else b'installed')
1873 ),
1890 ),
1874 )
1891 )
1875
1892
1876 # TODO print CA cert info
1893 # TODO print CA cert info
1877
1894
1878 # hg version
1895 # hg version
1879 hgver = util.version()
1896 hgver = util.version()
1880 fm.write(
1897 fm.write(
1881 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1898 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1882 )
1899 )
1883 fm.write(
1900 fm.write(
1884 b'hgverextra',
1901 b'hgverextra',
1885 _(b"checking Mercurial custom build (%s)\n"),
1902 _(b"checking Mercurial custom build (%s)\n"),
1886 b'+'.join(hgver.split(b'+')[1:]),
1903 b'+'.join(hgver.split(b'+')[1:]),
1887 )
1904 )
1888
1905
1889 # compiled modules
1906 # compiled modules
1890 hgmodules = None
1907 hgmodules = None
1891 if util.safehasattr(sys.modules[__name__], '__file__'):
1908 if util.safehasattr(sys.modules[__name__], '__file__'):
1892 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1909 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1893 elif getattr(sys, 'oxidized', False):
1910 elif getattr(sys, 'oxidized', False):
1894 hgmodules = pycompat.sysexecutable
1911 hgmodules = pycompat.sysexecutable
1895
1912
1896 fm.write(
1913 fm.write(
1897 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1914 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1898 )
1915 )
1899 fm.write(
1916 fm.write(
1900 b'hgmodules',
1917 b'hgmodules',
1901 _(b"checking installed modules (%s)...\n"),
1918 _(b"checking installed modules (%s)...\n"),
1902 hgmodules or _(b"unknown"),
1919 hgmodules or _(b"unknown"),
1903 )
1920 )
1904
1921
1905 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1922 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1906 rustext = rustandc # for now, that's the only case
1923 rustext = rustandc # for now, that's the only case
1907 cext = policy.policy in (b'c', b'allow') or rustandc
1924 cext = policy.policy in (b'c', b'allow') or rustandc
1908 nopure = cext or rustext
1925 nopure = cext or rustext
1909 if nopure:
1926 if nopure:
1910 err = None
1927 err = None
1911 try:
1928 try:
1912 if cext:
1929 if cext:
1913 from .cext import ( # pytype: disable=import-error
1930 from .cext import ( # pytype: disable=import-error
1914 base85,
1931 base85,
1915 bdiff,
1932 bdiff,
1916 mpatch,
1933 mpatch,
1917 osutil,
1934 osutil,
1918 )
1935 )
1919
1936
1920 # quiet pyflakes
1937 # quiet pyflakes
1921 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1938 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1922 if rustext:
1939 if rustext:
1923 from .rustext import ( # pytype: disable=import-error
1940 from .rustext import ( # pytype: disable=import-error
1924 ancestor,
1941 ancestor,
1925 dirstate,
1942 dirstate,
1926 )
1943 )
1927
1944
1928 dir(ancestor), dir(dirstate) # quiet pyflakes
1945 dir(ancestor), dir(dirstate) # quiet pyflakes
1929 except Exception as inst:
1946 except Exception as inst:
1930 err = stringutil.forcebytestr(inst)
1947 err = stringutil.forcebytestr(inst)
1931 problems += 1
1948 problems += 1
1932 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1949 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1933
1950
1934 compengines = util.compengines._engines.values()
1951 compengines = util.compengines._engines.values()
1935 fm.write(
1952 fm.write(
1936 b'compengines',
1953 b'compengines',
1937 _(b'checking registered compression engines (%s)\n'),
1954 _(b'checking registered compression engines (%s)\n'),
1938 fm.formatlist(
1955 fm.formatlist(
1939 sorted(e.name() for e in compengines),
1956 sorted(e.name() for e in compengines),
1940 name=b'compengine',
1957 name=b'compengine',
1941 fmt=b'%s',
1958 fmt=b'%s',
1942 sep=b', ',
1959 sep=b', ',
1943 ),
1960 ),
1944 )
1961 )
1945 fm.write(
1962 fm.write(
1946 b'compenginesavail',
1963 b'compenginesavail',
1947 _(b'checking available compression engines (%s)\n'),
1964 _(b'checking available compression engines (%s)\n'),
1948 fm.formatlist(
1965 fm.formatlist(
1949 sorted(e.name() for e in compengines if e.available()),
1966 sorted(e.name() for e in compengines if e.available()),
1950 name=b'compengine',
1967 name=b'compengine',
1951 fmt=b'%s',
1968 fmt=b'%s',
1952 sep=b', ',
1969 sep=b', ',
1953 ),
1970 ),
1954 )
1971 )
1955 wirecompengines = compression.compengines.supportedwireengines(
1972 wirecompengines = compression.compengines.supportedwireengines(
1956 compression.SERVERROLE
1973 compression.SERVERROLE
1957 )
1974 )
1958 fm.write(
1975 fm.write(
1959 b'compenginesserver',
1976 b'compenginesserver',
1960 _(
1977 _(
1961 b'checking available compression engines '
1978 b'checking available compression engines '
1962 b'for wire protocol (%s)\n'
1979 b'for wire protocol (%s)\n'
1963 ),
1980 ),
1964 fm.formatlist(
1981 fm.formatlist(
1965 [e.name() for e in wirecompengines if e.wireprotosupport()],
1982 [e.name() for e in wirecompengines if e.wireprotosupport()],
1966 name=b'compengine',
1983 name=b'compengine',
1967 fmt=b'%s',
1984 fmt=b'%s',
1968 sep=b', ',
1985 sep=b', ',
1969 ),
1986 ),
1970 )
1987 )
1971 re2 = b'missing'
1988 re2 = b'missing'
1972 if util._re2:
1989 if util._re2:
1973 re2 = b'available'
1990 re2 = b'available'
1974 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1991 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1975 fm.data(re2=bool(util._re2))
1992 fm.data(re2=bool(util._re2))
1976
1993
1977 # templates
1994 # templates
1978 p = templater.templatedir()
1995 p = templater.templatedir()
1979 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1996 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1980 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1997 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1981 if p:
1998 if p:
1982 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1999 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1983 if m:
2000 if m:
1984 # template found, check if it is working
2001 # template found, check if it is working
1985 err = None
2002 err = None
1986 try:
2003 try:
1987 templater.templater.frommapfile(m)
2004 templater.templater.frommapfile(m)
1988 except Exception as inst:
2005 except Exception as inst:
1989 err = stringutil.forcebytestr(inst)
2006 err = stringutil.forcebytestr(inst)
1990 p = None
2007 p = None
1991 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2008 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1992 else:
2009 else:
1993 p = None
2010 p = None
1994 fm.condwrite(
2011 fm.condwrite(
1995 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2012 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1996 )
2013 )
1997 fm.condwrite(
2014 fm.condwrite(
1998 not m,
2015 not m,
1999 b'defaulttemplatenotfound',
2016 b'defaulttemplatenotfound',
2000 _(b" template '%s' not found\n"),
2017 _(b" template '%s' not found\n"),
2001 b"default",
2018 b"default",
2002 )
2019 )
2003 if not p:
2020 if not p:
2004 problems += 1
2021 problems += 1
2005 fm.condwrite(
2022 fm.condwrite(
2006 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2023 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2007 )
2024 )
2008
2025
2009 # editor
2026 # editor
2010 editor = ui.geteditor()
2027 editor = ui.geteditor()
2011 editor = util.expandpath(editor)
2028 editor = util.expandpath(editor)
2012 editorbin = procutil.shellsplit(editor)[0]
2029 editorbin = procutil.shellsplit(editor)[0]
2013 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2030 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2014 cmdpath = procutil.findexe(editorbin)
2031 cmdpath = procutil.findexe(editorbin)
2015 fm.condwrite(
2032 fm.condwrite(
2016 not cmdpath and editor == b'vi',
2033 not cmdpath and editor == b'vi',
2017 b'vinotfound',
2034 b'vinotfound',
2018 _(
2035 _(
2019 b" No commit editor set and can't find %s in PATH\n"
2036 b" No commit editor set and can't find %s in PATH\n"
2020 b" (specify a commit editor in your configuration"
2037 b" (specify a commit editor in your configuration"
2021 b" file)\n"
2038 b" file)\n"
2022 ),
2039 ),
2023 not cmdpath and editor == b'vi' and editorbin,
2040 not cmdpath and editor == b'vi' and editorbin,
2024 )
2041 )
2025 fm.condwrite(
2042 fm.condwrite(
2026 not cmdpath and editor != b'vi',
2043 not cmdpath and editor != b'vi',
2027 b'editornotfound',
2044 b'editornotfound',
2028 _(
2045 _(
2029 b" Can't find editor '%s' in PATH\n"
2046 b" Can't find editor '%s' in PATH\n"
2030 b" (specify a commit editor in your configuration"
2047 b" (specify a commit editor in your configuration"
2031 b" file)\n"
2048 b" file)\n"
2032 ),
2049 ),
2033 not cmdpath and editorbin,
2050 not cmdpath and editorbin,
2034 )
2051 )
2035 if not cmdpath and editor != b'vi':
2052 if not cmdpath and editor != b'vi':
2036 problems += 1
2053 problems += 1
2037
2054
2038 # check username
2055 # check username
2039 username = None
2056 username = None
2040 err = None
2057 err = None
2041 try:
2058 try:
2042 username = ui.username()
2059 username = ui.username()
2043 except error.Abort as e:
2060 except error.Abort as e:
2044 err = e.message
2061 err = e.message
2045 problems += 1
2062 problems += 1
2046
2063
2047 fm.condwrite(
2064 fm.condwrite(
2048 username, b'username', _(b"checking username (%s)\n"), username
2065 username, b'username', _(b"checking username (%s)\n"), username
2049 )
2066 )
2050 fm.condwrite(
2067 fm.condwrite(
2051 err,
2068 err,
2052 b'usernameerror',
2069 b'usernameerror',
2053 _(
2070 _(
2054 b"checking username...\n %s\n"
2071 b"checking username...\n %s\n"
2055 b" (specify a username in your configuration file)\n"
2072 b" (specify a username in your configuration file)\n"
2056 ),
2073 ),
2057 err,
2074 err,
2058 )
2075 )
2059
2076
2060 for name, mod in extensions.extensions():
2077 for name, mod in extensions.extensions():
2061 handler = getattr(mod, 'debuginstall', None)
2078 handler = getattr(mod, 'debuginstall', None)
2062 if handler is not None:
2079 if handler is not None:
2063 problems += handler(ui, fm)
2080 problems += handler(ui, fm)
2064
2081
2065 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2082 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2066 if not problems:
2083 if not problems:
2067 fm.data(problems=problems)
2084 fm.data(problems=problems)
2068 fm.condwrite(
2085 fm.condwrite(
2069 problems,
2086 problems,
2070 b'problems',
2087 b'problems',
2071 _(b"%d problems detected, please check your install!\n"),
2088 _(b"%d problems detected, please check your install!\n"),
2072 problems,
2089 problems,
2073 )
2090 )
2074 fm.end()
2091 fm.end()
2075
2092
2076 return problems
2093 return problems
2077
2094
2078
2095
2079 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2096 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2080 def debugknown(ui, repopath, *ids, **opts):
2097 def debugknown(ui, repopath, *ids, **opts):
2081 """test whether node ids are known to a repo
2098 """test whether node ids are known to a repo
2082
2099
2083 Every ID must be a full-length hex node id string. Returns a list of 0s
2100 Every ID must be a full-length hex node id string. Returns a list of 0s
2084 and 1s indicating unknown/known.
2101 and 1s indicating unknown/known.
2085 """
2102 """
2086 opts = pycompat.byteskwargs(opts)
2103 opts = pycompat.byteskwargs(opts)
2087 repo = hg.peer(ui, opts, repopath)
2104 repo = hg.peer(ui, opts, repopath)
2088 if not repo.capable(b'known'):
2105 if not repo.capable(b'known'):
2089 raise error.Abort(b"known() not supported by target repository")
2106 raise error.Abort(b"known() not supported by target repository")
2090 flags = repo.known([bin(s) for s in ids])
2107 flags = repo.known([bin(s) for s in ids])
2091 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2108 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2092
2109
2093
2110
2094 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2111 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2095 def debuglabelcomplete(ui, repo, *args):
2112 def debuglabelcomplete(ui, repo, *args):
2096 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2113 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2097 debugnamecomplete(ui, repo, *args)
2114 debugnamecomplete(ui, repo, *args)
2098
2115
2099
2116
2100 @command(
2117 @command(
2101 b'debuglocks',
2118 b'debuglocks',
2102 [
2119 [
2103 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2120 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2104 (
2121 (
2105 b'W',
2122 b'W',
2106 b'force-free-wlock',
2123 b'force-free-wlock',
2107 None,
2124 None,
2108 _(b'free the working state lock (DANGEROUS)'),
2125 _(b'free the working state lock (DANGEROUS)'),
2109 ),
2126 ),
2110 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2127 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2111 (
2128 (
2112 b'S',
2129 b'S',
2113 b'set-wlock',
2130 b'set-wlock',
2114 None,
2131 None,
2115 _(b'set the working state lock until stopped'),
2132 _(b'set the working state lock until stopped'),
2116 ),
2133 ),
2117 ],
2134 ],
2118 _(b'[OPTION]...'),
2135 _(b'[OPTION]...'),
2119 )
2136 )
2120 def debuglocks(ui, repo, **opts):
2137 def debuglocks(ui, repo, **opts):
2121 """show or modify state of locks
2138 """show or modify state of locks
2122
2139
2123 By default, this command will show which locks are held. This
2140 By default, this command will show which locks are held. This
2124 includes the user and process holding the lock, the amount of time
2141 includes the user and process holding the lock, the amount of time
2125 the lock has been held, and the machine name where the process is
2142 the lock has been held, and the machine name where the process is
2126 running if it's not local.
2143 running if it's not local.
2127
2144
2128 Locks protect the integrity of Mercurial's data, so should be
2145 Locks protect the integrity of Mercurial's data, so should be
2129 treated with care. System crashes or other interruptions may cause
2146 treated with care. System crashes or other interruptions may cause
2130 locks to not be properly released, though Mercurial will usually
2147 locks to not be properly released, though Mercurial will usually
2131 detect and remove such stale locks automatically.
2148 detect and remove such stale locks automatically.
2132
2149
2133 However, detecting stale locks may not always be possible (for
2150 However, detecting stale locks may not always be possible (for
2134 instance, on a shared filesystem). Removing locks may also be
2151 instance, on a shared filesystem). Removing locks may also be
2135 blocked by filesystem permissions.
2152 blocked by filesystem permissions.
2136
2153
2137 Setting a lock will prevent other commands from changing the data.
2154 Setting a lock will prevent other commands from changing the data.
2138 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2155 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2139 The set locks are removed when the command exits.
2156 The set locks are removed when the command exits.
2140
2157
2141 Returns 0 if no locks are held.
2158 Returns 0 if no locks are held.
2142
2159
2143 """
2160 """
2144
2161
2145 if opts.get('force_free_lock'):
2162 if opts.get('force_free_lock'):
2146 repo.svfs.unlink(b'lock')
2163 repo.svfs.unlink(b'lock')
2147 if opts.get('force_free_wlock'):
2164 if opts.get('force_free_wlock'):
2148 repo.vfs.unlink(b'wlock')
2165 repo.vfs.unlink(b'wlock')
2149 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2166 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2150 return 0
2167 return 0
2151
2168
2152 locks = []
2169 locks = []
2153 try:
2170 try:
2154 if opts.get('set_wlock'):
2171 if opts.get('set_wlock'):
2155 try:
2172 try:
2156 locks.append(repo.wlock(False))
2173 locks.append(repo.wlock(False))
2157 except error.LockHeld:
2174 except error.LockHeld:
2158 raise error.Abort(_(b'wlock is already held'))
2175 raise error.Abort(_(b'wlock is already held'))
2159 if opts.get('set_lock'):
2176 if opts.get('set_lock'):
2160 try:
2177 try:
2161 locks.append(repo.lock(False))
2178 locks.append(repo.lock(False))
2162 except error.LockHeld:
2179 except error.LockHeld:
2163 raise error.Abort(_(b'lock is already held'))
2180 raise error.Abort(_(b'lock is already held'))
2164 if len(locks):
2181 if len(locks):
2165 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2182 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2166 return 0
2183 return 0
2167 finally:
2184 finally:
2168 release(*locks)
2185 release(*locks)
2169
2186
2170 now = time.time()
2187 now = time.time()
2171 held = 0
2188 held = 0
2172
2189
2173 def report(vfs, name, method):
2190 def report(vfs, name, method):
2174 # this causes stale locks to get reaped for more accurate reporting
2191 # this causes stale locks to get reaped for more accurate reporting
2175 try:
2192 try:
2176 l = method(False)
2193 l = method(False)
2177 except error.LockHeld:
2194 except error.LockHeld:
2178 l = None
2195 l = None
2179
2196
2180 if l:
2197 if l:
2181 l.release()
2198 l.release()
2182 else:
2199 else:
2183 try:
2200 try:
2184 st = vfs.lstat(name)
2201 st = vfs.lstat(name)
2185 age = now - st[stat.ST_MTIME]
2202 age = now - st[stat.ST_MTIME]
2186 user = util.username(st.st_uid)
2203 user = util.username(st.st_uid)
2187 locker = vfs.readlock(name)
2204 locker = vfs.readlock(name)
2188 if b":" in locker:
2205 if b":" in locker:
2189 host, pid = locker.split(b':')
2206 host, pid = locker.split(b':')
2190 if host == socket.gethostname():
2207 if host == socket.gethostname():
2191 locker = b'user %s, process %s' % (user or b'None', pid)
2208 locker = b'user %s, process %s' % (user or b'None', pid)
2192 else:
2209 else:
2193 locker = b'user %s, process %s, host %s' % (
2210 locker = b'user %s, process %s, host %s' % (
2194 user or b'None',
2211 user or b'None',
2195 pid,
2212 pid,
2196 host,
2213 host,
2197 )
2214 )
2198 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2215 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2199 return 1
2216 return 1
2200 except OSError as e:
2217 except OSError as e:
2201 if e.errno != errno.ENOENT:
2218 if e.errno != errno.ENOENT:
2202 raise
2219 raise
2203
2220
2204 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2221 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2205 return 0
2222 return 0
2206
2223
2207 held += report(repo.svfs, b"lock", repo.lock)
2224 held += report(repo.svfs, b"lock", repo.lock)
2208 held += report(repo.vfs, b"wlock", repo.wlock)
2225 held += report(repo.vfs, b"wlock", repo.wlock)
2209
2226
2210 return held
2227 return held
2211
2228
2212
2229
2213 @command(
2230 @command(
2214 b'debugmanifestfulltextcache',
2231 b'debugmanifestfulltextcache',
2215 [
2232 [
2216 (b'', b'clear', False, _(b'clear the cache')),
2233 (b'', b'clear', False, _(b'clear the cache')),
2217 (
2234 (
2218 b'a',
2235 b'a',
2219 b'add',
2236 b'add',
2220 [],
2237 [],
2221 _(b'add the given manifest nodes to the cache'),
2238 _(b'add the given manifest nodes to the cache'),
2222 _(b'NODE'),
2239 _(b'NODE'),
2223 ),
2240 ),
2224 ],
2241 ],
2225 b'',
2242 b'',
2226 )
2243 )
2227 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2244 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2228 """show, clear or amend the contents of the manifest fulltext cache"""
2245 """show, clear or amend the contents of the manifest fulltext cache"""
2229
2246
2230 def getcache():
2247 def getcache():
2231 r = repo.manifestlog.getstorage(b'')
2248 r = repo.manifestlog.getstorage(b'')
2232 try:
2249 try:
2233 return r._fulltextcache
2250 return r._fulltextcache
2234 except AttributeError:
2251 except AttributeError:
2235 msg = _(
2252 msg = _(
2236 b"Current revlog implementation doesn't appear to have a "
2253 b"Current revlog implementation doesn't appear to have a "
2237 b"manifest fulltext cache\n"
2254 b"manifest fulltext cache\n"
2238 )
2255 )
2239 raise error.Abort(msg)
2256 raise error.Abort(msg)
2240
2257
2241 if opts.get('clear'):
2258 if opts.get('clear'):
2242 with repo.wlock():
2259 with repo.wlock():
2243 cache = getcache()
2260 cache = getcache()
2244 cache.clear(clear_persisted_data=True)
2261 cache.clear(clear_persisted_data=True)
2245 return
2262 return
2246
2263
2247 if add:
2264 if add:
2248 with repo.wlock():
2265 with repo.wlock():
2249 m = repo.manifestlog
2266 m = repo.manifestlog
2250 store = m.getstorage(b'')
2267 store = m.getstorage(b'')
2251 for n in add:
2268 for n in add:
2252 try:
2269 try:
2253 manifest = m[store.lookup(n)]
2270 manifest = m[store.lookup(n)]
2254 except error.LookupError as e:
2271 except error.LookupError as e:
2255 raise error.Abort(
2272 raise error.Abort(
2256 bytes(e), hint=b"Check your manifest node id"
2273 bytes(e), hint=b"Check your manifest node id"
2257 )
2274 )
2258 manifest.read() # stores revisision in cache too
2275 manifest.read() # stores revisision in cache too
2259 return
2276 return
2260
2277
2261 cache = getcache()
2278 cache = getcache()
2262 if not len(cache):
2279 if not len(cache):
2263 ui.write(_(b'cache empty\n'))
2280 ui.write(_(b'cache empty\n'))
2264 else:
2281 else:
2265 ui.write(
2282 ui.write(
2266 _(
2283 _(
2267 b'cache contains %d manifest entries, in order of most to '
2284 b'cache contains %d manifest entries, in order of most to '
2268 b'least recent:\n'
2285 b'least recent:\n'
2269 )
2286 )
2270 % (len(cache),)
2287 % (len(cache),)
2271 )
2288 )
2272 totalsize = 0
2289 totalsize = 0
2273 for nodeid in cache:
2290 for nodeid in cache:
2274 # Use cache.get to not update the LRU order
2291 # Use cache.get to not update the LRU order
2275 data = cache.peek(nodeid)
2292 data = cache.peek(nodeid)
2276 size = len(data)
2293 size = len(data)
2277 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2294 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2278 ui.write(
2295 ui.write(
2279 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2296 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2280 )
2297 )
2281 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2298 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2282 ui.write(
2299 ui.write(
2283 _(b'total cache data size %s, on-disk %s\n')
2300 _(b'total cache data size %s, on-disk %s\n')
2284 % (util.bytecount(totalsize), util.bytecount(ondisk))
2301 % (util.bytecount(totalsize), util.bytecount(ondisk))
2285 )
2302 )
2286
2303
2287
2304
2288 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2305 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2289 def debugmergestate(ui, repo, *args, **opts):
2306 def debugmergestate(ui, repo, *args, **opts):
2290 """print merge state
2307 """print merge state
2291
2308
2292 Use --verbose to print out information about whether v1 or v2 merge state
2309 Use --verbose to print out information about whether v1 or v2 merge state
2293 was chosen."""
2310 was chosen."""
2294
2311
2295 if ui.verbose:
2312 if ui.verbose:
2296 ms = mergestatemod.mergestate(repo)
2313 ms = mergestatemod.mergestate(repo)
2297
2314
2298 # sort so that reasonable information is on top
2315 # sort so that reasonable information is on top
2299 v1records = ms._readrecordsv1()
2316 v1records = ms._readrecordsv1()
2300 v2records = ms._readrecordsv2()
2317 v2records = ms._readrecordsv2()
2301
2318
2302 if not v1records and not v2records:
2319 if not v1records and not v2records:
2303 pass
2320 pass
2304 elif not v2records:
2321 elif not v2records:
2305 ui.writenoi18n(b'no version 2 merge state\n')
2322 ui.writenoi18n(b'no version 2 merge state\n')
2306 elif ms._v1v2match(v1records, v2records):
2323 elif ms._v1v2match(v1records, v2records):
2307 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2324 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2308 else:
2325 else:
2309 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2326 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2310
2327
2311 opts = pycompat.byteskwargs(opts)
2328 opts = pycompat.byteskwargs(opts)
2312 if not opts[b'template']:
2329 if not opts[b'template']:
2313 opts[b'template'] = (
2330 opts[b'template'] = (
2314 b'{if(commits, "", "no merge state found\n")}'
2331 b'{if(commits, "", "no merge state found\n")}'
2315 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2332 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2316 b'{files % "file: {path} (state \\"{state}\\")\n'
2333 b'{files % "file: {path} (state \\"{state}\\")\n'
2317 b'{if(local_path, "'
2334 b'{if(local_path, "'
2318 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2335 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2319 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2336 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2320 b' other path: {other_path} (node {other_node})\n'
2337 b' other path: {other_path} (node {other_node})\n'
2321 b'")}'
2338 b'")}'
2322 b'{if(rename_side, "'
2339 b'{if(rename_side, "'
2323 b' rename side: {rename_side}\n'
2340 b' rename side: {rename_side}\n'
2324 b' renamed path: {renamed_path}\n'
2341 b' renamed path: {renamed_path}\n'
2325 b'")}'
2342 b'")}'
2326 b'{extras % " extra: {key} = {value}\n"}'
2343 b'{extras % " extra: {key} = {value}\n"}'
2327 b'"}'
2344 b'"}'
2328 b'{extras % "extra: {file} ({key} = {value})\n"}'
2345 b'{extras % "extra: {file} ({key} = {value})\n"}'
2329 )
2346 )
2330
2347
2331 ms = mergestatemod.mergestate.read(repo)
2348 ms = mergestatemod.mergestate.read(repo)
2332
2349
2333 fm = ui.formatter(b'debugmergestate', opts)
2350 fm = ui.formatter(b'debugmergestate', opts)
2334 fm.startitem()
2351 fm.startitem()
2335
2352
2336 fm_commits = fm.nested(b'commits')
2353 fm_commits = fm.nested(b'commits')
2337 if ms.active():
2354 if ms.active():
2338 for name, node, label_index in (
2355 for name, node, label_index in (
2339 (b'local', ms.local, 0),
2356 (b'local', ms.local, 0),
2340 (b'other', ms.other, 1),
2357 (b'other', ms.other, 1),
2341 ):
2358 ):
2342 fm_commits.startitem()
2359 fm_commits.startitem()
2343 fm_commits.data(name=name)
2360 fm_commits.data(name=name)
2344 fm_commits.data(node=hex(node))
2361 fm_commits.data(node=hex(node))
2345 if ms._labels and len(ms._labels) > label_index:
2362 if ms._labels and len(ms._labels) > label_index:
2346 fm_commits.data(label=ms._labels[label_index])
2363 fm_commits.data(label=ms._labels[label_index])
2347 fm_commits.end()
2364 fm_commits.end()
2348
2365
2349 fm_files = fm.nested(b'files')
2366 fm_files = fm.nested(b'files')
2350 if ms.active():
2367 if ms.active():
2351 for f in ms:
2368 for f in ms:
2352 fm_files.startitem()
2369 fm_files.startitem()
2353 fm_files.data(path=f)
2370 fm_files.data(path=f)
2354 state = ms._state[f]
2371 state = ms._state[f]
2355 fm_files.data(state=state[0])
2372 fm_files.data(state=state[0])
2356 if state[0] in (
2373 if state[0] in (
2357 mergestatemod.MERGE_RECORD_UNRESOLVED,
2374 mergestatemod.MERGE_RECORD_UNRESOLVED,
2358 mergestatemod.MERGE_RECORD_RESOLVED,
2375 mergestatemod.MERGE_RECORD_RESOLVED,
2359 ):
2376 ):
2360 fm_files.data(local_key=state[1])
2377 fm_files.data(local_key=state[1])
2361 fm_files.data(local_path=state[2])
2378 fm_files.data(local_path=state[2])
2362 fm_files.data(ancestor_path=state[3])
2379 fm_files.data(ancestor_path=state[3])
2363 fm_files.data(ancestor_node=state[4])
2380 fm_files.data(ancestor_node=state[4])
2364 fm_files.data(other_path=state[5])
2381 fm_files.data(other_path=state[5])
2365 fm_files.data(other_node=state[6])
2382 fm_files.data(other_node=state[6])
2366 fm_files.data(local_flags=state[7])
2383 fm_files.data(local_flags=state[7])
2367 elif state[0] in (
2384 elif state[0] in (
2368 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2385 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2369 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2386 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2370 ):
2387 ):
2371 fm_files.data(renamed_path=state[1])
2388 fm_files.data(renamed_path=state[1])
2372 fm_files.data(rename_side=state[2])
2389 fm_files.data(rename_side=state[2])
2373 fm_extras = fm_files.nested(b'extras')
2390 fm_extras = fm_files.nested(b'extras')
2374 for k, v in sorted(ms.extras(f).items()):
2391 for k, v in sorted(ms.extras(f).items()):
2375 fm_extras.startitem()
2392 fm_extras.startitem()
2376 fm_extras.data(key=k)
2393 fm_extras.data(key=k)
2377 fm_extras.data(value=v)
2394 fm_extras.data(value=v)
2378 fm_extras.end()
2395 fm_extras.end()
2379
2396
2380 fm_files.end()
2397 fm_files.end()
2381
2398
2382 fm_extras = fm.nested(b'extras')
2399 fm_extras = fm.nested(b'extras')
2383 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2400 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2384 if f in ms:
2401 if f in ms:
2385 # If file is in mergestate, we have already processed it's extras
2402 # If file is in mergestate, we have already processed it's extras
2386 continue
2403 continue
2387 for k, v in pycompat.iteritems(d):
2404 for k, v in pycompat.iteritems(d):
2388 fm_extras.startitem()
2405 fm_extras.startitem()
2389 fm_extras.data(file=f)
2406 fm_extras.data(file=f)
2390 fm_extras.data(key=k)
2407 fm_extras.data(key=k)
2391 fm_extras.data(value=v)
2408 fm_extras.data(value=v)
2392 fm_extras.end()
2409 fm_extras.end()
2393
2410
2394 fm.end()
2411 fm.end()
2395
2412
2396
2413
2397 @command(b'debugnamecomplete', [], _(b'NAME...'))
2414 @command(b'debugnamecomplete', [], _(b'NAME...'))
2398 def debugnamecomplete(ui, repo, *args):
2415 def debugnamecomplete(ui, repo, *args):
2399 '''complete "names" - tags, open branch names, bookmark names'''
2416 '''complete "names" - tags, open branch names, bookmark names'''
2400
2417
2401 names = set()
2418 names = set()
2402 # since we previously only listed open branches, we will handle that
2419 # since we previously only listed open branches, we will handle that
2403 # specially (after this for loop)
2420 # specially (after this for loop)
2404 for name, ns in pycompat.iteritems(repo.names):
2421 for name, ns in pycompat.iteritems(repo.names):
2405 if name != b'branches':
2422 if name != b'branches':
2406 names.update(ns.listnames(repo))
2423 names.update(ns.listnames(repo))
2407 names.update(
2424 names.update(
2408 tag
2425 tag
2409 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2426 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2410 if not closed
2427 if not closed
2411 )
2428 )
2412 completions = set()
2429 completions = set()
2413 if not args:
2430 if not args:
2414 args = [b'']
2431 args = [b'']
2415 for a in args:
2432 for a in args:
2416 completions.update(n for n in names if n.startswith(a))
2433 completions.update(n for n in names if n.startswith(a))
2417 ui.write(b'\n'.join(sorted(completions)))
2434 ui.write(b'\n'.join(sorted(completions)))
2418 ui.write(b'\n')
2435 ui.write(b'\n')
2419
2436
2420
2437
2421 @command(
2438 @command(
2422 b'debugnodemap',
2439 b'debugnodemap',
2423 [
2440 [
2424 (
2441 (
2425 b'',
2442 b'',
2426 b'dump-new',
2443 b'dump-new',
2427 False,
2444 False,
2428 _(b'write a (new) persistent binary nodemap on stdout'),
2445 _(b'write a (new) persistent binary nodemap on stdout'),
2429 ),
2446 ),
2430 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2447 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2431 (
2448 (
2432 b'',
2449 b'',
2433 b'check',
2450 b'check',
2434 False,
2451 False,
2435 _(b'check that the data on disk data are correct.'),
2452 _(b'check that the data on disk data are correct.'),
2436 ),
2453 ),
2437 (
2454 (
2438 b'',
2455 b'',
2439 b'metadata',
2456 b'metadata',
2440 False,
2457 False,
2441 _(b'display the on disk meta data for the nodemap'),
2458 _(b'display the on disk meta data for the nodemap'),
2442 ),
2459 ),
2443 ],
2460 ],
2444 )
2461 )
2445 def debugnodemap(ui, repo, **opts):
2462 def debugnodemap(ui, repo, **opts):
2446 """write and inspect on disk nodemap"""
2463 """write and inspect on disk nodemap"""
2447 if opts['dump_new']:
2464 if opts['dump_new']:
2448 unfi = repo.unfiltered()
2465 unfi = repo.unfiltered()
2449 cl = unfi.changelog
2466 cl = unfi.changelog
2450 if util.safehasattr(cl.index, "nodemap_data_all"):
2467 if util.safehasattr(cl.index, "nodemap_data_all"):
2451 data = cl.index.nodemap_data_all()
2468 data = cl.index.nodemap_data_all()
2452 else:
2469 else:
2453 data = nodemap.persistent_data(cl.index)
2470 data = nodemap.persistent_data(cl.index)
2454 ui.write(data)
2471 ui.write(data)
2455 elif opts['dump_disk']:
2472 elif opts['dump_disk']:
2456 unfi = repo.unfiltered()
2473 unfi = repo.unfiltered()
2457 cl = unfi.changelog
2474 cl = unfi.changelog
2458 nm_data = nodemap.persisted_data(cl)
2475 nm_data = nodemap.persisted_data(cl)
2459 if nm_data is not None:
2476 if nm_data is not None:
2460 docket, data = nm_data
2477 docket, data = nm_data
2461 ui.write(data[:])
2478 ui.write(data[:])
2462 elif opts['check']:
2479 elif opts['check']:
2463 unfi = repo.unfiltered()
2480 unfi = repo.unfiltered()
2464 cl = unfi.changelog
2481 cl = unfi.changelog
2465 nm_data = nodemap.persisted_data(cl)
2482 nm_data = nodemap.persisted_data(cl)
2466 if nm_data is not None:
2483 if nm_data is not None:
2467 docket, data = nm_data
2484 docket, data = nm_data
2468 return nodemap.check_data(ui, cl.index, data)
2485 return nodemap.check_data(ui, cl.index, data)
2469 elif opts['metadata']:
2486 elif opts['metadata']:
2470 unfi = repo.unfiltered()
2487 unfi = repo.unfiltered()
2471 cl = unfi.changelog
2488 cl = unfi.changelog
2472 nm_data = nodemap.persisted_data(cl)
2489 nm_data = nodemap.persisted_data(cl)
2473 if nm_data is not None:
2490 if nm_data is not None:
2474 docket, data = nm_data
2491 docket, data = nm_data
2475 ui.write((b"uid: %s\n") % docket.uid)
2492 ui.write((b"uid: %s\n") % docket.uid)
2476 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2493 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2477 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2494 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2478 ui.write((b"data-length: %d\n") % docket.data_length)
2495 ui.write((b"data-length: %d\n") % docket.data_length)
2479 ui.write((b"data-unused: %d\n") % docket.data_unused)
2496 ui.write((b"data-unused: %d\n") % docket.data_unused)
2480 unused_perc = docket.data_unused * 100.0 / docket.data_length
2497 unused_perc = docket.data_unused * 100.0 / docket.data_length
2481 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2498 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2482
2499
2483
2500
2484 @command(
2501 @command(
2485 b'debugobsolete',
2502 b'debugobsolete',
2486 [
2503 [
2487 (b'', b'flags', 0, _(b'markers flag')),
2504 (b'', b'flags', 0, _(b'markers flag')),
2488 (
2505 (
2489 b'',
2506 b'',
2490 b'record-parents',
2507 b'record-parents',
2491 False,
2508 False,
2492 _(b'record parent information for the precursor'),
2509 _(b'record parent information for the precursor'),
2493 ),
2510 ),
2494 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2511 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2495 (
2512 (
2496 b'',
2513 b'',
2497 b'exclusive',
2514 b'exclusive',
2498 False,
2515 False,
2499 _(b'restrict display to markers only relevant to REV'),
2516 _(b'restrict display to markers only relevant to REV'),
2500 ),
2517 ),
2501 (b'', b'index', False, _(b'display index of the marker')),
2518 (b'', b'index', False, _(b'display index of the marker')),
2502 (b'', b'delete', [], _(b'delete markers specified by indices')),
2519 (b'', b'delete', [], _(b'delete markers specified by indices')),
2503 ]
2520 ]
2504 + cmdutil.commitopts2
2521 + cmdutil.commitopts2
2505 + cmdutil.formatteropts,
2522 + cmdutil.formatteropts,
2506 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2523 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2507 )
2524 )
2508 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2525 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2509 """create arbitrary obsolete marker
2526 """create arbitrary obsolete marker
2510
2527
2511 With no arguments, displays the list of obsolescence markers."""
2528 With no arguments, displays the list of obsolescence markers."""
2512
2529
2513 opts = pycompat.byteskwargs(opts)
2530 opts = pycompat.byteskwargs(opts)
2514
2531
2515 def parsenodeid(s):
2532 def parsenodeid(s):
2516 try:
2533 try:
2517 # We do not use revsingle/revrange functions here to accept
2534 # We do not use revsingle/revrange functions here to accept
2518 # arbitrary node identifiers, possibly not present in the
2535 # arbitrary node identifiers, possibly not present in the
2519 # local repository.
2536 # local repository.
2520 n = bin(s)
2537 n = bin(s)
2521 if len(n) != repo.nodeconstants.nodelen:
2538 if len(n) != repo.nodeconstants.nodelen:
2522 raise TypeError()
2539 raise TypeError()
2523 return n
2540 return n
2524 except TypeError:
2541 except TypeError:
2525 raise error.InputError(
2542 raise error.InputError(
2526 b'changeset references must be full hexadecimal '
2543 b'changeset references must be full hexadecimal '
2527 b'node identifiers'
2544 b'node identifiers'
2528 )
2545 )
2529
2546
2530 if opts.get(b'delete'):
2547 if opts.get(b'delete'):
2531 indices = []
2548 indices = []
2532 for v in opts.get(b'delete'):
2549 for v in opts.get(b'delete'):
2533 try:
2550 try:
2534 indices.append(int(v))
2551 indices.append(int(v))
2535 except ValueError:
2552 except ValueError:
2536 raise error.InputError(
2553 raise error.InputError(
2537 _(b'invalid index value: %r') % v,
2554 _(b'invalid index value: %r') % v,
2538 hint=_(b'use integers for indices'),
2555 hint=_(b'use integers for indices'),
2539 )
2556 )
2540
2557
2541 if repo.currenttransaction():
2558 if repo.currenttransaction():
2542 raise error.Abort(
2559 raise error.Abort(
2543 _(b'cannot delete obsmarkers in the middle of transaction.')
2560 _(b'cannot delete obsmarkers in the middle of transaction.')
2544 )
2561 )
2545
2562
2546 with repo.lock():
2563 with repo.lock():
2547 n = repair.deleteobsmarkers(repo.obsstore, indices)
2564 n = repair.deleteobsmarkers(repo.obsstore, indices)
2548 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2565 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2549
2566
2550 return
2567 return
2551
2568
2552 if precursor is not None:
2569 if precursor is not None:
2553 if opts[b'rev']:
2570 if opts[b'rev']:
2554 raise error.InputError(
2571 raise error.InputError(
2555 b'cannot select revision when creating marker'
2572 b'cannot select revision when creating marker'
2556 )
2573 )
2557 metadata = {}
2574 metadata = {}
2558 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2575 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2559 succs = tuple(parsenodeid(succ) for succ in successors)
2576 succs = tuple(parsenodeid(succ) for succ in successors)
2560 l = repo.lock()
2577 l = repo.lock()
2561 try:
2578 try:
2562 tr = repo.transaction(b'debugobsolete')
2579 tr = repo.transaction(b'debugobsolete')
2563 try:
2580 try:
2564 date = opts.get(b'date')
2581 date = opts.get(b'date')
2565 if date:
2582 if date:
2566 date = dateutil.parsedate(date)
2583 date = dateutil.parsedate(date)
2567 else:
2584 else:
2568 date = None
2585 date = None
2569 prec = parsenodeid(precursor)
2586 prec = parsenodeid(precursor)
2570 parents = None
2587 parents = None
2571 if opts[b'record_parents']:
2588 if opts[b'record_parents']:
2572 if prec not in repo.unfiltered():
2589 if prec not in repo.unfiltered():
2573 raise error.Abort(
2590 raise error.Abort(
2574 b'cannot used --record-parents on '
2591 b'cannot used --record-parents on '
2575 b'unknown changesets'
2592 b'unknown changesets'
2576 )
2593 )
2577 parents = repo.unfiltered()[prec].parents()
2594 parents = repo.unfiltered()[prec].parents()
2578 parents = tuple(p.node() for p in parents)
2595 parents = tuple(p.node() for p in parents)
2579 repo.obsstore.create(
2596 repo.obsstore.create(
2580 tr,
2597 tr,
2581 prec,
2598 prec,
2582 succs,
2599 succs,
2583 opts[b'flags'],
2600 opts[b'flags'],
2584 parents=parents,
2601 parents=parents,
2585 date=date,
2602 date=date,
2586 metadata=metadata,
2603 metadata=metadata,
2587 ui=ui,
2604 ui=ui,
2588 )
2605 )
2589 tr.close()
2606 tr.close()
2590 except ValueError as exc:
2607 except ValueError as exc:
2591 raise error.Abort(
2608 raise error.Abort(
2592 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2609 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2593 )
2610 )
2594 finally:
2611 finally:
2595 tr.release()
2612 tr.release()
2596 finally:
2613 finally:
2597 l.release()
2614 l.release()
2598 else:
2615 else:
2599 if opts[b'rev']:
2616 if opts[b'rev']:
2600 revs = scmutil.revrange(repo, opts[b'rev'])
2617 revs = scmutil.revrange(repo, opts[b'rev'])
2601 nodes = [repo[r].node() for r in revs]
2618 nodes = [repo[r].node() for r in revs]
2602 markers = list(
2619 markers = list(
2603 obsutil.getmarkers(
2620 obsutil.getmarkers(
2604 repo, nodes=nodes, exclusive=opts[b'exclusive']
2621 repo, nodes=nodes, exclusive=opts[b'exclusive']
2605 )
2622 )
2606 )
2623 )
2607 markers.sort(key=lambda x: x._data)
2624 markers.sort(key=lambda x: x._data)
2608 else:
2625 else:
2609 markers = obsutil.getmarkers(repo)
2626 markers = obsutil.getmarkers(repo)
2610
2627
2611 markerstoiter = markers
2628 markerstoiter = markers
2612 isrelevant = lambda m: True
2629 isrelevant = lambda m: True
2613 if opts.get(b'rev') and opts.get(b'index'):
2630 if opts.get(b'rev') and opts.get(b'index'):
2614 markerstoiter = obsutil.getmarkers(repo)
2631 markerstoiter = obsutil.getmarkers(repo)
2615 markerset = set(markers)
2632 markerset = set(markers)
2616 isrelevant = lambda m: m in markerset
2633 isrelevant = lambda m: m in markerset
2617
2634
2618 fm = ui.formatter(b'debugobsolete', opts)
2635 fm = ui.formatter(b'debugobsolete', opts)
2619 for i, m in enumerate(markerstoiter):
2636 for i, m in enumerate(markerstoiter):
2620 if not isrelevant(m):
2637 if not isrelevant(m):
2621 # marker can be irrelevant when we're iterating over a set
2638 # marker can be irrelevant when we're iterating over a set
2622 # of markers (markerstoiter) which is bigger than the set
2639 # of markers (markerstoiter) which is bigger than the set
2623 # of markers we want to display (markers)
2640 # of markers we want to display (markers)
2624 # this can happen if both --index and --rev options are
2641 # this can happen if both --index and --rev options are
2625 # provided and thus we need to iterate over all of the markers
2642 # provided and thus we need to iterate over all of the markers
2626 # to get the correct indices, but only display the ones that
2643 # to get the correct indices, but only display the ones that
2627 # are relevant to --rev value
2644 # are relevant to --rev value
2628 continue
2645 continue
2629 fm.startitem()
2646 fm.startitem()
2630 ind = i if opts.get(b'index') else None
2647 ind = i if opts.get(b'index') else None
2631 cmdutil.showmarker(fm, m, index=ind)
2648 cmdutil.showmarker(fm, m, index=ind)
2632 fm.end()
2649 fm.end()
2633
2650
2634
2651
2635 @command(
2652 @command(
2636 b'debugp1copies',
2653 b'debugp1copies',
2637 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2654 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2638 _(b'[-r REV]'),
2655 _(b'[-r REV]'),
2639 )
2656 )
2640 def debugp1copies(ui, repo, **opts):
2657 def debugp1copies(ui, repo, **opts):
2641 """dump copy information compared to p1"""
2658 """dump copy information compared to p1"""
2642
2659
2643 opts = pycompat.byteskwargs(opts)
2660 opts = pycompat.byteskwargs(opts)
2644 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2661 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2645 for dst, src in ctx.p1copies().items():
2662 for dst, src in ctx.p1copies().items():
2646 ui.write(b'%s -> %s\n' % (src, dst))
2663 ui.write(b'%s -> %s\n' % (src, dst))
2647
2664
2648
2665
2649 @command(
2666 @command(
2650 b'debugp2copies',
2667 b'debugp2copies',
2651 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2668 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2652 _(b'[-r REV]'),
2669 _(b'[-r REV]'),
2653 )
2670 )
2654 def debugp1copies(ui, repo, **opts):
2671 def debugp1copies(ui, repo, **opts):
2655 """dump copy information compared to p2"""
2672 """dump copy information compared to p2"""
2656
2673
2657 opts = pycompat.byteskwargs(opts)
2674 opts = pycompat.byteskwargs(opts)
2658 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2675 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2659 for dst, src in ctx.p2copies().items():
2676 for dst, src in ctx.p2copies().items():
2660 ui.write(b'%s -> %s\n' % (src, dst))
2677 ui.write(b'%s -> %s\n' % (src, dst))
2661
2678
2662
2679
2663 @command(
2680 @command(
2664 b'debugpathcomplete',
2681 b'debugpathcomplete',
2665 [
2682 [
2666 (b'f', b'full', None, _(b'complete an entire path')),
2683 (b'f', b'full', None, _(b'complete an entire path')),
2667 (b'n', b'normal', None, _(b'show only normal files')),
2684 (b'n', b'normal', None, _(b'show only normal files')),
2668 (b'a', b'added', None, _(b'show only added files')),
2685 (b'a', b'added', None, _(b'show only added files')),
2669 (b'r', b'removed', None, _(b'show only removed files')),
2686 (b'r', b'removed', None, _(b'show only removed files')),
2670 ],
2687 ],
2671 _(b'FILESPEC...'),
2688 _(b'FILESPEC...'),
2672 )
2689 )
2673 def debugpathcomplete(ui, repo, *specs, **opts):
2690 def debugpathcomplete(ui, repo, *specs, **opts):
2674 """complete part or all of a tracked path
2691 """complete part or all of a tracked path
2675
2692
2676 This command supports shells that offer path name completion. It
2693 This command supports shells that offer path name completion. It
2677 currently completes only files already known to the dirstate.
2694 currently completes only files already known to the dirstate.
2678
2695
2679 Completion extends only to the next path segment unless
2696 Completion extends only to the next path segment unless
2680 --full is specified, in which case entire paths are used."""
2697 --full is specified, in which case entire paths are used."""
2681
2698
2682 def complete(path, acceptable):
2699 def complete(path, acceptable):
2683 dirstate = repo.dirstate
2700 dirstate = repo.dirstate
2684 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2701 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2685 rootdir = repo.root + pycompat.ossep
2702 rootdir = repo.root + pycompat.ossep
2686 if spec != repo.root and not spec.startswith(rootdir):
2703 if spec != repo.root and not spec.startswith(rootdir):
2687 return [], []
2704 return [], []
2688 if os.path.isdir(spec):
2705 if os.path.isdir(spec):
2689 spec += b'/'
2706 spec += b'/'
2690 spec = spec[len(rootdir) :]
2707 spec = spec[len(rootdir) :]
2691 fixpaths = pycompat.ossep != b'/'
2708 fixpaths = pycompat.ossep != b'/'
2692 if fixpaths:
2709 if fixpaths:
2693 spec = spec.replace(pycompat.ossep, b'/')
2710 spec = spec.replace(pycompat.ossep, b'/')
2694 speclen = len(spec)
2711 speclen = len(spec)
2695 fullpaths = opts['full']
2712 fullpaths = opts['full']
2696 files, dirs = set(), set()
2713 files, dirs = set(), set()
2697 adddir, addfile = dirs.add, files.add
2714 adddir, addfile = dirs.add, files.add
2698 for f, st in pycompat.iteritems(dirstate):
2715 for f, st in pycompat.iteritems(dirstate):
2699 if f.startswith(spec) and st.state in acceptable:
2716 if f.startswith(spec) and st.state in acceptable:
2700 if fixpaths:
2717 if fixpaths:
2701 f = f.replace(b'/', pycompat.ossep)
2718 f = f.replace(b'/', pycompat.ossep)
2702 if fullpaths:
2719 if fullpaths:
2703 addfile(f)
2720 addfile(f)
2704 continue
2721 continue
2705 s = f.find(pycompat.ossep, speclen)
2722 s = f.find(pycompat.ossep, speclen)
2706 if s >= 0:
2723 if s >= 0:
2707 adddir(f[:s])
2724 adddir(f[:s])
2708 else:
2725 else:
2709 addfile(f)
2726 addfile(f)
2710 return files, dirs
2727 return files, dirs
2711
2728
2712 acceptable = b''
2729 acceptable = b''
2713 if opts['normal']:
2730 if opts['normal']:
2714 acceptable += b'nm'
2731 acceptable += b'nm'
2715 if opts['added']:
2732 if opts['added']:
2716 acceptable += b'a'
2733 acceptable += b'a'
2717 if opts['removed']:
2734 if opts['removed']:
2718 acceptable += b'r'
2735 acceptable += b'r'
2719 cwd = repo.getcwd()
2736 cwd = repo.getcwd()
2720 if not specs:
2737 if not specs:
2721 specs = [b'.']
2738 specs = [b'.']
2722
2739
2723 files, dirs = set(), set()
2740 files, dirs = set(), set()
2724 for spec in specs:
2741 for spec in specs:
2725 f, d = complete(spec, acceptable or b'nmar')
2742 f, d = complete(spec, acceptable or b'nmar')
2726 files.update(f)
2743 files.update(f)
2727 dirs.update(d)
2744 dirs.update(d)
2728 files.update(dirs)
2745 files.update(dirs)
2729 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2746 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2730 ui.write(b'\n')
2747 ui.write(b'\n')
2731
2748
2732
2749
2733 @command(
2750 @command(
2734 b'debugpathcopies',
2751 b'debugpathcopies',
2735 cmdutil.walkopts,
2752 cmdutil.walkopts,
2736 b'hg debugpathcopies REV1 REV2 [FILE]',
2753 b'hg debugpathcopies REV1 REV2 [FILE]',
2737 inferrepo=True,
2754 inferrepo=True,
2738 )
2755 )
2739 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2756 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2740 """show copies between two revisions"""
2757 """show copies between two revisions"""
2741 ctx1 = scmutil.revsingle(repo, rev1)
2758 ctx1 = scmutil.revsingle(repo, rev1)
2742 ctx2 = scmutil.revsingle(repo, rev2)
2759 ctx2 = scmutil.revsingle(repo, rev2)
2743 m = scmutil.match(ctx1, pats, opts)
2760 m = scmutil.match(ctx1, pats, opts)
2744 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2761 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2745 ui.write(b'%s -> %s\n' % (src, dst))
2762 ui.write(b'%s -> %s\n' % (src, dst))
2746
2763
2747
2764
2748 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2765 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2749 def debugpeer(ui, path):
2766 def debugpeer(ui, path):
2750 """establish a connection to a peer repository"""
2767 """establish a connection to a peer repository"""
2751 # Always enable peer request logging. Requires --debug to display
2768 # Always enable peer request logging. Requires --debug to display
2752 # though.
2769 # though.
2753 overrides = {
2770 overrides = {
2754 (b'devel', b'debug.peer-request'): True,
2771 (b'devel', b'debug.peer-request'): True,
2755 }
2772 }
2756
2773
2757 with ui.configoverride(overrides):
2774 with ui.configoverride(overrides):
2758 peer = hg.peer(ui, {}, path)
2775 peer = hg.peer(ui, {}, path)
2759
2776
2760 try:
2777 try:
2761 local = peer.local() is not None
2778 local = peer.local() is not None
2762 canpush = peer.canpush()
2779 canpush = peer.canpush()
2763
2780
2764 ui.write(_(b'url: %s\n') % peer.url())
2781 ui.write(_(b'url: %s\n') % peer.url())
2765 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2782 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2766 ui.write(
2783 ui.write(
2767 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2784 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2768 )
2785 )
2769 finally:
2786 finally:
2770 peer.close()
2787 peer.close()
2771
2788
2772
2789
2773 @command(
2790 @command(
2774 b'debugpickmergetool',
2791 b'debugpickmergetool',
2775 [
2792 [
2776 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2793 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2777 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2794 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2778 ]
2795 ]
2779 + cmdutil.walkopts
2796 + cmdutil.walkopts
2780 + cmdutil.mergetoolopts,
2797 + cmdutil.mergetoolopts,
2781 _(b'[PATTERN]...'),
2798 _(b'[PATTERN]...'),
2782 inferrepo=True,
2799 inferrepo=True,
2783 )
2800 )
2784 def debugpickmergetool(ui, repo, *pats, **opts):
2801 def debugpickmergetool(ui, repo, *pats, **opts):
2785 """examine which merge tool is chosen for specified file
2802 """examine which merge tool is chosen for specified file
2786
2803
2787 As described in :hg:`help merge-tools`, Mercurial examines
2804 As described in :hg:`help merge-tools`, Mercurial examines
2788 configurations below in this order to decide which merge tool is
2805 configurations below in this order to decide which merge tool is
2789 chosen for specified file.
2806 chosen for specified file.
2790
2807
2791 1. ``--tool`` option
2808 1. ``--tool`` option
2792 2. ``HGMERGE`` environment variable
2809 2. ``HGMERGE`` environment variable
2793 3. configurations in ``merge-patterns`` section
2810 3. configurations in ``merge-patterns`` section
2794 4. configuration of ``ui.merge``
2811 4. configuration of ``ui.merge``
2795 5. configurations in ``merge-tools`` section
2812 5. configurations in ``merge-tools`` section
2796 6. ``hgmerge`` tool (for historical reason only)
2813 6. ``hgmerge`` tool (for historical reason only)
2797 7. default tool for fallback (``:merge`` or ``:prompt``)
2814 7. default tool for fallback (``:merge`` or ``:prompt``)
2798
2815
2799 This command writes out examination result in the style below::
2816 This command writes out examination result in the style below::
2800
2817
2801 FILE = MERGETOOL
2818 FILE = MERGETOOL
2802
2819
2803 By default, all files known in the first parent context of the
2820 By default, all files known in the first parent context of the
2804 working directory are examined. Use file patterns and/or -I/-X
2821 working directory are examined. Use file patterns and/or -I/-X
2805 options to limit target files. -r/--rev is also useful to examine
2822 options to limit target files. -r/--rev is also useful to examine
2806 files in another context without actual updating to it.
2823 files in another context without actual updating to it.
2807
2824
2808 With --debug, this command shows warning messages while matching
2825 With --debug, this command shows warning messages while matching
2809 against ``merge-patterns`` and so on, too. It is recommended to
2826 against ``merge-patterns`` and so on, too. It is recommended to
2810 use this option with explicit file patterns and/or -I/-X options,
2827 use this option with explicit file patterns and/or -I/-X options,
2811 because this option increases amount of output per file according
2828 because this option increases amount of output per file according
2812 to configurations in hgrc.
2829 to configurations in hgrc.
2813
2830
2814 With -v/--verbose, this command shows configurations below at
2831 With -v/--verbose, this command shows configurations below at
2815 first (only if specified).
2832 first (only if specified).
2816
2833
2817 - ``--tool`` option
2834 - ``--tool`` option
2818 - ``HGMERGE`` environment variable
2835 - ``HGMERGE`` environment variable
2819 - configuration of ``ui.merge``
2836 - configuration of ``ui.merge``
2820
2837
2821 If merge tool is chosen before matching against
2838 If merge tool is chosen before matching against
2822 ``merge-patterns``, this command can't show any helpful
2839 ``merge-patterns``, this command can't show any helpful
2823 information, even with --debug. In such case, information above is
2840 information, even with --debug. In such case, information above is
2824 useful to know why a merge tool is chosen.
2841 useful to know why a merge tool is chosen.
2825 """
2842 """
2826 opts = pycompat.byteskwargs(opts)
2843 opts = pycompat.byteskwargs(opts)
2827 overrides = {}
2844 overrides = {}
2828 if opts[b'tool']:
2845 if opts[b'tool']:
2829 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2846 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2830 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2847 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2831
2848
2832 with ui.configoverride(overrides, b'debugmergepatterns'):
2849 with ui.configoverride(overrides, b'debugmergepatterns'):
2833 hgmerge = encoding.environ.get(b"HGMERGE")
2850 hgmerge = encoding.environ.get(b"HGMERGE")
2834 if hgmerge is not None:
2851 if hgmerge is not None:
2835 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2852 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2836 uimerge = ui.config(b"ui", b"merge")
2853 uimerge = ui.config(b"ui", b"merge")
2837 if uimerge:
2854 if uimerge:
2838 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2855 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2839
2856
2840 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2857 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2841 m = scmutil.match(ctx, pats, opts)
2858 m = scmutil.match(ctx, pats, opts)
2842 changedelete = opts[b'changedelete']
2859 changedelete = opts[b'changedelete']
2843 for path in ctx.walk(m):
2860 for path in ctx.walk(m):
2844 fctx = ctx[path]
2861 fctx = ctx[path]
2845 with ui.silent(
2862 with ui.silent(
2846 error=True
2863 error=True
2847 ) if not ui.debugflag else util.nullcontextmanager():
2864 ) if not ui.debugflag else util.nullcontextmanager():
2848 tool, toolpath = filemerge._picktool(
2865 tool, toolpath = filemerge._picktool(
2849 repo,
2866 repo,
2850 ui,
2867 ui,
2851 path,
2868 path,
2852 fctx.isbinary(),
2869 fctx.isbinary(),
2853 b'l' in fctx.flags(),
2870 b'l' in fctx.flags(),
2854 changedelete,
2871 changedelete,
2855 )
2872 )
2856 ui.write(b'%s = %s\n' % (path, tool))
2873 ui.write(b'%s = %s\n' % (path, tool))
2857
2874
2858
2875
2859 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2876 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2860 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2877 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2861 """access the pushkey key/value protocol
2878 """access the pushkey key/value protocol
2862
2879
2863 With two args, list the keys in the given namespace.
2880 With two args, list the keys in the given namespace.
2864
2881
2865 With five args, set a key to new if it currently is set to old.
2882 With five args, set a key to new if it currently is set to old.
2866 Reports success or failure.
2883 Reports success or failure.
2867 """
2884 """
2868
2885
2869 target = hg.peer(ui, {}, repopath)
2886 target = hg.peer(ui, {}, repopath)
2870 try:
2887 try:
2871 if keyinfo:
2888 if keyinfo:
2872 key, old, new = keyinfo
2889 key, old, new = keyinfo
2873 with target.commandexecutor() as e:
2890 with target.commandexecutor() as e:
2874 r = e.callcommand(
2891 r = e.callcommand(
2875 b'pushkey',
2892 b'pushkey',
2876 {
2893 {
2877 b'namespace': namespace,
2894 b'namespace': namespace,
2878 b'key': key,
2895 b'key': key,
2879 b'old': old,
2896 b'old': old,
2880 b'new': new,
2897 b'new': new,
2881 },
2898 },
2882 ).result()
2899 ).result()
2883
2900
2884 ui.status(pycompat.bytestr(r) + b'\n')
2901 ui.status(pycompat.bytestr(r) + b'\n')
2885 return not r
2902 return not r
2886 else:
2903 else:
2887 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2904 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2888 ui.write(
2905 ui.write(
2889 b"%s\t%s\n"
2906 b"%s\t%s\n"
2890 % (stringutil.escapestr(k), stringutil.escapestr(v))
2907 % (stringutil.escapestr(k), stringutil.escapestr(v))
2891 )
2908 )
2892 finally:
2909 finally:
2893 target.close()
2910 target.close()
2894
2911
2895
2912
2896 @command(b'debugpvec', [], _(b'A B'))
2913 @command(b'debugpvec', [], _(b'A B'))
2897 def debugpvec(ui, repo, a, b=None):
2914 def debugpvec(ui, repo, a, b=None):
2898 ca = scmutil.revsingle(repo, a)
2915 ca = scmutil.revsingle(repo, a)
2899 cb = scmutil.revsingle(repo, b)
2916 cb = scmutil.revsingle(repo, b)
2900 pa = pvec.ctxpvec(ca)
2917 pa = pvec.ctxpvec(ca)
2901 pb = pvec.ctxpvec(cb)
2918 pb = pvec.ctxpvec(cb)
2902 if pa == pb:
2919 if pa == pb:
2903 rel = b"="
2920 rel = b"="
2904 elif pa > pb:
2921 elif pa > pb:
2905 rel = b">"
2922 rel = b">"
2906 elif pa < pb:
2923 elif pa < pb:
2907 rel = b"<"
2924 rel = b"<"
2908 elif pa | pb:
2925 elif pa | pb:
2909 rel = b"|"
2926 rel = b"|"
2910 ui.write(_(b"a: %s\n") % pa)
2927 ui.write(_(b"a: %s\n") % pa)
2911 ui.write(_(b"b: %s\n") % pb)
2928 ui.write(_(b"b: %s\n") % pb)
2912 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2929 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2913 ui.write(
2930 ui.write(
2914 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2931 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2915 % (
2932 % (
2916 abs(pa._depth - pb._depth),
2933 abs(pa._depth - pb._depth),
2917 pvec._hamming(pa._vec, pb._vec),
2934 pvec._hamming(pa._vec, pb._vec),
2918 pa.distance(pb),
2935 pa.distance(pb),
2919 rel,
2936 rel,
2920 )
2937 )
2921 )
2938 )
2922
2939
2923
2940
2924 @command(
2941 @command(
2925 b'debugrebuilddirstate|debugrebuildstate',
2942 b'debugrebuilddirstate|debugrebuildstate',
2926 [
2943 [
2927 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2944 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2928 (
2945 (
2929 b'',
2946 b'',
2930 b'minimal',
2947 b'minimal',
2931 None,
2948 None,
2932 _(
2949 _(
2933 b'only rebuild files that are inconsistent with '
2950 b'only rebuild files that are inconsistent with '
2934 b'the working copy parent'
2951 b'the working copy parent'
2935 ),
2952 ),
2936 ),
2953 ),
2937 ],
2954 ],
2938 _(b'[-r REV]'),
2955 _(b'[-r REV]'),
2939 )
2956 )
2940 def debugrebuilddirstate(ui, repo, rev, **opts):
2957 def debugrebuilddirstate(ui, repo, rev, **opts):
2941 """rebuild the dirstate as it would look like for the given revision
2958 """rebuild the dirstate as it would look like for the given revision
2942
2959
2943 If no revision is specified the first current parent will be used.
2960 If no revision is specified the first current parent will be used.
2944
2961
2945 The dirstate will be set to the files of the given revision.
2962 The dirstate will be set to the files of the given revision.
2946 The actual working directory content or existing dirstate
2963 The actual working directory content or existing dirstate
2947 information such as adds or removes is not considered.
2964 information such as adds or removes is not considered.
2948
2965
2949 ``minimal`` will only rebuild the dirstate status for files that claim to be
2966 ``minimal`` will only rebuild the dirstate status for files that claim to be
2950 tracked but are not in the parent manifest, or that exist in the parent
2967 tracked but are not in the parent manifest, or that exist in the parent
2951 manifest but are not in the dirstate. It will not change adds, removes, or
2968 manifest but are not in the dirstate. It will not change adds, removes, or
2952 modified files that are in the working copy parent.
2969 modified files that are in the working copy parent.
2953
2970
2954 One use of this command is to make the next :hg:`status` invocation
2971 One use of this command is to make the next :hg:`status` invocation
2955 check the actual file content.
2972 check the actual file content.
2956 """
2973 """
2957 ctx = scmutil.revsingle(repo, rev)
2974 ctx = scmutil.revsingle(repo, rev)
2958 with repo.wlock():
2975 with repo.wlock():
2959 dirstate = repo.dirstate
2976 dirstate = repo.dirstate
2960 changedfiles = None
2977 changedfiles = None
2961 # See command doc for what minimal does.
2978 # See command doc for what minimal does.
2962 if opts.get('minimal'):
2979 if opts.get('minimal'):
2963 manifestfiles = set(ctx.manifest().keys())
2980 manifestfiles = set(ctx.manifest().keys())
2964 dirstatefiles = set(dirstate)
2981 dirstatefiles = set(dirstate)
2965 manifestonly = manifestfiles - dirstatefiles
2982 manifestonly = manifestfiles - dirstatefiles
2966 dsonly = dirstatefiles - manifestfiles
2983 dsonly = dirstatefiles - manifestfiles
2967 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2984 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2968 changedfiles = manifestonly | dsnotadded
2985 changedfiles = manifestonly | dsnotadded
2969
2986
2970 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2987 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2971
2988
2972
2989
2973 @command(b'debugrebuildfncache', [], b'')
2990 @command(b'debugrebuildfncache', [], b'')
2974 def debugrebuildfncache(ui, repo):
2991 def debugrebuildfncache(ui, repo):
2975 """rebuild the fncache file"""
2992 """rebuild the fncache file"""
2976 repair.rebuildfncache(ui, repo)
2993 repair.rebuildfncache(ui, repo)
2977
2994
2978
2995
2979 @command(
2996 @command(
2980 b'debugrename',
2997 b'debugrename',
2981 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2998 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2982 _(b'[-r REV] [FILE]...'),
2999 _(b'[-r REV] [FILE]...'),
2983 )
3000 )
2984 def debugrename(ui, repo, *pats, **opts):
3001 def debugrename(ui, repo, *pats, **opts):
2985 """dump rename information"""
3002 """dump rename information"""
2986
3003
2987 opts = pycompat.byteskwargs(opts)
3004 opts = pycompat.byteskwargs(opts)
2988 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3005 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2989 m = scmutil.match(ctx, pats, opts)
3006 m = scmutil.match(ctx, pats, opts)
2990 for abs in ctx.walk(m):
3007 for abs in ctx.walk(m):
2991 fctx = ctx[abs]
3008 fctx = ctx[abs]
2992 o = fctx.filelog().renamed(fctx.filenode())
3009 o = fctx.filelog().renamed(fctx.filenode())
2993 rel = repo.pathto(abs)
3010 rel = repo.pathto(abs)
2994 if o:
3011 if o:
2995 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3012 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2996 else:
3013 else:
2997 ui.write(_(b"%s not renamed\n") % rel)
3014 ui.write(_(b"%s not renamed\n") % rel)
2998
3015
2999
3016
3000 @command(b'debugrequires|debugrequirements', [], b'')
3017 @command(b'debugrequires|debugrequirements', [], b'')
3001 def debugrequirements(ui, repo):
3018 def debugrequirements(ui, repo):
3002 """print the current repo requirements"""
3019 """print the current repo requirements"""
3003 for r in sorted(repo.requirements):
3020 for r in sorted(repo.requirements):
3004 ui.write(b"%s\n" % r)
3021 ui.write(b"%s\n" % r)
3005
3022
3006
3023
3007 @command(
3024 @command(
3008 b'debugrevlog',
3025 b'debugrevlog',
3009 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3026 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3010 _(b'-c|-m|FILE'),
3027 _(b'-c|-m|FILE'),
3011 optionalrepo=True,
3028 optionalrepo=True,
3012 )
3029 )
3013 def debugrevlog(ui, repo, file_=None, **opts):
3030 def debugrevlog(ui, repo, file_=None, **opts):
3014 """show data and statistics about a revlog"""
3031 """show data and statistics about a revlog"""
3015 opts = pycompat.byteskwargs(opts)
3032 opts = pycompat.byteskwargs(opts)
3016 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3033 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3017
3034
3018 if opts.get(b"dump"):
3035 if opts.get(b"dump"):
3019 numrevs = len(r)
3036 numrevs = len(r)
3020 ui.write(
3037 ui.write(
3021 (
3038 (
3022 b"# rev p1rev p2rev start end deltastart base p1 p2"
3039 b"# rev p1rev p2rev start end deltastart base p1 p2"
3023 b" rawsize totalsize compression heads chainlen\n"
3040 b" rawsize totalsize compression heads chainlen\n"
3024 )
3041 )
3025 )
3042 )
3026 ts = 0
3043 ts = 0
3027 heads = set()
3044 heads = set()
3028
3045
3029 for rev in pycompat.xrange(numrevs):
3046 for rev in pycompat.xrange(numrevs):
3030 dbase = r.deltaparent(rev)
3047 dbase = r.deltaparent(rev)
3031 if dbase == -1:
3048 if dbase == -1:
3032 dbase = rev
3049 dbase = rev
3033 cbase = r.chainbase(rev)
3050 cbase = r.chainbase(rev)
3034 clen = r.chainlen(rev)
3051 clen = r.chainlen(rev)
3035 p1, p2 = r.parentrevs(rev)
3052 p1, p2 = r.parentrevs(rev)
3036 rs = r.rawsize(rev)
3053 rs = r.rawsize(rev)
3037 ts = ts + rs
3054 ts = ts + rs
3038 heads -= set(r.parentrevs(rev))
3055 heads -= set(r.parentrevs(rev))
3039 heads.add(rev)
3056 heads.add(rev)
3040 try:
3057 try:
3041 compression = ts / r.end(rev)
3058 compression = ts / r.end(rev)
3042 except ZeroDivisionError:
3059 except ZeroDivisionError:
3043 compression = 0
3060 compression = 0
3044 ui.write(
3061 ui.write(
3045 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3062 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3046 b"%11d %5d %8d\n"
3063 b"%11d %5d %8d\n"
3047 % (
3064 % (
3048 rev,
3065 rev,
3049 p1,
3066 p1,
3050 p2,
3067 p2,
3051 r.start(rev),
3068 r.start(rev),
3052 r.end(rev),
3069 r.end(rev),
3053 r.start(dbase),
3070 r.start(dbase),
3054 r.start(cbase),
3071 r.start(cbase),
3055 r.start(p1),
3072 r.start(p1),
3056 r.start(p2),
3073 r.start(p2),
3057 rs,
3074 rs,
3058 ts,
3075 ts,
3059 compression,
3076 compression,
3060 len(heads),
3077 len(heads),
3061 clen,
3078 clen,
3062 )
3079 )
3063 )
3080 )
3064 return 0
3081 return 0
3065
3082
3066 format = r._format_version
3083 format = r._format_version
3067 v = r._format_flags
3084 v = r._format_flags
3068 flags = []
3085 flags = []
3069 gdelta = False
3086 gdelta = False
3070 if v & revlog.FLAG_INLINE_DATA:
3087 if v & revlog.FLAG_INLINE_DATA:
3071 flags.append(b'inline')
3088 flags.append(b'inline')
3072 if v & revlog.FLAG_GENERALDELTA:
3089 if v & revlog.FLAG_GENERALDELTA:
3073 gdelta = True
3090 gdelta = True
3074 flags.append(b'generaldelta')
3091 flags.append(b'generaldelta')
3075 if not flags:
3092 if not flags:
3076 flags = [b'(none)']
3093 flags = [b'(none)']
3077
3094
3078 ### tracks merge vs single parent
3095 ### tracks merge vs single parent
3079 nummerges = 0
3096 nummerges = 0
3080
3097
3081 ### tracks ways the "delta" are build
3098 ### tracks ways the "delta" are build
3082 # nodelta
3099 # nodelta
3083 numempty = 0
3100 numempty = 0
3084 numemptytext = 0
3101 numemptytext = 0
3085 numemptydelta = 0
3102 numemptydelta = 0
3086 # full file content
3103 # full file content
3087 numfull = 0
3104 numfull = 0
3088 # intermediate snapshot against a prior snapshot
3105 # intermediate snapshot against a prior snapshot
3089 numsemi = 0
3106 numsemi = 0
3090 # snapshot count per depth
3107 # snapshot count per depth
3091 numsnapdepth = collections.defaultdict(lambda: 0)
3108 numsnapdepth = collections.defaultdict(lambda: 0)
3092 # delta against previous revision
3109 # delta against previous revision
3093 numprev = 0
3110 numprev = 0
3094 # delta against first or second parent (not prev)
3111 # delta against first or second parent (not prev)
3095 nump1 = 0
3112 nump1 = 0
3096 nump2 = 0
3113 nump2 = 0
3097 # delta against neither prev nor parents
3114 # delta against neither prev nor parents
3098 numother = 0
3115 numother = 0
3099 # delta against prev that are also first or second parent
3116 # delta against prev that are also first or second parent
3100 # (details of `numprev`)
3117 # (details of `numprev`)
3101 nump1prev = 0
3118 nump1prev = 0
3102 nump2prev = 0
3119 nump2prev = 0
3103
3120
3104 # data about delta chain of each revs
3121 # data about delta chain of each revs
3105 chainlengths = []
3122 chainlengths = []
3106 chainbases = []
3123 chainbases = []
3107 chainspans = []
3124 chainspans = []
3108
3125
3109 # data about each revision
3126 # data about each revision
3110 datasize = [None, 0, 0]
3127 datasize = [None, 0, 0]
3111 fullsize = [None, 0, 0]
3128 fullsize = [None, 0, 0]
3112 semisize = [None, 0, 0]
3129 semisize = [None, 0, 0]
3113 # snapshot count per depth
3130 # snapshot count per depth
3114 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3131 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3115 deltasize = [None, 0, 0]
3132 deltasize = [None, 0, 0]
3116 chunktypecounts = {}
3133 chunktypecounts = {}
3117 chunktypesizes = {}
3134 chunktypesizes = {}
3118
3135
3119 def addsize(size, l):
3136 def addsize(size, l):
3120 if l[0] is None or size < l[0]:
3137 if l[0] is None or size < l[0]:
3121 l[0] = size
3138 l[0] = size
3122 if size > l[1]:
3139 if size > l[1]:
3123 l[1] = size
3140 l[1] = size
3124 l[2] += size
3141 l[2] += size
3125
3142
3126 numrevs = len(r)
3143 numrevs = len(r)
3127 for rev in pycompat.xrange(numrevs):
3144 for rev in pycompat.xrange(numrevs):
3128 p1, p2 = r.parentrevs(rev)
3145 p1, p2 = r.parentrevs(rev)
3129 delta = r.deltaparent(rev)
3146 delta = r.deltaparent(rev)
3130 if format > 0:
3147 if format > 0:
3131 addsize(r.rawsize(rev), datasize)
3148 addsize(r.rawsize(rev), datasize)
3132 if p2 != nullrev:
3149 if p2 != nullrev:
3133 nummerges += 1
3150 nummerges += 1
3134 size = r.length(rev)
3151 size = r.length(rev)
3135 if delta == nullrev:
3152 if delta == nullrev:
3136 chainlengths.append(0)
3153 chainlengths.append(0)
3137 chainbases.append(r.start(rev))
3154 chainbases.append(r.start(rev))
3138 chainspans.append(size)
3155 chainspans.append(size)
3139 if size == 0:
3156 if size == 0:
3140 numempty += 1
3157 numempty += 1
3141 numemptytext += 1
3158 numemptytext += 1
3142 else:
3159 else:
3143 numfull += 1
3160 numfull += 1
3144 numsnapdepth[0] += 1
3161 numsnapdepth[0] += 1
3145 addsize(size, fullsize)
3162 addsize(size, fullsize)
3146 addsize(size, snapsizedepth[0])
3163 addsize(size, snapsizedepth[0])
3147 else:
3164 else:
3148 chainlengths.append(chainlengths[delta] + 1)
3165 chainlengths.append(chainlengths[delta] + 1)
3149 baseaddr = chainbases[delta]
3166 baseaddr = chainbases[delta]
3150 revaddr = r.start(rev)
3167 revaddr = r.start(rev)
3151 chainbases.append(baseaddr)
3168 chainbases.append(baseaddr)
3152 chainspans.append((revaddr - baseaddr) + size)
3169 chainspans.append((revaddr - baseaddr) + size)
3153 if size == 0:
3170 if size == 0:
3154 numempty += 1
3171 numempty += 1
3155 numemptydelta += 1
3172 numemptydelta += 1
3156 elif r.issnapshot(rev):
3173 elif r.issnapshot(rev):
3157 addsize(size, semisize)
3174 addsize(size, semisize)
3158 numsemi += 1
3175 numsemi += 1
3159 depth = r.snapshotdepth(rev)
3176 depth = r.snapshotdepth(rev)
3160 numsnapdepth[depth] += 1
3177 numsnapdepth[depth] += 1
3161 addsize(size, snapsizedepth[depth])
3178 addsize(size, snapsizedepth[depth])
3162 else:
3179 else:
3163 addsize(size, deltasize)
3180 addsize(size, deltasize)
3164 if delta == rev - 1:
3181 if delta == rev - 1:
3165 numprev += 1
3182 numprev += 1
3166 if delta == p1:
3183 if delta == p1:
3167 nump1prev += 1
3184 nump1prev += 1
3168 elif delta == p2:
3185 elif delta == p2:
3169 nump2prev += 1
3186 nump2prev += 1
3170 elif delta == p1:
3187 elif delta == p1:
3171 nump1 += 1
3188 nump1 += 1
3172 elif delta == p2:
3189 elif delta == p2:
3173 nump2 += 1
3190 nump2 += 1
3174 elif delta != nullrev:
3191 elif delta != nullrev:
3175 numother += 1
3192 numother += 1
3176
3193
3177 # Obtain data on the raw chunks in the revlog.
3194 # Obtain data on the raw chunks in the revlog.
3178 if util.safehasattr(r, b'_getsegmentforrevs'):
3195 if util.safehasattr(r, b'_getsegmentforrevs'):
3179 segment = r._getsegmentforrevs(rev, rev)[1]
3196 segment = r._getsegmentforrevs(rev, rev)[1]
3180 else:
3197 else:
3181 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3198 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3182 if segment:
3199 if segment:
3183 chunktype = bytes(segment[0:1])
3200 chunktype = bytes(segment[0:1])
3184 else:
3201 else:
3185 chunktype = b'empty'
3202 chunktype = b'empty'
3186
3203
3187 if chunktype not in chunktypecounts:
3204 if chunktype not in chunktypecounts:
3188 chunktypecounts[chunktype] = 0
3205 chunktypecounts[chunktype] = 0
3189 chunktypesizes[chunktype] = 0
3206 chunktypesizes[chunktype] = 0
3190
3207
3191 chunktypecounts[chunktype] += 1
3208 chunktypecounts[chunktype] += 1
3192 chunktypesizes[chunktype] += size
3209 chunktypesizes[chunktype] += size
3193
3210
3194 # Adjust size min value for empty cases
3211 # Adjust size min value for empty cases
3195 for size in (datasize, fullsize, semisize, deltasize):
3212 for size in (datasize, fullsize, semisize, deltasize):
3196 if size[0] is None:
3213 if size[0] is None:
3197 size[0] = 0
3214 size[0] = 0
3198
3215
3199 numdeltas = numrevs - numfull - numempty - numsemi
3216 numdeltas = numrevs - numfull - numempty - numsemi
3200 numoprev = numprev - nump1prev - nump2prev
3217 numoprev = numprev - nump1prev - nump2prev
3201 totalrawsize = datasize[2]
3218 totalrawsize = datasize[2]
3202 datasize[2] /= numrevs
3219 datasize[2] /= numrevs
3203 fulltotal = fullsize[2]
3220 fulltotal = fullsize[2]
3204 if numfull == 0:
3221 if numfull == 0:
3205 fullsize[2] = 0
3222 fullsize[2] = 0
3206 else:
3223 else:
3207 fullsize[2] /= numfull
3224 fullsize[2] /= numfull
3208 semitotal = semisize[2]
3225 semitotal = semisize[2]
3209 snaptotal = {}
3226 snaptotal = {}
3210 if numsemi > 0:
3227 if numsemi > 0:
3211 semisize[2] /= numsemi
3228 semisize[2] /= numsemi
3212 for depth in snapsizedepth:
3229 for depth in snapsizedepth:
3213 snaptotal[depth] = snapsizedepth[depth][2]
3230 snaptotal[depth] = snapsizedepth[depth][2]
3214 snapsizedepth[depth][2] /= numsnapdepth[depth]
3231 snapsizedepth[depth][2] /= numsnapdepth[depth]
3215
3232
3216 deltatotal = deltasize[2]
3233 deltatotal = deltasize[2]
3217 if numdeltas > 0:
3234 if numdeltas > 0:
3218 deltasize[2] /= numdeltas
3235 deltasize[2] /= numdeltas
3219 totalsize = fulltotal + semitotal + deltatotal
3236 totalsize = fulltotal + semitotal + deltatotal
3220 avgchainlen = sum(chainlengths) / numrevs
3237 avgchainlen = sum(chainlengths) / numrevs
3221 maxchainlen = max(chainlengths)
3238 maxchainlen = max(chainlengths)
3222 maxchainspan = max(chainspans)
3239 maxchainspan = max(chainspans)
3223 compratio = 1
3240 compratio = 1
3224 if totalsize:
3241 if totalsize:
3225 compratio = totalrawsize / totalsize
3242 compratio = totalrawsize / totalsize
3226
3243
3227 basedfmtstr = b'%%%dd\n'
3244 basedfmtstr = b'%%%dd\n'
3228 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3245 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3229
3246
3230 def dfmtstr(max):
3247 def dfmtstr(max):
3231 return basedfmtstr % len(str(max))
3248 return basedfmtstr % len(str(max))
3232
3249
3233 def pcfmtstr(max, padding=0):
3250 def pcfmtstr(max, padding=0):
3234 return basepcfmtstr % (len(str(max)), b' ' * padding)
3251 return basepcfmtstr % (len(str(max)), b' ' * padding)
3235
3252
3236 def pcfmt(value, total):
3253 def pcfmt(value, total):
3237 if total:
3254 if total:
3238 return (value, 100 * float(value) / total)
3255 return (value, 100 * float(value) / total)
3239 else:
3256 else:
3240 return value, 100.0
3257 return value, 100.0
3241
3258
3242 ui.writenoi18n(b'format : %d\n' % format)
3259 ui.writenoi18n(b'format : %d\n' % format)
3243 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3260 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3244
3261
3245 ui.write(b'\n')
3262 ui.write(b'\n')
3246 fmt = pcfmtstr(totalsize)
3263 fmt = pcfmtstr(totalsize)
3247 fmt2 = dfmtstr(totalsize)
3264 fmt2 = dfmtstr(totalsize)
3248 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3265 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3249 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3266 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3250 ui.writenoi18n(
3267 ui.writenoi18n(
3251 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3268 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3252 )
3269 )
3253 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3270 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3254 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3271 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3255 ui.writenoi18n(
3272 ui.writenoi18n(
3256 b' text : '
3273 b' text : '
3257 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3274 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3258 )
3275 )
3259 ui.writenoi18n(
3276 ui.writenoi18n(
3260 b' delta : '
3277 b' delta : '
3261 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3278 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3262 )
3279 )
3263 ui.writenoi18n(
3280 ui.writenoi18n(
3264 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3281 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3265 )
3282 )
3266 for depth in sorted(numsnapdepth):
3283 for depth in sorted(numsnapdepth):
3267 ui.write(
3284 ui.write(
3268 (b' lvl-%-3d : ' % depth)
3285 (b' lvl-%-3d : ' % depth)
3269 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3286 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3270 )
3287 )
3271 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3288 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3272 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3289 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3273 ui.writenoi18n(
3290 ui.writenoi18n(
3274 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3291 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3275 )
3292 )
3276 for depth in sorted(numsnapdepth):
3293 for depth in sorted(numsnapdepth):
3277 ui.write(
3294 ui.write(
3278 (b' lvl-%-3d : ' % depth)
3295 (b' lvl-%-3d : ' % depth)
3279 + fmt % pcfmt(snaptotal[depth], totalsize)
3296 + fmt % pcfmt(snaptotal[depth], totalsize)
3280 )
3297 )
3281 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3298 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3282
3299
3283 def fmtchunktype(chunktype):
3300 def fmtchunktype(chunktype):
3284 if chunktype == b'empty':
3301 if chunktype == b'empty':
3285 return b' %s : ' % chunktype
3302 return b' %s : ' % chunktype
3286 elif chunktype in pycompat.bytestr(string.ascii_letters):
3303 elif chunktype in pycompat.bytestr(string.ascii_letters):
3287 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3304 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3288 else:
3305 else:
3289 return b' 0x%s : ' % hex(chunktype)
3306 return b' 0x%s : ' % hex(chunktype)
3290
3307
3291 ui.write(b'\n')
3308 ui.write(b'\n')
3292 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3309 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3293 for chunktype in sorted(chunktypecounts):
3310 for chunktype in sorted(chunktypecounts):
3294 ui.write(fmtchunktype(chunktype))
3311 ui.write(fmtchunktype(chunktype))
3295 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3312 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3296 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3313 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3297 for chunktype in sorted(chunktypecounts):
3314 for chunktype in sorted(chunktypecounts):
3298 ui.write(fmtchunktype(chunktype))
3315 ui.write(fmtchunktype(chunktype))
3299 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3316 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3300
3317
3301 ui.write(b'\n')
3318 ui.write(b'\n')
3302 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3319 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3303 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3320 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3304 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3321 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3305 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3322 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3306 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3323 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3307
3324
3308 if format > 0:
3325 if format > 0:
3309 ui.write(b'\n')
3326 ui.write(b'\n')
3310 ui.writenoi18n(
3327 ui.writenoi18n(
3311 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3328 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3312 % tuple(datasize)
3329 % tuple(datasize)
3313 )
3330 )
3314 ui.writenoi18n(
3331 ui.writenoi18n(
3315 b'full revision size (min/max/avg) : %d / %d / %d\n'
3332 b'full revision size (min/max/avg) : %d / %d / %d\n'
3316 % tuple(fullsize)
3333 % tuple(fullsize)
3317 )
3334 )
3318 ui.writenoi18n(
3335 ui.writenoi18n(
3319 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3336 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3320 % tuple(semisize)
3337 % tuple(semisize)
3321 )
3338 )
3322 for depth in sorted(snapsizedepth):
3339 for depth in sorted(snapsizedepth):
3323 if depth == 0:
3340 if depth == 0:
3324 continue
3341 continue
3325 ui.writenoi18n(
3342 ui.writenoi18n(
3326 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3343 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3327 % ((depth,) + tuple(snapsizedepth[depth]))
3344 % ((depth,) + tuple(snapsizedepth[depth]))
3328 )
3345 )
3329 ui.writenoi18n(
3346 ui.writenoi18n(
3330 b'delta size (min/max/avg) : %d / %d / %d\n'
3347 b'delta size (min/max/avg) : %d / %d / %d\n'
3331 % tuple(deltasize)
3348 % tuple(deltasize)
3332 )
3349 )
3333
3350
3334 if numdeltas > 0:
3351 if numdeltas > 0:
3335 ui.write(b'\n')
3352 ui.write(b'\n')
3336 fmt = pcfmtstr(numdeltas)
3353 fmt = pcfmtstr(numdeltas)
3337 fmt2 = pcfmtstr(numdeltas, 4)
3354 fmt2 = pcfmtstr(numdeltas, 4)
3338 ui.writenoi18n(
3355 ui.writenoi18n(
3339 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3356 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3340 )
3357 )
3341 if numprev > 0:
3358 if numprev > 0:
3342 ui.writenoi18n(
3359 ui.writenoi18n(
3343 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3360 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3344 )
3361 )
3345 ui.writenoi18n(
3362 ui.writenoi18n(
3346 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3363 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3347 )
3364 )
3348 ui.writenoi18n(
3365 ui.writenoi18n(
3349 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3366 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3350 )
3367 )
3351 if gdelta:
3368 if gdelta:
3352 ui.writenoi18n(
3369 ui.writenoi18n(
3353 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3370 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3354 )
3371 )
3355 ui.writenoi18n(
3372 ui.writenoi18n(
3356 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3373 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3357 )
3374 )
3358 ui.writenoi18n(
3375 ui.writenoi18n(
3359 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3376 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3360 )
3377 )
3361
3378
3362
3379
3363 @command(
3380 @command(
3364 b'debugrevlogindex',
3381 b'debugrevlogindex',
3365 cmdutil.debugrevlogopts
3382 cmdutil.debugrevlogopts
3366 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3383 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3367 _(b'[-f FORMAT] -c|-m|FILE'),
3384 _(b'[-f FORMAT] -c|-m|FILE'),
3368 optionalrepo=True,
3385 optionalrepo=True,
3369 )
3386 )
3370 def debugrevlogindex(ui, repo, file_=None, **opts):
3387 def debugrevlogindex(ui, repo, file_=None, **opts):
3371 """dump the contents of a revlog index"""
3388 """dump the contents of a revlog index"""
3372 opts = pycompat.byteskwargs(opts)
3389 opts = pycompat.byteskwargs(opts)
3373 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3390 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3374 format = opts.get(b'format', 0)
3391 format = opts.get(b'format', 0)
3375 if format not in (0, 1):
3392 if format not in (0, 1):
3376 raise error.Abort(_(b"unknown format %d") % format)
3393 raise error.Abort(_(b"unknown format %d") % format)
3377
3394
3378 if ui.debugflag:
3395 if ui.debugflag:
3379 shortfn = hex
3396 shortfn = hex
3380 else:
3397 else:
3381 shortfn = short
3398 shortfn = short
3382
3399
3383 # There might not be anything in r, so have a sane default
3400 # There might not be anything in r, so have a sane default
3384 idlen = 12
3401 idlen = 12
3385 for i in r:
3402 for i in r:
3386 idlen = len(shortfn(r.node(i)))
3403 idlen = len(shortfn(r.node(i)))
3387 break
3404 break
3388
3405
3389 if format == 0:
3406 if format == 0:
3390 if ui.verbose:
3407 if ui.verbose:
3391 ui.writenoi18n(
3408 ui.writenoi18n(
3392 b" rev offset length linkrev %s %s p2\n"
3409 b" rev offset length linkrev %s %s p2\n"
3393 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3410 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3394 )
3411 )
3395 else:
3412 else:
3396 ui.writenoi18n(
3413 ui.writenoi18n(
3397 b" rev linkrev %s %s p2\n"
3414 b" rev linkrev %s %s p2\n"
3398 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3415 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3399 )
3416 )
3400 elif format == 1:
3417 elif format == 1:
3401 if ui.verbose:
3418 if ui.verbose:
3402 ui.writenoi18n(
3419 ui.writenoi18n(
3403 (
3420 (
3404 b" rev flag offset length size link p1"
3421 b" rev flag offset length size link p1"
3405 b" p2 %s\n"
3422 b" p2 %s\n"
3406 )
3423 )
3407 % b"nodeid".rjust(idlen)
3424 % b"nodeid".rjust(idlen)
3408 )
3425 )
3409 else:
3426 else:
3410 ui.writenoi18n(
3427 ui.writenoi18n(
3411 b" rev flag size link p1 p2 %s\n"
3428 b" rev flag size link p1 p2 %s\n"
3412 % b"nodeid".rjust(idlen)
3429 % b"nodeid".rjust(idlen)
3413 )
3430 )
3414
3431
3415 for i in r:
3432 for i in r:
3416 node = r.node(i)
3433 node = r.node(i)
3417 if format == 0:
3434 if format == 0:
3418 try:
3435 try:
3419 pp = r.parents(node)
3436 pp = r.parents(node)
3420 except Exception:
3437 except Exception:
3421 pp = [repo.nullid, repo.nullid]
3438 pp = [repo.nullid, repo.nullid]
3422 if ui.verbose:
3439 if ui.verbose:
3423 ui.write(
3440 ui.write(
3424 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3441 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3425 % (
3442 % (
3426 i,
3443 i,
3427 r.start(i),
3444 r.start(i),
3428 r.length(i),
3445 r.length(i),
3429 r.linkrev(i),
3446 r.linkrev(i),
3430 shortfn(node),
3447 shortfn(node),
3431 shortfn(pp[0]),
3448 shortfn(pp[0]),
3432 shortfn(pp[1]),
3449 shortfn(pp[1]),
3433 )
3450 )
3434 )
3451 )
3435 else:
3452 else:
3436 ui.write(
3453 ui.write(
3437 b"% 6d % 7d %s %s %s\n"
3454 b"% 6d % 7d %s %s %s\n"
3438 % (
3455 % (
3439 i,
3456 i,
3440 r.linkrev(i),
3457 r.linkrev(i),
3441 shortfn(node),
3458 shortfn(node),
3442 shortfn(pp[0]),
3459 shortfn(pp[0]),
3443 shortfn(pp[1]),
3460 shortfn(pp[1]),
3444 )
3461 )
3445 )
3462 )
3446 elif format == 1:
3463 elif format == 1:
3447 pr = r.parentrevs(i)
3464 pr = r.parentrevs(i)
3448 if ui.verbose:
3465 if ui.verbose:
3449 ui.write(
3466 ui.write(
3450 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3467 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3451 % (
3468 % (
3452 i,
3469 i,
3453 r.flags(i),
3470 r.flags(i),
3454 r.start(i),
3471 r.start(i),
3455 r.length(i),
3472 r.length(i),
3456 r.rawsize(i),
3473 r.rawsize(i),
3457 r.linkrev(i),
3474 r.linkrev(i),
3458 pr[0],
3475 pr[0],
3459 pr[1],
3476 pr[1],
3460 shortfn(node),
3477 shortfn(node),
3461 )
3478 )
3462 )
3479 )
3463 else:
3480 else:
3464 ui.write(
3481 ui.write(
3465 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3482 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3466 % (
3483 % (
3467 i,
3484 i,
3468 r.flags(i),
3485 r.flags(i),
3469 r.rawsize(i),
3486 r.rawsize(i),
3470 r.linkrev(i),
3487 r.linkrev(i),
3471 pr[0],
3488 pr[0],
3472 pr[1],
3489 pr[1],
3473 shortfn(node),
3490 shortfn(node),
3474 )
3491 )
3475 )
3492 )
3476
3493
3477
3494
3478 @command(
3495 @command(
3479 b'debugrevspec',
3496 b'debugrevspec',
3480 [
3497 [
3481 (
3498 (
3482 b'',
3499 b'',
3483 b'optimize',
3500 b'optimize',
3484 None,
3501 None,
3485 _(b'print parsed tree after optimizing (DEPRECATED)'),
3502 _(b'print parsed tree after optimizing (DEPRECATED)'),
3486 ),
3503 ),
3487 (
3504 (
3488 b'',
3505 b'',
3489 b'show-revs',
3506 b'show-revs',
3490 True,
3507 True,
3491 _(b'print list of result revisions (default)'),
3508 _(b'print list of result revisions (default)'),
3492 ),
3509 ),
3493 (
3510 (
3494 b's',
3511 b's',
3495 b'show-set',
3512 b'show-set',
3496 None,
3513 None,
3497 _(b'print internal representation of result set'),
3514 _(b'print internal representation of result set'),
3498 ),
3515 ),
3499 (
3516 (
3500 b'p',
3517 b'p',
3501 b'show-stage',
3518 b'show-stage',
3502 [],
3519 [],
3503 _(b'print parsed tree at the given stage'),
3520 _(b'print parsed tree at the given stage'),
3504 _(b'NAME'),
3521 _(b'NAME'),
3505 ),
3522 ),
3506 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3523 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3507 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3524 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3508 ],
3525 ],
3509 b'REVSPEC',
3526 b'REVSPEC',
3510 )
3527 )
3511 def debugrevspec(ui, repo, expr, **opts):
3528 def debugrevspec(ui, repo, expr, **opts):
3512 """parse and apply a revision specification
3529 """parse and apply a revision specification
3513
3530
3514 Use -p/--show-stage option to print the parsed tree at the given stages.
3531 Use -p/--show-stage option to print the parsed tree at the given stages.
3515 Use -p all to print tree at every stage.
3532 Use -p all to print tree at every stage.
3516
3533
3517 Use --no-show-revs option with -s or -p to print only the set
3534 Use --no-show-revs option with -s or -p to print only the set
3518 representation or the parsed tree respectively.
3535 representation or the parsed tree respectively.
3519
3536
3520 Use --verify-optimized to compare the optimized result with the unoptimized
3537 Use --verify-optimized to compare the optimized result with the unoptimized
3521 one. Returns 1 if the optimized result differs.
3538 one. Returns 1 if the optimized result differs.
3522 """
3539 """
3523 opts = pycompat.byteskwargs(opts)
3540 opts = pycompat.byteskwargs(opts)
3524 aliases = ui.configitems(b'revsetalias')
3541 aliases = ui.configitems(b'revsetalias')
3525 stages = [
3542 stages = [
3526 (b'parsed', lambda tree: tree),
3543 (b'parsed', lambda tree: tree),
3527 (
3544 (
3528 b'expanded',
3545 b'expanded',
3529 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3546 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3530 ),
3547 ),
3531 (b'concatenated', revsetlang.foldconcat),
3548 (b'concatenated', revsetlang.foldconcat),
3532 (b'analyzed', revsetlang.analyze),
3549 (b'analyzed', revsetlang.analyze),
3533 (b'optimized', revsetlang.optimize),
3550 (b'optimized', revsetlang.optimize),
3534 ]
3551 ]
3535 if opts[b'no_optimized']:
3552 if opts[b'no_optimized']:
3536 stages = stages[:-1]
3553 stages = stages[:-1]
3537 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3554 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3538 raise error.Abort(
3555 raise error.Abort(
3539 _(b'cannot use --verify-optimized with --no-optimized')
3556 _(b'cannot use --verify-optimized with --no-optimized')
3540 )
3557 )
3541 stagenames = {n for n, f in stages}
3558 stagenames = {n for n, f in stages}
3542
3559
3543 showalways = set()
3560 showalways = set()
3544 showchanged = set()
3561 showchanged = set()
3545 if ui.verbose and not opts[b'show_stage']:
3562 if ui.verbose and not opts[b'show_stage']:
3546 # show parsed tree by --verbose (deprecated)
3563 # show parsed tree by --verbose (deprecated)
3547 showalways.add(b'parsed')
3564 showalways.add(b'parsed')
3548 showchanged.update([b'expanded', b'concatenated'])
3565 showchanged.update([b'expanded', b'concatenated'])
3549 if opts[b'optimize']:
3566 if opts[b'optimize']:
3550 showalways.add(b'optimized')
3567 showalways.add(b'optimized')
3551 if opts[b'show_stage'] and opts[b'optimize']:
3568 if opts[b'show_stage'] and opts[b'optimize']:
3552 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3569 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3553 if opts[b'show_stage'] == [b'all']:
3570 if opts[b'show_stage'] == [b'all']:
3554 showalways.update(stagenames)
3571 showalways.update(stagenames)
3555 else:
3572 else:
3556 for n in opts[b'show_stage']:
3573 for n in opts[b'show_stage']:
3557 if n not in stagenames:
3574 if n not in stagenames:
3558 raise error.Abort(_(b'invalid stage name: %s') % n)
3575 raise error.Abort(_(b'invalid stage name: %s') % n)
3559 showalways.update(opts[b'show_stage'])
3576 showalways.update(opts[b'show_stage'])
3560
3577
3561 treebystage = {}
3578 treebystage = {}
3562 printedtree = None
3579 printedtree = None
3563 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3580 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3564 for n, f in stages:
3581 for n, f in stages:
3565 treebystage[n] = tree = f(tree)
3582 treebystage[n] = tree = f(tree)
3566 if n in showalways or (n in showchanged and tree != printedtree):
3583 if n in showalways or (n in showchanged and tree != printedtree):
3567 if opts[b'show_stage'] or n != b'parsed':
3584 if opts[b'show_stage'] or n != b'parsed':
3568 ui.write(b"* %s:\n" % n)
3585 ui.write(b"* %s:\n" % n)
3569 ui.write(revsetlang.prettyformat(tree), b"\n")
3586 ui.write(revsetlang.prettyformat(tree), b"\n")
3570 printedtree = tree
3587 printedtree = tree
3571
3588
3572 if opts[b'verify_optimized']:
3589 if opts[b'verify_optimized']:
3573 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3590 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3574 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3591 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3575 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3592 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3576 ui.writenoi18n(
3593 ui.writenoi18n(
3577 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3594 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3578 )
3595 )
3579 ui.writenoi18n(
3596 ui.writenoi18n(
3580 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3597 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3581 )
3598 )
3582 arevs = list(arevs)
3599 arevs = list(arevs)
3583 brevs = list(brevs)
3600 brevs = list(brevs)
3584 if arevs == brevs:
3601 if arevs == brevs:
3585 return 0
3602 return 0
3586 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3603 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3587 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3604 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3588 sm = difflib.SequenceMatcher(None, arevs, brevs)
3605 sm = difflib.SequenceMatcher(None, arevs, brevs)
3589 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3606 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3590 if tag in ('delete', 'replace'):
3607 if tag in ('delete', 'replace'):
3591 for c in arevs[alo:ahi]:
3608 for c in arevs[alo:ahi]:
3592 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3609 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3593 if tag in ('insert', 'replace'):
3610 if tag in ('insert', 'replace'):
3594 for c in brevs[blo:bhi]:
3611 for c in brevs[blo:bhi]:
3595 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3612 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3596 if tag == 'equal':
3613 if tag == 'equal':
3597 for c in arevs[alo:ahi]:
3614 for c in arevs[alo:ahi]:
3598 ui.write(b' %d\n' % c)
3615 ui.write(b' %d\n' % c)
3599 return 1
3616 return 1
3600
3617
3601 func = revset.makematcher(tree)
3618 func = revset.makematcher(tree)
3602 revs = func(repo)
3619 revs = func(repo)
3603 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3620 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3604 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3621 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3605 if not opts[b'show_revs']:
3622 if not opts[b'show_revs']:
3606 return
3623 return
3607 for c in revs:
3624 for c in revs:
3608 ui.write(b"%d\n" % c)
3625 ui.write(b"%d\n" % c)
3609
3626
3610
3627
3611 @command(
3628 @command(
3612 b'debugserve',
3629 b'debugserve',
3613 [
3630 [
3614 (
3631 (
3615 b'',
3632 b'',
3616 b'sshstdio',
3633 b'sshstdio',
3617 False,
3634 False,
3618 _(b'run an SSH server bound to process handles'),
3635 _(b'run an SSH server bound to process handles'),
3619 ),
3636 ),
3620 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3637 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3621 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3638 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3622 ],
3639 ],
3623 b'',
3640 b'',
3624 )
3641 )
3625 def debugserve(ui, repo, **opts):
3642 def debugserve(ui, repo, **opts):
3626 """run a server with advanced settings
3643 """run a server with advanced settings
3627
3644
3628 This command is similar to :hg:`serve`. It exists partially as a
3645 This command is similar to :hg:`serve`. It exists partially as a
3629 workaround to the fact that ``hg serve --stdio`` must have specific
3646 workaround to the fact that ``hg serve --stdio`` must have specific
3630 arguments for security reasons.
3647 arguments for security reasons.
3631 """
3648 """
3632 opts = pycompat.byteskwargs(opts)
3649 opts = pycompat.byteskwargs(opts)
3633
3650
3634 if not opts[b'sshstdio']:
3651 if not opts[b'sshstdio']:
3635 raise error.Abort(_(b'only --sshstdio is currently supported'))
3652 raise error.Abort(_(b'only --sshstdio is currently supported'))
3636
3653
3637 logfh = None
3654 logfh = None
3638
3655
3639 if opts[b'logiofd'] and opts[b'logiofile']:
3656 if opts[b'logiofd'] and opts[b'logiofile']:
3640 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3657 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3641
3658
3642 if opts[b'logiofd']:
3659 if opts[b'logiofd']:
3643 # Ideally we would be line buffered. But line buffering in binary
3660 # Ideally we would be line buffered. But line buffering in binary
3644 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3661 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3645 # buffering could have performance impacts. But since this isn't
3662 # buffering could have performance impacts. But since this isn't
3646 # performance critical code, it should be fine.
3663 # performance critical code, it should be fine.
3647 try:
3664 try:
3648 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3665 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3649 except OSError as e:
3666 except OSError as e:
3650 if e.errno != errno.ESPIPE:
3667 if e.errno != errno.ESPIPE:
3651 raise
3668 raise
3652 # can't seek a pipe, so `ab` mode fails on py3
3669 # can't seek a pipe, so `ab` mode fails on py3
3653 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3670 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3654 elif opts[b'logiofile']:
3671 elif opts[b'logiofile']:
3655 logfh = open(opts[b'logiofile'], b'ab', 0)
3672 logfh = open(opts[b'logiofile'], b'ab', 0)
3656
3673
3657 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3674 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3658 s.serve_forever()
3675 s.serve_forever()
3659
3676
3660
3677
3661 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3678 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3662 def debugsetparents(ui, repo, rev1, rev2=None):
3679 def debugsetparents(ui, repo, rev1, rev2=None):
3663 """manually set the parents of the current working directory (DANGEROUS)
3680 """manually set the parents of the current working directory (DANGEROUS)
3664
3681
3665 This command is not what you are looking for and should not be used. Using
3682 This command is not what you are looking for and should not be used. Using
3666 this command will most certainly results in slight corruption of the file
3683 this command will most certainly results in slight corruption of the file
3667 level histories withing your repository. DO NOT USE THIS COMMAND.
3684 level histories withing your repository. DO NOT USE THIS COMMAND.
3668
3685
3669 The command update the p1 and p2 field in the dirstate, and not touching
3686 The command update the p1 and p2 field in the dirstate, and not touching
3670 anything else. This useful for writing repository conversion tools, but
3687 anything else. This useful for writing repository conversion tools, but
3671 should be used with extreme care. For example, neither the working
3688 should be used with extreme care. For example, neither the working
3672 directory nor the dirstate is updated, so file status may be incorrect
3689 directory nor the dirstate is updated, so file status may be incorrect
3673 after running this command. Only used if you are one of the few people that
3690 after running this command. Only used if you are one of the few people that
3674 deeply unstand both conversion tools and file level histories. If you are
3691 deeply unstand both conversion tools and file level histories. If you are
3675 reading this help, you are not one of this people (most of them sailed west
3692 reading this help, you are not one of this people (most of them sailed west
3676 from Mithlond anyway.
3693 from Mithlond anyway.
3677
3694
3678 So one last time DO NOT USE THIS COMMAND.
3695 So one last time DO NOT USE THIS COMMAND.
3679
3696
3680 Returns 0 on success.
3697 Returns 0 on success.
3681 """
3698 """
3682
3699
3683 node1 = scmutil.revsingle(repo, rev1).node()
3700 node1 = scmutil.revsingle(repo, rev1).node()
3684 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3701 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3685
3702
3686 with repo.wlock():
3703 with repo.wlock():
3687 repo.setparents(node1, node2)
3704 repo.setparents(node1, node2)
3688
3705
3689
3706
3690 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3707 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3691 def debugsidedata(ui, repo, file_, rev=None, **opts):
3708 def debugsidedata(ui, repo, file_, rev=None, **opts):
3692 """dump the side data for a cl/manifest/file revision
3709 """dump the side data for a cl/manifest/file revision
3693
3710
3694 Use --verbose to dump the sidedata content."""
3711 Use --verbose to dump the sidedata content."""
3695 opts = pycompat.byteskwargs(opts)
3712 opts = pycompat.byteskwargs(opts)
3696 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3713 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3697 if rev is not None:
3714 if rev is not None:
3698 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3715 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3699 file_, rev = None, file_
3716 file_, rev = None, file_
3700 elif rev is None:
3717 elif rev is None:
3701 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3702 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3719 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3703 r = getattr(r, '_revlog', r)
3720 r = getattr(r, '_revlog', r)
3704 try:
3721 try:
3705 sidedata = r.sidedata(r.lookup(rev))
3722 sidedata = r.sidedata(r.lookup(rev))
3706 except KeyError:
3723 except KeyError:
3707 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3708 if sidedata:
3725 if sidedata:
3709 sidedata = list(sidedata.items())
3726 sidedata = list(sidedata.items())
3710 sidedata.sort()
3727 sidedata.sort()
3711 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3728 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3712 for key, value in sidedata:
3729 for key, value in sidedata:
3713 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3730 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3714 if ui.verbose:
3731 if ui.verbose:
3715 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3732 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3716
3733
3717
3734
3718 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3735 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3719 def debugssl(ui, repo, source=None, **opts):
3736 def debugssl(ui, repo, source=None, **opts):
3720 """test a secure connection to a server
3737 """test a secure connection to a server
3721
3738
3722 This builds the certificate chain for the server on Windows, installing the
3739 This builds the certificate chain for the server on Windows, installing the
3723 missing intermediates and trusted root via Windows Update if necessary. It
3740 missing intermediates and trusted root via Windows Update if necessary. It
3724 does nothing on other platforms.
3741 does nothing on other platforms.
3725
3742
3726 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3743 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3727 that server is used. See :hg:`help urls` for more information.
3744 that server is used. See :hg:`help urls` for more information.
3728
3745
3729 If the update succeeds, retry the original operation. Otherwise, the cause
3746 If the update succeeds, retry the original operation. Otherwise, the cause
3730 of the SSL error is likely another issue.
3747 of the SSL error is likely another issue.
3731 """
3748 """
3732 if not pycompat.iswindows:
3749 if not pycompat.iswindows:
3733 raise error.Abort(
3750 raise error.Abort(
3734 _(b'certificate chain building is only possible on Windows')
3751 _(b'certificate chain building is only possible on Windows')
3735 )
3752 )
3736
3753
3737 if not source:
3754 if not source:
3738 if not repo:
3755 if not repo:
3739 raise error.Abort(
3756 raise error.Abort(
3740 _(
3757 _(
3741 b"there is no Mercurial repository here, and no "
3758 b"there is no Mercurial repository here, and no "
3742 b"server specified"
3759 b"server specified"
3743 )
3760 )
3744 )
3761 )
3745 source = b"default"
3762 source = b"default"
3746
3763
3747 source, branches = urlutil.get_unique_pull_path(
3764 source, branches = urlutil.get_unique_pull_path(
3748 b'debugssl', repo, ui, source
3765 b'debugssl', repo, ui, source
3749 )
3766 )
3750 url = urlutil.url(source)
3767 url = urlutil.url(source)
3751
3768
3752 defaultport = {b'https': 443, b'ssh': 22}
3769 defaultport = {b'https': 443, b'ssh': 22}
3753 if url.scheme in defaultport:
3770 if url.scheme in defaultport:
3754 try:
3771 try:
3755 addr = (url.host, int(url.port or defaultport[url.scheme]))
3772 addr = (url.host, int(url.port or defaultport[url.scheme]))
3756 except ValueError:
3773 except ValueError:
3757 raise error.Abort(_(b"malformed port number in URL"))
3774 raise error.Abort(_(b"malformed port number in URL"))
3758 else:
3775 else:
3759 raise error.Abort(_(b"only https and ssh connections are supported"))
3776 raise error.Abort(_(b"only https and ssh connections are supported"))
3760
3777
3761 from . import win32
3778 from . import win32
3762
3779
3763 s = ssl.wrap_socket(
3780 s = ssl.wrap_socket(
3764 socket.socket(),
3781 socket.socket(),
3765 ssl_version=ssl.PROTOCOL_TLS,
3782 ssl_version=ssl.PROTOCOL_TLS,
3766 cert_reqs=ssl.CERT_NONE,
3783 cert_reqs=ssl.CERT_NONE,
3767 ca_certs=None,
3784 ca_certs=None,
3768 )
3785 )
3769
3786
3770 try:
3787 try:
3771 s.connect(addr)
3788 s.connect(addr)
3772 cert = s.getpeercert(True)
3789 cert = s.getpeercert(True)
3773
3790
3774 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3791 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3775
3792
3776 complete = win32.checkcertificatechain(cert, build=False)
3793 complete = win32.checkcertificatechain(cert, build=False)
3777
3794
3778 if not complete:
3795 if not complete:
3779 ui.status(_(b'certificate chain is incomplete, updating... '))
3796 ui.status(_(b'certificate chain is incomplete, updating... '))
3780
3797
3781 if not win32.checkcertificatechain(cert):
3798 if not win32.checkcertificatechain(cert):
3782 ui.status(_(b'failed.\n'))
3799 ui.status(_(b'failed.\n'))
3783 else:
3800 else:
3784 ui.status(_(b'done.\n'))
3801 ui.status(_(b'done.\n'))
3785 else:
3802 else:
3786 ui.status(_(b'full certificate chain is available\n'))
3803 ui.status(_(b'full certificate chain is available\n'))
3787 finally:
3804 finally:
3788 s.close()
3805 s.close()
3789
3806
3790
3807
3791 @command(
3808 @command(
3792 b"debugbackupbundle",
3809 b"debugbackupbundle",
3793 [
3810 [
3794 (
3811 (
3795 b"",
3812 b"",
3796 b"recover",
3813 b"recover",
3797 b"",
3814 b"",
3798 b"brings the specified changeset back into the repository",
3815 b"brings the specified changeset back into the repository",
3799 )
3816 )
3800 ]
3817 ]
3801 + cmdutil.logopts,
3818 + cmdutil.logopts,
3802 _(b"hg debugbackupbundle [--recover HASH]"),
3819 _(b"hg debugbackupbundle [--recover HASH]"),
3803 )
3820 )
3804 def debugbackupbundle(ui, repo, *pats, **opts):
3821 def debugbackupbundle(ui, repo, *pats, **opts):
3805 """lists the changesets available in backup bundles
3822 """lists the changesets available in backup bundles
3806
3823
3807 Without any arguments, this command prints a list of the changesets in each
3824 Without any arguments, this command prints a list of the changesets in each
3808 backup bundle.
3825 backup bundle.
3809
3826
3810 --recover takes a changeset hash and unbundles the first bundle that
3827 --recover takes a changeset hash and unbundles the first bundle that
3811 contains that hash, which puts that changeset back in your repository.
3828 contains that hash, which puts that changeset back in your repository.
3812
3829
3813 --verbose will print the entire commit message and the bundle path for that
3830 --verbose will print the entire commit message and the bundle path for that
3814 backup.
3831 backup.
3815 """
3832 """
3816 backups = list(
3833 backups = list(
3817 filter(
3834 filter(
3818 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3835 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3819 )
3836 )
3820 )
3837 )
3821 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3838 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3822
3839
3823 opts = pycompat.byteskwargs(opts)
3840 opts = pycompat.byteskwargs(opts)
3824 opts[b"bundle"] = b""
3841 opts[b"bundle"] = b""
3825 opts[b"force"] = None
3842 opts[b"force"] = None
3826 limit = logcmdutil.getlimit(opts)
3843 limit = logcmdutil.getlimit(opts)
3827
3844
3828 def display(other, chlist, displayer):
3845 def display(other, chlist, displayer):
3829 if opts.get(b"newest_first"):
3846 if opts.get(b"newest_first"):
3830 chlist.reverse()
3847 chlist.reverse()
3831 count = 0
3848 count = 0
3832 for n in chlist:
3849 for n in chlist:
3833 if limit is not None and count >= limit:
3850 if limit is not None and count >= limit:
3834 break
3851 break
3835 parents = [
3852 parents = [
3836 True for p in other.changelog.parents(n) if p != repo.nullid
3853 True for p in other.changelog.parents(n) if p != repo.nullid
3837 ]
3854 ]
3838 if opts.get(b"no_merges") and len(parents) == 2:
3855 if opts.get(b"no_merges") and len(parents) == 2:
3839 continue
3856 continue
3840 count += 1
3857 count += 1
3841 displayer.show(other[n])
3858 displayer.show(other[n])
3842
3859
3843 recovernode = opts.get(b"recover")
3860 recovernode = opts.get(b"recover")
3844 if recovernode:
3861 if recovernode:
3845 if scmutil.isrevsymbol(repo, recovernode):
3862 if scmutil.isrevsymbol(repo, recovernode):
3846 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3863 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3847 return
3864 return
3848 elif backups:
3865 elif backups:
3849 msg = _(
3866 msg = _(
3850 b"Recover changesets using: hg debugbackupbundle --recover "
3867 b"Recover changesets using: hg debugbackupbundle --recover "
3851 b"<changeset hash>\n\nAvailable backup changesets:"
3868 b"<changeset hash>\n\nAvailable backup changesets:"
3852 )
3869 )
3853 ui.status(msg, label=b"status.removed")
3870 ui.status(msg, label=b"status.removed")
3854 else:
3871 else:
3855 ui.status(_(b"no backup changesets found\n"))
3872 ui.status(_(b"no backup changesets found\n"))
3856 return
3873 return
3857
3874
3858 for backup in backups:
3875 for backup in backups:
3859 # Much of this is copied from the hg incoming logic
3876 # Much of this is copied from the hg incoming logic
3860 source = os.path.relpath(backup, encoding.getcwd())
3877 source = os.path.relpath(backup, encoding.getcwd())
3861 source, branches = urlutil.get_unique_pull_path(
3878 source, branches = urlutil.get_unique_pull_path(
3862 b'debugbackupbundle',
3879 b'debugbackupbundle',
3863 repo,
3880 repo,
3864 ui,
3881 ui,
3865 source,
3882 source,
3866 default_branches=opts.get(b'branch'),
3883 default_branches=opts.get(b'branch'),
3867 )
3884 )
3868 try:
3885 try:
3869 other = hg.peer(repo, opts, source)
3886 other = hg.peer(repo, opts, source)
3870 except error.LookupError as ex:
3887 except error.LookupError as ex:
3871 msg = _(b"\nwarning: unable to open bundle %s") % source
3888 msg = _(b"\nwarning: unable to open bundle %s") % source
3872 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3889 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3873 ui.warn(msg, hint=hint)
3890 ui.warn(msg, hint=hint)
3874 continue
3891 continue
3875 revs, checkout = hg.addbranchrevs(
3892 revs, checkout = hg.addbranchrevs(
3876 repo, other, branches, opts.get(b"rev")
3893 repo, other, branches, opts.get(b"rev")
3877 )
3894 )
3878
3895
3879 if revs:
3896 if revs:
3880 revs = [other.lookup(rev) for rev in revs]
3897 revs = [other.lookup(rev) for rev in revs]
3881
3898
3882 with ui.silent():
3899 with ui.silent():
3883 try:
3900 try:
3884 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3901 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3885 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3902 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3886 )
3903 )
3887 except error.LookupError:
3904 except error.LookupError:
3888 continue
3905 continue
3889
3906
3890 try:
3907 try:
3891 if not chlist:
3908 if not chlist:
3892 continue
3909 continue
3893 if recovernode:
3910 if recovernode:
3894 with repo.lock(), repo.transaction(b"unbundle") as tr:
3911 with repo.lock(), repo.transaction(b"unbundle") as tr:
3895 if scmutil.isrevsymbol(other, recovernode):
3912 if scmutil.isrevsymbol(other, recovernode):
3896 ui.status(_(b"Unbundling %s\n") % (recovernode))
3913 ui.status(_(b"Unbundling %s\n") % (recovernode))
3897 f = hg.openpath(ui, source)
3914 f = hg.openpath(ui, source)
3898 gen = exchange.readbundle(ui, f, source)
3915 gen = exchange.readbundle(ui, f, source)
3899 if isinstance(gen, bundle2.unbundle20):
3916 if isinstance(gen, bundle2.unbundle20):
3900 bundle2.applybundle(
3917 bundle2.applybundle(
3901 repo,
3918 repo,
3902 gen,
3919 gen,
3903 tr,
3920 tr,
3904 source=b"unbundle",
3921 source=b"unbundle",
3905 url=b"bundle:" + source,
3922 url=b"bundle:" + source,
3906 )
3923 )
3907 else:
3924 else:
3908 gen.apply(repo, b"unbundle", b"bundle:" + source)
3925 gen.apply(repo, b"unbundle", b"bundle:" + source)
3909 break
3926 break
3910 else:
3927 else:
3911 backupdate = encoding.strtolocal(
3928 backupdate = encoding.strtolocal(
3912 time.strftime(
3929 time.strftime(
3913 "%a %H:%M, %Y-%m-%d",
3930 "%a %H:%M, %Y-%m-%d",
3914 time.localtime(os.path.getmtime(source)),
3931 time.localtime(os.path.getmtime(source)),
3915 )
3932 )
3916 )
3933 )
3917 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3934 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3918 if ui.verbose:
3935 if ui.verbose:
3919 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3936 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3920 else:
3937 else:
3921 opts[
3938 opts[
3922 b"template"
3939 b"template"
3923 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3940 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3924 displayer = logcmdutil.changesetdisplayer(
3941 displayer = logcmdutil.changesetdisplayer(
3925 ui, other, opts, False
3942 ui, other, opts, False
3926 )
3943 )
3927 display(other, chlist, displayer)
3944 display(other, chlist, displayer)
3928 displayer.close()
3945 displayer.close()
3929 finally:
3946 finally:
3930 cleanupfn()
3947 cleanupfn()
3931
3948
3932
3949
3933 @command(
3950 @command(
3934 b'debugsub',
3951 b'debugsub',
3935 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3952 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3936 _(b'[-r REV] [REV]'),
3953 _(b'[-r REV] [REV]'),
3937 )
3954 )
3938 def debugsub(ui, repo, rev=None):
3955 def debugsub(ui, repo, rev=None):
3939 ctx = scmutil.revsingle(repo, rev, None)
3956 ctx = scmutil.revsingle(repo, rev, None)
3940 for k, v in sorted(ctx.substate.items()):
3957 for k, v in sorted(ctx.substate.items()):
3941 ui.writenoi18n(b'path %s\n' % k)
3958 ui.writenoi18n(b'path %s\n' % k)
3942 ui.writenoi18n(b' source %s\n' % v[0])
3959 ui.writenoi18n(b' source %s\n' % v[0])
3943 ui.writenoi18n(b' revision %s\n' % v[1])
3960 ui.writenoi18n(b' revision %s\n' % v[1])
3944
3961
3945
3962
3946 @command(b'debugshell', optionalrepo=True)
3963 @command(b'debugshell', optionalrepo=True)
3947 def debugshell(ui, repo):
3964 def debugshell(ui, repo):
3948 """run an interactive Python interpreter
3965 """run an interactive Python interpreter
3949
3966
3950 The local namespace is provided with a reference to the ui and
3967 The local namespace is provided with a reference to the ui and
3951 the repo instance (if available).
3968 the repo instance (if available).
3952 """
3969 """
3953 import code
3970 import code
3954
3971
3955 imported_objects = {
3972 imported_objects = {
3956 'ui': ui,
3973 'ui': ui,
3957 'repo': repo,
3974 'repo': repo,
3958 }
3975 }
3959
3976
3960 code.interact(local=imported_objects)
3977 code.interact(local=imported_objects)
3961
3978
3962
3979
3963 @command(
3980 @command(
3964 b'debugsuccessorssets',
3981 b'debugsuccessorssets',
3965 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3982 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3966 _(b'[REV]'),
3983 _(b'[REV]'),
3967 )
3984 )
3968 def debugsuccessorssets(ui, repo, *revs, **opts):
3985 def debugsuccessorssets(ui, repo, *revs, **opts):
3969 """show set of successors for revision
3986 """show set of successors for revision
3970
3987
3971 A successors set of changeset A is a consistent group of revisions that
3988 A successors set of changeset A is a consistent group of revisions that
3972 succeed A. It contains non-obsolete changesets only unless closests
3989 succeed A. It contains non-obsolete changesets only unless closests
3973 successors set is set.
3990 successors set is set.
3974
3991
3975 In most cases a changeset A has a single successors set containing a single
3992 In most cases a changeset A has a single successors set containing a single
3976 successor (changeset A replaced by A').
3993 successor (changeset A replaced by A').
3977
3994
3978 A changeset that is made obsolete with no successors are called "pruned".
3995 A changeset that is made obsolete with no successors are called "pruned".
3979 Such changesets have no successors sets at all.
3996 Such changesets have no successors sets at all.
3980
3997
3981 A changeset that has been "split" will have a successors set containing
3998 A changeset that has been "split" will have a successors set containing
3982 more than one successor.
3999 more than one successor.
3983
4000
3984 A changeset that has been rewritten in multiple different ways is called
4001 A changeset that has been rewritten in multiple different ways is called
3985 "divergent". Such changesets have multiple successor sets (each of which
4002 "divergent". Such changesets have multiple successor sets (each of which
3986 may also be split, i.e. have multiple successors).
4003 may also be split, i.e. have multiple successors).
3987
4004
3988 Results are displayed as follows::
4005 Results are displayed as follows::
3989
4006
3990 <rev1>
4007 <rev1>
3991 <successors-1A>
4008 <successors-1A>
3992 <rev2>
4009 <rev2>
3993 <successors-2A>
4010 <successors-2A>
3994 <successors-2B1> <successors-2B2> <successors-2B3>
4011 <successors-2B1> <successors-2B2> <successors-2B3>
3995
4012
3996 Here rev2 has two possible (i.e. divergent) successors sets. The first
4013 Here rev2 has two possible (i.e. divergent) successors sets. The first
3997 holds one element, whereas the second holds three (i.e. the changeset has
4014 holds one element, whereas the second holds three (i.e. the changeset has
3998 been split).
4015 been split).
3999 """
4016 """
4000 # passed to successorssets caching computation from one call to another
4017 # passed to successorssets caching computation from one call to another
4001 cache = {}
4018 cache = {}
4002 ctx2str = bytes
4019 ctx2str = bytes
4003 node2str = short
4020 node2str = short
4004 for rev in scmutil.revrange(repo, revs):
4021 for rev in scmutil.revrange(repo, revs):
4005 ctx = repo[rev]
4022 ctx = repo[rev]
4006 ui.write(b'%s\n' % ctx2str(ctx))
4023 ui.write(b'%s\n' % ctx2str(ctx))
4007 for succsset in obsutil.successorssets(
4024 for succsset in obsutil.successorssets(
4008 repo, ctx.node(), closest=opts['closest'], cache=cache
4025 repo, ctx.node(), closest=opts['closest'], cache=cache
4009 ):
4026 ):
4010 if succsset:
4027 if succsset:
4011 ui.write(b' ')
4028 ui.write(b' ')
4012 ui.write(node2str(succsset[0]))
4029 ui.write(node2str(succsset[0]))
4013 for node in succsset[1:]:
4030 for node in succsset[1:]:
4014 ui.write(b' ')
4031 ui.write(b' ')
4015 ui.write(node2str(node))
4032 ui.write(node2str(node))
4016 ui.write(b'\n')
4033 ui.write(b'\n')
4017
4034
4018
4035
4019 @command(b'debugtagscache', [])
4036 @command(b'debugtagscache', [])
4020 def debugtagscache(ui, repo):
4037 def debugtagscache(ui, repo):
4021 """display the contents of .hg/cache/hgtagsfnodes1"""
4038 """display the contents of .hg/cache/hgtagsfnodes1"""
4022 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4039 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4023 flog = repo.file(b'.hgtags')
4040 flog = repo.file(b'.hgtags')
4024 for r in repo:
4041 for r in repo:
4025 node = repo[r].node()
4042 node = repo[r].node()
4026 tagsnode = cache.getfnode(node, computemissing=False)
4043 tagsnode = cache.getfnode(node, computemissing=False)
4027 if tagsnode:
4044 if tagsnode:
4028 tagsnodedisplay = hex(tagsnode)
4045 tagsnodedisplay = hex(tagsnode)
4029 if not flog.hasnode(tagsnode):
4046 if not flog.hasnode(tagsnode):
4030 tagsnodedisplay += b' (unknown node)'
4047 tagsnodedisplay += b' (unknown node)'
4031 elif tagsnode is None:
4048 elif tagsnode is None:
4032 tagsnodedisplay = b'missing'
4049 tagsnodedisplay = b'missing'
4033 else:
4050 else:
4034 tagsnodedisplay = b'invalid'
4051 tagsnodedisplay = b'invalid'
4035
4052
4036 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4053 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4037
4054
4038
4055
4039 @command(
4056 @command(
4040 b'debugtemplate',
4057 b'debugtemplate',
4041 [
4058 [
4042 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4059 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4043 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4060 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4044 ],
4061 ],
4045 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4062 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4046 optionalrepo=True,
4063 optionalrepo=True,
4047 )
4064 )
4048 def debugtemplate(ui, repo, tmpl, **opts):
4065 def debugtemplate(ui, repo, tmpl, **opts):
4049 """parse and apply a template
4066 """parse and apply a template
4050
4067
4051 If -r/--rev is given, the template is processed as a log template and
4068 If -r/--rev is given, the template is processed as a log template and
4052 applied to the given changesets. Otherwise, it is processed as a generic
4069 applied to the given changesets. Otherwise, it is processed as a generic
4053 template.
4070 template.
4054
4071
4055 Use --verbose to print the parsed tree.
4072 Use --verbose to print the parsed tree.
4056 """
4073 """
4057 revs = None
4074 revs = None
4058 if opts['rev']:
4075 if opts['rev']:
4059 if repo is None:
4076 if repo is None:
4060 raise error.RepoError(
4077 raise error.RepoError(
4061 _(b'there is no Mercurial repository here (.hg not found)')
4078 _(b'there is no Mercurial repository here (.hg not found)')
4062 )
4079 )
4063 revs = scmutil.revrange(repo, opts['rev'])
4080 revs = scmutil.revrange(repo, opts['rev'])
4064
4081
4065 props = {}
4082 props = {}
4066 for d in opts['define']:
4083 for d in opts['define']:
4067 try:
4084 try:
4068 k, v = (e.strip() for e in d.split(b'=', 1))
4085 k, v = (e.strip() for e in d.split(b'=', 1))
4069 if not k or k == b'ui':
4086 if not k or k == b'ui':
4070 raise ValueError
4087 raise ValueError
4071 props[k] = v
4088 props[k] = v
4072 except ValueError:
4089 except ValueError:
4073 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4090 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4074
4091
4075 if ui.verbose:
4092 if ui.verbose:
4076 aliases = ui.configitems(b'templatealias')
4093 aliases = ui.configitems(b'templatealias')
4077 tree = templater.parse(tmpl)
4094 tree = templater.parse(tmpl)
4078 ui.note(templater.prettyformat(tree), b'\n')
4095 ui.note(templater.prettyformat(tree), b'\n')
4079 newtree = templater.expandaliases(tree, aliases)
4096 newtree = templater.expandaliases(tree, aliases)
4080 if newtree != tree:
4097 if newtree != tree:
4081 ui.notenoi18n(
4098 ui.notenoi18n(
4082 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4099 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4083 )
4100 )
4084
4101
4085 if revs is None:
4102 if revs is None:
4086 tres = formatter.templateresources(ui, repo)
4103 tres = formatter.templateresources(ui, repo)
4087 t = formatter.maketemplater(ui, tmpl, resources=tres)
4104 t = formatter.maketemplater(ui, tmpl, resources=tres)
4088 if ui.verbose:
4105 if ui.verbose:
4089 kwds, funcs = t.symbolsuseddefault()
4106 kwds, funcs = t.symbolsuseddefault()
4090 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4107 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4091 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4108 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4092 ui.write(t.renderdefault(props))
4109 ui.write(t.renderdefault(props))
4093 else:
4110 else:
4094 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4111 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4095 if ui.verbose:
4112 if ui.verbose:
4096 kwds, funcs = displayer.t.symbolsuseddefault()
4113 kwds, funcs = displayer.t.symbolsuseddefault()
4097 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4114 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4098 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4115 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4099 for r in revs:
4116 for r in revs:
4100 displayer.show(repo[r], **pycompat.strkwargs(props))
4117 displayer.show(repo[r], **pycompat.strkwargs(props))
4101 displayer.close()
4118 displayer.close()
4102
4119
4103
4120
4104 @command(
4121 @command(
4105 b'debuguigetpass',
4122 b'debuguigetpass',
4106 [
4123 [
4107 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4124 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4108 ],
4125 ],
4109 _(b'[-p TEXT]'),
4126 _(b'[-p TEXT]'),
4110 norepo=True,
4127 norepo=True,
4111 )
4128 )
4112 def debuguigetpass(ui, prompt=b''):
4129 def debuguigetpass(ui, prompt=b''):
4113 """show prompt to type password"""
4130 """show prompt to type password"""
4114 r = ui.getpass(prompt)
4131 r = ui.getpass(prompt)
4115 if r is None:
4132 if r is None:
4116 r = b"<default response>"
4133 r = b"<default response>"
4117 ui.writenoi18n(b'response: %s\n' % r)
4134 ui.writenoi18n(b'response: %s\n' % r)
4118
4135
4119
4136
4120 @command(
4137 @command(
4121 b'debuguiprompt',
4138 b'debuguiprompt',
4122 [
4139 [
4123 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4140 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4124 ],
4141 ],
4125 _(b'[-p TEXT]'),
4142 _(b'[-p TEXT]'),
4126 norepo=True,
4143 norepo=True,
4127 )
4144 )
4128 def debuguiprompt(ui, prompt=b''):
4145 def debuguiprompt(ui, prompt=b''):
4129 """show plain prompt"""
4146 """show plain prompt"""
4130 r = ui.prompt(prompt)
4147 r = ui.prompt(prompt)
4131 ui.writenoi18n(b'response: %s\n' % r)
4148 ui.writenoi18n(b'response: %s\n' % r)
4132
4149
4133
4150
4134 @command(b'debugupdatecaches', [])
4151 @command(b'debugupdatecaches', [])
4135 def debugupdatecaches(ui, repo, *pats, **opts):
4152 def debugupdatecaches(ui, repo, *pats, **opts):
4136 """warm all known caches in the repository"""
4153 """warm all known caches in the repository"""
4137 with repo.wlock(), repo.lock():
4154 with repo.wlock(), repo.lock():
4138 repo.updatecaches(caches=repository.CACHES_ALL)
4155 repo.updatecaches(caches=repository.CACHES_ALL)
4139
4156
4140
4157
4141 @command(
4158 @command(
4142 b'debugupgraderepo',
4159 b'debugupgraderepo',
4143 [
4160 [
4144 (
4161 (
4145 b'o',
4162 b'o',
4146 b'optimize',
4163 b'optimize',
4147 [],
4164 [],
4148 _(b'extra optimization to perform'),
4165 _(b'extra optimization to perform'),
4149 _(b'NAME'),
4166 _(b'NAME'),
4150 ),
4167 ),
4151 (b'', b'run', False, _(b'performs an upgrade')),
4168 (b'', b'run', False, _(b'performs an upgrade')),
4152 (b'', b'backup', True, _(b'keep the old repository content around')),
4169 (b'', b'backup', True, _(b'keep the old repository content around')),
4153 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4170 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4154 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4171 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4155 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4172 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4156 ],
4173 ],
4157 )
4174 )
4158 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4175 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4159 """upgrade a repository to use different features
4176 """upgrade a repository to use different features
4160
4177
4161 If no arguments are specified, the repository is evaluated for upgrade
4178 If no arguments are specified, the repository is evaluated for upgrade
4162 and a list of problems and potential optimizations is printed.
4179 and a list of problems and potential optimizations is printed.
4163
4180
4164 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4181 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4165 can be influenced via additional arguments. More details will be provided
4182 can be influenced via additional arguments. More details will be provided
4166 by the command output when run without ``--run``.
4183 by the command output when run without ``--run``.
4167
4184
4168 During the upgrade, the repository will be locked and no writes will be
4185 During the upgrade, the repository will be locked and no writes will be
4169 allowed.
4186 allowed.
4170
4187
4171 At the end of the upgrade, the repository may not be readable while new
4188 At the end of the upgrade, the repository may not be readable while new
4172 repository data is swapped in. This window will be as long as it takes to
4189 repository data is swapped in. This window will be as long as it takes to
4173 rename some directories inside the ``.hg`` directory. On most machines, this
4190 rename some directories inside the ``.hg`` directory. On most machines, this
4174 should complete almost instantaneously and the chances of a consumer being
4191 should complete almost instantaneously and the chances of a consumer being
4175 unable to access the repository should be low.
4192 unable to access the repository should be low.
4176
4193
4177 By default, all revlogs will be upgraded. You can restrict this using flags
4194 By default, all revlogs will be upgraded. You can restrict this using flags
4178 such as `--manifest`:
4195 such as `--manifest`:
4179
4196
4180 * `--manifest`: only optimize the manifest
4197 * `--manifest`: only optimize the manifest
4181 * `--no-manifest`: optimize all revlog but the manifest
4198 * `--no-manifest`: optimize all revlog but the manifest
4182 * `--changelog`: optimize the changelog only
4199 * `--changelog`: optimize the changelog only
4183 * `--no-changelog --no-manifest`: optimize filelogs only
4200 * `--no-changelog --no-manifest`: optimize filelogs only
4184 * `--filelogs`: optimize the filelogs only
4201 * `--filelogs`: optimize the filelogs only
4185 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4202 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4186 """
4203 """
4187 return upgrade.upgraderepo(
4204 return upgrade.upgraderepo(
4188 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4205 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4189 )
4206 )
4190
4207
4191
4208
4192 @command(
4209 @command(
4193 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4210 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4194 )
4211 )
4195 def debugwalk(ui, repo, *pats, **opts):
4212 def debugwalk(ui, repo, *pats, **opts):
4196 """show how files match on given patterns"""
4213 """show how files match on given patterns"""
4197 opts = pycompat.byteskwargs(opts)
4214 opts = pycompat.byteskwargs(opts)
4198 m = scmutil.match(repo[None], pats, opts)
4215 m = scmutil.match(repo[None], pats, opts)
4199 if ui.verbose:
4216 if ui.verbose:
4200 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4217 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4201 items = list(repo[None].walk(m))
4218 items = list(repo[None].walk(m))
4202 if not items:
4219 if not items:
4203 return
4220 return
4204 f = lambda fn: fn
4221 f = lambda fn: fn
4205 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4222 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4206 f = lambda fn: util.normpath(fn)
4223 f = lambda fn: util.normpath(fn)
4207 fmt = b'f %%-%ds %%-%ds %%s' % (
4224 fmt = b'f %%-%ds %%-%ds %%s' % (
4208 max([len(abs) for abs in items]),
4225 max([len(abs) for abs in items]),
4209 max([len(repo.pathto(abs)) for abs in items]),
4226 max([len(repo.pathto(abs)) for abs in items]),
4210 )
4227 )
4211 for abs in items:
4228 for abs in items:
4212 line = fmt % (
4229 line = fmt % (
4213 abs,
4230 abs,
4214 f(repo.pathto(abs)),
4231 f(repo.pathto(abs)),
4215 m.exact(abs) and b'exact' or b'',
4232 m.exact(abs) and b'exact' or b'',
4216 )
4233 )
4217 ui.write(b"%s\n" % line.rstrip())
4234 ui.write(b"%s\n" % line.rstrip())
4218
4235
4219
4236
4220 @command(b'debugwhyunstable', [], _(b'REV'))
4237 @command(b'debugwhyunstable', [], _(b'REV'))
4221 def debugwhyunstable(ui, repo, rev):
4238 def debugwhyunstable(ui, repo, rev):
4222 """explain instabilities of a changeset"""
4239 """explain instabilities of a changeset"""
4223 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4240 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4224 dnodes = b''
4241 dnodes = b''
4225 if entry.get(b'divergentnodes'):
4242 if entry.get(b'divergentnodes'):
4226 dnodes = (
4243 dnodes = (
4227 b' '.join(
4244 b' '.join(
4228 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4245 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4229 for ctx in entry[b'divergentnodes']
4246 for ctx in entry[b'divergentnodes']
4230 )
4247 )
4231 + b' '
4248 + b' '
4232 )
4249 )
4233 ui.write(
4250 ui.write(
4234 b'%s: %s%s %s\n'
4251 b'%s: %s%s %s\n'
4235 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4252 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4236 )
4253 )
4237
4254
4238
4255
4239 @command(
4256 @command(
4240 b'debugwireargs',
4257 b'debugwireargs',
4241 [
4258 [
4242 (b'', b'three', b'', b'three'),
4259 (b'', b'three', b'', b'three'),
4243 (b'', b'four', b'', b'four'),
4260 (b'', b'four', b'', b'four'),
4244 (b'', b'five', b'', b'five'),
4261 (b'', b'five', b'', b'five'),
4245 ]
4262 ]
4246 + cmdutil.remoteopts,
4263 + cmdutil.remoteopts,
4247 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4264 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4248 norepo=True,
4265 norepo=True,
4249 )
4266 )
4250 def debugwireargs(ui, repopath, *vals, **opts):
4267 def debugwireargs(ui, repopath, *vals, **opts):
4251 opts = pycompat.byteskwargs(opts)
4268 opts = pycompat.byteskwargs(opts)
4252 repo = hg.peer(ui, opts, repopath)
4269 repo = hg.peer(ui, opts, repopath)
4253 try:
4270 try:
4254 for opt in cmdutil.remoteopts:
4271 for opt in cmdutil.remoteopts:
4255 del opts[opt[1]]
4272 del opts[opt[1]]
4256 args = {}
4273 args = {}
4257 for k, v in pycompat.iteritems(opts):
4274 for k, v in pycompat.iteritems(opts):
4258 if v:
4275 if v:
4259 args[k] = v
4276 args[k] = v
4260 args = pycompat.strkwargs(args)
4277 args = pycompat.strkwargs(args)
4261 # run twice to check that we don't mess up the stream for the next command
4278 # run twice to check that we don't mess up the stream for the next command
4262 res1 = repo.debugwireargs(*vals, **args)
4279 res1 = repo.debugwireargs(*vals, **args)
4263 res2 = repo.debugwireargs(*vals, **args)
4280 res2 = repo.debugwireargs(*vals, **args)
4264 ui.write(b"%s\n" % res1)
4281 ui.write(b"%s\n" % res1)
4265 if res1 != res2:
4282 if res1 != res2:
4266 ui.warn(b"%s\n" % res2)
4283 ui.warn(b"%s\n" % res2)
4267 finally:
4284 finally:
4268 repo.close()
4285 repo.close()
4269
4286
4270
4287
4271 def _parsewirelangblocks(fh):
4288 def _parsewirelangblocks(fh):
4272 activeaction = None
4289 activeaction = None
4273 blocklines = []
4290 blocklines = []
4274 lastindent = 0
4291 lastindent = 0
4275
4292
4276 for line in fh:
4293 for line in fh:
4277 line = line.rstrip()
4294 line = line.rstrip()
4278 if not line:
4295 if not line:
4279 continue
4296 continue
4280
4297
4281 if line.startswith(b'#'):
4298 if line.startswith(b'#'):
4282 continue
4299 continue
4283
4300
4284 if not line.startswith(b' '):
4301 if not line.startswith(b' '):
4285 # New block. Flush previous one.
4302 # New block. Flush previous one.
4286 if activeaction:
4303 if activeaction:
4287 yield activeaction, blocklines
4304 yield activeaction, blocklines
4288
4305
4289 activeaction = line
4306 activeaction = line
4290 blocklines = []
4307 blocklines = []
4291 lastindent = 0
4308 lastindent = 0
4292 continue
4309 continue
4293
4310
4294 # Else we start with an indent.
4311 # Else we start with an indent.
4295
4312
4296 if not activeaction:
4313 if not activeaction:
4297 raise error.Abort(_(b'indented line outside of block'))
4314 raise error.Abort(_(b'indented line outside of block'))
4298
4315
4299 indent = len(line) - len(line.lstrip())
4316 indent = len(line) - len(line.lstrip())
4300
4317
4301 # If this line is indented more than the last line, concatenate it.
4318 # If this line is indented more than the last line, concatenate it.
4302 if indent > lastindent and blocklines:
4319 if indent > lastindent and blocklines:
4303 blocklines[-1] += line.lstrip()
4320 blocklines[-1] += line.lstrip()
4304 else:
4321 else:
4305 blocklines.append(line)
4322 blocklines.append(line)
4306 lastindent = indent
4323 lastindent = indent
4307
4324
4308 # Flush last block.
4325 # Flush last block.
4309 if activeaction:
4326 if activeaction:
4310 yield activeaction, blocklines
4327 yield activeaction, blocklines
4311
4328
4312
4329
4313 @command(
4330 @command(
4314 b'debugwireproto',
4331 b'debugwireproto',
4315 [
4332 [
4316 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4333 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4317 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4334 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4318 (
4335 (
4319 b'',
4336 b'',
4320 b'noreadstderr',
4337 b'noreadstderr',
4321 False,
4338 False,
4322 _(b'do not read from stderr of the remote'),
4339 _(b'do not read from stderr of the remote'),
4323 ),
4340 ),
4324 (
4341 (
4325 b'',
4342 b'',
4326 b'nologhandshake',
4343 b'nologhandshake',
4327 False,
4344 False,
4328 _(b'do not log I/O related to the peer handshake'),
4345 _(b'do not log I/O related to the peer handshake'),
4329 ),
4346 ),
4330 ]
4347 ]
4331 + cmdutil.remoteopts,
4348 + cmdutil.remoteopts,
4332 _(b'[PATH]'),
4349 _(b'[PATH]'),
4333 optionalrepo=True,
4350 optionalrepo=True,
4334 )
4351 )
4335 def debugwireproto(ui, repo, path=None, **opts):
4352 def debugwireproto(ui, repo, path=None, **opts):
4336 """send wire protocol commands to a server
4353 """send wire protocol commands to a server
4337
4354
4338 This command can be used to issue wire protocol commands to remote
4355 This command can be used to issue wire protocol commands to remote
4339 peers and to debug the raw data being exchanged.
4356 peers and to debug the raw data being exchanged.
4340
4357
4341 ``--localssh`` will start an SSH server against the current repository
4358 ``--localssh`` will start an SSH server against the current repository
4342 and connect to that. By default, the connection will perform a handshake
4359 and connect to that. By default, the connection will perform a handshake
4343 and establish an appropriate peer instance.
4360 and establish an appropriate peer instance.
4344
4361
4345 ``--peer`` can be used to bypass the handshake protocol and construct a
4362 ``--peer`` can be used to bypass the handshake protocol and construct a
4346 peer instance using the specified class type. Valid values are ``raw``,
4363 peer instance using the specified class type. Valid values are ``raw``,
4347 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4364 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4348 raw data payloads and don't support higher-level command actions.
4365 raw data payloads and don't support higher-level command actions.
4349
4366
4350 ``--noreadstderr`` can be used to disable automatic reading from stderr
4367 ``--noreadstderr`` can be used to disable automatic reading from stderr
4351 of the peer (for SSH connections only). Disabling automatic reading of
4368 of the peer (for SSH connections only). Disabling automatic reading of
4352 stderr is useful for making output more deterministic.
4369 stderr is useful for making output more deterministic.
4353
4370
4354 Commands are issued via a mini language which is specified via stdin.
4371 Commands are issued via a mini language which is specified via stdin.
4355 The language consists of individual actions to perform. An action is
4372 The language consists of individual actions to perform. An action is
4356 defined by a block. A block is defined as a line with no leading
4373 defined by a block. A block is defined as a line with no leading
4357 space followed by 0 or more lines with leading space. Blocks are
4374 space followed by 0 or more lines with leading space. Blocks are
4358 effectively a high-level command with additional metadata.
4375 effectively a high-level command with additional metadata.
4359
4376
4360 Lines beginning with ``#`` are ignored.
4377 Lines beginning with ``#`` are ignored.
4361
4378
4362 The following sections denote available actions.
4379 The following sections denote available actions.
4363
4380
4364 raw
4381 raw
4365 ---
4382 ---
4366
4383
4367 Send raw data to the server.
4384 Send raw data to the server.
4368
4385
4369 The block payload contains the raw data to send as one atomic send
4386 The block payload contains the raw data to send as one atomic send
4370 operation. The data may not actually be delivered in a single system
4387 operation. The data may not actually be delivered in a single system
4371 call: it depends on the abilities of the transport being used.
4388 call: it depends on the abilities of the transport being used.
4372
4389
4373 Each line in the block is de-indented and concatenated. Then, that
4390 Each line in the block is de-indented and concatenated. Then, that
4374 value is evaluated as a Python b'' literal. This allows the use of
4391 value is evaluated as a Python b'' literal. This allows the use of
4375 backslash escaping, etc.
4392 backslash escaping, etc.
4376
4393
4377 raw+
4394 raw+
4378 ----
4395 ----
4379
4396
4380 Behaves like ``raw`` except flushes output afterwards.
4397 Behaves like ``raw`` except flushes output afterwards.
4381
4398
4382 command <X>
4399 command <X>
4383 -----------
4400 -----------
4384
4401
4385 Send a request to run a named command, whose name follows the ``command``
4402 Send a request to run a named command, whose name follows the ``command``
4386 string.
4403 string.
4387
4404
4388 Arguments to the command are defined as lines in this block. The format of
4405 Arguments to the command are defined as lines in this block. The format of
4389 each line is ``<key> <value>``. e.g.::
4406 each line is ``<key> <value>``. e.g.::
4390
4407
4391 command listkeys
4408 command listkeys
4392 namespace bookmarks
4409 namespace bookmarks
4393
4410
4394 If the value begins with ``eval:``, it will be interpreted as a Python
4411 If the value begins with ``eval:``, it will be interpreted as a Python
4395 literal expression. Otherwise values are interpreted as Python b'' literals.
4412 literal expression. Otherwise values are interpreted as Python b'' literals.
4396 This allows sending complex types and encoding special byte sequences via
4413 This allows sending complex types and encoding special byte sequences via
4397 backslash escaping.
4414 backslash escaping.
4398
4415
4399 The following arguments have special meaning:
4416 The following arguments have special meaning:
4400
4417
4401 ``PUSHFILE``
4418 ``PUSHFILE``
4402 When defined, the *push* mechanism of the peer will be used instead
4419 When defined, the *push* mechanism of the peer will be used instead
4403 of the static request-response mechanism and the content of the
4420 of the static request-response mechanism and the content of the
4404 file specified in the value of this argument will be sent as the
4421 file specified in the value of this argument will be sent as the
4405 command payload.
4422 command payload.
4406
4423
4407 This can be used to submit a local bundle file to the remote.
4424 This can be used to submit a local bundle file to the remote.
4408
4425
4409 batchbegin
4426 batchbegin
4410 ----------
4427 ----------
4411
4428
4412 Instruct the peer to begin a batched send.
4429 Instruct the peer to begin a batched send.
4413
4430
4414 All ``command`` blocks are queued for execution until the next
4431 All ``command`` blocks are queued for execution until the next
4415 ``batchsubmit`` block.
4432 ``batchsubmit`` block.
4416
4433
4417 batchsubmit
4434 batchsubmit
4418 -----------
4435 -----------
4419
4436
4420 Submit previously queued ``command`` blocks as a batch request.
4437 Submit previously queued ``command`` blocks as a batch request.
4421
4438
4422 This action MUST be paired with a ``batchbegin`` action.
4439 This action MUST be paired with a ``batchbegin`` action.
4423
4440
4424 httprequest <method> <path>
4441 httprequest <method> <path>
4425 ---------------------------
4442 ---------------------------
4426
4443
4427 (HTTP peer only)
4444 (HTTP peer only)
4428
4445
4429 Send an HTTP request to the peer.
4446 Send an HTTP request to the peer.
4430
4447
4431 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4448 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4432
4449
4433 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4450 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4434 headers to add to the request. e.g. ``Accept: foo``.
4451 headers to add to the request. e.g. ``Accept: foo``.
4435
4452
4436 The following arguments are special:
4453 The following arguments are special:
4437
4454
4438 ``BODYFILE``
4455 ``BODYFILE``
4439 The content of the file defined as the value to this argument will be
4456 The content of the file defined as the value to this argument will be
4440 transferred verbatim as the HTTP request body.
4457 transferred verbatim as the HTTP request body.
4441
4458
4442 ``frame <type> <flags> <payload>``
4459 ``frame <type> <flags> <payload>``
4443 Send a unified protocol frame as part of the request body.
4460 Send a unified protocol frame as part of the request body.
4444
4461
4445 All frames will be collected and sent as the body to the HTTP
4462 All frames will be collected and sent as the body to the HTTP
4446 request.
4463 request.
4447
4464
4448 close
4465 close
4449 -----
4466 -----
4450
4467
4451 Close the connection to the server.
4468 Close the connection to the server.
4452
4469
4453 flush
4470 flush
4454 -----
4471 -----
4455
4472
4456 Flush data written to the server.
4473 Flush data written to the server.
4457
4474
4458 readavailable
4475 readavailable
4459 -------------
4476 -------------
4460
4477
4461 Close the write end of the connection and read all available data from
4478 Close the write end of the connection and read all available data from
4462 the server.
4479 the server.
4463
4480
4464 If the connection to the server encompasses multiple pipes, we poll both
4481 If the connection to the server encompasses multiple pipes, we poll both
4465 pipes and read available data.
4482 pipes and read available data.
4466
4483
4467 readline
4484 readline
4468 --------
4485 --------
4469
4486
4470 Read a line of output from the server. If there are multiple output
4487 Read a line of output from the server. If there are multiple output
4471 pipes, reads only the main pipe.
4488 pipes, reads only the main pipe.
4472
4489
4473 ereadline
4490 ereadline
4474 ---------
4491 ---------
4475
4492
4476 Like ``readline``, but read from the stderr pipe, if available.
4493 Like ``readline``, but read from the stderr pipe, if available.
4477
4494
4478 read <X>
4495 read <X>
4479 --------
4496 --------
4480
4497
4481 ``read()`` N bytes from the server's main output pipe.
4498 ``read()`` N bytes from the server's main output pipe.
4482
4499
4483 eread <X>
4500 eread <X>
4484 ---------
4501 ---------
4485
4502
4486 ``read()`` N bytes from the server's stderr pipe, if available.
4503 ``read()`` N bytes from the server's stderr pipe, if available.
4487
4504
4488 Specifying Unified Frame-Based Protocol Frames
4505 Specifying Unified Frame-Based Protocol Frames
4489 ----------------------------------------------
4506 ----------------------------------------------
4490
4507
4491 It is possible to emit a *Unified Frame-Based Protocol* by using special
4508 It is possible to emit a *Unified Frame-Based Protocol* by using special
4492 syntax.
4509 syntax.
4493
4510
4494 A frame is composed as a type, flags, and payload. These can be parsed
4511 A frame is composed as a type, flags, and payload. These can be parsed
4495 from a string of the form:
4512 from a string of the form:
4496
4513
4497 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4514 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4498
4515
4499 ``request-id`` and ``stream-id`` are integers defining the request and
4516 ``request-id`` and ``stream-id`` are integers defining the request and
4500 stream identifiers.
4517 stream identifiers.
4501
4518
4502 ``type`` can be an integer value for the frame type or the string name
4519 ``type`` can be an integer value for the frame type or the string name
4503 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4520 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4504 ``command-name``.
4521 ``command-name``.
4505
4522
4506 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4523 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4507 components. Each component (and there can be just one) can be an integer
4524 components. Each component (and there can be just one) can be an integer
4508 or a flag name for stream flags or frame flags, respectively. Values are
4525 or a flag name for stream flags or frame flags, respectively. Values are
4509 resolved to integers and then bitwise OR'd together.
4526 resolved to integers and then bitwise OR'd together.
4510
4527
4511 ``payload`` represents the raw frame payload. If it begins with
4528 ``payload`` represents the raw frame payload. If it begins with
4512 ``cbor:``, the following string is evaluated as Python code and the
4529 ``cbor:``, the following string is evaluated as Python code and the
4513 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4530 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4514 as a Python byte string literal.
4531 as a Python byte string literal.
4515 """
4532 """
4516 opts = pycompat.byteskwargs(opts)
4533 opts = pycompat.byteskwargs(opts)
4517
4534
4518 if opts[b'localssh'] and not repo:
4535 if opts[b'localssh'] and not repo:
4519 raise error.Abort(_(b'--localssh requires a repository'))
4536 raise error.Abort(_(b'--localssh requires a repository'))
4520
4537
4521 if opts[b'peer'] and opts[b'peer'] not in (
4538 if opts[b'peer'] and opts[b'peer'] not in (
4522 b'raw',
4539 b'raw',
4523 b'http2',
4540 b'http2',
4524 b'ssh1',
4541 b'ssh1',
4525 b'ssh2',
4542 b'ssh2',
4526 ):
4543 ):
4527 raise error.Abort(
4544 raise error.Abort(
4528 _(b'invalid value for --peer'),
4545 _(b'invalid value for --peer'),
4529 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4546 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4530 )
4547 )
4531
4548
4532 if path and opts[b'localssh']:
4549 if path and opts[b'localssh']:
4533 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4550 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4534
4551
4535 if ui.interactive():
4552 if ui.interactive():
4536 ui.write(_(b'(waiting for commands on stdin)\n'))
4553 ui.write(_(b'(waiting for commands on stdin)\n'))
4537
4554
4538 blocks = list(_parsewirelangblocks(ui.fin))
4555 blocks = list(_parsewirelangblocks(ui.fin))
4539
4556
4540 proc = None
4557 proc = None
4541 stdin = None
4558 stdin = None
4542 stdout = None
4559 stdout = None
4543 stderr = None
4560 stderr = None
4544 opener = None
4561 opener = None
4545
4562
4546 if opts[b'localssh']:
4563 if opts[b'localssh']:
4547 # We start the SSH server in its own process so there is process
4564 # We start the SSH server in its own process so there is process
4548 # separation. This prevents a whole class of potential bugs around
4565 # separation. This prevents a whole class of potential bugs around
4549 # shared state from interfering with server operation.
4566 # shared state from interfering with server operation.
4550 args = procutil.hgcmd() + [
4567 args = procutil.hgcmd() + [
4551 b'-R',
4568 b'-R',
4552 repo.root,
4569 repo.root,
4553 b'debugserve',
4570 b'debugserve',
4554 b'--sshstdio',
4571 b'--sshstdio',
4555 ]
4572 ]
4556 proc = subprocess.Popen(
4573 proc = subprocess.Popen(
4557 pycompat.rapply(procutil.tonativestr, args),
4574 pycompat.rapply(procutil.tonativestr, args),
4558 stdin=subprocess.PIPE,
4575 stdin=subprocess.PIPE,
4559 stdout=subprocess.PIPE,
4576 stdout=subprocess.PIPE,
4560 stderr=subprocess.PIPE,
4577 stderr=subprocess.PIPE,
4561 bufsize=0,
4578 bufsize=0,
4562 )
4579 )
4563
4580
4564 stdin = proc.stdin
4581 stdin = proc.stdin
4565 stdout = proc.stdout
4582 stdout = proc.stdout
4566 stderr = proc.stderr
4583 stderr = proc.stderr
4567
4584
4568 # We turn the pipes into observers so we can log I/O.
4585 # We turn the pipes into observers so we can log I/O.
4569 if ui.verbose or opts[b'peer'] == b'raw':
4586 if ui.verbose or opts[b'peer'] == b'raw':
4570 stdin = util.makeloggingfileobject(
4587 stdin = util.makeloggingfileobject(
4571 ui, proc.stdin, b'i', logdata=True
4588 ui, proc.stdin, b'i', logdata=True
4572 )
4589 )
4573 stdout = util.makeloggingfileobject(
4590 stdout = util.makeloggingfileobject(
4574 ui, proc.stdout, b'o', logdata=True
4591 ui, proc.stdout, b'o', logdata=True
4575 )
4592 )
4576 stderr = util.makeloggingfileobject(
4593 stderr = util.makeloggingfileobject(
4577 ui, proc.stderr, b'e', logdata=True
4594 ui, proc.stderr, b'e', logdata=True
4578 )
4595 )
4579
4596
4580 # --localssh also implies the peer connection settings.
4597 # --localssh also implies the peer connection settings.
4581
4598
4582 url = b'ssh://localserver'
4599 url = b'ssh://localserver'
4583 autoreadstderr = not opts[b'noreadstderr']
4600 autoreadstderr = not opts[b'noreadstderr']
4584
4601
4585 if opts[b'peer'] == b'ssh1':
4602 if opts[b'peer'] == b'ssh1':
4586 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4603 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4587 peer = sshpeer.sshv1peer(
4604 peer = sshpeer.sshv1peer(
4588 ui,
4605 ui,
4589 url,
4606 url,
4590 proc,
4607 proc,
4591 stdin,
4608 stdin,
4592 stdout,
4609 stdout,
4593 stderr,
4610 stderr,
4594 None,
4611 None,
4595 autoreadstderr=autoreadstderr,
4612 autoreadstderr=autoreadstderr,
4596 )
4613 )
4597 elif opts[b'peer'] == b'ssh2':
4614 elif opts[b'peer'] == b'ssh2':
4598 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4615 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4599 peer = sshpeer.sshv2peer(
4616 peer = sshpeer.sshv2peer(
4600 ui,
4617 ui,
4601 url,
4618 url,
4602 proc,
4619 proc,
4603 stdin,
4620 stdin,
4604 stdout,
4621 stdout,
4605 stderr,
4622 stderr,
4606 None,
4623 None,
4607 autoreadstderr=autoreadstderr,
4624 autoreadstderr=autoreadstderr,
4608 )
4625 )
4609 elif opts[b'peer'] == b'raw':
4626 elif opts[b'peer'] == b'raw':
4610 ui.write(_(b'using raw connection to peer\n'))
4627 ui.write(_(b'using raw connection to peer\n'))
4611 peer = None
4628 peer = None
4612 else:
4629 else:
4613 ui.write(_(b'creating ssh peer from handshake results\n'))
4630 ui.write(_(b'creating ssh peer from handshake results\n'))
4614 peer = sshpeer.makepeer(
4631 peer = sshpeer.makepeer(
4615 ui,
4632 ui,
4616 url,
4633 url,
4617 proc,
4634 proc,
4618 stdin,
4635 stdin,
4619 stdout,
4636 stdout,
4620 stderr,
4637 stderr,
4621 autoreadstderr=autoreadstderr,
4638 autoreadstderr=autoreadstderr,
4622 )
4639 )
4623
4640
4624 elif path:
4641 elif path:
4625 # We bypass hg.peer() so we can proxy the sockets.
4642 # We bypass hg.peer() so we can proxy the sockets.
4626 # TODO consider not doing this because we skip
4643 # TODO consider not doing this because we skip
4627 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4644 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4628 u = urlutil.url(path)
4645 u = urlutil.url(path)
4629 if u.scheme != b'http':
4646 if u.scheme != b'http':
4630 raise error.Abort(_(b'only http:// paths are currently supported'))
4647 raise error.Abort(_(b'only http:// paths are currently supported'))
4631
4648
4632 url, authinfo = u.authinfo()
4649 url, authinfo = u.authinfo()
4633 openerargs = {
4650 openerargs = {
4634 'useragent': b'Mercurial debugwireproto',
4651 'useragent': b'Mercurial debugwireproto',
4635 }
4652 }
4636
4653
4637 # Turn pipes/sockets into observers so we can log I/O.
4654 # Turn pipes/sockets into observers so we can log I/O.
4638 if ui.verbose:
4655 if ui.verbose:
4639 openerargs.update(
4656 openerargs.update(
4640 {
4657 {
4641 'loggingfh': ui,
4658 'loggingfh': ui,
4642 'loggingname': b's',
4659 'loggingname': b's',
4643 'loggingopts': {
4660 'loggingopts': {
4644 'logdata': True,
4661 'logdata': True,
4645 'logdataapis': False,
4662 'logdataapis': False,
4646 },
4663 },
4647 }
4664 }
4648 )
4665 )
4649
4666
4650 if ui.debugflag:
4667 if ui.debugflag:
4651 openerargs['loggingopts']['logdataapis'] = True
4668 openerargs['loggingopts']['logdataapis'] = True
4652
4669
4653 # Don't send default headers when in raw mode. This allows us to
4670 # Don't send default headers when in raw mode. This allows us to
4654 # bypass most of the behavior of our URL handling code so we can
4671 # bypass most of the behavior of our URL handling code so we can
4655 # have near complete control over what's sent on the wire.
4672 # have near complete control over what's sent on the wire.
4656 if opts[b'peer'] == b'raw':
4673 if opts[b'peer'] == b'raw':
4657 openerargs['sendaccept'] = False
4674 openerargs['sendaccept'] = False
4658
4675
4659 opener = urlmod.opener(ui, authinfo, **openerargs)
4676 opener = urlmod.opener(ui, authinfo, **openerargs)
4660
4677
4661 if opts[b'peer'] == b'http2':
4678 if opts[b'peer'] == b'http2':
4662 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4679 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4663 # We go through makepeer() because we need an API descriptor for
4680 # We go through makepeer() because we need an API descriptor for
4664 # the peer instance to be useful.
4681 # the peer instance to be useful.
4665 maybe_silent = (
4682 maybe_silent = (
4666 ui.silent()
4683 ui.silent()
4667 if opts[b'nologhandshake']
4684 if opts[b'nologhandshake']
4668 else util.nullcontextmanager()
4685 else util.nullcontextmanager()
4669 )
4686 )
4670 with maybe_silent, ui.configoverride(
4687 with maybe_silent, ui.configoverride(
4671 {(b'experimental', b'httppeer.advertise-v2'): True}
4688 {(b'experimental', b'httppeer.advertise-v2'): True}
4672 ):
4689 ):
4673 peer = httppeer.makepeer(ui, path, opener=opener)
4690 peer = httppeer.makepeer(ui, path, opener=opener)
4674
4691
4675 if not isinstance(peer, httppeer.httpv2peer):
4692 if not isinstance(peer, httppeer.httpv2peer):
4676 raise error.Abort(
4693 raise error.Abort(
4677 _(
4694 _(
4678 b'could not instantiate HTTP peer for '
4695 b'could not instantiate HTTP peer for '
4679 b'wire protocol version 2'
4696 b'wire protocol version 2'
4680 ),
4697 ),
4681 hint=_(
4698 hint=_(
4682 b'the server may not have the feature '
4699 b'the server may not have the feature '
4683 b'enabled or is not allowing this '
4700 b'enabled or is not allowing this '
4684 b'client version'
4701 b'client version'
4685 ),
4702 ),
4686 )
4703 )
4687
4704
4688 elif opts[b'peer'] == b'raw':
4705 elif opts[b'peer'] == b'raw':
4689 ui.write(_(b'using raw connection to peer\n'))
4706 ui.write(_(b'using raw connection to peer\n'))
4690 peer = None
4707 peer = None
4691 elif opts[b'peer']:
4708 elif opts[b'peer']:
4692 raise error.Abort(
4709 raise error.Abort(
4693 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4710 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4694 )
4711 )
4695 else:
4712 else:
4696 peer = httppeer.makepeer(ui, path, opener=opener)
4713 peer = httppeer.makepeer(ui, path, opener=opener)
4697
4714
4698 # We /could/ populate stdin/stdout with sock.makefile()...
4715 # We /could/ populate stdin/stdout with sock.makefile()...
4699 else:
4716 else:
4700 raise error.Abort(_(b'unsupported connection configuration'))
4717 raise error.Abort(_(b'unsupported connection configuration'))
4701
4718
4702 batchedcommands = None
4719 batchedcommands = None
4703
4720
4704 # Now perform actions based on the parsed wire language instructions.
4721 # Now perform actions based on the parsed wire language instructions.
4705 for action, lines in blocks:
4722 for action, lines in blocks:
4706 if action in (b'raw', b'raw+'):
4723 if action in (b'raw', b'raw+'):
4707 if not stdin:
4724 if not stdin:
4708 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4725 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4709
4726
4710 # Concatenate the data together.
4727 # Concatenate the data together.
4711 data = b''.join(l.lstrip() for l in lines)
4728 data = b''.join(l.lstrip() for l in lines)
4712 data = stringutil.unescapestr(data)
4729 data = stringutil.unescapestr(data)
4713 stdin.write(data)
4730 stdin.write(data)
4714
4731
4715 if action == b'raw+':
4732 if action == b'raw+':
4716 stdin.flush()
4733 stdin.flush()
4717 elif action == b'flush':
4734 elif action == b'flush':
4718 if not stdin:
4735 if not stdin:
4719 raise error.Abort(_(b'cannot call flush on this peer'))
4736 raise error.Abort(_(b'cannot call flush on this peer'))
4720 stdin.flush()
4737 stdin.flush()
4721 elif action.startswith(b'command'):
4738 elif action.startswith(b'command'):
4722 if not peer:
4739 if not peer:
4723 raise error.Abort(
4740 raise error.Abort(
4724 _(
4741 _(
4725 b'cannot send commands unless peer instance '
4742 b'cannot send commands unless peer instance '
4726 b'is available'
4743 b'is available'
4727 )
4744 )
4728 )
4745 )
4729
4746
4730 command = action.split(b' ', 1)[1]
4747 command = action.split(b' ', 1)[1]
4731
4748
4732 args = {}
4749 args = {}
4733 for line in lines:
4750 for line in lines:
4734 # We need to allow empty values.
4751 # We need to allow empty values.
4735 fields = line.lstrip().split(b' ', 1)
4752 fields = line.lstrip().split(b' ', 1)
4736 if len(fields) == 1:
4753 if len(fields) == 1:
4737 key = fields[0]
4754 key = fields[0]
4738 value = b''
4755 value = b''
4739 else:
4756 else:
4740 key, value = fields
4757 key, value = fields
4741
4758
4742 if value.startswith(b'eval:'):
4759 if value.startswith(b'eval:'):
4743 value = stringutil.evalpythonliteral(value[5:])
4760 value = stringutil.evalpythonliteral(value[5:])
4744 else:
4761 else:
4745 value = stringutil.unescapestr(value)
4762 value = stringutil.unescapestr(value)
4746
4763
4747 args[key] = value
4764 args[key] = value
4748
4765
4749 if batchedcommands is not None:
4766 if batchedcommands is not None:
4750 batchedcommands.append((command, args))
4767 batchedcommands.append((command, args))
4751 continue
4768 continue
4752
4769
4753 ui.status(_(b'sending %s command\n') % command)
4770 ui.status(_(b'sending %s command\n') % command)
4754
4771
4755 if b'PUSHFILE' in args:
4772 if b'PUSHFILE' in args:
4756 with open(args[b'PUSHFILE'], 'rb') as fh:
4773 with open(args[b'PUSHFILE'], 'rb') as fh:
4757 del args[b'PUSHFILE']
4774 del args[b'PUSHFILE']
4758 res, output = peer._callpush(
4775 res, output = peer._callpush(
4759 command, fh, **pycompat.strkwargs(args)
4776 command, fh, **pycompat.strkwargs(args)
4760 )
4777 )
4761 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4778 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4762 ui.status(
4779 ui.status(
4763 _(b'remote output: %s\n') % stringutil.escapestr(output)
4780 _(b'remote output: %s\n') % stringutil.escapestr(output)
4764 )
4781 )
4765 else:
4782 else:
4766 with peer.commandexecutor() as e:
4783 with peer.commandexecutor() as e:
4767 res = e.callcommand(command, args).result()
4784 res = e.callcommand(command, args).result()
4768
4785
4769 if isinstance(res, wireprotov2peer.commandresponse):
4786 if isinstance(res, wireprotov2peer.commandresponse):
4770 val = res.objects()
4787 val = res.objects()
4771 ui.status(
4788 ui.status(
4772 _(b'response: %s\n')
4789 _(b'response: %s\n')
4773 % stringutil.pprint(val, bprefix=True, indent=2)
4790 % stringutil.pprint(val, bprefix=True, indent=2)
4774 )
4791 )
4775 else:
4792 else:
4776 ui.status(
4793 ui.status(
4777 _(b'response: %s\n')
4794 _(b'response: %s\n')
4778 % stringutil.pprint(res, bprefix=True, indent=2)
4795 % stringutil.pprint(res, bprefix=True, indent=2)
4779 )
4796 )
4780
4797
4781 elif action == b'batchbegin':
4798 elif action == b'batchbegin':
4782 if batchedcommands is not None:
4799 if batchedcommands is not None:
4783 raise error.Abort(_(b'nested batchbegin not allowed'))
4800 raise error.Abort(_(b'nested batchbegin not allowed'))
4784
4801
4785 batchedcommands = []
4802 batchedcommands = []
4786 elif action == b'batchsubmit':
4803 elif action == b'batchsubmit':
4787 # There is a batching API we could go through. But it would be
4804 # There is a batching API we could go through. But it would be
4788 # difficult to normalize requests into function calls. It is easier
4805 # difficult to normalize requests into function calls. It is easier
4789 # to bypass this layer and normalize to commands + args.
4806 # to bypass this layer and normalize to commands + args.
4790 ui.status(
4807 ui.status(
4791 _(b'sending batch with %d sub-commands\n')
4808 _(b'sending batch with %d sub-commands\n')
4792 % len(batchedcommands)
4809 % len(batchedcommands)
4793 )
4810 )
4794 assert peer is not None
4811 assert peer is not None
4795 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4812 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4796 ui.status(
4813 ui.status(
4797 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4814 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4798 )
4815 )
4799
4816
4800 batchedcommands = None
4817 batchedcommands = None
4801
4818
4802 elif action.startswith(b'httprequest '):
4819 elif action.startswith(b'httprequest '):
4803 if not opener:
4820 if not opener:
4804 raise error.Abort(
4821 raise error.Abort(
4805 _(b'cannot use httprequest without an HTTP peer')
4822 _(b'cannot use httprequest without an HTTP peer')
4806 )
4823 )
4807
4824
4808 request = action.split(b' ', 2)
4825 request = action.split(b' ', 2)
4809 if len(request) != 3:
4826 if len(request) != 3:
4810 raise error.Abort(
4827 raise error.Abort(
4811 _(
4828 _(
4812 b'invalid httprequest: expected format is '
4829 b'invalid httprequest: expected format is '
4813 b'"httprequest <method> <path>'
4830 b'"httprequest <method> <path>'
4814 )
4831 )
4815 )
4832 )
4816
4833
4817 method, httppath = request[1:]
4834 method, httppath = request[1:]
4818 headers = {}
4835 headers = {}
4819 body = None
4836 body = None
4820 frames = []
4837 frames = []
4821 for line in lines:
4838 for line in lines:
4822 line = line.lstrip()
4839 line = line.lstrip()
4823 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4840 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4824 if m:
4841 if m:
4825 # Headers need to use native strings.
4842 # Headers need to use native strings.
4826 key = pycompat.strurl(m.group(1))
4843 key = pycompat.strurl(m.group(1))
4827 value = pycompat.strurl(m.group(2))
4844 value = pycompat.strurl(m.group(2))
4828 headers[key] = value
4845 headers[key] = value
4829 continue
4846 continue
4830
4847
4831 if line.startswith(b'BODYFILE '):
4848 if line.startswith(b'BODYFILE '):
4832 with open(line.split(b' ', 1), b'rb') as fh:
4849 with open(line.split(b' ', 1), b'rb') as fh:
4833 body = fh.read()
4850 body = fh.read()
4834 elif line.startswith(b'frame '):
4851 elif line.startswith(b'frame '):
4835 frame = wireprotoframing.makeframefromhumanstring(
4852 frame = wireprotoframing.makeframefromhumanstring(
4836 line[len(b'frame ') :]
4853 line[len(b'frame ') :]
4837 )
4854 )
4838
4855
4839 frames.append(frame)
4856 frames.append(frame)
4840 else:
4857 else:
4841 raise error.Abort(
4858 raise error.Abort(
4842 _(b'unknown argument to httprequest: %s') % line
4859 _(b'unknown argument to httprequest: %s') % line
4843 )
4860 )
4844
4861
4845 url = path + httppath
4862 url = path + httppath
4846
4863
4847 if frames:
4864 if frames:
4848 body = b''.join(bytes(f) for f in frames)
4865 body = b''.join(bytes(f) for f in frames)
4849
4866
4850 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4867 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4851
4868
4852 # urllib.Request insists on using has_data() as a proxy for
4869 # urllib.Request insists on using has_data() as a proxy for
4853 # determining the request method. Override that to use our
4870 # determining the request method. Override that to use our
4854 # explicitly requested method.
4871 # explicitly requested method.
4855 req.get_method = lambda: pycompat.sysstr(method)
4872 req.get_method = lambda: pycompat.sysstr(method)
4856
4873
4857 try:
4874 try:
4858 res = opener.open(req)
4875 res = opener.open(req)
4859 body = res.read()
4876 body = res.read()
4860 except util.urlerr.urlerror as e:
4877 except util.urlerr.urlerror as e:
4861 # read() method must be called, but only exists in Python 2
4878 # read() method must be called, but only exists in Python 2
4862 getattr(e, 'read', lambda: None)()
4879 getattr(e, 'read', lambda: None)()
4863 continue
4880 continue
4864
4881
4865 ct = res.headers.get('Content-Type')
4882 ct = res.headers.get('Content-Type')
4866 if ct == 'application/mercurial-cbor':
4883 if ct == 'application/mercurial-cbor':
4867 ui.write(
4884 ui.write(
4868 _(b'cbor> %s\n')
4885 _(b'cbor> %s\n')
4869 % stringutil.pprint(
4886 % stringutil.pprint(
4870 cborutil.decodeall(body), bprefix=True, indent=2
4887 cborutil.decodeall(body), bprefix=True, indent=2
4871 )
4888 )
4872 )
4889 )
4873
4890
4874 elif action == b'close':
4891 elif action == b'close':
4875 assert peer is not None
4892 assert peer is not None
4876 peer.close()
4893 peer.close()
4877 elif action == b'readavailable':
4894 elif action == b'readavailable':
4878 if not stdout or not stderr:
4895 if not stdout or not stderr:
4879 raise error.Abort(
4896 raise error.Abort(
4880 _(b'readavailable not available on this peer')
4897 _(b'readavailable not available on this peer')
4881 )
4898 )
4882
4899
4883 stdin.close()
4900 stdin.close()
4884 stdout.read()
4901 stdout.read()
4885 stderr.read()
4902 stderr.read()
4886
4903
4887 elif action == b'readline':
4904 elif action == b'readline':
4888 if not stdout:
4905 if not stdout:
4889 raise error.Abort(_(b'readline not available on this peer'))
4906 raise error.Abort(_(b'readline not available on this peer'))
4890 stdout.readline()
4907 stdout.readline()
4891 elif action == b'ereadline':
4908 elif action == b'ereadline':
4892 if not stderr:
4909 if not stderr:
4893 raise error.Abort(_(b'ereadline not available on this peer'))
4910 raise error.Abort(_(b'ereadline not available on this peer'))
4894 stderr.readline()
4911 stderr.readline()
4895 elif action.startswith(b'read '):
4912 elif action.startswith(b'read '):
4896 count = int(action.split(b' ', 1)[1])
4913 count = int(action.split(b' ', 1)[1])
4897 if not stdout:
4914 if not stdout:
4898 raise error.Abort(_(b'read not available on this peer'))
4915 raise error.Abort(_(b'read not available on this peer'))
4899 stdout.read(count)
4916 stdout.read(count)
4900 elif action.startswith(b'eread '):
4917 elif action.startswith(b'eread '):
4901 count = int(action.split(b' ', 1)[1])
4918 count = int(action.split(b' ', 1)[1])
4902 if not stderr:
4919 if not stderr:
4903 raise error.Abort(_(b'eread not available on this peer'))
4920 raise error.Abort(_(b'eread not available on this peer'))
4904 stderr.read(count)
4921 stderr.read(count)
4905 else:
4922 else:
4906 raise error.Abort(_(b'unknown action: %s') % action)
4923 raise error.Abort(_(b'unknown action: %s') % action)
4907
4924
4908 if batchedcommands is not None:
4925 if batchedcommands is not None:
4909 raise error.Abort(_(b'unclosed "batchbegin" request'))
4926 raise error.Abort(_(b'unclosed "batchbegin" request'))
4910
4927
4911 if peer:
4928 if peer:
4912 peer.close()
4929 peer.close()
4913
4930
4914 if proc:
4931 if proc:
4915 proc.kill()
4932 proc.kill()
@@ -1,747 +1,755
1 # censor code related to censoring revision
1 # censor code related to censoring revision
2 # coding: utf8
2 # coding: utf8
3 #
3 #
4 # Copyright 2021 Pierre-Yves David <pierre-yves.david@octobus.net>
4 # Copyright 2021 Pierre-Yves David <pierre-yves.david@octobus.net>
5 # Copyright 2015 Google, Inc <martinvonz@google.com>
5 # Copyright 2015 Google, Inc <martinvonz@google.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 import binascii
10 import binascii
11 import contextlib
11 import contextlib
12 import os
12 import os
13 import struct
13 import struct
14
14
15 from ..node import (
15 from ..node import (
16 nullrev,
16 nullrev,
17 )
17 )
18 from .constants import (
18 from .constants import (
19 COMP_MODE_PLAIN,
19 COMP_MODE_PLAIN,
20 ENTRY_DATA_COMPRESSED_LENGTH,
20 ENTRY_DATA_COMPRESSED_LENGTH,
21 ENTRY_DATA_COMPRESSION_MODE,
21 ENTRY_DATA_COMPRESSION_MODE,
22 ENTRY_DATA_OFFSET,
22 ENTRY_DATA_OFFSET,
23 ENTRY_DATA_UNCOMPRESSED_LENGTH,
23 ENTRY_DATA_UNCOMPRESSED_LENGTH,
24 ENTRY_DELTA_BASE,
24 ENTRY_DELTA_BASE,
25 ENTRY_LINK_REV,
25 ENTRY_LINK_REV,
26 ENTRY_NODE_ID,
26 ENTRY_NODE_ID,
27 ENTRY_PARENT_1,
27 ENTRY_PARENT_1,
28 ENTRY_PARENT_2,
28 ENTRY_PARENT_2,
29 ENTRY_SIDEDATA_COMPRESSED_LENGTH,
29 ENTRY_SIDEDATA_COMPRESSED_LENGTH,
30 ENTRY_SIDEDATA_COMPRESSION_MODE,
30 ENTRY_SIDEDATA_COMPRESSION_MODE,
31 ENTRY_SIDEDATA_OFFSET,
31 ENTRY_SIDEDATA_OFFSET,
32 REVLOGV0,
32 REVLOGV0,
33 REVLOGV1,
33 REVLOGV1,
34 )
34 )
35 from ..i18n import _
35 from ..i18n import _
36
36
37 from .. import (
37 from .. import (
38 error,
38 error,
39 pycompat,
39 pycompat,
40 revlogutils,
40 revlogutils,
41 util,
41 util,
42 )
42 )
43 from ..utils import (
43 from ..utils import (
44 storageutil,
44 storageutil,
45 )
45 )
46 from . import (
46 from . import (
47 constants,
47 constants,
48 deltas,
48 deltas,
49 )
49 )
50
50
51
51
52 def v1_censor(rl, tr, censornode, tombstone=b''):
52 def v1_censor(rl, tr, censornode, tombstone=b''):
53 """censors a revision in a "version 1" revlog"""
53 """censors a revision in a "version 1" revlog"""
54 assert rl._format_version == constants.REVLOGV1, rl._format_version
54 assert rl._format_version == constants.REVLOGV1, rl._format_version
55
55
56 # avoid cycle
56 # avoid cycle
57 from .. import revlog
57 from .. import revlog
58
58
59 censorrev = rl.rev(censornode)
59 censorrev = rl.rev(censornode)
60 tombstone = storageutil.packmeta({b'censored': tombstone}, b'')
60 tombstone = storageutil.packmeta({b'censored': tombstone}, b'')
61
61
62 # Rewriting the revlog in place is hard. Our strategy for censoring is
62 # Rewriting the revlog in place is hard. Our strategy for censoring is
63 # to create a new revlog, copy all revisions to it, then replace the
63 # to create a new revlog, copy all revisions to it, then replace the
64 # revlogs on transaction close.
64 # revlogs on transaction close.
65 #
65 #
66 # This is a bit dangerous. We could easily have a mismatch of state.
66 # This is a bit dangerous. We could easily have a mismatch of state.
67 newrl = revlog.revlog(
67 newrl = revlog.revlog(
68 rl.opener,
68 rl.opener,
69 target=rl.target,
69 target=rl.target,
70 radix=rl.radix,
70 radix=rl.radix,
71 postfix=b'tmpcensored',
71 postfix=b'tmpcensored',
72 censorable=True,
72 censorable=True,
73 )
73 )
74 newrl._format_version = rl._format_version
74 newrl._format_version = rl._format_version
75 newrl._format_flags = rl._format_flags
75 newrl._format_flags = rl._format_flags
76 newrl._generaldelta = rl._generaldelta
76 newrl._generaldelta = rl._generaldelta
77 newrl._parse_index = rl._parse_index
77 newrl._parse_index = rl._parse_index
78
78
79 for rev in rl.revs():
79 for rev in rl.revs():
80 node = rl.node(rev)
80 node = rl.node(rev)
81 p1, p2 = rl.parents(node)
81 p1, p2 = rl.parents(node)
82
82
83 if rev == censorrev:
83 if rev == censorrev:
84 newrl.addrawrevision(
84 newrl.addrawrevision(
85 tombstone,
85 tombstone,
86 tr,
86 tr,
87 rl.linkrev(censorrev),
87 rl.linkrev(censorrev),
88 p1,
88 p1,
89 p2,
89 p2,
90 censornode,
90 censornode,
91 constants.REVIDX_ISCENSORED,
91 constants.REVIDX_ISCENSORED,
92 )
92 )
93
93
94 if newrl.deltaparent(rev) != nullrev:
94 if newrl.deltaparent(rev) != nullrev:
95 m = _(b'censored revision stored as delta; cannot censor')
95 m = _(b'censored revision stored as delta; cannot censor')
96 h = _(
96 h = _(
97 b'censoring of revlogs is not fully implemented;'
97 b'censoring of revlogs is not fully implemented;'
98 b' please report this bug'
98 b' please report this bug'
99 )
99 )
100 raise error.Abort(m, hint=h)
100 raise error.Abort(m, hint=h)
101 continue
101 continue
102
102
103 if rl.iscensored(rev):
103 if rl.iscensored(rev):
104 if rl.deltaparent(rev) != nullrev:
104 if rl.deltaparent(rev) != nullrev:
105 m = _(
105 m = _(
106 b'cannot censor due to censored '
106 b'cannot censor due to censored '
107 b'revision having delta stored'
107 b'revision having delta stored'
108 )
108 )
109 raise error.Abort(m)
109 raise error.Abort(m)
110 rawtext = rl._chunk(rev)
110 rawtext = rl._chunk(rev)
111 else:
111 else:
112 rawtext = rl.rawdata(rev)
112 rawtext = rl.rawdata(rev)
113
113
114 newrl.addrawrevision(
114 newrl.addrawrevision(
115 rawtext, tr, rl.linkrev(rev), p1, p2, node, rl.flags(rev)
115 rawtext, tr, rl.linkrev(rev), p1, p2, node, rl.flags(rev)
116 )
116 )
117
117
118 tr.addbackup(rl._indexfile, location=b'store')
118 tr.addbackup(rl._indexfile, location=b'store')
119 if not rl._inline:
119 if not rl._inline:
120 tr.addbackup(rl._datafile, location=b'store')
120 tr.addbackup(rl._datafile, location=b'store')
121
121
122 rl.opener.rename(newrl._indexfile, rl._indexfile)
122 rl.opener.rename(newrl._indexfile, rl._indexfile)
123 if not rl._inline:
123 if not rl._inline:
124 rl.opener.rename(newrl._datafile, rl._datafile)
124 rl.opener.rename(newrl._datafile, rl._datafile)
125
125
126 rl.clearcaches()
126 rl.clearcaches()
127 rl._loadindex()
127 rl._loadindex()
128
128
129
129
130 def v2_censor(revlog, tr, censornode, tombstone=b''):
130 def v2_censor(revlog, tr, censornode, tombstone=b''):
131 """censors a revision in a "version 2" revlog"""
131 """censors a revision in a "version 2" revlog"""
132 assert revlog._format_version != REVLOGV0, revlog._format_version
132 assert revlog._format_version != REVLOGV0, revlog._format_version
133 assert revlog._format_version != REVLOGV1, revlog._format_version
133 assert revlog._format_version != REVLOGV1, revlog._format_version
134
134
135 censor_revs = {revlog.rev(censornode)}
135 censor_revs = {revlog.rev(censornode)}
136 _rewrite_v2(revlog, tr, censor_revs, tombstone)
136 _rewrite_v2(revlog, tr, censor_revs, tombstone)
137
137
138
138
139 def _rewrite_v2(revlog, tr, censor_revs, tombstone=b''):
139 def _rewrite_v2(revlog, tr, censor_revs, tombstone=b''):
140 """rewrite a revlog to censor some of its content
140 """rewrite a revlog to censor some of its content
141
141
142 General principle
142 General principle
143
143
144 We create new revlog files (index/data/sidedata) to copy the content of
144 We create new revlog files (index/data/sidedata) to copy the content of
145 the existing data without the censored data.
145 the existing data without the censored data.
146
146
147 We need to recompute new delta for any revision that used the censored
147 We need to recompute new delta for any revision that used the censored
148 revision as delta base. As the cumulative size of the new delta may be
148 revision as delta base. As the cumulative size of the new delta may be
149 large, we store them in a temporary file until they are stored in their
149 large, we store them in a temporary file until they are stored in their
150 final destination.
150 final destination.
151
151
152 All data before the censored data can be blindly copied. The rest needs
152 All data before the censored data can be blindly copied. The rest needs
153 to be copied as we go and the associated index entry needs adjustement.
153 to be copied as we go and the associated index entry needs adjustement.
154 """
154 """
155 assert revlog._format_version != REVLOGV0, revlog._format_version
155 assert revlog._format_version != REVLOGV0, revlog._format_version
156 assert revlog._format_version != REVLOGV1, revlog._format_version
156 assert revlog._format_version != REVLOGV1, revlog._format_version
157
157
158 old_index = revlog.index
158 old_index = revlog.index
159 docket = revlog._docket
159 docket = revlog._docket
160
160
161 tombstone = storageutil.packmeta({b'censored': tombstone}, b'')
161 tombstone = storageutil.packmeta({b'censored': tombstone}, b'')
162
162
163 first_excl_rev = min(censor_revs)
163 first_excl_rev = min(censor_revs)
164
164
165 first_excl_entry = revlog.index[first_excl_rev]
165 first_excl_entry = revlog.index[first_excl_rev]
166 index_cutoff = revlog.index.entry_size * first_excl_rev
166 index_cutoff = revlog.index.entry_size * first_excl_rev
167 data_cutoff = first_excl_entry[ENTRY_DATA_OFFSET] >> 16
167 data_cutoff = first_excl_entry[ENTRY_DATA_OFFSET] >> 16
168 sidedata_cutoff = revlog.sidedata_cut_off(first_excl_rev)
168 sidedata_cutoff = revlog.sidedata_cut_off(first_excl_rev)
169
169
170 with pycompat.unnamedtempfile(mode=b"w+b") as tmp_storage:
170 with pycompat.unnamedtempfile(mode=b"w+b") as tmp_storage:
171 # rev → (new_base, data_start, data_end, compression_mode)
171 # rev → (new_base, data_start, data_end, compression_mode)
172 rewritten_entries = _precompute_rewritten_delta(
172 rewritten_entries = _precompute_rewritten_delta(
173 revlog,
173 revlog,
174 old_index,
174 old_index,
175 censor_revs,
175 censor_revs,
176 tmp_storage,
176 tmp_storage,
177 )
177 )
178
178
179 all_files = _setup_new_files(
179 all_files = _setup_new_files(
180 revlog,
180 revlog,
181 index_cutoff,
181 index_cutoff,
182 data_cutoff,
182 data_cutoff,
183 sidedata_cutoff,
183 sidedata_cutoff,
184 )
184 )
185
185
186 # we dont need to open the old index file since its content already
186 # we dont need to open the old index file since its content already
187 # exist in a usable form in `old_index`.
187 # exist in a usable form in `old_index`.
188 with all_files() as open_files:
188 with all_files() as open_files:
189 (
189 (
190 old_data_file,
190 old_data_file,
191 old_sidedata_file,
191 old_sidedata_file,
192 new_index_file,
192 new_index_file,
193 new_data_file,
193 new_data_file,
194 new_sidedata_file,
194 new_sidedata_file,
195 ) = open_files
195 ) = open_files
196
196
197 # writing the censored revision
197 # writing the censored revision
198
198
199 # Writing all subsequent revisions
199 # Writing all subsequent revisions
200 for rev in range(first_excl_rev, len(old_index)):
200 for rev in range(first_excl_rev, len(old_index)):
201 if rev in censor_revs:
201 if rev in censor_revs:
202 _rewrite_censor(
202 _rewrite_censor(
203 revlog,
203 revlog,
204 old_index,
204 old_index,
205 open_files,
205 open_files,
206 rev,
206 rev,
207 tombstone,
207 tombstone,
208 )
208 )
209 else:
209 else:
210 _rewrite_simple(
210 _rewrite_simple(
211 revlog,
211 revlog,
212 old_index,
212 old_index,
213 open_files,
213 open_files,
214 rev,
214 rev,
215 rewritten_entries,
215 rewritten_entries,
216 tmp_storage,
216 tmp_storage,
217 )
217 )
218 docket.write(transaction=None, stripping=True)
218 docket.write(transaction=None, stripping=True)
219
219
220
220
221 def _precompute_rewritten_delta(
221 def _precompute_rewritten_delta(
222 revlog,
222 revlog,
223 old_index,
223 old_index,
224 excluded_revs,
224 excluded_revs,
225 tmp_storage,
225 tmp_storage,
226 ):
226 ):
227 """Compute new delta for revisions whose delta is based on revision that
227 """Compute new delta for revisions whose delta is based on revision that
228 will not survive as is.
228 will not survive as is.
229
229
230 Return a mapping: {rev → (new_base, data_start, data_end, compression_mode)}
230 Return a mapping: {rev → (new_base, data_start, data_end, compression_mode)}
231 """
231 """
232 dc = deltas.deltacomputer(revlog)
232 dc = deltas.deltacomputer(revlog)
233 rewritten_entries = {}
233 rewritten_entries = {}
234 first_excl_rev = min(excluded_revs)
234 first_excl_rev = min(excluded_revs)
235 with revlog._segmentfile._open_read() as dfh:
235 with revlog._segmentfile._open_read() as dfh:
236 for rev in range(first_excl_rev, len(old_index)):
236 for rev in range(first_excl_rev, len(old_index)):
237 if rev in excluded_revs:
237 if rev in excluded_revs:
238 # this revision will be preserved as is, so we don't need to
238 # this revision will be preserved as is, so we don't need to
239 # consider recomputing a delta.
239 # consider recomputing a delta.
240 continue
240 continue
241 entry = old_index[rev]
241 entry = old_index[rev]
242 if entry[ENTRY_DELTA_BASE] not in excluded_revs:
242 if entry[ENTRY_DELTA_BASE] not in excluded_revs:
243 continue
243 continue
244 # This is a revision that use the censored revision as the base
244 # This is a revision that use the censored revision as the base
245 # for its delta. We need a need new deltas
245 # for its delta. We need a need new deltas
246 if entry[ENTRY_DATA_UNCOMPRESSED_LENGTH] == 0:
246 if entry[ENTRY_DATA_UNCOMPRESSED_LENGTH] == 0:
247 # this revision is empty, we can delta against nullrev
247 # this revision is empty, we can delta against nullrev
248 rewritten_entries[rev] = (nullrev, 0, 0, COMP_MODE_PLAIN)
248 rewritten_entries[rev] = (nullrev, 0, 0, COMP_MODE_PLAIN)
249 else:
249 else:
250
250
251 text = revlog.rawdata(rev, _df=dfh)
251 text = revlog.rawdata(rev, _df=dfh)
252 info = revlogutils.revisioninfo(
252 info = revlogutils.revisioninfo(
253 node=entry[ENTRY_NODE_ID],
253 node=entry[ENTRY_NODE_ID],
254 p1=revlog.node(entry[ENTRY_PARENT_1]),
254 p1=revlog.node(entry[ENTRY_PARENT_1]),
255 p2=revlog.node(entry[ENTRY_PARENT_2]),
255 p2=revlog.node(entry[ENTRY_PARENT_2]),
256 btext=[text],
256 btext=[text],
257 textlen=len(text),
257 textlen=len(text),
258 cachedelta=None,
258 cachedelta=None,
259 flags=entry[ENTRY_DATA_OFFSET] & 0xFFFF,
259 flags=entry[ENTRY_DATA_OFFSET] & 0xFFFF,
260 )
260 )
261 d = dc.finddeltainfo(
261 d = dc.finddeltainfo(
262 info, dfh, excluded_bases=excluded_revs, target_rev=rev
262 info, dfh, excluded_bases=excluded_revs, target_rev=rev
263 )
263 )
264 default_comp = revlog._docket.default_compression_header
264 default_comp = revlog._docket.default_compression_header
265 comp_mode, d = deltas.delta_compression(default_comp, d)
265 comp_mode, d = deltas.delta_compression(default_comp, d)
266 # using `tell` is a bit lazy, but we are not here for speed
266 # using `tell` is a bit lazy, but we are not here for speed
267 start = tmp_storage.tell()
267 start = tmp_storage.tell()
268 tmp_storage.write(d.data[1])
268 tmp_storage.write(d.data[1])
269 end = tmp_storage.tell()
269 end = tmp_storage.tell()
270 rewritten_entries[rev] = (d.base, start, end, comp_mode)
270 rewritten_entries[rev] = (d.base, start, end, comp_mode)
271 return rewritten_entries
271 return rewritten_entries
272
272
273
273
274 def _setup_new_files(
274 def _setup_new_files(
275 revlog,
275 revlog,
276 index_cutoff,
276 index_cutoff,
277 data_cutoff,
277 data_cutoff,
278 sidedata_cutoff,
278 sidedata_cutoff,
279 ):
279 ):
280 """
280 """
281
281
282 return a context manager to open all the relevant files:
282 return a context manager to open all the relevant files:
283 - old_data_file,
283 - old_data_file,
284 - old_sidedata_file,
284 - old_sidedata_file,
285 - new_index_file,
285 - new_index_file,
286 - new_data_file,
286 - new_data_file,
287 - new_sidedata_file,
287 - new_sidedata_file,
288
288
289 The old_index_file is not here because it is accessed through the
289 The old_index_file is not here because it is accessed through the
290 `old_index` object if the caller function.
290 `old_index` object if the caller function.
291 """
291 """
292 docket = revlog._docket
292 docket = revlog._docket
293 old_index_filepath = revlog.opener.join(docket.index_filepath())
293 old_index_filepath = revlog.opener.join(docket.index_filepath())
294 old_data_filepath = revlog.opener.join(docket.data_filepath())
294 old_data_filepath = revlog.opener.join(docket.data_filepath())
295 old_sidedata_filepath = revlog.opener.join(docket.sidedata_filepath())
295 old_sidedata_filepath = revlog.opener.join(docket.sidedata_filepath())
296
296
297 new_index_filepath = revlog.opener.join(docket.new_index_file())
297 new_index_filepath = revlog.opener.join(docket.new_index_file())
298 new_data_filepath = revlog.opener.join(docket.new_data_file())
298 new_data_filepath = revlog.opener.join(docket.new_data_file())
299 new_sidedata_filepath = revlog.opener.join(docket.new_sidedata_file())
299 new_sidedata_filepath = revlog.opener.join(docket.new_sidedata_file())
300
300
301 util.copyfile(old_index_filepath, new_index_filepath, nb_bytes=index_cutoff)
301 util.copyfile(old_index_filepath, new_index_filepath, nb_bytes=index_cutoff)
302 util.copyfile(old_data_filepath, new_data_filepath, nb_bytes=data_cutoff)
302 util.copyfile(old_data_filepath, new_data_filepath, nb_bytes=data_cutoff)
303 util.copyfile(
303 util.copyfile(
304 old_sidedata_filepath,
304 old_sidedata_filepath,
305 new_sidedata_filepath,
305 new_sidedata_filepath,
306 nb_bytes=sidedata_cutoff,
306 nb_bytes=sidedata_cutoff,
307 )
307 )
308 revlog.opener.register_file(docket.index_filepath())
308 revlog.opener.register_file(docket.index_filepath())
309 revlog.opener.register_file(docket.data_filepath())
309 revlog.opener.register_file(docket.data_filepath())
310 revlog.opener.register_file(docket.sidedata_filepath())
310 revlog.opener.register_file(docket.sidedata_filepath())
311
311
312 docket.index_end = index_cutoff
312 docket.index_end = index_cutoff
313 docket.data_end = data_cutoff
313 docket.data_end = data_cutoff
314 docket.sidedata_end = sidedata_cutoff
314 docket.sidedata_end = sidedata_cutoff
315
315
316 # reload the revlog internal information
316 # reload the revlog internal information
317 revlog.clearcaches()
317 revlog.clearcaches()
318 revlog._loadindex(docket=docket)
318 revlog._loadindex(docket=docket)
319
319
320 @contextlib.contextmanager
320 @contextlib.contextmanager
321 def all_files_opener():
321 def all_files_opener():
322 # hide opening in an helper function to please check-code, black
322 # hide opening in an helper function to please check-code, black
323 # and various python version at the same time
323 # and various python version at the same time
324 with open(old_data_filepath, 'rb') as old_data_file:
324 with open(old_data_filepath, 'rb') as old_data_file:
325 with open(old_sidedata_filepath, 'rb') as old_sidedata_file:
325 with open(old_sidedata_filepath, 'rb') as old_sidedata_file:
326 with open(new_index_filepath, 'r+b') as new_index_file:
326 with open(new_index_filepath, 'r+b') as new_index_file:
327 with open(new_data_filepath, 'r+b') as new_data_file:
327 with open(new_data_filepath, 'r+b') as new_data_file:
328 with open(
328 with open(
329 new_sidedata_filepath, 'r+b'
329 new_sidedata_filepath, 'r+b'
330 ) as new_sidedata_file:
330 ) as new_sidedata_file:
331 new_index_file.seek(0, os.SEEK_END)
331 new_index_file.seek(0, os.SEEK_END)
332 assert new_index_file.tell() == index_cutoff
332 assert new_index_file.tell() == index_cutoff
333 new_data_file.seek(0, os.SEEK_END)
333 new_data_file.seek(0, os.SEEK_END)
334 assert new_data_file.tell() == data_cutoff
334 assert new_data_file.tell() == data_cutoff
335 new_sidedata_file.seek(0, os.SEEK_END)
335 new_sidedata_file.seek(0, os.SEEK_END)
336 assert new_sidedata_file.tell() == sidedata_cutoff
336 assert new_sidedata_file.tell() == sidedata_cutoff
337 yield (
337 yield (
338 old_data_file,
338 old_data_file,
339 old_sidedata_file,
339 old_sidedata_file,
340 new_index_file,
340 new_index_file,
341 new_data_file,
341 new_data_file,
342 new_sidedata_file,
342 new_sidedata_file,
343 )
343 )
344
344
345 return all_files_opener
345 return all_files_opener
346
346
347
347
348 def _rewrite_simple(
348 def _rewrite_simple(
349 revlog,
349 revlog,
350 old_index,
350 old_index,
351 all_files,
351 all_files,
352 rev,
352 rev,
353 rewritten_entries,
353 rewritten_entries,
354 tmp_storage,
354 tmp_storage,
355 ):
355 ):
356 """append a normal revision to the index after the rewritten one(s)"""
356 """append a normal revision to the index after the rewritten one(s)"""
357 (
357 (
358 old_data_file,
358 old_data_file,
359 old_sidedata_file,
359 old_sidedata_file,
360 new_index_file,
360 new_index_file,
361 new_data_file,
361 new_data_file,
362 new_sidedata_file,
362 new_sidedata_file,
363 ) = all_files
363 ) = all_files
364 entry = old_index[rev]
364 entry = old_index[rev]
365 flags = entry[ENTRY_DATA_OFFSET] & 0xFFFF
365 flags = entry[ENTRY_DATA_OFFSET] & 0xFFFF
366 old_data_offset = entry[ENTRY_DATA_OFFSET] >> 16
366 old_data_offset = entry[ENTRY_DATA_OFFSET] >> 16
367
367
368 if rev not in rewritten_entries:
368 if rev not in rewritten_entries:
369 old_data_file.seek(old_data_offset)
369 old_data_file.seek(old_data_offset)
370 new_data_size = entry[ENTRY_DATA_COMPRESSED_LENGTH]
370 new_data_size = entry[ENTRY_DATA_COMPRESSED_LENGTH]
371 new_data = old_data_file.read(new_data_size)
371 new_data = old_data_file.read(new_data_size)
372 data_delta_base = entry[ENTRY_DELTA_BASE]
372 data_delta_base = entry[ENTRY_DELTA_BASE]
373 d_comp_mode = entry[ENTRY_DATA_COMPRESSION_MODE]
373 d_comp_mode = entry[ENTRY_DATA_COMPRESSION_MODE]
374 else:
374 else:
375 (
375 (
376 data_delta_base,
376 data_delta_base,
377 start,
377 start,
378 end,
378 end,
379 d_comp_mode,
379 d_comp_mode,
380 ) = rewritten_entries[rev]
380 ) = rewritten_entries[rev]
381 new_data_size = end - start
381 new_data_size = end - start
382 tmp_storage.seek(start)
382 tmp_storage.seek(start)
383 new_data = tmp_storage.read(new_data_size)
383 new_data = tmp_storage.read(new_data_size)
384
384
385 # It might be faster to group continuous read/write operation,
385 # It might be faster to group continuous read/write operation,
386 # however, this is censor, an operation that is not focussed
386 # however, this is censor, an operation that is not focussed
387 # around stellar performance. So I have not written this
387 # around stellar performance. So I have not written this
388 # optimisation yet.
388 # optimisation yet.
389 new_data_offset = new_data_file.tell()
389 new_data_offset = new_data_file.tell()
390 new_data_file.write(new_data)
390 new_data_file.write(new_data)
391
391
392 sidedata_size = entry[ENTRY_SIDEDATA_COMPRESSED_LENGTH]
392 sidedata_size = entry[ENTRY_SIDEDATA_COMPRESSED_LENGTH]
393 new_sidedata_offset = new_sidedata_file.tell()
393 new_sidedata_offset = new_sidedata_file.tell()
394 if 0 < sidedata_size:
394 if 0 < sidedata_size:
395 old_sidedata_offset = entry[ENTRY_SIDEDATA_OFFSET]
395 old_sidedata_offset = entry[ENTRY_SIDEDATA_OFFSET]
396 old_sidedata_file.seek(old_sidedata_offset)
396 old_sidedata_file.seek(old_sidedata_offset)
397 new_sidedata = old_sidedata_file.read(sidedata_size)
397 new_sidedata = old_sidedata_file.read(sidedata_size)
398 new_sidedata_file.write(new_sidedata)
398 new_sidedata_file.write(new_sidedata)
399
399
400 data_uncompressed_length = entry[ENTRY_DATA_UNCOMPRESSED_LENGTH]
400 data_uncompressed_length = entry[ENTRY_DATA_UNCOMPRESSED_LENGTH]
401 sd_com_mode = entry[ENTRY_SIDEDATA_COMPRESSION_MODE]
401 sd_com_mode = entry[ENTRY_SIDEDATA_COMPRESSION_MODE]
402 assert data_delta_base <= rev, (data_delta_base, rev)
402 assert data_delta_base <= rev, (data_delta_base, rev)
403
403
404 new_entry = revlogutils.entry(
404 new_entry = revlogutils.entry(
405 flags=flags,
405 flags=flags,
406 data_offset=new_data_offset,
406 data_offset=new_data_offset,
407 data_compressed_length=new_data_size,
407 data_compressed_length=new_data_size,
408 data_uncompressed_length=data_uncompressed_length,
408 data_uncompressed_length=data_uncompressed_length,
409 data_delta_base=data_delta_base,
409 data_delta_base=data_delta_base,
410 link_rev=entry[ENTRY_LINK_REV],
410 link_rev=entry[ENTRY_LINK_REV],
411 parent_rev_1=entry[ENTRY_PARENT_1],
411 parent_rev_1=entry[ENTRY_PARENT_1],
412 parent_rev_2=entry[ENTRY_PARENT_2],
412 parent_rev_2=entry[ENTRY_PARENT_2],
413 node_id=entry[ENTRY_NODE_ID],
413 node_id=entry[ENTRY_NODE_ID],
414 sidedata_offset=new_sidedata_offset,
414 sidedata_offset=new_sidedata_offset,
415 sidedata_compressed_length=sidedata_size,
415 sidedata_compressed_length=sidedata_size,
416 data_compression_mode=d_comp_mode,
416 data_compression_mode=d_comp_mode,
417 sidedata_compression_mode=sd_com_mode,
417 sidedata_compression_mode=sd_com_mode,
418 )
418 )
419 revlog.index.append(new_entry)
419 revlog.index.append(new_entry)
420 entry_bin = revlog.index.entry_binary(rev)
420 entry_bin = revlog.index.entry_binary(rev)
421 new_index_file.write(entry_bin)
421 new_index_file.write(entry_bin)
422
422
423 revlog._docket.index_end = new_index_file.tell()
423 revlog._docket.index_end = new_index_file.tell()
424 revlog._docket.data_end = new_data_file.tell()
424 revlog._docket.data_end = new_data_file.tell()
425 revlog._docket.sidedata_end = new_sidedata_file.tell()
425 revlog._docket.sidedata_end = new_sidedata_file.tell()
426
426
427
427
428 def _rewrite_censor(
428 def _rewrite_censor(
429 revlog,
429 revlog,
430 old_index,
430 old_index,
431 all_files,
431 all_files,
432 rev,
432 rev,
433 tombstone,
433 tombstone,
434 ):
434 ):
435 """rewrite and append a censored revision"""
435 """rewrite and append a censored revision"""
436 (
436 (
437 old_data_file,
437 old_data_file,
438 old_sidedata_file,
438 old_sidedata_file,
439 new_index_file,
439 new_index_file,
440 new_data_file,
440 new_data_file,
441 new_sidedata_file,
441 new_sidedata_file,
442 ) = all_files
442 ) = all_files
443 entry = old_index[rev]
443 entry = old_index[rev]
444
444
445 # XXX consider trying the default compression too
445 # XXX consider trying the default compression too
446 new_data_size = len(tombstone)
446 new_data_size = len(tombstone)
447 new_data_offset = new_data_file.tell()
447 new_data_offset = new_data_file.tell()
448 new_data_file.write(tombstone)
448 new_data_file.write(tombstone)
449
449
450 # we are not adding any sidedata as they might leak info about the censored version
450 # we are not adding any sidedata as they might leak info about the censored version
451
451
452 link_rev = entry[ENTRY_LINK_REV]
452 link_rev = entry[ENTRY_LINK_REV]
453
453
454 p1 = entry[ENTRY_PARENT_1]
454 p1 = entry[ENTRY_PARENT_1]
455 p2 = entry[ENTRY_PARENT_2]
455 p2 = entry[ENTRY_PARENT_2]
456
456
457 new_entry = revlogutils.entry(
457 new_entry = revlogutils.entry(
458 flags=constants.REVIDX_ISCENSORED,
458 flags=constants.REVIDX_ISCENSORED,
459 data_offset=new_data_offset,
459 data_offset=new_data_offset,
460 data_compressed_length=new_data_size,
460 data_compressed_length=new_data_size,
461 data_uncompressed_length=new_data_size,
461 data_uncompressed_length=new_data_size,
462 data_delta_base=rev,
462 data_delta_base=rev,
463 link_rev=link_rev,
463 link_rev=link_rev,
464 parent_rev_1=p1,
464 parent_rev_1=p1,
465 parent_rev_2=p2,
465 parent_rev_2=p2,
466 node_id=entry[ENTRY_NODE_ID],
466 node_id=entry[ENTRY_NODE_ID],
467 sidedata_offset=0,
467 sidedata_offset=0,
468 sidedata_compressed_length=0,
468 sidedata_compressed_length=0,
469 data_compression_mode=COMP_MODE_PLAIN,
469 data_compression_mode=COMP_MODE_PLAIN,
470 sidedata_compression_mode=COMP_MODE_PLAIN,
470 sidedata_compression_mode=COMP_MODE_PLAIN,
471 )
471 )
472 revlog.index.append(new_entry)
472 revlog.index.append(new_entry)
473 entry_bin = revlog.index.entry_binary(rev)
473 entry_bin = revlog.index.entry_binary(rev)
474 new_index_file.write(entry_bin)
474 new_index_file.write(entry_bin)
475 revlog._docket.index_end = new_index_file.tell()
475 revlog._docket.index_end = new_index_file.tell()
476 revlog._docket.data_end = new_data_file.tell()
476 revlog._docket.data_end = new_data_file.tell()
477
477
478
478
479 def _get_filename_from_filelog_index(path):
479 def _get_filename_from_filelog_index(path):
480 # Drop the extension and the `data/` prefix
480 # Drop the extension and the `data/` prefix
481 path_part = path.rsplit(b'.', 1)[0].split(b'/', 1)
481 path_part = path.rsplit(b'.', 1)[0].split(b'/', 1)
482 if len(path_part) < 2:
482 if len(path_part) < 2:
483 msg = _(b"cannot recognize filelog from filename: '%s'")
483 msg = _(b"cannot recognize filelog from filename: '%s'")
484 msg %= path
484 msg %= path
485 raise error.Abort(msg)
485 raise error.Abort(msg)
486
486
487 return path_part[1]
487 return path_part[1]
488
488
489
489
490 def _filelog_from_filename(repo, path):
490 def _filelog_from_filename(repo, path):
491 """Returns the filelog for the given `path`. Stolen from `engine.py`"""
491 """Returns the filelog for the given `path`. Stolen from `engine.py`"""
492
492
493 from .. import filelog # avoid cycle
493 from .. import filelog # avoid cycle
494
494
495 fl = filelog.filelog(repo.svfs, path)
495 fl = filelog.filelog(repo.svfs, path)
496 return fl
496 return fl
497
497
498
498
499 def _write_swapped_parents(repo, rl, rev, offset, fp):
499 def _write_swapped_parents(repo, rl, rev, offset, fp):
500 """Swaps p1 and p2 and overwrites the revlog entry for `rev` in `fp`"""
500 """Swaps p1 and p2 and overwrites the revlog entry for `rev` in `fp`"""
501 from ..pure import parsers # avoid cycle
501 from ..pure import parsers # avoid cycle
502
502
503 if repo._currentlock(repo._lockref) is None:
503 if repo._currentlock(repo._lockref) is None:
504 # Let's be paranoid about it
504 # Let's be paranoid about it
505 msg = "repo needs to be locked to rewrite parents"
505 msg = "repo needs to be locked to rewrite parents"
506 raise error.ProgrammingError(msg)
506 raise error.ProgrammingError(msg)
507
507
508 index_format = parsers.IndexObject.index_format
508 index_format = parsers.IndexObject.index_format
509 entry = rl.index[rev]
509 entry = rl.index[rev]
510 new_entry = list(entry)
510 new_entry = list(entry)
511 new_entry[5], new_entry[6] = entry[6], entry[5]
511 new_entry[5], new_entry[6] = entry[6], entry[5]
512 packed = index_format.pack(*new_entry[:8])
512 packed = index_format.pack(*new_entry[:8])
513 fp.seek(offset)
513 fp.seek(offset)
514 fp.write(packed)
514 fp.write(packed)
515
515
516
516
517 def _reorder_filelog_parents(repo, fl, to_fix):
517 def _reorder_filelog_parents(repo, fl, to_fix):
518 """
518 """
519 Swaps p1 and p2 for all `to_fix` revisions of filelog `fl` and writes the
519 Swaps p1 and p2 for all `to_fix` revisions of filelog `fl` and writes the
520 new version to disk, overwriting the old one with a rename.
520 new version to disk, overwriting the old one with a rename.
521 """
521 """
522 from ..pure import parsers # avoid cycle
522 from ..pure import parsers # avoid cycle
523
523
524 ui = repo.ui
524 ui = repo.ui
525 assert len(to_fix) > 0
525 assert len(to_fix) > 0
526 rl = fl._revlog
526 rl = fl._revlog
527 if rl._format_version != constants.REVLOGV1:
527 if rl._format_version != constants.REVLOGV1:
528 msg = "expected version 1 revlog, got version '%d'" % rl._format_version
528 msg = "expected version 1 revlog, got version '%d'" % rl._format_version
529 raise error.ProgrammingError(msg)
529 raise error.ProgrammingError(msg)
530
530
531 index_file = rl._indexfile
531 index_file = rl._indexfile
532 new_file_path = index_file + b'.tmp-parents-fix'
532 new_file_path = index_file + b'.tmp-parents-fix'
533 repaired_msg = _(b"repaired revision %d of 'filelog %s'\n")
533 repaired_msg = _(b"repaired revision %d of 'filelog %s'\n")
534
534
535 with ui.uninterruptible():
535 with ui.uninterruptible():
536 try:
536 try:
537 util.copyfile(
537 util.copyfile(
538 rl.opener.join(index_file),
538 rl.opener.join(index_file),
539 rl.opener.join(new_file_path),
539 rl.opener.join(new_file_path),
540 checkambig=rl._checkambig,
540 checkambig=rl._checkambig,
541 )
541 )
542
542
543 with rl.opener(new_file_path, mode=b"r+") as fp:
543 with rl.opener(new_file_path, mode=b"r+") as fp:
544 if rl._inline:
544 if rl._inline:
545 index = parsers.InlinedIndexObject(fp.read())
545 index = parsers.InlinedIndexObject(fp.read())
546 for rev in fl.revs():
546 for rev in fl.revs():
547 if rev in to_fix:
547 if rev in to_fix:
548 offset = index._calculate_index(rev)
548 offset = index._calculate_index(rev)
549 _write_swapped_parents(repo, rl, rev, offset, fp)
549 _write_swapped_parents(repo, rl, rev, offset, fp)
550 ui.write(repaired_msg % (rev, index_file))
550 ui.write(repaired_msg % (rev, index_file))
551 else:
551 else:
552 index_format = parsers.IndexObject.index_format
552 index_format = parsers.IndexObject.index_format
553 for rev in to_fix:
553 for rev in to_fix:
554 offset = rev * index_format.size
554 offset = rev * index_format.size
555 _write_swapped_parents(repo, rl, rev, offset, fp)
555 _write_swapped_parents(repo, rl, rev, offset, fp)
556 ui.write(repaired_msg % (rev, index_file))
556 ui.write(repaired_msg % (rev, index_file))
557
557
558 rl.opener.rename(new_file_path, index_file)
558 rl.opener.rename(new_file_path, index_file)
559 rl.clearcaches()
559 rl.clearcaches()
560 rl._loadindex()
560 rl._loadindex()
561 finally:
561 finally:
562 util.tryunlink(new_file_path)
562 util.tryunlink(new_file_path)
563
563
564
564
565 def _is_revision_affected(fl, filerev, metadata_cache=None):
565 def _is_revision_affected(fl, filerev, metadata_cache=None):
566 """Mercurial currently (5.9rc0) uses `p1 == nullrev and p2 != nullrev` as a
566 """Mercurial currently (5.9rc0) uses `p1 == nullrev and p2 != nullrev` as a
567 special meaning compared to the reverse in the context of filelog-based
567 special meaning compared to the reverse in the context of filelog-based
568 copytracing. issue6528 exists because new code assumed that parent ordering
568 copytracing. issue6528 exists because new code assumed that parent ordering
569 didn't matter, so this detects if the revision contains metadata (since
569 didn't matter, so this detects if the revision contains metadata (since
570 it's only used for filelog-based copytracing) and its parents are in the
570 it's only used for filelog-based copytracing) and its parents are in the
571 "wrong" order."""
571 "wrong" order."""
572 try:
572 try:
573 raw_text = fl.rawdata(filerev)
573 raw_text = fl.rawdata(filerev)
574 except error.CensoredNodeError:
574 except error.CensoredNodeError:
575 # We don't care about censored nodes as they never carry metadata
575 # We don't care about censored nodes as they never carry metadata
576 return False
576 return False
577 has_meta = raw_text.startswith(b'\x01\n')
577 has_meta = raw_text.startswith(b'\x01\n')
578 if metadata_cache is not None:
578 if metadata_cache is not None:
579 metadata_cache[filerev] = has_meta
579 metadata_cache[filerev] = has_meta
580 if has_meta:
580 if has_meta:
581 (p1, p2) = fl.parentrevs(filerev)
581 (p1, p2) = fl.parentrevs(filerev)
582 if p1 != nullrev and p2 == nullrev:
582 if p1 != nullrev and p2 == nullrev:
583 return True
583 return True
584 return False
584 return False
585
585
586
586
587 def _is_revision_affected_fast(repo, fl, filerev, metadata_cache):
587 def _is_revision_affected_fast(repo, fl, filerev, metadata_cache):
588 """Optimization fast-path for `_is_revision_affected`.
588 """Optimization fast-path for `_is_revision_affected`.
589
589
590 `metadata_cache` is a dict of `{rev: has_metadata}` which allows any
590 `metadata_cache` is a dict of `{rev: has_metadata}` which allows any
591 revision to check if its base has metadata, saving computation of the full
591 revision to check if its base has metadata, saving computation of the full
592 text, instead looking at the current delta.
592 text, instead looking at the current delta.
593
593
594 This optimization only works if the revisions are looked at in order."""
594 This optimization only works if the revisions are looked at in order."""
595 rl = fl._revlog
595 rl = fl._revlog
596
596
597 if rl.iscensored(filerev):
597 if rl.iscensored(filerev):
598 # Censored revisions don't contain metadata, so they cannot be affected
598 # Censored revisions don't contain metadata, so they cannot be affected
599 metadata_cache[filerev] = False
599 metadata_cache[filerev] = False
600 return False
600 return False
601
601
602 p1, p2 = rl.parentrevs(filerev)
602 p1, p2 = rl.parentrevs(filerev)
603 if p1 == nullrev or p2 != nullrev:
603 if p1 == nullrev or p2 != nullrev:
604 return False
604 return False
605
605
606 delta_parent = rl.deltaparent(filerev)
606 delta_parent = rl.deltaparent(filerev)
607 parent_has_metadata = metadata_cache.get(delta_parent)
607 parent_has_metadata = metadata_cache.get(delta_parent)
608 if parent_has_metadata is None:
608 if parent_has_metadata is None:
609 is_affected = _is_revision_affected(fl, filerev, metadata_cache)
609 is_affected = _is_revision_affected(fl, filerev, metadata_cache)
610 return is_affected
610 return is_affected
611
611
612 chunk = rl._chunk(filerev)
612 chunk = rl._chunk(filerev)
613 if not len(chunk):
613 if not len(chunk):
614 # No diff for this revision
614 # No diff for this revision
615 return parent_has_metadata
615 return parent_has_metadata
616
616
617 header_length = 12
617 header_length = 12
618 if len(chunk) < header_length:
618 if len(chunk) < header_length:
619 raise error.Abort(_(b"patch cannot be decoded"))
619 raise error.Abort(_(b"patch cannot be decoded"))
620
620
621 start, _end, _length = struct.unpack(b">lll", chunk[:header_length])
621 start, _end, _length = struct.unpack(b">lll", chunk[:header_length])
622
622
623 if start < 2: # len(b'\x01\n') == 2
623 if start < 2: # len(b'\x01\n') == 2
624 # This delta does *something* to the metadata marker (if any).
624 # This delta does *something* to the metadata marker (if any).
625 # Check it the slow way
625 # Check it the slow way
626 is_affected = _is_revision_affected(fl, filerev, metadata_cache)
626 is_affected = _is_revision_affected(fl, filerev, metadata_cache)
627 return is_affected
627 return is_affected
628
628
629 # The diff did not remove or add the metadata header, it's then in the same
629 # The diff did not remove or add the metadata header, it's then in the same
630 # situation as its parent
630 # situation as its parent
631 metadata_cache[filerev] = parent_has_metadata
631 metadata_cache[filerev] = parent_has_metadata
632 return parent_has_metadata
632 return parent_has_metadata
633
633
634
634
635 def _from_report(ui, repo, context, from_report, dry_run):
635 def _from_report(ui, repo, context, from_report, dry_run):
636 """
636 """
637 Fix the revisions given in the `from_report` file, but still checks if the
637 Fix the revisions given in the `from_report` file, but still checks if the
638 revisions are indeed affected to prevent an unfortunate cyclic situation
638 revisions are indeed affected to prevent an unfortunate cyclic situation
639 where we'd swap well-ordered parents again.
639 where we'd swap well-ordered parents again.
640
640
641 See the doc for `debug_fix_issue6528` for the format documentation.
641 See the doc for `debug_fix_issue6528` for the format documentation.
642 """
642 """
643 ui.write(_(b"loading report file '%s'\n") % from_report)
643 ui.write(_(b"loading report file '%s'\n") % from_report)
644
644
645 with context(), open(from_report, mode='rb') as f:
645 with context(), open(from_report, mode='rb') as f:
646 for line in f.read().split(b'\n'):
646 for line in f.read().split(b'\n'):
647 if not line:
647 if not line:
648 continue
648 continue
649 filenodes, filename = line.split(b' ', 1)
649 filenodes, filename = line.split(b' ', 1)
650 fl = _filelog_from_filename(repo, filename)
650 fl = _filelog_from_filename(repo, filename)
651 to_fix = set(
651 to_fix = set(
652 fl.rev(binascii.unhexlify(n)) for n in filenodes.split(b',')
652 fl.rev(binascii.unhexlify(n)) for n in filenodes.split(b',')
653 )
653 )
654 excluded = set()
654 excluded = set()
655
655
656 for filerev in to_fix:
656 for filerev in to_fix:
657 if _is_revision_affected(fl, filerev):
657 if _is_revision_affected(fl, filerev):
658 msg = b"found affected revision %d for filelog '%s'\n"
658 msg = b"found affected revision %d for filelog '%s'\n"
659 ui.warn(msg % (filerev, filename))
659 ui.warn(msg % (filerev, filename))
660 else:
660 else:
661 msg = _(b"revision %s of file '%s' is not affected\n")
661 msg = _(b"revision %s of file '%s' is not affected\n")
662 msg %= (binascii.hexlify(fl.node(filerev)), filename)
662 msg %= (binascii.hexlify(fl.node(filerev)), filename)
663 ui.warn(msg)
663 ui.warn(msg)
664 excluded.add(filerev)
664 excluded.add(filerev)
665
665
666 to_fix = to_fix - excluded
666 to_fix = to_fix - excluded
667 if not to_fix:
667 if not to_fix:
668 msg = _(b"no affected revisions were found for '%s'\n")
668 msg = _(b"no affected revisions were found for '%s'\n")
669 ui.write(msg % filename)
669 ui.write(msg % filename)
670 continue
670 continue
671 if not dry_run:
671 if not dry_run:
672 _reorder_filelog_parents(repo, fl, sorted(to_fix))
672 _reorder_filelog_parents(repo, fl, sorted(to_fix))
673
673
674
674
675 def repair_issue6528(ui, repo, dry_run=False, to_report=None, from_report=None):
675 def repair_issue6528(
676 ui, repo, dry_run=False, to_report=None, from_report=None, paranoid=False
677 ):
676 from .. import store # avoid cycle
678 from .. import store # avoid cycle
677
679
678 @contextlib.contextmanager
680 @contextlib.contextmanager
679 def context():
681 def context():
680 if dry_run or to_report: # No need for locking
682 if dry_run or to_report: # No need for locking
681 yield
683 yield
682 else:
684 else:
683 with repo.wlock(), repo.lock():
685 with repo.wlock(), repo.lock():
684 yield
686 yield
685
687
686 if from_report:
688 if from_report:
687 return _from_report(ui, repo, context, from_report, dry_run)
689 return _from_report(ui, repo, context, from_report, dry_run)
688
690
689 report_entries = []
691 report_entries = []
690
692
691 with context():
693 with context():
692 files = list(
694 files = list(
693 (file_type, path)
695 (file_type, path)
694 for (file_type, path, _e, _s) in repo.store.datafiles()
696 for (file_type, path, _e, _s) in repo.store.datafiles()
695 if path.endswith(b'.i') and file_type & store.FILEFLAGS_FILELOG
697 if path.endswith(b'.i') and file_type & store.FILEFLAGS_FILELOG
696 )
698 )
697
699
698 progress = ui.makeprogress(
700 progress = ui.makeprogress(
699 _(b"looking for affected revisions"),
701 _(b"looking for affected revisions"),
700 unit=_(b"filelogs"),
702 unit=_(b"filelogs"),
701 total=len(files),
703 total=len(files),
702 )
704 )
703 found_nothing = True
705 found_nothing = True
704
706
705 for file_type, path in files:
707 for file_type, path in files:
706 if (
708 if (
707 not path.endswith(b'.i')
709 not path.endswith(b'.i')
708 or not file_type & store.FILEFLAGS_FILELOG
710 or not file_type & store.FILEFLAGS_FILELOG
709 ):
711 ):
710 continue
712 continue
711 progress.increment()
713 progress.increment()
712 filename = _get_filename_from_filelog_index(path)
714 filename = _get_filename_from_filelog_index(path)
713 fl = _filelog_from_filename(repo, filename)
715 fl = _filelog_from_filename(repo, filename)
714
716
715 # Set of filerevs (or hex filenodes if `to_report`) that need fixing
717 # Set of filerevs (or hex filenodes if `to_report`) that need fixing
716 to_fix = set()
718 to_fix = set()
717 metadata_cache = {}
719 metadata_cache = {}
718 for filerev in fl.revs():
720 for filerev in fl.revs():
719 affected = _is_revision_affected_fast(
721 affected = _is_revision_affected_fast(
720 repo, fl, filerev, metadata_cache
722 repo, fl, filerev, metadata_cache
721 )
723 )
724 if paranoid:
725 slow = _is_revision_affected(fl, filerev)
726 if slow != affected:
727 msg = _(b"paranoid check failed for '%s' at node %s")
728 node = binascii.hexlify(fl.node(filerev))
729 raise error.Abort(msg % (filename, node))
722 if affected:
730 if affected:
723 msg = b"found affected revision %d for filelog '%s'\n"
731 msg = b"found affected revision %d for filelog '%s'\n"
724 ui.warn(msg % (filerev, path))
732 ui.warn(msg % (filerev, path))
725 found_nothing = False
733 found_nothing = False
726 if not dry_run:
734 if not dry_run:
727 if to_report:
735 if to_report:
728 to_fix.add(binascii.hexlify(fl.node(filerev)))
736 to_fix.add(binascii.hexlify(fl.node(filerev)))
729 else:
737 else:
730 to_fix.add(filerev)
738 to_fix.add(filerev)
731
739
732 if to_fix:
740 if to_fix:
733 to_fix = sorted(to_fix)
741 to_fix = sorted(to_fix)
734 if to_report:
742 if to_report:
735 report_entries.append((filename, to_fix))
743 report_entries.append((filename, to_fix))
736 else:
744 else:
737 _reorder_filelog_parents(repo, fl, to_fix)
745 _reorder_filelog_parents(repo, fl, to_fix)
738
746
739 if found_nothing:
747 if found_nothing:
740 ui.write(_(b"no affected revisions were found\n"))
748 ui.write(_(b"no affected revisions were found\n"))
741
749
742 if to_report and report_entries:
750 if to_report and report_entries:
743 with open(to_report, mode="wb") as f:
751 with open(to_report, mode="wb") as f:
744 for path, to_fix in report_entries:
752 for path, to_fix in report_entries:
745 f.write(b"%s %s\n" % (b",".join(to_fix), path))
753 f.write(b"%s %s\n" % (b",".join(to_fix), path))
746
754
747 progress.complete()
755 progress.complete()
@@ -1,447 +1,447
1 Show all commands except debug commands
1 Show all commands except debug commands
2 $ hg debugcomplete
2 $ hg debugcomplete
3 abort
3 abort
4 add
4 add
5 addremove
5 addremove
6 annotate
6 annotate
7 archive
7 archive
8 backout
8 backout
9 bisect
9 bisect
10 bookmarks
10 bookmarks
11 branch
11 branch
12 branches
12 branches
13 bundle
13 bundle
14 cat
14 cat
15 clone
15 clone
16 commit
16 commit
17 config
17 config
18 continue
18 continue
19 copy
19 copy
20 diff
20 diff
21 export
21 export
22 files
22 files
23 forget
23 forget
24 graft
24 graft
25 grep
25 grep
26 heads
26 heads
27 help
27 help
28 identify
28 identify
29 import
29 import
30 incoming
30 incoming
31 init
31 init
32 locate
32 locate
33 log
33 log
34 manifest
34 manifest
35 merge
35 merge
36 outgoing
36 outgoing
37 parents
37 parents
38 paths
38 paths
39 phase
39 phase
40 pull
40 pull
41 purge
41 purge
42 push
42 push
43 recover
43 recover
44 remove
44 remove
45 rename
45 rename
46 resolve
46 resolve
47 revert
47 revert
48 rollback
48 rollback
49 root
49 root
50 serve
50 serve
51 shelve
51 shelve
52 status
52 status
53 summary
53 summary
54 tag
54 tag
55 tags
55 tags
56 tip
56 tip
57 unbundle
57 unbundle
58 unshelve
58 unshelve
59 update
59 update
60 verify
60 verify
61 version
61 version
62
62
63 Show all commands that start with "a"
63 Show all commands that start with "a"
64 $ hg debugcomplete a
64 $ hg debugcomplete a
65 abort
65 abort
66 add
66 add
67 addremove
67 addremove
68 annotate
68 annotate
69 archive
69 archive
70
70
71 Do not show debug commands if there are other candidates
71 Do not show debug commands if there are other candidates
72 $ hg debugcomplete d
72 $ hg debugcomplete d
73 diff
73 diff
74
74
75 Show debug commands if there are no other candidates
75 Show debug commands if there are no other candidates
76 $ hg debugcomplete debug
76 $ hg debugcomplete debug
77 debug-repair-issue6528
77 debug-repair-issue6528
78 debugancestor
78 debugancestor
79 debugantivirusrunning
79 debugantivirusrunning
80 debugapplystreamclonebundle
80 debugapplystreamclonebundle
81 debugbackupbundle
81 debugbackupbundle
82 debugbuilddag
82 debugbuilddag
83 debugbundle
83 debugbundle
84 debugcapabilities
84 debugcapabilities
85 debugchangedfiles
85 debugchangedfiles
86 debugcheckstate
86 debugcheckstate
87 debugcolor
87 debugcolor
88 debugcommands
88 debugcommands
89 debugcomplete
89 debugcomplete
90 debugconfig
90 debugconfig
91 debugcreatestreamclonebundle
91 debugcreatestreamclonebundle
92 debugdag
92 debugdag
93 debugdata
93 debugdata
94 debugdate
94 debugdate
95 debugdeltachain
95 debugdeltachain
96 debugdirstate
96 debugdirstate
97 debugdirstateignorepatternshash
97 debugdirstateignorepatternshash
98 debugdiscovery
98 debugdiscovery
99 debugdownload
99 debugdownload
100 debugextensions
100 debugextensions
101 debugfileset
101 debugfileset
102 debugformat
102 debugformat
103 debugfsinfo
103 debugfsinfo
104 debuggetbundle
104 debuggetbundle
105 debugignore
105 debugignore
106 debugindex
106 debugindex
107 debugindexdot
107 debugindexdot
108 debugindexstats
108 debugindexstats
109 debuginstall
109 debuginstall
110 debugknown
110 debugknown
111 debuglabelcomplete
111 debuglabelcomplete
112 debuglocks
112 debuglocks
113 debugmanifestfulltextcache
113 debugmanifestfulltextcache
114 debugmergestate
114 debugmergestate
115 debugnamecomplete
115 debugnamecomplete
116 debugnodemap
116 debugnodemap
117 debugobsolete
117 debugobsolete
118 debugp1copies
118 debugp1copies
119 debugp2copies
119 debugp2copies
120 debugpathcomplete
120 debugpathcomplete
121 debugpathcopies
121 debugpathcopies
122 debugpeer
122 debugpeer
123 debugpickmergetool
123 debugpickmergetool
124 debugpushkey
124 debugpushkey
125 debugpvec
125 debugpvec
126 debugrebuilddirstate
126 debugrebuilddirstate
127 debugrebuildfncache
127 debugrebuildfncache
128 debugrename
128 debugrename
129 debugrequires
129 debugrequires
130 debugrevlog
130 debugrevlog
131 debugrevlogindex
131 debugrevlogindex
132 debugrevspec
132 debugrevspec
133 debugserve
133 debugserve
134 debugsetparents
134 debugsetparents
135 debugshell
135 debugshell
136 debugsidedata
136 debugsidedata
137 debugssl
137 debugssl
138 debugstrip
138 debugstrip
139 debugsub
139 debugsub
140 debugsuccessorssets
140 debugsuccessorssets
141 debugtagscache
141 debugtagscache
142 debugtemplate
142 debugtemplate
143 debuguigetpass
143 debuguigetpass
144 debuguiprompt
144 debuguiprompt
145 debugupdatecaches
145 debugupdatecaches
146 debugupgraderepo
146 debugupgraderepo
147 debugwalk
147 debugwalk
148 debugwhyunstable
148 debugwhyunstable
149 debugwireargs
149 debugwireargs
150 debugwireproto
150 debugwireproto
151
151
152 Do not show the alias of a debug command if there are other candidates
152 Do not show the alias of a debug command if there are other candidates
153 (this should hide rawcommit)
153 (this should hide rawcommit)
154 $ hg debugcomplete r
154 $ hg debugcomplete r
155 recover
155 recover
156 remove
156 remove
157 rename
157 rename
158 resolve
158 resolve
159 revert
159 revert
160 rollback
160 rollback
161 root
161 root
162 Show the alias of a debug command if there are no other candidates
162 Show the alias of a debug command if there are no other candidates
163 $ hg debugcomplete rawc
163 $ hg debugcomplete rawc
164
164
165
165
166 Show the global options
166 Show the global options
167 $ hg debugcomplete --options | sort
167 $ hg debugcomplete --options | sort
168 --color
168 --color
169 --config
169 --config
170 --cwd
170 --cwd
171 --debug
171 --debug
172 --debugger
172 --debugger
173 --encoding
173 --encoding
174 --encodingmode
174 --encodingmode
175 --help
175 --help
176 --hidden
176 --hidden
177 --noninteractive
177 --noninteractive
178 --pager
178 --pager
179 --profile
179 --profile
180 --quiet
180 --quiet
181 --repository
181 --repository
182 --time
182 --time
183 --traceback
183 --traceback
184 --verbose
184 --verbose
185 --version
185 --version
186 -R
186 -R
187 -h
187 -h
188 -q
188 -q
189 -v
189 -v
190 -y
190 -y
191
191
192 Show the options for the "serve" command
192 Show the options for the "serve" command
193 $ hg debugcomplete --options serve | sort
193 $ hg debugcomplete --options serve | sort
194 --accesslog
194 --accesslog
195 --address
195 --address
196 --certificate
196 --certificate
197 --cmdserver
197 --cmdserver
198 --color
198 --color
199 --config
199 --config
200 --cwd
200 --cwd
201 --daemon
201 --daemon
202 --daemon-postexec
202 --daemon-postexec
203 --debug
203 --debug
204 --debugger
204 --debugger
205 --encoding
205 --encoding
206 --encodingmode
206 --encodingmode
207 --errorlog
207 --errorlog
208 --help
208 --help
209 --hidden
209 --hidden
210 --ipv6
210 --ipv6
211 --name
211 --name
212 --noninteractive
212 --noninteractive
213 --pager
213 --pager
214 --pid-file
214 --pid-file
215 --port
215 --port
216 --prefix
216 --prefix
217 --print-url
217 --print-url
218 --profile
218 --profile
219 --quiet
219 --quiet
220 --repository
220 --repository
221 --stdio
221 --stdio
222 --style
222 --style
223 --subrepos
223 --subrepos
224 --templates
224 --templates
225 --time
225 --time
226 --traceback
226 --traceback
227 --verbose
227 --verbose
228 --version
228 --version
229 --web-conf
229 --web-conf
230 -6
230 -6
231 -A
231 -A
232 -E
232 -E
233 -R
233 -R
234 -S
234 -S
235 -a
235 -a
236 -d
236 -d
237 -h
237 -h
238 -n
238 -n
239 -p
239 -p
240 -q
240 -q
241 -t
241 -t
242 -v
242 -v
243 -y
243 -y
244
244
245 Show an error if we use --options with an ambiguous abbreviation
245 Show an error if we use --options with an ambiguous abbreviation
246 $ hg debugcomplete --options s
246 $ hg debugcomplete --options s
247 hg: command 's' is ambiguous:
247 hg: command 's' is ambiguous:
248 serve shelve showconfig status summary
248 serve shelve showconfig status summary
249 [10]
249 [10]
250
250
251 Show all commands + options
251 Show all commands + options
252 $ hg debugcommands
252 $ hg debugcommands
253 abort: dry-run
253 abort: dry-run
254 add: include, exclude, subrepos, dry-run
254 add: include, exclude, subrepos, dry-run
255 addremove: similarity, subrepos, include, exclude, dry-run
255 addremove: similarity, subrepos, include, exclude, dry-run
256 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, include, exclude, template
256 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, include, exclude, template
257 archive: no-decode, prefix, rev, type, subrepos, include, exclude
257 archive: no-decode, prefix, rev, type, subrepos, include, exclude
258 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
258 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
259 bisect: reset, good, bad, skip, extend, command, noupdate
259 bisect: reset, good, bad, skip, extend, command, noupdate
260 bookmarks: force, rev, delete, rename, inactive, list, template
260 bookmarks: force, rev, delete, rename, inactive, list, template
261 branch: force, clean, rev
261 branch: force, clean, rev
262 branches: active, closed, rev, template
262 branches: active, closed, rev, template
263 bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
263 bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
264 cat: output, rev, decode, include, exclude, template
264 cat: output, rev, decode, include, exclude, template
265 clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
265 clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
266 commit: addremove, close-branch, amend, secret, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos
266 commit: addremove, close-branch, amend, secret, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos
267 config: untrusted, exp-all-known, edit, local, source, shared, non-shared, global, template
267 config: untrusted, exp-all-known, edit, local, source, shared, non-shared, global, template
268 continue: dry-run
268 continue: dry-run
269 copy: forget, after, at-rev, force, include, exclude, dry-run
269 copy: forget, after, at-rev, force, include, exclude, dry-run
270 debug-repair-issue6528: to-report, from-report, dry-run
270 debug-repair-issue6528: to-report, from-report, paranoid, dry-run
271 debugancestor:
271 debugancestor:
272 debugantivirusrunning:
272 debugantivirusrunning:
273 debugapplystreamclonebundle:
273 debugapplystreamclonebundle:
274 debugbackupbundle: recover, patch, git, limit, no-merges, stat, graph, style, template
274 debugbackupbundle: recover, patch, git, limit, no-merges, stat, graph, style, template
275 debugbuilddag: mergeable-file, overwritten-file, new-file
275 debugbuilddag: mergeable-file, overwritten-file, new-file
276 debugbundle: all, part-type, spec
276 debugbundle: all, part-type, spec
277 debugcapabilities:
277 debugcapabilities:
278 debugchangedfiles: compute
278 debugchangedfiles: compute
279 debugcheckstate:
279 debugcheckstate:
280 debugcolor: style
280 debugcolor: style
281 debugcommands:
281 debugcommands:
282 debugcomplete: options
282 debugcomplete: options
283 debugcreatestreamclonebundle:
283 debugcreatestreamclonebundle:
284 debugdag: tags, branches, dots, spaces
284 debugdag: tags, branches, dots, spaces
285 debugdata: changelog, manifest, dir
285 debugdata: changelog, manifest, dir
286 debugdate: extended
286 debugdate: extended
287 debugdeltachain: changelog, manifest, dir, template
287 debugdeltachain: changelog, manifest, dir, template
288 debugdirstateignorepatternshash:
288 debugdirstateignorepatternshash:
289 debugdirstate: nodates, dates, datesort, all
289 debugdirstate: nodates, dates, datesort, all
290 debugdiscovery: old, nonheads, rev, seed, local-as-revs, remote-as-revs, ssh, remotecmd, insecure, template
290 debugdiscovery: old, nonheads, rev, seed, local-as-revs, remote-as-revs, ssh, remotecmd, insecure, template
291 debugdownload: output
291 debugdownload: output
292 debugextensions: template
292 debugextensions: template
293 debugfileset: rev, all-files, show-matcher, show-stage
293 debugfileset: rev, all-files, show-matcher, show-stage
294 debugformat: template
294 debugformat: template
295 debugfsinfo:
295 debugfsinfo:
296 debuggetbundle: head, common, type
296 debuggetbundle: head, common, type
297 debugignore:
297 debugignore:
298 debugindex: changelog, manifest, dir, template
298 debugindex: changelog, manifest, dir, template
299 debugindexdot: changelog, manifest, dir
299 debugindexdot: changelog, manifest, dir
300 debugindexstats:
300 debugindexstats:
301 debuginstall: template
301 debuginstall: template
302 debugknown:
302 debugknown:
303 debuglabelcomplete:
303 debuglabelcomplete:
304 debuglocks: force-free-lock, force-free-wlock, set-lock, set-wlock
304 debuglocks: force-free-lock, force-free-wlock, set-lock, set-wlock
305 debugmanifestfulltextcache: clear, add
305 debugmanifestfulltextcache: clear, add
306 debugmergestate: style, template
306 debugmergestate: style, template
307 debugnamecomplete:
307 debugnamecomplete:
308 debugnodemap: dump-new, dump-disk, check, metadata
308 debugnodemap: dump-new, dump-disk, check, metadata
309 debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
309 debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
310 debugp1copies: rev
310 debugp1copies: rev
311 debugp2copies: rev
311 debugp2copies: rev
312 debugpathcomplete: full, normal, added, removed
312 debugpathcomplete: full, normal, added, removed
313 debugpathcopies: include, exclude
313 debugpathcopies: include, exclude
314 debugpeer:
314 debugpeer:
315 debugpickmergetool: rev, changedelete, include, exclude, tool
315 debugpickmergetool: rev, changedelete, include, exclude, tool
316 debugpushkey:
316 debugpushkey:
317 debugpvec:
317 debugpvec:
318 debugrebuilddirstate: rev, minimal
318 debugrebuilddirstate: rev, minimal
319 debugrebuildfncache:
319 debugrebuildfncache:
320 debugrename: rev
320 debugrename: rev
321 debugrequires:
321 debugrequires:
322 debugrevlog: changelog, manifest, dir, dump
322 debugrevlog: changelog, manifest, dir, dump
323 debugrevlogindex: changelog, manifest, dir, format
323 debugrevlogindex: changelog, manifest, dir, format
324 debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
324 debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
325 debugserve: sshstdio, logiofd, logiofile
325 debugserve: sshstdio, logiofd, logiofile
326 debugsetparents:
326 debugsetparents:
327 debugshell:
327 debugshell:
328 debugsidedata: changelog, manifest, dir
328 debugsidedata: changelog, manifest, dir
329 debugssl:
329 debugssl:
330 debugstrip: rev, force, no-backup, nobackup, , keep, bookmark, soft
330 debugstrip: rev, force, no-backup, nobackup, , keep, bookmark, soft
331 debugsub: rev
331 debugsub: rev
332 debugsuccessorssets: closest
332 debugsuccessorssets: closest
333 debugtagscache:
333 debugtagscache:
334 debugtemplate: rev, define
334 debugtemplate: rev, define
335 debuguigetpass: prompt
335 debuguigetpass: prompt
336 debuguiprompt: prompt
336 debuguiprompt: prompt
337 debugupdatecaches:
337 debugupdatecaches:
338 debugupgraderepo: optimize, run, backup, changelog, manifest, filelogs
338 debugupgraderepo: optimize, run, backup, changelog, manifest, filelogs
339 debugwalk: include, exclude
339 debugwalk: include, exclude
340 debugwhyunstable:
340 debugwhyunstable:
341 debugwireargs: three, four, five, ssh, remotecmd, insecure
341 debugwireargs: three, four, five, ssh, remotecmd, insecure
342 debugwireproto: localssh, peer, noreadstderr, nologhandshake, ssh, remotecmd, insecure
342 debugwireproto: localssh, peer, noreadstderr, nologhandshake, ssh, remotecmd, insecure
343 diff: rev, from, to, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
343 diff: rev, from, to, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
344 export: bookmark, output, switch-parent, rev, text, git, binary, nodates, template
344 export: bookmark, output, switch-parent, rev, text, git, binary, nodates, template
345 files: rev, print0, include, exclude, template, subrepos
345 files: rev, print0, include, exclude, template, subrepos
346 forget: interactive, include, exclude, dry-run
346 forget: interactive, include, exclude, dry-run
347 graft: rev, base, continue, stop, abort, edit, log, no-commit, force, currentdate, currentuser, date, user, tool, dry-run
347 graft: rev, base, continue, stop, abort, edit, log, no-commit, force, currentdate, currentuser, date, user, tool, dry-run
348 grep: print0, all, diff, text, follow, ignore-case, files-with-matches, line-number, rev, all-files, user, date, template, include, exclude
348 grep: print0, all, diff, text, follow, ignore-case, files-with-matches, line-number, rev, all-files, user, date, template, include, exclude
349 heads: rev, topo, active, closed, style, template
349 heads: rev, topo, active, closed, style, template
350 help: extension, command, keyword, system
350 help: extension, command, keyword, system
351 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure, template
351 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure, template
352 import: strip, base, secret, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
352 import: strip, base, secret, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
353 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
353 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
354 init: ssh, remotecmd, insecure
354 init: ssh, remotecmd, insecure
355 locate: rev, print0, fullpath, include, exclude
355 locate: rev, print0, fullpath, include, exclude
356 log: follow, follow-first, date, copies, keyword, rev, line-range, removed, only-merges, user, only-branch, branch, bookmark, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
356 log: follow, follow-first, date, copies, keyword, rev, line-range, removed, only-merges, user, only-branch, branch, bookmark, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
357 manifest: rev, all, template
357 manifest: rev, all, template
358 merge: force, rev, preview, abort, tool
358 merge: force, rev, preview, abort, tool
359 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
359 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
360 parents: rev, style, template
360 parents: rev, style, template
361 paths: template
361 paths: template
362 phase: public, draft, secret, force, rev
362 phase: public, draft, secret, force, rev
363 pull: update, force, confirm, rev, bookmark, branch, ssh, remotecmd, insecure
363 pull: update, force, confirm, rev, bookmark, branch, ssh, remotecmd, insecure
364 purge: abort-on-err, all, ignored, dirs, files, print, print0, confirm, include, exclude
364 purge: abort-on-err, all, ignored, dirs, files, print, print0, confirm, include, exclude
365 push: force, rev, bookmark, all-bookmarks, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure
365 push: force, rev, bookmark, all-bookmarks, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure
366 recover: verify
366 recover: verify
367 remove: after, force, subrepos, include, exclude, dry-run
367 remove: after, force, subrepos, include, exclude, dry-run
368 rename: forget, after, at-rev, force, include, exclude, dry-run
368 rename: forget, after, at-rev, force, include, exclude, dry-run
369 resolve: all, list, mark, unmark, no-status, re-merge, tool, include, exclude, template
369 resolve: all, list, mark, unmark, no-status, re-merge, tool, include, exclude, template
370 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
370 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
371 rollback: dry-run, force
371 rollback: dry-run, force
372 root: template
372 root: template
373 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, print-url, subrepos
373 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, print-url, subrepos
374 shelve: addremove, unknown, cleanup, date, delete, edit, keep, list, message, name, patch, interactive, stat, include, exclude
374 shelve: addremove, unknown, cleanup, date, delete, edit, keep, list, message, name, patch, interactive, stat, include, exclude
375 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, terse, copies, print0, rev, change, include, exclude, subrepos, template
375 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, terse, copies, print0, rev, change, include, exclude, subrepos, template
376 summary: remote
376 summary: remote
377 tag: force, local, rev, remove, edit, message, date, user
377 tag: force, local, rev, remove, edit, message, date, user
378 tags: template
378 tags: template
379 tip: patch, git, style, template
379 tip: patch, git, style, template
380 unbundle: update
380 unbundle: update
381 unshelve: abort, continue, interactive, keep, name, tool, date
381 unshelve: abort, continue, interactive, keep, name, tool, date
382 update: clean, check, merge, date, rev, tool
382 update: clean, check, merge, date, rev, tool
383 verify: full
383 verify: full
384 version: template
384 version: template
385
385
386 $ hg init a
386 $ hg init a
387 $ cd a
387 $ cd a
388 $ echo fee > fee
388 $ echo fee > fee
389 $ hg ci -q -Amfee
389 $ hg ci -q -Amfee
390 $ hg tag fee
390 $ hg tag fee
391 $ mkdir fie
391 $ mkdir fie
392 $ echo dead > fie/dead
392 $ echo dead > fie/dead
393 $ echo live > fie/live
393 $ echo live > fie/live
394 $ hg bookmark fo
394 $ hg bookmark fo
395 $ hg branch -q fie
395 $ hg branch -q fie
396 $ hg ci -q -Amfie
396 $ hg ci -q -Amfie
397 $ echo fo > fo
397 $ echo fo > fo
398 $ hg branch -qf default
398 $ hg branch -qf default
399 $ hg ci -q -Amfo
399 $ hg ci -q -Amfo
400 $ echo Fum > Fum
400 $ echo Fum > Fum
401 $ hg ci -q -AmFum
401 $ hg ci -q -AmFum
402 $ hg bookmark Fum
402 $ hg bookmark Fum
403
403
404 Test debugpathcomplete
404 Test debugpathcomplete
405
405
406 $ hg debugpathcomplete f
406 $ hg debugpathcomplete f
407 fee
407 fee
408 fie
408 fie
409 fo
409 fo
410 $ hg debugpathcomplete -f f
410 $ hg debugpathcomplete -f f
411 fee
411 fee
412 fie/dead
412 fie/dead
413 fie/live
413 fie/live
414 fo
414 fo
415
415
416 $ hg rm Fum
416 $ hg rm Fum
417 $ hg debugpathcomplete -r F
417 $ hg debugpathcomplete -r F
418 Fum
418 Fum
419
419
420 Test debugnamecomplete
420 Test debugnamecomplete
421
421
422 $ hg debugnamecomplete
422 $ hg debugnamecomplete
423 Fum
423 Fum
424 default
424 default
425 fee
425 fee
426 fie
426 fie
427 fo
427 fo
428 tip
428 tip
429 $ hg debugnamecomplete f
429 $ hg debugnamecomplete f
430 fee
430 fee
431 fie
431 fie
432 fo
432 fo
433
433
434 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
434 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
435 used for completions in some shells.
435 used for completions in some shells.
436
436
437 $ hg debuglabelcomplete
437 $ hg debuglabelcomplete
438 Fum
438 Fum
439 default
439 default
440 fee
440 fee
441 fie
441 fie
442 fo
442 fo
443 tip
443 tip
444 $ hg debuglabelcomplete f
444 $ hg debuglabelcomplete f
445 fee
445 fee
446 fie
446 fie
447 fo
447 fo
@@ -1,414 +1,433
1 ===============================================================
1 ===============================================================
2 Test non-regression on the corruption associated with issue6528
2 Test non-regression on the corruption associated with issue6528
3 ===============================================================
3 ===============================================================
4
4
5 Setup
5 Setup
6 =====
6 =====
7
7
8 $ hg init base-repo
8 $ hg init base-repo
9 $ cd base-repo
9 $ cd base-repo
10
10
11 $ cat <<EOF > a.txt
11 $ cat <<EOF > a.txt
12 > 1
12 > 1
13 > 2
13 > 2
14 > 3
14 > 3
15 > 4
15 > 4
16 > 5
16 > 5
17 > 6
17 > 6
18 > EOF
18 > EOF
19
19
20 $ hg add a.txt
20 $ hg add a.txt
21 $ hg commit -m 'c_base_c - create a.txt'
21 $ hg commit -m 'c_base_c - create a.txt'
22
22
23 Modify a.txt
23 Modify a.txt
24
24
25 $ sed -e 's/1/foo/' a.txt > a.tmp; mv a.tmp a.txt
25 $ sed -e 's/1/foo/' a.txt > a.tmp; mv a.tmp a.txt
26 $ hg commit -m 'c_modify_c - modify a.txt'
26 $ hg commit -m 'c_modify_c - modify a.txt'
27
27
28 Modify and rename a.txt to b.txt
28 Modify and rename a.txt to b.txt
29
29
30 $ hg up -r "desc('c_base_c')"
30 $ hg up -r "desc('c_base_c')"
31 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
31 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
32 $ sed -e 's/6/bar/' a.txt > a.tmp; mv a.tmp a.txt
32 $ sed -e 's/6/bar/' a.txt > a.tmp; mv a.tmp a.txt
33 $ hg mv a.txt b.txt
33 $ hg mv a.txt b.txt
34 $ hg commit -m 'c_rename_c - rename and modify a.txt to b.txt'
34 $ hg commit -m 'c_rename_c - rename and modify a.txt to b.txt'
35 created new head
35 created new head
36
36
37 Merge each branch
37 Merge each branch
38
38
39 $ hg merge -r "desc('c_modify_c')"
39 $ hg merge -r "desc('c_modify_c')"
40 merging b.txt and a.txt to b.txt
40 merging b.txt and a.txt to b.txt
41 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
41 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
42 (branch merge, don't forget to commit)
42 (branch merge, don't forget to commit)
43 $ hg commit -m 'c_merge_c: commit merge'
43 $ hg commit -m 'c_merge_c: commit merge'
44
44
45 $ hg debugrevlogindex b.txt
45 $ hg debugrevlogindex b.txt
46 rev linkrev nodeid p1 p2
46 rev linkrev nodeid p1 p2
47 0 2 05b806ebe5ea 000000000000 000000000000
47 0 2 05b806ebe5ea 000000000000 000000000000
48 1 3 a58b36ad6b65 000000000000 05b806ebe5ea
48 1 3 a58b36ad6b65 000000000000 05b806ebe5ea
49
49
50 Check commit Graph
50 Check commit Graph
51
51
52 $ hg log -G
52 $ hg log -G
53 @ changeset: 3:a1cc2bdca0aa
53 @ changeset: 3:a1cc2bdca0aa
54 |\ tag: tip
54 |\ tag: tip
55 | | parent: 2:615c6ccefd15
55 | | parent: 2:615c6ccefd15
56 | | parent: 1:373d507f4667
56 | | parent: 1:373d507f4667
57 | | user: test
57 | | user: test
58 | | date: Thu Jan 01 00:00:00 1970 +0000
58 | | date: Thu Jan 01 00:00:00 1970 +0000
59 | | summary: c_merge_c: commit merge
59 | | summary: c_merge_c: commit merge
60 | |
60 | |
61 | o changeset: 2:615c6ccefd15
61 | o changeset: 2:615c6ccefd15
62 | | parent: 0:f5a5a568022f
62 | | parent: 0:f5a5a568022f
63 | | user: test
63 | | user: test
64 | | date: Thu Jan 01 00:00:00 1970 +0000
64 | | date: Thu Jan 01 00:00:00 1970 +0000
65 | | summary: c_rename_c - rename and modify a.txt to b.txt
65 | | summary: c_rename_c - rename and modify a.txt to b.txt
66 | |
66 | |
67 o | changeset: 1:373d507f4667
67 o | changeset: 1:373d507f4667
68 |/ user: test
68 |/ user: test
69 | date: Thu Jan 01 00:00:00 1970 +0000
69 | date: Thu Jan 01 00:00:00 1970 +0000
70 | summary: c_modify_c - modify a.txt
70 | summary: c_modify_c - modify a.txt
71 |
71 |
72 o changeset: 0:f5a5a568022f
72 o changeset: 0:f5a5a568022f
73 user: test
73 user: test
74 date: Thu Jan 01 00:00:00 1970 +0000
74 date: Thu Jan 01 00:00:00 1970 +0000
75 summary: c_base_c - create a.txt
75 summary: c_base_c - create a.txt
76
76
77
77
78 $ hg cat -r . b.txt
78 $ hg cat -r . b.txt
79 foo
79 foo
80 2
80 2
81 3
81 3
82 4
82 4
83 5
83 5
84 bar
84 bar
85 $ cat b.txt
85 $ cat b.txt
86 foo
86 foo
87 2
87 2
88 3
88 3
89 4
89 4
90 5
90 5
91 bar
91 bar
92 $ cd ..
92 $ cd ..
93
93
94
94
95 Check the lack of corruption
95 Check the lack of corruption
96 ============================
96 ============================
97
97
98 $ hg clone --pull base-repo cloned
98 $ hg clone --pull base-repo cloned
99 requesting all changes
99 requesting all changes
100 adding changesets
100 adding changesets
101 adding manifests
101 adding manifests
102 adding file changes
102 adding file changes
103 added 4 changesets with 4 changes to 2 files
103 added 4 changesets with 4 changes to 2 files
104 new changesets f5a5a568022f:a1cc2bdca0aa
104 new changesets f5a5a568022f:a1cc2bdca0aa
105 updating to branch default
105 updating to branch default
106 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
106 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
107 $ cd cloned
107 $ cd cloned
108 $ hg up -r "desc('c_merge_c')"
108 $ hg up -r "desc('c_merge_c')"
109 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
109 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
110
110
111
111
112 Status is buggy, even with debugrebuilddirstate
112 Status is buggy, even with debugrebuilddirstate
113
113
114 $ hg cat -r . b.txt
114 $ hg cat -r . b.txt
115 foo
115 foo
116 2
116 2
117 3
117 3
118 4
118 4
119 5
119 5
120 bar
120 bar
121 $ cat b.txt
121 $ cat b.txt
122 foo
122 foo
123 2
123 2
124 3
124 3
125 4
125 4
126 5
126 5
127 bar
127 bar
128 $ hg status
128 $ hg status
129 $ hg debugrebuilddirstate
129 $ hg debugrebuilddirstate
130 $ hg status
130 $ hg status
131
131
132 the history was altered
132 the history was altered
133
133
134 in theory p1/p2 order does not matter but in practice p1 == nullid is used as a
134 in theory p1/p2 order does not matter but in practice p1 == nullid is used as a
135 marker that some metadata are present and should be fetched.
135 marker that some metadata are present and should be fetched.
136
136
137 $ hg debugrevlogindex b.txt
137 $ hg debugrevlogindex b.txt
138 rev linkrev nodeid p1 p2
138 rev linkrev nodeid p1 p2
139 0 2 05b806ebe5ea 000000000000 000000000000
139 0 2 05b806ebe5ea 000000000000 000000000000
140 1 3 a58b36ad6b65 000000000000 05b806ebe5ea
140 1 3 a58b36ad6b65 000000000000 05b806ebe5ea
141
141
142 Check commit Graph
142 Check commit Graph
143
143
144 $ hg log -G
144 $ hg log -G
145 @ changeset: 3:a1cc2bdca0aa
145 @ changeset: 3:a1cc2bdca0aa
146 |\ tag: tip
146 |\ tag: tip
147 | | parent: 2:615c6ccefd15
147 | | parent: 2:615c6ccefd15
148 | | parent: 1:373d507f4667
148 | | parent: 1:373d507f4667
149 | | user: test
149 | | user: test
150 | | date: Thu Jan 01 00:00:00 1970 +0000
150 | | date: Thu Jan 01 00:00:00 1970 +0000
151 | | summary: c_merge_c: commit merge
151 | | summary: c_merge_c: commit merge
152 | |
152 | |
153 | o changeset: 2:615c6ccefd15
153 | o changeset: 2:615c6ccefd15
154 | | parent: 0:f5a5a568022f
154 | | parent: 0:f5a5a568022f
155 | | user: test
155 | | user: test
156 | | date: Thu Jan 01 00:00:00 1970 +0000
156 | | date: Thu Jan 01 00:00:00 1970 +0000
157 | | summary: c_rename_c - rename and modify a.txt to b.txt
157 | | summary: c_rename_c - rename and modify a.txt to b.txt
158 | |
158 | |
159 o | changeset: 1:373d507f4667
159 o | changeset: 1:373d507f4667
160 |/ user: test
160 |/ user: test
161 | date: Thu Jan 01 00:00:00 1970 +0000
161 | date: Thu Jan 01 00:00:00 1970 +0000
162 | summary: c_modify_c - modify a.txt
162 | summary: c_modify_c - modify a.txt
163 |
163 |
164 o changeset: 0:f5a5a568022f
164 o changeset: 0:f5a5a568022f
165 user: test
165 user: test
166 date: Thu Jan 01 00:00:00 1970 +0000
166 date: Thu Jan 01 00:00:00 1970 +0000
167 summary: c_base_c - create a.txt
167 summary: c_base_c - create a.txt
168
168
169
169
170 Test the command that fixes the issue
170 Test the command that fixes the issue
171 =====================================
171 =====================================
172
172
173 Restore a broken repository with multiple broken revisions and a filename that
173 Restore a broken repository with multiple broken revisions and a filename that
174 would get encoded to test the `report` options.
174 would get encoded to test the `report` options.
175 It's a tarball because unbundle might magically fix the issue later.
175 It's a tarball because unbundle might magically fix the issue later.
176
176
177 $ cd ..
177 $ cd ..
178 $ mkdir repo-to-fix
178 $ mkdir repo-to-fix
179 $ cd repo-to-fix
179 $ cd repo-to-fix
180 #if windows
180 #if windows
181 tar interprets `:` in paths (like `C:`) as being remote, force local on Windows
181 tar interprets `:` in paths (like `C:`) as being remote, force local on Windows
182 only since some versions of tar don't have this flag.
182 only since some versions of tar don't have this flag.
183
183
184 $ tar --force-local -xf $TESTDIR/bundles/issue6528.tar
184 $ tar --force-local -xf $TESTDIR/bundles/issue6528.tar
185 #else
185 #else
186 $ tar xf $TESTDIR/bundles/issue6528.tar
186 $ tar xf $TESTDIR/bundles/issue6528.tar
187 #endif
187 #endif
188
188
189 Check that the issue is present
189 Check that the issue is present
190 $ hg st
190 $ hg st
191 M D.txt
191 M D.txt
192 M b.txt
192 M b.txt
193 $ hg debugrevlogindex b.txt
193 $ hg debugrevlogindex b.txt
194 rev linkrev nodeid p1 p2
194 rev linkrev nodeid p1 p2
195 0 2 05b806ebe5ea 000000000000 000000000000
195 0 2 05b806ebe5ea 000000000000 000000000000
196 1 3 a58b36ad6b65 05b806ebe5ea 000000000000
196 1 3 a58b36ad6b65 05b806ebe5ea 000000000000
197 2 6 216a5fe8b8ed 000000000000 000000000000
197 2 6 216a5fe8b8ed 000000000000 000000000000
198 3 7 ea4f2f2463cc 216a5fe8b8ed 000000000000
198 3 7 ea4f2f2463cc 216a5fe8b8ed 000000000000
199 $ hg debugrevlogindex D.txt
199 $ hg debugrevlogindex D.txt
200 rev linkrev nodeid p1 p2
200 rev linkrev nodeid p1 p2
201 0 6 2a8d3833f2fb 000000000000 000000000000
201 0 6 2a8d3833f2fb 000000000000 000000000000
202 1 7 2a80419dfc31 2a8d3833f2fb 000000000000
202 1 7 2a80419dfc31 2a8d3833f2fb 000000000000
203
203
204 Dry-run the fix
204 Dry-run the fix
205 $ hg debug-repair-issue6528 --dry-run
205 $ hg debug-repair-issue6528 --dry-run
206 found affected revision 1 for filelog 'data/D.txt.i'
206 found affected revision 1 for filelog 'data/D.txt.i'
207 found affected revision 1 for filelog 'data/b.txt.i'
207 found affected revision 1 for filelog 'data/b.txt.i'
208 found affected revision 3 for filelog 'data/b.txt.i'
208 found affected revision 3 for filelog 'data/b.txt.i'
209 $ hg st
209 $ hg st
210 M D.txt
210 M D.txt
211 M b.txt
211 M b.txt
212 $ hg debugrevlogindex b.txt
212 $ hg debugrevlogindex b.txt
213 rev linkrev nodeid p1 p2
213 rev linkrev nodeid p1 p2
214 0 2 05b806ebe5ea 000000000000 000000000000
214 0 2 05b806ebe5ea 000000000000 000000000000
215 1 3 a58b36ad6b65 05b806ebe5ea 000000000000
215 1 3 a58b36ad6b65 05b806ebe5ea 000000000000
216 2 6 216a5fe8b8ed 000000000000 000000000000
216 2 6 216a5fe8b8ed 000000000000 000000000000
217 3 7 ea4f2f2463cc 216a5fe8b8ed 000000000000
217 3 7 ea4f2f2463cc 216a5fe8b8ed 000000000000
218 $ hg debugrevlogindex D.txt
218 $ hg debugrevlogindex D.txt
219 rev linkrev nodeid p1 p2
219 rev linkrev nodeid p1 p2
220 0 6 2a8d3833f2fb 000000000000 000000000000
220 0 6 2a8d3833f2fb 000000000000 000000000000
221 1 7 2a80419dfc31 2a8d3833f2fb 000000000000
221 1 7 2a80419dfc31 2a8d3833f2fb 000000000000
222
222
223 Test the --paranoid option
224 $ hg debug-repair-issue6528 --dry-run --paranoid
225 found affected revision 1 for filelog 'data/D.txt.i'
226 found affected revision 1 for filelog 'data/b.txt.i'
227 found affected revision 3 for filelog 'data/b.txt.i'
228 $ hg st
229 M D.txt
230 M b.txt
231 $ hg debugrevlogindex b.txt
232 rev linkrev nodeid p1 p2
233 0 2 05b806ebe5ea 000000000000 000000000000
234 1 3 a58b36ad6b65 05b806ebe5ea 000000000000
235 2 6 216a5fe8b8ed 000000000000 000000000000
236 3 7 ea4f2f2463cc 216a5fe8b8ed 000000000000
237 $ hg debugrevlogindex D.txt
238 rev linkrev nodeid p1 p2
239 0 6 2a8d3833f2fb 000000000000 000000000000
240 1 7 2a80419dfc31 2a8d3833f2fb 000000000000
241
223 Run the fix
242 Run the fix
224 $ hg debug-repair-issue6528
243 $ hg debug-repair-issue6528
225 found affected revision 1 for filelog 'data/D.txt.i'
244 found affected revision 1 for filelog 'data/D.txt.i'
226 repaired revision 1 of 'filelog data/D.txt.i'
245 repaired revision 1 of 'filelog data/D.txt.i'
227 found affected revision 1 for filelog 'data/b.txt.i'
246 found affected revision 1 for filelog 'data/b.txt.i'
228 found affected revision 3 for filelog 'data/b.txt.i'
247 found affected revision 3 for filelog 'data/b.txt.i'
229 repaired revision 1 of 'filelog data/b.txt.i'
248 repaired revision 1 of 'filelog data/b.txt.i'
230 repaired revision 3 of 'filelog data/b.txt.i'
249 repaired revision 3 of 'filelog data/b.txt.i'
231
250
232 Check that the fix worked and that running it twice does nothing
251 Check that the fix worked and that running it twice does nothing
233 $ hg st
252 $ hg st
234 $ hg debugrevlogindex b.txt
253 $ hg debugrevlogindex b.txt
235 rev linkrev nodeid p1 p2
254 rev linkrev nodeid p1 p2
236 0 2 05b806ebe5ea 000000000000 000000000000
255 0 2 05b806ebe5ea 000000000000 000000000000
237 1 3 a58b36ad6b65 000000000000 05b806ebe5ea
256 1 3 a58b36ad6b65 000000000000 05b806ebe5ea
238 2 6 216a5fe8b8ed 000000000000 000000000000
257 2 6 216a5fe8b8ed 000000000000 000000000000
239 3 7 ea4f2f2463cc 000000000000 216a5fe8b8ed
258 3 7 ea4f2f2463cc 000000000000 216a5fe8b8ed
240 $ hg debugrevlogindex D.txt
259 $ hg debugrevlogindex D.txt
241 rev linkrev nodeid p1 p2
260 rev linkrev nodeid p1 p2
242 0 6 2a8d3833f2fb 000000000000 000000000000
261 0 6 2a8d3833f2fb 000000000000 000000000000
243 1 7 2a80419dfc31 000000000000 2a8d3833f2fb
262 1 7 2a80419dfc31 000000000000 2a8d3833f2fb
244 $ hg debug-repair-issue6528
263 $ hg debug-repair-issue6528
245 no affected revisions were found
264 no affected revisions were found
246 $ hg st
265 $ hg st
247 $ hg debugrevlogindex b.txt
266 $ hg debugrevlogindex b.txt
248 rev linkrev nodeid p1 p2
267 rev linkrev nodeid p1 p2
249 0 2 05b806ebe5ea 000000000000 000000000000
268 0 2 05b806ebe5ea 000000000000 000000000000
250 1 3 a58b36ad6b65 000000000000 05b806ebe5ea
269 1 3 a58b36ad6b65 000000000000 05b806ebe5ea
251 2 6 216a5fe8b8ed 000000000000 000000000000
270 2 6 216a5fe8b8ed 000000000000 000000000000
252 3 7 ea4f2f2463cc 000000000000 216a5fe8b8ed
271 3 7 ea4f2f2463cc 000000000000 216a5fe8b8ed
253 $ hg debugrevlogindex D.txt
272 $ hg debugrevlogindex D.txt
254 rev linkrev nodeid p1 p2
273 rev linkrev nodeid p1 p2
255 0 6 2a8d3833f2fb 000000000000 000000000000
274 0 6 2a8d3833f2fb 000000000000 000000000000
256 1 7 2a80419dfc31 000000000000 2a8d3833f2fb
275 1 7 2a80419dfc31 000000000000 2a8d3833f2fb
257
276
258 Try the using the report options
277 Try the using the report options
259 --------------------------------
278 --------------------------------
260
279
261 $ cd ..
280 $ cd ..
262 $ mkdir repo-to-fix-report
281 $ mkdir repo-to-fix-report
263 $ cd repo-to-fix
282 $ cd repo-to-fix
264 #if windows
283 #if windows
265 tar interprets `:` in paths (like `C:`) as being remote, force local on Windows
284 tar interprets `:` in paths (like `C:`) as being remote, force local on Windows
266 only since some versions of tar don't have this flag.
285 only since some versions of tar don't have this flag.
267
286
268 $ tar --force-local -xf $TESTDIR/bundles/issue6528.tar
287 $ tar --force-local -xf $TESTDIR/bundles/issue6528.tar
269 #else
288 #else
270 $ tar xf $TESTDIR/bundles/issue6528.tar
289 $ tar xf $TESTDIR/bundles/issue6528.tar
271 #endif
290 #endif
272
291
273 $ hg debug-repair-issue6528 --to-report $TESTTMP/report.txt
292 $ hg debug-repair-issue6528 --to-report $TESTTMP/report.txt
274 found affected revision 1 for filelog 'data/D.txt.i'
293 found affected revision 1 for filelog 'data/D.txt.i'
275 found affected revision 1 for filelog 'data/b.txt.i'
294 found affected revision 1 for filelog 'data/b.txt.i'
276 found affected revision 3 for filelog 'data/b.txt.i'
295 found affected revision 3 for filelog 'data/b.txt.i'
277 $ cat $TESTTMP/report.txt
296 $ cat $TESTTMP/report.txt
278 2a80419dfc31d7dfb308ac40f3f138282de7d73b D.txt
297 2a80419dfc31d7dfb308ac40f3f138282de7d73b D.txt
279 a58b36ad6b6545195952793099613c2116f3563b,ea4f2f2463cca5b29ddf3461012b8ce5c6dac175 b.txt
298 a58b36ad6b6545195952793099613c2116f3563b,ea4f2f2463cca5b29ddf3461012b8ce5c6dac175 b.txt
280
299
281 $ hg debug-repair-issue6528 --from-report $TESTTMP/report.txt --dry-run
300 $ hg debug-repair-issue6528 --from-report $TESTTMP/report.txt --dry-run
282 loading report file '$TESTTMP/report.txt'
301 loading report file '$TESTTMP/report.txt'
283 found affected revision 1 for filelog 'D.txt'
302 found affected revision 1 for filelog 'D.txt'
284 found affected revision 1 for filelog 'b.txt'
303 found affected revision 1 for filelog 'b.txt'
285 found affected revision 3 for filelog 'b.txt'
304 found affected revision 3 for filelog 'b.txt'
286 $ hg st
305 $ hg st
287 M D.txt
306 M D.txt
288 M b.txt
307 M b.txt
289 $ hg debugrevlogindex b.txt
308 $ hg debugrevlogindex b.txt
290 rev linkrev nodeid p1 p2
309 rev linkrev nodeid p1 p2
291 0 2 05b806ebe5ea 000000000000 000000000000
310 0 2 05b806ebe5ea 000000000000 000000000000
292 1 3 a58b36ad6b65 05b806ebe5ea 000000000000
311 1 3 a58b36ad6b65 05b806ebe5ea 000000000000
293 2 6 216a5fe8b8ed 000000000000 000000000000
312 2 6 216a5fe8b8ed 000000000000 000000000000
294 3 7 ea4f2f2463cc 216a5fe8b8ed 000000000000
313 3 7 ea4f2f2463cc 216a5fe8b8ed 000000000000
295 $ hg debugrevlogindex D.txt
314 $ hg debugrevlogindex D.txt
296 rev linkrev nodeid p1 p2
315 rev linkrev nodeid p1 p2
297 0 6 2a8d3833f2fb 000000000000 000000000000
316 0 6 2a8d3833f2fb 000000000000 000000000000
298 1 7 2a80419dfc31 2a8d3833f2fb 000000000000
317 1 7 2a80419dfc31 2a8d3833f2fb 000000000000
299
318
300 $ hg debug-repair-issue6528 --from-report $TESTTMP/report.txt
319 $ hg debug-repair-issue6528 --from-report $TESTTMP/report.txt
301 loading report file '$TESTTMP/report.txt'
320 loading report file '$TESTTMP/report.txt'
302 found affected revision 1 for filelog 'D.txt'
321 found affected revision 1 for filelog 'D.txt'
303 repaired revision 1 of 'filelog data/D.txt.i'
322 repaired revision 1 of 'filelog data/D.txt.i'
304 found affected revision 1 for filelog 'b.txt'
323 found affected revision 1 for filelog 'b.txt'
305 found affected revision 3 for filelog 'b.txt'
324 found affected revision 3 for filelog 'b.txt'
306 repaired revision 1 of 'filelog data/b.txt.i'
325 repaired revision 1 of 'filelog data/b.txt.i'
307 repaired revision 3 of 'filelog data/b.txt.i'
326 repaired revision 3 of 'filelog data/b.txt.i'
308 $ hg st
327 $ hg st
309 $ hg debugrevlogindex b.txt
328 $ hg debugrevlogindex b.txt
310 rev linkrev nodeid p1 p2
329 rev linkrev nodeid p1 p2
311 0 2 05b806ebe5ea 000000000000 000000000000
330 0 2 05b806ebe5ea 000000000000 000000000000
312 1 3 a58b36ad6b65 000000000000 05b806ebe5ea
331 1 3 a58b36ad6b65 000000000000 05b806ebe5ea
313 2 6 216a5fe8b8ed 000000000000 000000000000
332 2 6 216a5fe8b8ed 000000000000 000000000000
314 3 7 ea4f2f2463cc 000000000000 216a5fe8b8ed
333 3 7 ea4f2f2463cc 000000000000 216a5fe8b8ed
315 $ hg debugrevlogindex D.txt
334 $ hg debugrevlogindex D.txt
316 rev linkrev nodeid p1 p2
335 rev linkrev nodeid p1 p2
317 0 6 2a8d3833f2fb 000000000000 000000000000
336 0 6 2a8d3833f2fb 000000000000 000000000000
318 1 7 2a80419dfc31 000000000000 2a8d3833f2fb
337 1 7 2a80419dfc31 000000000000 2a8d3833f2fb
319
338
320 Check that the revision is not "fixed" again
339 Check that the revision is not "fixed" again
321
340
322 $ hg debug-repair-issue6528 --from-report $TESTTMP/report.txt
341 $ hg debug-repair-issue6528 --from-report $TESTTMP/report.txt
323 loading report file '$TESTTMP/report.txt'
342 loading report file '$TESTTMP/report.txt'
324 revision 2a80419dfc31d7dfb308ac40f3f138282de7d73b of file 'D.txt' is not affected
343 revision 2a80419dfc31d7dfb308ac40f3f138282de7d73b of file 'D.txt' is not affected
325 no affected revisions were found for 'D.txt'
344 no affected revisions were found for 'D.txt'
326 revision a58b36ad6b6545195952793099613c2116f3563b of file 'b.txt' is not affected
345 revision a58b36ad6b6545195952793099613c2116f3563b of file 'b.txt' is not affected
327 revision ea4f2f2463cca5b29ddf3461012b8ce5c6dac175 of file 'b.txt' is not affected
346 revision ea4f2f2463cca5b29ddf3461012b8ce5c6dac175 of file 'b.txt' is not affected
328 no affected revisions were found for 'b.txt'
347 no affected revisions were found for 'b.txt'
329 $ hg st
348 $ hg st
330 $ hg debugrevlogindex b.txt
349 $ hg debugrevlogindex b.txt
331 rev linkrev nodeid p1 p2
350 rev linkrev nodeid p1 p2
332 0 2 05b806ebe5ea 000000000000 000000000000
351 0 2 05b806ebe5ea 000000000000 000000000000
333 1 3 a58b36ad6b65 000000000000 05b806ebe5ea
352 1 3 a58b36ad6b65 000000000000 05b806ebe5ea
334 2 6 216a5fe8b8ed 000000000000 000000000000
353 2 6 216a5fe8b8ed 000000000000 000000000000
335 3 7 ea4f2f2463cc 000000000000 216a5fe8b8ed
354 3 7 ea4f2f2463cc 000000000000 216a5fe8b8ed
336 $ hg debugrevlogindex D.txt
355 $ hg debugrevlogindex D.txt
337 rev linkrev nodeid p1 p2
356 rev linkrev nodeid p1 p2
338 0 6 2a8d3833f2fb 000000000000 000000000000
357 0 6 2a8d3833f2fb 000000000000 000000000000
339 1 7 2a80419dfc31 000000000000 2a8d3833f2fb
358 1 7 2a80419dfc31 000000000000 2a8d3833f2fb
340
359
341 Try it with a non-inline revlog
360 Try it with a non-inline revlog
342 -------------------------------
361 -------------------------------
343
362
344 $ cd ..
363 $ cd ..
345 $ mkdir $TESTTMP/ext
364 $ mkdir $TESTTMP/ext
346 $ cat << EOF > $TESTTMP/ext/small_inline.py
365 $ cat << EOF > $TESTTMP/ext/small_inline.py
347 > from mercurial import revlog
366 > from mercurial import revlog
348 > revlog._maxinline = 8
367 > revlog._maxinline = 8
349 > EOF
368 > EOF
350
369
351 $ cat << EOF >> $HGRCPATH
370 $ cat << EOF >> $HGRCPATH
352 > [extensions]
371 > [extensions]
353 > small_inline=$TESTTMP/ext/small_inline.py
372 > small_inline=$TESTTMP/ext/small_inline.py
354 > EOF
373 > EOF
355
374
356 $ mkdir repo-to-fix-not-inline
375 $ mkdir repo-to-fix-not-inline
357 $ cd repo-to-fix-not-inline
376 $ cd repo-to-fix-not-inline
358 #if windows
377 #if windows
359 tar interprets `:` in paths (like `C:`) as being remote, force local on Windows
378 tar interprets `:` in paths (like `C:`) as being remote, force local on Windows
360 only since some versions of tar don't have this flag.
379 only since some versions of tar don't have this flag.
361
380
362 $ tar --force-local -xf $TESTDIR/bundles/issue6528.tar
381 $ tar --force-local -xf $TESTDIR/bundles/issue6528.tar
363 #else
382 #else
364 $ tar xf $TESTDIR/bundles/issue6528.tar
383 $ tar xf $TESTDIR/bundles/issue6528.tar
365 #endif
384 #endif
366 $ echo b >> b.txt
385 $ echo b >> b.txt
367 $ hg commit -qm "inline -> separate"
386 $ hg commit -qm "inline -> separate"
368 $ find .hg -name *b.txt.d
387 $ find .hg -name *b.txt.d
369 .hg/store/data/b.txt.d
388 .hg/store/data/b.txt.d
370
389
371 Status is correct, but the problem is still there, in the earlier revision
390 Status is correct, but the problem is still there, in the earlier revision
372 $ hg st
391 $ hg st
373 $ hg up 3
392 $ hg up 3
374 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
393 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
375 $ hg st
394 $ hg st
376 M b.txt
395 M b.txt
377 $ hg debugrevlogindex b.txt
396 $ hg debugrevlogindex b.txt
378 rev linkrev nodeid p1 p2
397 rev linkrev nodeid p1 p2
379 0 2 05b806ebe5ea 000000000000 000000000000
398 0 2 05b806ebe5ea 000000000000 000000000000
380 1 3 a58b36ad6b65 05b806ebe5ea 000000000000
399 1 3 a58b36ad6b65 05b806ebe5ea 000000000000
381 2 6 216a5fe8b8ed 000000000000 000000000000
400 2 6 216a5fe8b8ed 000000000000 000000000000
382 3 7 ea4f2f2463cc 216a5fe8b8ed 000000000000
401 3 7 ea4f2f2463cc 216a5fe8b8ed 000000000000
383 4 8 db234885e2fe ea4f2f2463cc 000000000000
402 4 8 db234885e2fe ea4f2f2463cc 000000000000
384 $ hg debugrevlogindex D.txt
403 $ hg debugrevlogindex D.txt
385 rev linkrev nodeid p1 p2
404 rev linkrev nodeid p1 p2
386 0 6 2a8d3833f2fb 000000000000 000000000000
405 0 6 2a8d3833f2fb 000000000000 000000000000
387 1 7 2a80419dfc31 2a8d3833f2fb 000000000000
406 1 7 2a80419dfc31 2a8d3833f2fb 000000000000
388 2 8 65aecc89bb5d 2a80419dfc31 000000000000
407 2 8 65aecc89bb5d 2a80419dfc31 000000000000
389
408
390 Run the fix on the non-inline revlog
409 Run the fix on the non-inline revlog
391 $ hg debug-repair-issue6528
410 $ hg debug-repair-issue6528
392 found affected revision 1 for filelog 'data/D.txt.i'
411 found affected revision 1 for filelog 'data/D.txt.i'
393 repaired revision 1 of 'filelog data/D.txt.i'
412 repaired revision 1 of 'filelog data/D.txt.i'
394 found affected revision 1 for filelog 'data/b.txt.i'
413 found affected revision 1 for filelog 'data/b.txt.i'
395 found affected revision 3 for filelog 'data/b.txt.i'
414 found affected revision 3 for filelog 'data/b.txt.i'
396 repaired revision 1 of 'filelog data/b.txt.i'
415 repaired revision 1 of 'filelog data/b.txt.i'
397 repaired revision 3 of 'filelog data/b.txt.i'
416 repaired revision 3 of 'filelog data/b.txt.i'
398
417
399 Check that it worked
418 Check that it worked
400 $ hg debugrevlogindex b.txt
419 $ hg debugrevlogindex b.txt
401 rev linkrev nodeid p1 p2
420 rev linkrev nodeid p1 p2
402 0 2 05b806ebe5ea 000000000000 000000000000
421 0 2 05b806ebe5ea 000000000000 000000000000
403 1 3 a58b36ad6b65 000000000000 05b806ebe5ea
422 1 3 a58b36ad6b65 000000000000 05b806ebe5ea
404 2 6 216a5fe8b8ed 000000000000 000000000000
423 2 6 216a5fe8b8ed 000000000000 000000000000
405 3 7 ea4f2f2463cc 000000000000 216a5fe8b8ed
424 3 7 ea4f2f2463cc 000000000000 216a5fe8b8ed
406 4 8 db234885e2fe ea4f2f2463cc 000000000000
425 4 8 db234885e2fe ea4f2f2463cc 000000000000
407 $ hg debugrevlogindex D.txt
426 $ hg debugrevlogindex D.txt
408 rev linkrev nodeid p1 p2
427 rev linkrev nodeid p1 p2
409 0 6 2a8d3833f2fb 000000000000 000000000000
428 0 6 2a8d3833f2fb 000000000000 000000000000
410 1 7 2a80419dfc31 000000000000 2a8d3833f2fb
429 1 7 2a80419dfc31 000000000000 2a8d3833f2fb
411 2 8 65aecc89bb5d 2a80419dfc31 000000000000
430 2 8 65aecc89bb5d 2a80419dfc31 000000000000
412 $ hg debug-repair-issue6528
431 $ hg debug-repair-issue6528
413 no affected revisions were found
432 no affected revisions were found
414 $ hg st
433 $ hg st
General Comments 0
You need to be logged in to leave comments. Login now