##// END OF EJS Templates
debugrebuildfncache: add a cheaper option to rebuild the fncache...
Valentin Gatien-Baron -
r48674:8e4659b5 stable draft
parent child Browse files
Show More
@@ -1,4932 +1,4944 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import binascii
10 import binascii
11 import codecs
11 import codecs
12 import collections
12 import collections
13 import contextlib
13 import contextlib
14 import difflib
14 import difflib
15 import errno
15 import errno
16 import glob
16 import glob
17 import operator
17 import operator
18 import os
18 import os
19 import platform
19 import platform
20 import random
20 import random
21 import re
21 import re
22 import socket
22 import socket
23 import ssl
23 import ssl
24 import stat
24 import stat
25 import string
25 import string
26 import subprocess
26 import subprocess
27 import sys
27 import sys
28 import time
28 import time
29
29
30 from .i18n import _
30 from .i18n import _
31 from .node import (
31 from .node import (
32 bin,
32 bin,
33 hex,
33 hex,
34 nullrev,
34 nullrev,
35 short,
35 short,
36 )
36 )
37 from .pycompat import (
37 from .pycompat import (
38 getattr,
38 getattr,
39 open,
39 open,
40 )
40 )
41 from . import (
41 from . import (
42 bundle2,
42 bundle2,
43 bundlerepo,
43 bundlerepo,
44 changegroup,
44 changegroup,
45 cmdutil,
45 cmdutil,
46 color,
46 color,
47 context,
47 context,
48 copies,
48 copies,
49 dagparser,
49 dagparser,
50 encoding,
50 encoding,
51 error,
51 error,
52 exchange,
52 exchange,
53 extensions,
53 extensions,
54 filemerge,
54 filemerge,
55 filesetlang,
55 filesetlang,
56 formatter,
56 formatter,
57 hg,
57 hg,
58 httppeer,
58 httppeer,
59 localrepo,
59 localrepo,
60 lock as lockmod,
60 lock as lockmod,
61 logcmdutil,
61 logcmdutil,
62 mergestate as mergestatemod,
62 mergestate as mergestatemod,
63 metadata,
63 metadata,
64 obsolete,
64 obsolete,
65 obsutil,
65 obsutil,
66 pathutil,
66 pathutil,
67 phases,
67 phases,
68 policy,
68 policy,
69 pvec,
69 pvec,
70 pycompat,
70 pycompat,
71 registrar,
71 registrar,
72 repair,
72 repair,
73 repoview,
73 repoview,
74 requirements,
74 requirements,
75 revlog,
75 revlog,
76 revset,
76 revset,
77 revsetlang,
77 revsetlang,
78 scmutil,
78 scmutil,
79 setdiscovery,
79 setdiscovery,
80 simplemerge,
80 simplemerge,
81 sshpeer,
81 sshpeer,
82 sslutil,
82 sslutil,
83 streamclone,
83 streamclone,
84 strip,
84 strip,
85 tags as tagsmod,
85 tags as tagsmod,
86 templater,
86 templater,
87 treediscovery,
87 treediscovery,
88 upgrade,
88 upgrade,
89 url as urlmod,
89 url as urlmod,
90 util,
90 util,
91 vfs as vfsmod,
91 vfs as vfsmod,
92 wireprotoframing,
92 wireprotoframing,
93 wireprotoserver,
93 wireprotoserver,
94 wireprotov2peer,
94 wireprotov2peer,
95 )
95 )
96 from .interfaces import repository
96 from .interfaces import repository
97 from .utils import (
97 from .utils import (
98 cborutil,
98 cborutil,
99 compression,
99 compression,
100 dateutil,
100 dateutil,
101 procutil,
101 procutil,
102 stringutil,
102 stringutil,
103 urlutil,
103 urlutil,
104 )
104 )
105
105
106 from .revlogutils import (
106 from .revlogutils import (
107 deltas as deltautil,
107 deltas as deltautil,
108 nodemap,
108 nodemap,
109 rewrite,
109 rewrite,
110 sidedata,
110 sidedata,
111 )
111 )
112
112
113 release = lockmod.release
113 release = lockmod.release
114
114
115 table = {}
115 table = {}
116 table.update(strip.command._table)
116 table.update(strip.command._table)
117 command = registrar.command(table)
117 command = registrar.command(table)
118
118
119
119
120 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
120 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
121 def debugancestor(ui, repo, *args):
121 def debugancestor(ui, repo, *args):
122 """find the ancestor revision of two revisions in a given index"""
122 """find the ancestor revision of two revisions in a given index"""
123 if len(args) == 3:
123 if len(args) == 3:
124 index, rev1, rev2 = args
124 index, rev1, rev2 = args
125 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
125 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
126 lookup = r.lookup
126 lookup = r.lookup
127 elif len(args) == 2:
127 elif len(args) == 2:
128 if not repo:
128 if not repo:
129 raise error.Abort(
129 raise error.Abort(
130 _(b'there is no Mercurial repository here (.hg not found)')
130 _(b'there is no Mercurial repository here (.hg not found)')
131 )
131 )
132 rev1, rev2 = args
132 rev1, rev2 = args
133 r = repo.changelog
133 r = repo.changelog
134 lookup = repo.lookup
134 lookup = repo.lookup
135 else:
135 else:
136 raise error.Abort(_(b'either two or three arguments required'))
136 raise error.Abort(_(b'either two or three arguments required'))
137 a = r.ancestor(lookup(rev1), lookup(rev2))
137 a = r.ancestor(lookup(rev1), lookup(rev2))
138 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
138 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
139
139
140
140
141 @command(b'debugantivirusrunning', [])
141 @command(b'debugantivirusrunning', [])
142 def debugantivirusrunning(ui, repo):
142 def debugantivirusrunning(ui, repo):
143 """attempt to trigger an antivirus scanner to see if one is active"""
143 """attempt to trigger an antivirus scanner to see if one is active"""
144 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
144 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
145 f.write(
145 f.write(
146 util.b85decode(
146 util.b85decode(
147 # This is a base85-armored version of the EICAR test file. See
147 # This is a base85-armored version of the EICAR test file. See
148 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
148 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
149 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
149 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
150 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
150 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
151 )
151 )
152 )
152 )
153 # Give an AV engine time to scan the file.
153 # Give an AV engine time to scan the file.
154 time.sleep(2)
154 time.sleep(2)
155 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
155 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
156
156
157
157
158 @command(b'debugapplystreamclonebundle', [], b'FILE')
158 @command(b'debugapplystreamclonebundle', [], b'FILE')
159 def debugapplystreamclonebundle(ui, repo, fname):
159 def debugapplystreamclonebundle(ui, repo, fname):
160 """apply a stream clone bundle file"""
160 """apply a stream clone bundle file"""
161 f = hg.openpath(ui, fname)
161 f = hg.openpath(ui, fname)
162 gen = exchange.readbundle(ui, f, fname)
162 gen = exchange.readbundle(ui, f, fname)
163 gen.apply(repo)
163 gen.apply(repo)
164
164
165
165
166 @command(
166 @command(
167 b'debugbuilddag',
167 b'debugbuilddag',
168 [
168 [
169 (
169 (
170 b'm',
170 b'm',
171 b'mergeable-file',
171 b'mergeable-file',
172 None,
172 None,
173 _(b'add single file mergeable changes'),
173 _(b'add single file mergeable changes'),
174 ),
174 ),
175 (
175 (
176 b'o',
176 b'o',
177 b'overwritten-file',
177 b'overwritten-file',
178 None,
178 None,
179 _(b'add single file all revs overwrite'),
179 _(b'add single file all revs overwrite'),
180 ),
180 ),
181 (b'n', b'new-file', None, _(b'add new file at each rev')),
181 (b'n', b'new-file', None, _(b'add new file at each rev')),
182 ],
182 ],
183 _(b'[OPTION]... [TEXT]'),
183 _(b'[OPTION]... [TEXT]'),
184 )
184 )
185 def debugbuilddag(
185 def debugbuilddag(
186 ui,
186 ui,
187 repo,
187 repo,
188 text=None,
188 text=None,
189 mergeable_file=False,
189 mergeable_file=False,
190 overwritten_file=False,
190 overwritten_file=False,
191 new_file=False,
191 new_file=False,
192 ):
192 ):
193 """builds a repo with a given DAG from scratch in the current empty repo
193 """builds a repo with a given DAG from scratch in the current empty repo
194
194
195 The description of the DAG is read from stdin if not given on the
195 The description of the DAG is read from stdin if not given on the
196 command line.
196 command line.
197
197
198 Elements:
198 Elements:
199
199
200 - "+n" is a linear run of n nodes based on the current default parent
200 - "+n" is a linear run of n nodes based on the current default parent
201 - "." is a single node based on the current default parent
201 - "." is a single node based on the current default parent
202 - "$" resets the default parent to null (implied at the start);
202 - "$" resets the default parent to null (implied at the start);
203 otherwise the default parent is always the last node created
203 otherwise the default parent is always the last node created
204 - "<p" sets the default parent to the backref p
204 - "<p" sets the default parent to the backref p
205 - "*p" is a fork at parent p, which is a backref
205 - "*p" is a fork at parent p, which is a backref
206 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
206 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
207 - "/p2" is a merge of the preceding node and p2
207 - "/p2" is a merge of the preceding node and p2
208 - ":tag" defines a local tag for the preceding node
208 - ":tag" defines a local tag for the preceding node
209 - "@branch" sets the named branch for subsequent nodes
209 - "@branch" sets the named branch for subsequent nodes
210 - "#...\\n" is a comment up to the end of the line
210 - "#...\\n" is a comment up to the end of the line
211
211
212 Whitespace between the above elements is ignored.
212 Whitespace between the above elements is ignored.
213
213
214 A backref is either
214 A backref is either
215
215
216 - a number n, which references the node curr-n, where curr is the current
216 - a number n, which references the node curr-n, where curr is the current
217 node, or
217 node, or
218 - the name of a local tag you placed earlier using ":tag", or
218 - the name of a local tag you placed earlier using ":tag", or
219 - empty to denote the default parent.
219 - empty to denote the default parent.
220
220
221 All string valued-elements are either strictly alphanumeric, or must
221 All string valued-elements are either strictly alphanumeric, or must
222 be enclosed in double quotes ("..."), with "\\" as escape character.
222 be enclosed in double quotes ("..."), with "\\" as escape character.
223 """
223 """
224
224
225 if text is None:
225 if text is None:
226 ui.status(_(b"reading DAG from stdin\n"))
226 ui.status(_(b"reading DAG from stdin\n"))
227 text = ui.fin.read()
227 text = ui.fin.read()
228
228
229 cl = repo.changelog
229 cl = repo.changelog
230 if len(cl) > 0:
230 if len(cl) > 0:
231 raise error.Abort(_(b'repository is not empty'))
231 raise error.Abort(_(b'repository is not empty'))
232
232
233 # determine number of revs in DAG
233 # determine number of revs in DAG
234 total = 0
234 total = 0
235 for type, data in dagparser.parsedag(text):
235 for type, data in dagparser.parsedag(text):
236 if type == b'n':
236 if type == b'n':
237 total += 1
237 total += 1
238
238
239 if mergeable_file:
239 if mergeable_file:
240 linesperrev = 2
240 linesperrev = 2
241 # make a file with k lines per rev
241 # make a file with k lines per rev
242 initialmergedlines = [
242 initialmergedlines = [
243 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
243 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
244 ]
244 ]
245 initialmergedlines.append(b"")
245 initialmergedlines.append(b"")
246
246
247 tags = []
247 tags = []
248 progress = ui.makeprogress(
248 progress = ui.makeprogress(
249 _(b'building'), unit=_(b'revisions'), total=total
249 _(b'building'), unit=_(b'revisions'), total=total
250 )
250 )
251 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
251 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
252 at = -1
252 at = -1
253 atbranch = b'default'
253 atbranch = b'default'
254 nodeids = []
254 nodeids = []
255 id = 0
255 id = 0
256 progress.update(id)
256 progress.update(id)
257 for type, data in dagparser.parsedag(text):
257 for type, data in dagparser.parsedag(text):
258 if type == b'n':
258 if type == b'n':
259 ui.note((b'node %s\n' % pycompat.bytestr(data)))
259 ui.note((b'node %s\n' % pycompat.bytestr(data)))
260 id, ps = data
260 id, ps = data
261
261
262 files = []
262 files = []
263 filecontent = {}
263 filecontent = {}
264
264
265 p2 = None
265 p2 = None
266 if mergeable_file:
266 if mergeable_file:
267 fn = b"mf"
267 fn = b"mf"
268 p1 = repo[ps[0]]
268 p1 = repo[ps[0]]
269 if len(ps) > 1:
269 if len(ps) > 1:
270 p2 = repo[ps[1]]
270 p2 = repo[ps[1]]
271 pa = p1.ancestor(p2)
271 pa = p1.ancestor(p2)
272 base, local, other = [
272 base, local, other = [
273 x[fn].data() for x in (pa, p1, p2)
273 x[fn].data() for x in (pa, p1, p2)
274 ]
274 ]
275 m3 = simplemerge.Merge3Text(base, local, other)
275 m3 = simplemerge.Merge3Text(base, local, other)
276 ml = [l.strip() for l in m3.merge_lines()]
276 ml = [l.strip() for l in m3.merge_lines()]
277 ml.append(b"")
277 ml.append(b"")
278 elif at > 0:
278 elif at > 0:
279 ml = p1[fn].data().split(b"\n")
279 ml = p1[fn].data().split(b"\n")
280 else:
280 else:
281 ml = initialmergedlines
281 ml = initialmergedlines
282 ml[id * linesperrev] += b" r%i" % id
282 ml[id * linesperrev] += b" r%i" % id
283 mergedtext = b"\n".join(ml)
283 mergedtext = b"\n".join(ml)
284 files.append(fn)
284 files.append(fn)
285 filecontent[fn] = mergedtext
285 filecontent[fn] = mergedtext
286
286
287 if overwritten_file:
287 if overwritten_file:
288 fn = b"of"
288 fn = b"of"
289 files.append(fn)
289 files.append(fn)
290 filecontent[fn] = b"r%i\n" % id
290 filecontent[fn] = b"r%i\n" % id
291
291
292 if new_file:
292 if new_file:
293 fn = b"nf%i" % id
293 fn = b"nf%i" % id
294 files.append(fn)
294 files.append(fn)
295 filecontent[fn] = b"r%i\n" % id
295 filecontent[fn] = b"r%i\n" % id
296 if len(ps) > 1:
296 if len(ps) > 1:
297 if not p2:
297 if not p2:
298 p2 = repo[ps[1]]
298 p2 = repo[ps[1]]
299 for fn in p2:
299 for fn in p2:
300 if fn.startswith(b"nf"):
300 if fn.startswith(b"nf"):
301 files.append(fn)
301 files.append(fn)
302 filecontent[fn] = p2[fn].data()
302 filecontent[fn] = p2[fn].data()
303
303
304 def fctxfn(repo, cx, path):
304 def fctxfn(repo, cx, path):
305 if path in filecontent:
305 if path in filecontent:
306 return context.memfilectx(
306 return context.memfilectx(
307 repo, cx, path, filecontent[path]
307 repo, cx, path, filecontent[path]
308 )
308 )
309 return None
309 return None
310
310
311 if len(ps) == 0 or ps[0] < 0:
311 if len(ps) == 0 or ps[0] < 0:
312 pars = [None, None]
312 pars = [None, None]
313 elif len(ps) == 1:
313 elif len(ps) == 1:
314 pars = [nodeids[ps[0]], None]
314 pars = [nodeids[ps[0]], None]
315 else:
315 else:
316 pars = [nodeids[p] for p in ps]
316 pars = [nodeids[p] for p in ps]
317 cx = context.memctx(
317 cx = context.memctx(
318 repo,
318 repo,
319 pars,
319 pars,
320 b"r%i" % id,
320 b"r%i" % id,
321 files,
321 files,
322 fctxfn,
322 fctxfn,
323 date=(id, 0),
323 date=(id, 0),
324 user=b"debugbuilddag",
324 user=b"debugbuilddag",
325 extra={b'branch': atbranch},
325 extra={b'branch': atbranch},
326 )
326 )
327 nodeid = repo.commitctx(cx)
327 nodeid = repo.commitctx(cx)
328 nodeids.append(nodeid)
328 nodeids.append(nodeid)
329 at = id
329 at = id
330 elif type == b'l':
330 elif type == b'l':
331 id, name = data
331 id, name = data
332 ui.note((b'tag %s\n' % name))
332 ui.note((b'tag %s\n' % name))
333 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
333 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
334 elif type == b'a':
334 elif type == b'a':
335 ui.note((b'branch %s\n' % data))
335 ui.note((b'branch %s\n' % data))
336 atbranch = data
336 atbranch = data
337 progress.update(id)
337 progress.update(id)
338
338
339 if tags:
339 if tags:
340 repo.vfs.write(b"localtags", b"".join(tags))
340 repo.vfs.write(b"localtags", b"".join(tags))
341
341
342
342
343 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
343 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
344 indent_string = b' ' * indent
344 indent_string = b' ' * indent
345 if all:
345 if all:
346 ui.writenoi18n(
346 ui.writenoi18n(
347 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
347 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
348 % indent_string
348 % indent_string
349 )
349 )
350
350
351 def showchunks(named):
351 def showchunks(named):
352 ui.write(b"\n%s%s\n" % (indent_string, named))
352 ui.write(b"\n%s%s\n" % (indent_string, named))
353 for deltadata in gen.deltaiter():
353 for deltadata in gen.deltaiter():
354 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
354 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
355 ui.write(
355 ui.write(
356 b"%s%s %s %s %s %s %d\n"
356 b"%s%s %s %s %s %s %d\n"
357 % (
357 % (
358 indent_string,
358 indent_string,
359 hex(node),
359 hex(node),
360 hex(p1),
360 hex(p1),
361 hex(p2),
361 hex(p2),
362 hex(cs),
362 hex(cs),
363 hex(deltabase),
363 hex(deltabase),
364 len(delta),
364 len(delta),
365 )
365 )
366 )
366 )
367
367
368 gen.changelogheader()
368 gen.changelogheader()
369 showchunks(b"changelog")
369 showchunks(b"changelog")
370 gen.manifestheader()
370 gen.manifestheader()
371 showchunks(b"manifest")
371 showchunks(b"manifest")
372 for chunkdata in iter(gen.filelogheader, {}):
372 for chunkdata in iter(gen.filelogheader, {}):
373 fname = chunkdata[b'filename']
373 fname = chunkdata[b'filename']
374 showchunks(fname)
374 showchunks(fname)
375 else:
375 else:
376 if isinstance(gen, bundle2.unbundle20):
376 if isinstance(gen, bundle2.unbundle20):
377 raise error.Abort(_(b'use debugbundle2 for this file'))
377 raise error.Abort(_(b'use debugbundle2 for this file'))
378 gen.changelogheader()
378 gen.changelogheader()
379 for deltadata in gen.deltaiter():
379 for deltadata in gen.deltaiter():
380 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
380 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
381 ui.write(b"%s%s\n" % (indent_string, hex(node)))
381 ui.write(b"%s%s\n" % (indent_string, hex(node)))
382
382
383
383
384 def _debugobsmarkers(ui, part, indent=0, **opts):
384 def _debugobsmarkers(ui, part, indent=0, **opts):
385 """display version and markers contained in 'data'"""
385 """display version and markers contained in 'data'"""
386 opts = pycompat.byteskwargs(opts)
386 opts = pycompat.byteskwargs(opts)
387 data = part.read()
387 data = part.read()
388 indent_string = b' ' * indent
388 indent_string = b' ' * indent
389 try:
389 try:
390 version, markers = obsolete._readmarkers(data)
390 version, markers = obsolete._readmarkers(data)
391 except error.UnknownVersion as exc:
391 except error.UnknownVersion as exc:
392 msg = b"%sunsupported version: %s (%d bytes)\n"
392 msg = b"%sunsupported version: %s (%d bytes)\n"
393 msg %= indent_string, exc.version, len(data)
393 msg %= indent_string, exc.version, len(data)
394 ui.write(msg)
394 ui.write(msg)
395 else:
395 else:
396 msg = b"%sversion: %d (%d bytes)\n"
396 msg = b"%sversion: %d (%d bytes)\n"
397 msg %= indent_string, version, len(data)
397 msg %= indent_string, version, len(data)
398 ui.write(msg)
398 ui.write(msg)
399 fm = ui.formatter(b'debugobsolete', opts)
399 fm = ui.formatter(b'debugobsolete', opts)
400 for rawmarker in sorted(markers):
400 for rawmarker in sorted(markers):
401 m = obsutil.marker(None, rawmarker)
401 m = obsutil.marker(None, rawmarker)
402 fm.startitem()
402 fm.startitem()
403 fm.plain(indent_string)
403 fm.plain(indent_string)
404 cmdutil.showmarker(fm, m)
404 cmdutil.showmarker(fm, m)
405 fm.end()
405 fm.end()
406
406
407
407
408 def _debugphaseheads(ui, data, indent=0):
408 def _debugphaseheads(ui, data, indent=0):
409 """display version and markers contained in 'data'"""
409 """display version and markers contained in 'data'"""
410 indent_string = b' ' * indent
410 indent_string = b' ' * indent
411 headsbyphase = phases.binarydecode(data)
411 headsbyphase = phases.binarydecode(data)
412 for phase in phases.allphases:
412 for phase in phases.allphases:
413 for head in headsbyphase[phase]:
413 for head in headsbyphase[phase]:
414 ui.write(indent_string)
414 ui.write(indent_string)
415 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
415 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
416
416
417
417
418 def _quasirepr(thing):
418 def _quasirepr(thing):
419 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
419 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
420 return b'{%s}' % (
420 return b'{%s}' % (
421 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
421 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
422 )
422 )
423 return pycompat.bytestr(repr(thing))
423 return pycompat.bytestr(repr(thing))
424
424
425
425
426 def _debugbundle2(ui, gen, all=None, **opts):
426 def _debugbundle2(ui, gen, all=None, **opts):
427 """lists the contents of a bundle2"""
427 """lists the contents of a bundle2"""
428 if not isinstance(gen, bundle2.unbundle20):
428 if not isinstance(gen, bundle2.unbundle20):
429 raise error.Abort(_(b'not a bundle2 file'))
429 raise error.Abort(_(b'not a bundle2 file'))
430 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
430 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
431 parttypes = opts.get('part_type', [])
431 parttypes = opts.get('part_type', [])
432 for part in gen.iterparts():
432 for part in gen.iterparts():
433 if parttypes and part.type not in parttypes:
433 if parttypes and part.type not in parttypes:
434 continue
434 continue
435 msg = b'%s -- %s (mandatory: %r)\n'
435 msg = b'%s -- %s (mandatory: %r)\n'
436 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
436 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
437 if part.type == b'changegroup':
437 if part.type == b'changegroup':
438 version = part.params.get(b'version', b'01')
438 version = part.params.get(b'version', b'01')
439 cg = changegroup.getunbundler(version, part, b'UN')
439 cg = changegroup.getunbundler(version, part, b'UN')
440 if not ui.quiet:
440 if not ui.quiet:
441 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
441 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
442 if part.type == b'obsmarkers':
442 if part.type == b'obsmarkers':
443 if not ui.quiet:
443 if not ui.quiet:
444 _debugobsmarkers(ui, part, indent=4, **opts)
444 _debugobsmarkers(ui, part, indent=4, **opts)
445 if part.type == b'phase-heads':
445 if part.type == b'phase-heads':
446 if not ui.quiet:
446 if not ui.quiet:
447 _debugphaseheads(ui, part, indent=4)
447 _debugphaseheads(ui, part, indent=4)
448
448
449
449
450 @command(
450 @command(
451 b'debugbundle',
451 b'debugbundle',
452 [
452 [
453 (b'a', b'all', None, _(b'show all details')),
453 (b'a', b'all', None, _(b'show all details')),
454 (b'', b'part-type', [], _(b'show only the named part type')),
454 (b'', b'part-type', [], _(b'show only the named part type')),
455 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
455 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
456 ],
456 ],
457 _(b'FILE'),
457 _(b'FILE'),
458 norepo=True,
458 norepo=True,
459 )
459 )
460 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
460 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
461 """lists the contents of a bundle"""
461 """lists the contents of a bundle"""
462 with hg.openpath(ui, bundlepath) as f:
462 with hg.openpath(ui, bundlepath) as f:
463 if spec:
463 if spec:
464 spec = exchange.getbundlespec(ui, f)
464 spec = exchange.getbundlespec(ui, f)
465 ui.write(b'%s\n' % spec)
465 ui.write(b'%s\n' % spec)
466 return
466 return
467
467
468 gen = exchange.readbundle(ui, f, bundlepath)
468 gen = exchange.readbundle(ui, f, bundlepath)
469 if isinstance(gen, bundle2.unbundle20):
469 if isinstance(gen, bundle2.unbundle20):
470 return _debugbundle2(ui, gen, all=all, **opts)
470 return _debugbundle2(ui, gen, all=all, **opts)
471 _debugchangegroup(ui, gen, all=all, **opts)
471 _debugchangegroup(ui, gen, all=all, **opts)
472
472
473
473
474 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
474 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
475 def debugcapabilities(ui, path, **opts):
475 def debugcapabilities(ui, path, **opts):
476 """lists the capabilities of a remote peer"""
476 """lists the capabilities of a remote peer"""
477 opts = pycompat.byteskwargs(opts)
477 opts = pycompat.byteskwargs(opts)
478 peer = hg.peer(ui, opts, path)
478 peer = hg.peer(ui, opts, path)
479 try:
479 try:
480 caps = peer.capabilities()
480 caps = peer.capabilities()
481 ui.writenoi18n(b'Main capabilities:\n')
481 ui.writenoi18n(b'Main capabilities:\n')
482 for c in sorted(caps):
482 for c in sorted(caps):
483 ui.write(b' %s\n' % c)
483 ui.write(b' %s\n' % c)
484 b2caps = bundle2.bundle2caps(peer)
484 b2caps = bundle2.bundle2caps(peer)
485 if b2caps:
485 if b2caps:
486 ui.writenoi18n(b'Bundle2 capabilities:\n')
486 ui.writenoi18n(b'Bundle2 capabilities:\n')
487 for key, values in sorted(pycompat.iteritems(b2caps)):
487 for key, values in sorted(pycompat.iteritems(b2caps)):
488 ui.write(b' %s\n' % key)
488 ui.write(b' %s\n' % key)
489 for v in values:
489 for v in values:
490 ui.write(b' %s\n' % v)
490 ui.write(b' %s\n' % v)
491 finally:
491 finally:
492 peer.close()
492 peer.close()
493
493
494
494
495 @command(
495 @command(
496 b'debugchangedfiles',
496 b'debugchangedfiles',
497 [
497 [
498 (
498 (
499 b'',
499 b'',
500 b'compute',
500 b'compute',
501 False,
501 False,
502 b"compute information instead of reading it from storage",
502 b"compute information instead of reading it from storage",
503 ),
503 ),
504 ],
504 ],
505 b'REV',
505 b'REV',
506 )
506 )
507 def debugchangedfiles(ui, repo, rev, **opts):
507 def debugchangedfiles(ui, repo, rev, **opts):
508 """list the stored files changes for a revision"""
508 """list the stored files changes for a revision"""
509 ctx = scmutil.revsingle(repo, rev, None)
509 ctx = scmutil.revsingle(repo, rev, None)
510 files = None
510 files = None
511
511
512 if opts['compute']:
512 if opts['compute']:
513 files = metadata.compute_all_files_changes(ctx)
513 files = metadata.compute_all_files_changes(ctx)
514 else:
514 else:
515 sd = repo.changelog.sidedata(ctx.rev())
515 sd = repo.changelog.sidedata(ctx.rev())
516 files_block = sd.get(sidedata.SD_FILES)
516 files_block = sd.get(sidedata.SD_FILES)
517 if files_block is not None:
517 if files_block is not None:
518 files = metadata.decode_files_sidedata(sd)
518 files = metadata.decode_files_sidedata(sd)
519 if files is not None:
519 if files is not None:
520 for f in sorted(files.touched):
520 for f in sorted(files.touched):
521 if f in files.added:
521 if f in files.added:
522 action = b"added"
522 action = b"added"
523 elif f in files.removed:
523 elif f in files.removed:
524 action = b"removed"
524 action = b"removed"
525 elif f in files.merged:
525 elif f in files.merged:
526 action = b"merged"
526 action = b"merged"
527 elif f in files.salvaged:
527 elif f in files.salvaged:
528 action = b"salvaged"
528 action = b"salvaged"
529 else:
529 else:
530 action = b"touched"
530 action = b"touched"
531
531
532 copy_parent = b""
532 copy_parent = b""
533 copy_source = b""
533 copy_source = b""
534 if f in files.copied_from_p1:
534 if f in files.copied_from_p1:
535 copy_parent = b"p1"
535 copy_parent = b"p1"
536 copy_source = files.copied_from_p1[f]
536 copy_source = files.copied_from_p1[f]
537 elif f in files.copied_from_p2:
537 elif f in files.copied_from_p2:
538 copy_parent = b"p2"
538 copy_parent = b"p2"
539 copy_source = files.copied_from_p2[f]
539 copy_source = files.copied_from_p2[f]
540
540
541 data = (action, copy_parent, f, copy_source)
541 data = (action, copy_parent, f, copy_source)
542 template = b"%-8s %2s: %s, %s;\n"
542 template = b"%-8s %2s: %s, %s;\n"
543 ui.write(template % data)
543 ui.write(template % data)
544
544
545
545
546 @command(b'debugcheckstate', [], b'')
546 @command(b'debugcheckstate', [], b'')
547 def debugcheckstate(ui, repo):
547 def debugcheckstate(ui, repo):
548 """validate the correctness of the current dirstate"""
548 """validate the correctness of the current dirstate"""
549 parent1, parent2 = repo.dirstate.parents()
549 parent1, parent2 = repo.dirstate.parents()
550 m1 = repo[parent1].manifest()
550 m1 = repo[parent1].manifest()
551 m2 = repo[parent2].manifest()
551 m2 = repo[parent2].manifest()
552 errors = 0
552 errors = 0
553 for f in repo.dirstate:
553 for f in repo.dirstate:
554 state = repo.dirstate[f]
554 state = repo.dirstate[f]
555 if state in b"nr" and f not in m1:
555 if state in b"nr" and f not in m1:
556 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
556 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
557 errors += 1
557 errors += 1
558 if state in b"a" and f in m1:
558 if state in b"a" and f in m1:
559 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
559 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
560 errors += 1
560 errors += 1
561 if state in b"m" and f not in m1 and f not in m2:
561 if state in b"m" and f not in m1 and f not in m2:
562 ui.warn(
562 ui.warn(
563 _(b"%s in state %s, but not in either manifest\n") % (f, state)
563 _(b"%s in state %s, but not in either manifest\n") % (f, state)
564 )
564 )
565 errors += 1
565 errors += 1
566 for f in m1:
566 for f in m1:
567 state = repo.dirstate[f]
567 state = repo.dirstate[f]
568 if state not in b"nrm":
568 if state not in b"nrm":
569 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
569 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
570 errors += 1
570 errors += 1
571 if errors:
571 if errors:
572 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
572 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
573 raise error.Abort(errstr)
573 raise error.Abort(errstr)
574
574
575
575
576 @command(
576 @command(
577 b'debugcolor',
577 b'debugcolor',
578 [(b'', b'style', None, _(b'show all configured styles'))],
578 [(b'', b'style', None, _(b'show all configured styles'))],
579 b'hg debugcolor',
579 b'hg debugcolor',
580 )
580 )
581 def debugcolor(ui, repo, **opts):
581 def debugcolor(ui, repo, **opts):
582 """show available color, effects or style"""
582 """show available color, effects or style"""
583 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
583 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
584 if opts.get('style'):
584 if opts.get('style'):
585 return _debugdisplaystyle(ui)
585 return _debugdisplaystyle(ui)
586 else:
586 else:
587 return _debugdisplaycolor(ui)
587 return _debugdisplaycolor(ui)
588
588
589
589
590 def _debugdisplaycolor(ui):
590 def _debugdisplaycolor(ui):
591 ui = ui.copy()
591 ui = ui.copy()
592 ui._styles.clear()
592 ui._styles.clear()
593 for effect in color._activeeffects(ui).keys():
593 for effect in color._activeeffects(ui).keys():
594 ui._styles[effect] = effect
594 ui._styles[effect] = effect
595 if ui._terminfoparams:
595 if ui._terminfoparams:
596 for k, v in ui.configitems(b'color'):
596 for k, v in ui.configitems(b'color'):
597 if k.startswith(b'color.'):
597 if k.startswith(b'color.'):
598 ui._styles[k] = k[6:]
598 ui._styles[k] = k[6:]
599 elif k.startswith(b'terminfo.'):
599 elif k.startswith(b'terminfo.'):
600 ui._styles[k] = k[9:]
600 ui._styles[k] = k[9:]
601 ui.write(_(b'available colors:\n'))
601 ui.write(_(b'available colors:\n'))
602 # sort label with a '_' after the other to group '_background' entry.
602 # sort label with a '_' after the other to group '_background' entry.
603 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
603 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
604 for colorname, label in items:
604 for colorname, label in items:
605 ui.write(b'%s\n' % colorname, label=label)
605 ui.write(b'%s\n' % colorname, label=label)
606
606
607
607
608 def _debugdisplaystyle(ui):
608 def _debugdisplaystyle(ui):
609 ui.write(_(b'available style:\n'))
609 ui.write(_(b'available style:\n'))
610 if not ui._styles:
610 if not ui._styles:
611 return
611 return
612 width = max(len(s) for s in ui._styles)
612 width = max(len(s) for s in ui._styles)
613 for label, effects in sorted(ui._styles.items()):
613 for label, effects in sorted(ui._styles.items()):
614 ui.write(b'%s' % label, label=label)
614 ui.write(b'%s' % label, label=label)
615 if effects:
615 if effects:
616 # 50
616 # 50
617 ui.write(b': ')
617 ui.write(b': ')
618 ui.write(b' ' * (max(0, width - len(label))))
618 ui.write(b' ' * (max(0, width - len(label))))
619 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
619 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
620 ui.write(b'\n')
620 ui.write(b'\n')
621
621
622
622
623 @command(b'debugcreatestreamclonebundle', [], b'FILE')
623 @command(b'debugcreatestreamclonebundle', [], b'FILE')
624 def debugcreatestreamclonebundle(ui, repo, fname):
624 def debugcreatestreamclonebundle(ui, repo, fname):
625 """create a stream clone bundle file
625 """create a stream clone bundle file
626
626
627 Stream bundles are special bundles that are essentially archives of
627 Stream bundles are special bundles that are essentially archives of
628 revlog files. They are commonly used for cloning very quickly.
628 revlog files. They are commonly used for cloning very quickly.
629 """
629 """
630 # TODO we may want to turn this into an abort when this functionality
630 # TODO we may want to turn this into an abort when this functionality
631 # is moved into `hg bundle`.
631 # is moved into `hg bundle`.
632 if phases.hassecret(repo):
632 if phases.hassecret(repo):
633 ui.warn(
633 ui.warn(
634 _(
634 _(
635 b'(warning: stream clone bundle will contain secret '
635 b'(warning: stream clone bundle will contain secret '
636 b'revisions)\n'
636 b'revisions)\n'
637 )
637 )
638 )
638 )
639
639
640 requirements, gen = streamclone.generatebundlev1(repo)
640 requirements, gen = streamclone.generatebundlev1(repo)
641 changegroup.writechunks(ui, gen, fname)
641 changegroup.writechunks(ui, gen, fname)
642
642
643 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
643 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
644
644
645
645
646 @command(
646 @command(
647 b'debugdag',
647 b'debugdag',
648 [
648 [
649 (b't', b'tags', None, _(b'use tags as labels')),
649 (b't', b'tags', None, _(b'use tags as labels')),
650 (b'b', b'branches', None, _(b'annotate with branch names')),
650 (b'b', b'branches', None, _(b'annotate with branch names')),
651 (b'', b'dots', None, _(b'use dots for runs')),
651 (b'', b'dots', None, _(b'use dots for runs')),
652 (b's', b'spaces', None, _(b'separate elements by spaces')),
652 (b's', b'spaces', None, _(b'separate elements by spaces')),
653 ],
653 ],
654 _(b'[OPTION]... [FILE [REV]...]'),
654 _(b'[OPTION]... [FILE [REV]...]'),
655 optionalrepo=True,
655 optionalrepo=True,
656 )
656 )
657 def debugdag(ui, repo, file_=None, *revs, **opts):
657 def debugdag(ui, repo, file_=None, *revs, **opts):
658 """format the changelog or an index DAG as a concise textual description
658 """format the changelog or an index DAG as a concise textual description
659
659
660 If you pass a revlog index, the revlog's DAG is emitted. If you list
660 If you pass a revlog index, the revlog's DAG is emitted. If you list
661 revision numbers, they get labeled in the output as rN.
661 revision numbers, they get labeled in the output as rN.
662
662
663 Otherwise, the changelog DAG of the current repo is emitted.
663 Otherwise, the changelog DAG of the current repo is emitted.
664 """
664 """
665 spaces = opts.get('spaces')
665 spaces = opts.get('spaces')
666 dots = opts.get('dots')
666 dots = opts.get('dots')
667 if file_:
667 if file_:
668 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
668 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
669 revs = {int(r) for r in revs}
669 revs = {int(r) for r in revs}
670
670
671 def events():
671 def events():
672 for r in rlog:
672 for r in rlog:
673 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
673 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
674 if r in revs:
674 if r in revs:
675 yield b'l', (r, b"r%i" % r)
675 yield b'l', (r, b"r%i" % r)
676
676
677 elif repo:
677 elif repo:
678 cl = repo.changelog
678 cl = repo.changelog
679 tags = opts.get('tags')
679 tags = opts.get('tags')
680 branches = opts.get('branches')
680 branches = opts.get('branches')
681 if tags:
681 if tags:
682 labels = {}
682 labels = {}
683 for l, n in repo.tags().items():
683 for l, n in repo.tags().items():
684 labels.setdefault(cl.rev(n), []).append(l)
684 labels.setdefault(cl.rev(n), []).append(l)
685
685
686 def events():
686 def events():
687 b = b"default"
687 b = b"default"
688 for r in cl:
688 for r in cl:
689 if branches:
689 if branches:
690 newb = cl.read(cl.node(r))[5][b'branch']
690 newb = cl.read(cl.node(r))[5][b'branch']
691 if newb != b:
691 if newb != b:
692 yield b'a', newb
692 yield b'a', newb
693 b = newb
693 b = newb
694 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
694 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
695 if tags:
695 if tags:
696 ls = labels.get(r)
696 ls = labels.get(r)
697 if ls:
697 if ls:
698 for l in ls:
698 for l in ls:
699 yield b'l', (r, l)
699 yield b'l', (r, l)
700
700
701 else:
701 else:
702 raise error.Abort(_(b'need repo for changelog dag'))
702 raise error.Abort(_(b'need repo for changelog dag'))
703
703
704 for line in dagparser.dagtextlines(
704 for line in dagparser.dagtextlines(
705 events(),
705 events(),
706 addspaces=spaces,
706 addspaces=spaces,
707 wraplabels=True,
707 wraplabels=True,
708 wrapannotations=True,
708 wrapannotations=True,
709 wrapnonlinear=dots,
709 wrapnonlinear=dots,
710 usedots=dots,
710 usedots=dots,
711 maxlinewidth=70,
711 maxlinewidth=70,
712 ):
712 ):
713 ui.write(line)
713 ui.write(line)
714 ui.write(b"\n")
714 ui.write(b"\n")
715
715
716
716
717 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
717 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
718 def debugdata(ui, repo, file_, rev=None, **opts):
718 def debugdata(ui, repo, file_, rev=None, **opts):
719 """dump the contents of a data file revision"""
719 """dump the contents of a data file revision"""
720 opts = pycompat.byteskwargs(opts)
720 opts = pycompat.byteskwargs(opts)
721 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
721 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
722 if rev is not None:
722 if rev is not None:
723 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
723 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
724 file_, rev = None, file_
724 file_, rev = None, file_
725 elif rev is None:
725 elif rev is None:
726 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
726 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
727 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
727 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
728 try:
728 try:
729 ui.write(r.rawdata(r.lookup(rev)))
729 ui.write(r.rawdata(r.lookup(rev)))
730 except KeyError:
730 except KeyError:
731 raise error.Abort(_(b'invalid revision identifier %s') % rev)
731 raise error.Abort(_(b'invalid revision identifier %s') % rev)
732
732
733
733
734 @command(
734 @command(
735 b'debugdate',
735 b'debugdate',
736 [(b'e', b'extended', None, _(b'try extended date formats'))],
736 [(b'e', b'extended', None, _(b'try extended date formats'))],
737 _(b'[-e] DATE [RANGE]'),
737 _(b'[-e] DATE [RANGE]'),
738 norepo=True,
738 norepo=True,
739 optionalrepo=True,
739 optionalrepo=True,
740 )
740 )
741 def debugdate(ui, date, range=None, **opts):
741 def debugdate(ui, date, range=None, **opts):
742 """parse and display a date"""
742 """parse and display a date"""
743 if opts["extended"]:
743 if opts["extended"]:
744 d = dateutil.parsedate(date, dateutil.extendeddateformats)
744 d = dateutil.parsedate(date, dateutil.extendeddateformats)
745 else:
745 else:
746 d = dateutil.parsedate(date)
746 d = dateutil.parsedate(date)
747 ui.writenoi18n(b"internal: %d %d\n" % d)
747 ui.writenoi18n(b"internal: %d %d\n" % d)
748 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
748 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
749 if range:
749 if range:
750 m = dateutil.matchdate(range)
750 m = dateutil.matchdate(range)
751 ui.writenoi18n(b"match: %s\n" % m(d[0]))
751 ui.writenoi18n(b"match: %s\n" % m(d[0]))
752
752
753
753
754 @command(
754 @command(
755 b'debugdeltachain',
755 b'debugdeltachain',
756 cmdutil.debugrevlogopts + cmdutil.formatteropts,
756 cmdutil.debugrevlogopts + cmdutil.formatteropts,
757 _(b'-c|-m|FILE'),
757 _(b'-c|-m|FILE'),
758 optionalrepo=True,
758 optionalrepo=True,
759 )
759 )
760 def debugdeltachain(ui, repo, file_=None, **opts):
760 def debugdeltachain(ui, repo, file_=None, **opts):
761 """dump information about delta chains in a revlog
761 """dump information about delta chains in a revlog
762
762
763 Output can be templatized. Available template keywords are:
763 Output can be templatized. Available template keywords are:
764
764
765 :``rev``: revision number
765 :``rev``: revision number
766 :``chainid``: delta chain identifier (numbered by unique base)
766 :``chainid``: delta chain identifier (numbered by unique base)
767 :``chainlen``: delta chain length to this revision
767 :``chainlen``: delta chain length to this revision
768 :``prevrev``: previous revision in delta chain
768 :``prevrev``: previous revision in delta chain
769 :``deltatype``: role of delta / how it was computed
769 :``deltatype``: role of delta / how it was computed
770 :``compsize``: compressed size of revision
770 :``compsize``: compressed size of revision
771 :``uncompsize``: uncompressed size of revision
771 :``uncompsize``: uncompressed size of revision
772 :``chainsize``: total size of compressed revisions in chain
772 :``chainsize``: total size of compressed revisions in chain
773 :``chainratio``: total chain size divided by uncompressed revision size
773 :``chainratio``: total chain size divided by uncompressed revision size
774 (new delta chains typically start at ratio 2.00)
774 (new delta chains typically start at ratio 2.00)
775 :``lindist``: linear distance from base revision in delta chain to end
775 :``lindist``: linear distance from base revision in delta chain to end
776 of this revision
776 of this revision
777 :``extradist``: total size of revisions not part of this delta chain from
777 :``extradist``: total size of revisions not part of this delta chain from
778 base of delta chain to end of this revision; a measurement
778 base of delta chain to end of this revision; a measurement
779 of how much extra data we need to read/seek across to read
779 of how much extra data we need to read/seek across to read
780 the delta chain for this revision
780 the delta chain for this revision
781 :``extraratio``: extradist divided by chainsize; another representation of
781 :``extraratio``: extradist divided by chainsize; another representation of
782 how much unrelated data is needed to load this delta chain
782 how much unrelated data is needed to load this delta chain
783
783
784 If the repository is configured to use the sparse read, additional keywords
784 If the repository is configured to use the sparse read, additional keywords
785 are available:
785 are available:
786
786
787 :``readsize``: total size of data read from the disk for a revision
787 :``readsize``: total size of data read from the disk for a revision
788 (sum of the sizes of all the blocks)
788 (sum of the sizes of all the blocks)
789 :``largestblock``: size of the largest block of data read from the disk
789 :``largestblock``: size of the largest block of data read from the disk
790 :``readdensity``: density of useful bytes in the data read from the disk
790 :``readdensity``: density of useful bytes in the data read from the disk
791 :``srchunks``: in how many data hunks the whole revision would be read
791 :``srchunks``: in how many data hunks the whole revision would be read
792
792
793 The sparse read can be enabled with experimental.sparse-read = True
793 The sparse read can be enabled with experimental.sparse-read = True
794 """
794 """
795 opts = pycompat.byteskwargs(opts)
795 opts = pycompat.byteskwargs(opts)
796 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
796 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
797 index = r.index
797 index = r.index
798 start = r.start
798 start = r.start
799 length = r.length
799 length = r.length
800 generaldelta = r._generaldelta
800 generaldelta = r._generaldelta
801 withsparseread = getattr(r, '_withsparseread', False)
801 withsparseread = getattr(r, '_withsparseread', False)
802
802
803 def revinfo(rev):
803 def revinfo(rev):
804 e = index[rev]
804 e = index[rev]
805 compsize = e[1]
805 compsize = e[1]
806 uncompsize = e[2]
806 uncompsize = e[2]
807 chainsize = 0
807 chainsize = 0
808
808
809 if generaldelta:
809 if generaldelta:
810 if e[3] == e[5]:
810 if e[3] == e[5]:
811 deltatype = b'p1'
811 deltatype = b'p1'
812 elif e[3] == e[6]:
812 elif e[3] == e[6]:
813 deltatype = b'p2'
813 deltatype = b'p2'
814 elif e[3] == rev - 1:
814 elif e[3] == rev - 1:
815 deltatype = b'prev'
815 deltatype = b'prev'
816 elif e[3] == rev:
816 elif e[3] == rev:
817 deltatype = b'base'
817 deltatype = b'base'
818 else:
818 else:
819 deltatype = b'other'
819 deltatype = b'other'
820 else:
820 else:
821 if e[3] == rev:
821 if e[3] == rev:
822 deltatype = b'base'
822 deltatype = b'base'
823 else:
823 else:
824 deltatype = b'prev'
824 deltatype = b'prev'
825
825
826 chain = r._deltachain(rev)[0]
826 chain = r._deltachain(rev)[0]
827 for iterrev in chain:
827 for iterrev in chain:
828 e = index[iterrev]
828 e = index[iterrev]
829 chainsize += e[1]
829 chainsize += e[1]
830
830
831 return compsize, uncompsize, deltatype, chain, chainsize
831 return compsize, uncompsize, deltatype, chain, chainsize
832
832
833 fm = ui.formatter(b'debugdeltachain', opts)
833 fm = ui.formatter(b'debugdeltachain', opts)
834
834
835 fm.plain(
835 fm.plain(
836 b' rev chain# chainlen prev delta '
836 b' rev chain# chainlen prev delta '
837 b'size rawsize chainsize ratio lindist extradist '
837 b'size rawsize chainsize ratio lindist extradist '
838 b'extraratio'
838 b'extraratio'
839 )
839 )
840 if withsparseread:
840 if withsparseread:
841 fm.plain(b' readsize largestblk rddensity srchunks')
841 fm.plain(b' readsize largestblk rddensity srchunks')
842 fm.plain(b'\n')
842 fm.plain(b'\n')
843
843
844 chainbases = {}
844 chainbases = {}
845 for rev in r:
845 for rev in r:
846 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
846 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
847 chainbase = chain[0]
847 chainbase = chain[0]
848 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
848 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
849 basestart = start(chainbase)
849 basestart = start(chainbase)
850 revstart = start(rev)
850 revstart = start(rev)
851 lineardist = revstart + comp - basestart
851 lineardist = revstart + comp - basestart
852 extradist = lineardist - chainsize
852 extradist = lineardist - chainsize
853 try:
853 try:
854 prevrev = chain[-2]
854 prevrev = chain[-2]
855 except IndexError:
855 except IndexError:
856 prevrev = -1
856 prevrev = -1
857
857
858 if uncomp != 0:
858 if uncomp != 0:
859 chainratio = float(chainsize) / float(uncomp)
859 chainratio = float(chainsize) / float(uncomp)
860 else:
860 else:
861 chainratio = chainsize
861 chainratio = chainsize
862
862
863 if chainsize != 0:
863 if chainsize != 0:
864 extraratio = float(extradist) / float(chainsize)
864 extraratio = float(extradist) / float(chainsize)
865 else:
865 else:
866 extraratio = extradist
866 extraratio = extradist
867
867
868 fm.startitem()
868 fm.startitem()
869 fm.write(
869 fm.write(
870 b'rev chainid chainlen prevrev deltatype compsize '
870 b'rev chainid chainlen prevrev deltatype compsize '
871 b'uncompsize chainsize chainratio lindist extradist '
871 b'uncompsize chainsize chainratio lindist extradist '
872 b'extraratio',
872 b'extraratio',
873 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
873 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
874 rev,
874 rev,
875 chainid,
875 chainid,
876 len(chain),
876 len(chain),
877 prevrev,
877 prevrev,
878 deltatype,
878 deltatype,
879 comp,
879 comp,
880 uncomp,
880 uncomp,
881 chainsize,
881 chainsize,
882 chainratio,
882 chainratio,
883 lineardist,
883 lineardist,
884 extradist,
884 extradist,
885 extraratio,
885 extraratio,
886 rev=rev,
886 rev=rev,
887 chainid=chainid,
887 chainid=chainid,
888 chainlen=len(chain),
888 chainlen=len(chain),
889 prevrev=prevrev,
889 prevrev=prevrev,
890 deltatype=deltatype,
890 deltatype=deltatype,
891 compsize=comp,
891 compsize=comp,
892 uncompsize=uncomp,
892 uncompsize=uncomp,
893 chainsize=chainsize,
893 chainsize=chainsize,
894 chainratio=chainratio,
894 chainratio=chainratio,
895 lindist=lineardist,
895 lindist=lineardist,
896 extradist=extradist,
896 extradist=extradist,
897 extraratio=extraratio,
897 extraratio=extraratio,
898 )
898 )
899 if withsparseread:
899 if withsparseread:
900 readsize = 0
900 readsize = 0
901 largestblock = 0
901 largestblock = 0
902 srchunks = 0
902 srchunks = 0
903
903
904 for revschunk in deltautil.slicechunk(r, chain):
904 for revschunk in deltautil.slicechunk(r, chain):
905 srchunks += 1
905 srchunks += 1
906 blkend = start(revschunk[-1]) + length(revschunk[-1])
906 blkend = start(revschunk[-1]) + length(revschunk[-1])
907 blksize = blkend - start(revschunk[0])
907 blksize = blkend - start(revschunk[0])
908
908
909 readsize += blksize
909 readsize += blksize
910 if largestblock < blksize:
910 if largestblock < blksize:
911 largestblock = blksize
911 largestblock = blksize
912
912
913 if readsize:
913 if readsize:
914 readdensity = float(chainsize) / float(readsize)
914 readdensity = float(chainsize) / float(readsize)
915 else:
915 else:
916 readdensity = 1
916 readdensity = 1
917
917
918 fm.write(
918 fm.write(
919 b'readsize largestblock readdensity srchunks',
919 b'readsize largestblock readdensity srchunks',
920 b' %10d %10d %9.5f %8d',
920 b' %10d %10d %9.5f %8d',
921 readsize,
921 readsize,
922 largestblock,
922 largestblock,
923 readdensity,
923 readdensity,
924 srchunks,
924 srchunks,
925 readsize=readsize,
925 readsize=readsize,
926 largestblock=largestblock,
926 largestblock=largestblock,
927 readdensity=readdensity,
927 readdensity=readdensity,
928 srchunks=srchunks,
928 srchunks=srchunks,
929 )
929 )
930
930
931 fm.plain(b'\n')
931 fm.plain(b'\n')
932
932
933 fm.end()
933 fm.end()
934
934
935
935
936 @command(
936 @command(
937 b'debugdirstate|debugstate',
937 b'debugdirstate|debugstate',
938 [
938 [
939 (
939 (
940 b'',
940 b'',
941 b'nodates',
941 b'nodates',
942 None,
942 None,
943 _(b'do not display the saved mtime (DEPRECATED)'),
943 _(b'do not display the saved mtime (DEPRECATED)'),
944 ),
944 ),
945 (b'', b'dates', True, _(b'display the saved mtime')),
945 (b'', b'dates', True, _(b'display the saved mtime')),
946 (b'', b'datesort', None, _(b'sort by saved mtime')),
946 (b'', b'datesort', None, _(b'sort by saved mtime')),
947 (
947 (
948 b'',
948 b'',
949 b'all',
949 b'all',
950 False,
950 False,
951 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
951 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
952 ),
952 ),
953 ],
953 ],
954 _(b'[OPTION]...'),
954 _(b'[OPTION]...'),
955 )
955 )
956 def debugstate(ui, repo, **opts):
956 def debugstate(ui, repo, **opts):
957 """show the contents of the current dirstate"""
957 """show the contents of the current dirstate"""
958
958
959 nodates = not opts['dates']
959 nodates = not opts['dates']
960 if opts.get('nodates') is not None:
960 if opts.get('nodates') is not None:
961 nodates = True
961 nodates = True
962 datesort = opts.get('datesort')
962 datesort = opts.get('datesort')
963
963
964 if datesort:
964 if datesort:
965 keyfunc = lambda x: (
965 keyfunc = lambda x: (
966 x[1].v1_mtime(),
966 x[1].v1_mtime(),
967 x[0],
967 x[0],
968 ) # sort by mtime, then by filename
968 ) # sort by mtime, then by filename
969 else:
969 else:
970 keyfunc = None # sort by filename
970 keyfunc = None # sort by filename
971 if opts['all']:
971 if opts['all']:
972 entries = list(repo.dirstate._map.debug_iter())
972 entries = list(repo.dirstate._map.debug_iter())
973 else:
973 else:
974 entries = list(pycompat.iteritems(repo.dirstate))
974 entries = list(pycompat.iteritems(repo.dirstate))
975 entries.sort(key=keyfunc)
975 entries.sort(key=keyfunc)
976 for file_, ent in entries:
976 for file_, ent in entries:
977 if ent.v1_mtime() == -1:
977 if ent.v1_mtime() == -1:
978 timestr = b'unset '
978 timestr = b'unset '
979 elif nodates:
979 elif nodates:
980 timestr = b'set '
980 timestr = b'set '
981 else:
981 else:
982 timestr = time.strftime(
982 timestr = time.strftime(
983 "%Y-%m-%d %H:%M:%S ", time.localtime(ent.v1_mtime())
983 "%Y-%m-%d %H:%M:%S ", time.localtime(ent.v1_mtime())
984 )
984 )
985 timestr = encoding.strtolocal(timestr)
985 timestr = encoding.strtolocal(timestr)
986 if ent.mode & 0o20000:
986 if ent.mode & 0o20000:
987 mode = b'lnk'
987 mode = b'lnk'
988 else:
988 else:
989 mode = b'%3o' % (ent.v1_mode() & 0o777 & ~util.umask)
989 mode = b'%3o' % (ent.v1_mode() & 0o777 & ~util.umask)
990 ui.write(
990 ui.write(
991 b"%c %s %10d %s%s\n"
991 b"%c %s %10d %s%s\n"
992 % (ent.v1_state(), mode, ent.v1_size(), timestr, file_)
992 % (ent.v1_state(), mode, ent.v1_size(), timestr, file_)
993 )
993 )
994 for f in repo.dirstate.copies():
994 for f in repo.dirstate.copies():
995 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
995 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
996
996
997
997
998 @command(
998 @command(
999 b'debugdirstateignorepatternshash',
999 b'debugdirstateignorepatternshash',
1000 [],
1000 [],
1001 _(b''),
1001 _(b''),
1002 )
1002 )
1003 def debugdirstateignorepatternshash(ui, repo, **opts):
1003 def debugdirstateignorepatternshash(ui, repo, **opts):
1004 """show the hash of ignore patterns stored in dirstate if v2,
1004 """show the hash of ignore patterns stored in dirstate if v2,
1005 or nothing for dirstate-v2
1005 or nothing for dirstate-v2
1006 """
1006 """
1007 if repo.dirstate._use_dirstate_v2:
1007 if repo.dirstate._use_dirstate_v2:
1008 docket = repo.dirstate._map.docket
1008 docket = repo.dirstate._map.docket
1009 hash_len = 20 # 160 bits for SHA-1
1009 hash_len = 20 # 160 bits for SHA-1
1010 hash_bytes = docket.tree_metadata[-hash_len:]
1010 hash_bytes = docket.tree_metadata[-hash_len:]
1011 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1011 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1012
1012
1013
1013
1014 @command(
1014 @command(
1015 b'debugdiscovery',
1015 b'debugdiscovery',
1016 [
1016 [
1017 (b'', b'old', None, _(b'use old-style discovery')),
1017 (b'', b'old', None, _(b'use old-style discovery')),
1018 (
1018 (
1019 b'',
1019 b'',
1020 b'nonheads',
1020 b'nonheads',
1021 None,
1021 None,
1022 _(b'use old-style discovery with non-heads included'),
1022 _(b'use old-style discovery with non-heads included'),
1023 ),
1023 ),
1024 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1024 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1025 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1025 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1026 (
1026 (
1027 b'',
1027 b'',
1028 b'local-as-revs',
1028 b'local-as-revs',
1029 b"",
1029 b"",
1030 b'treat local has having these revisions only',
1030 b'treat local has having these revisions only',
1031 ),
1031 ),
1032 (
1032 (
1033 b'',
1033 b'',
1034 b'remote-as-revs',
1034 b'remote-as-revs',
1035 b"",
1035 b"",
1036 b'use local as remote, with only these these revisions',
1036 b'use local as remote, with only these these revisions',
1037 ),
1037 ),
1038 ]
1038 ]
1039 + cmdutil.remoteopts
1039 + cmdutil.remoteopts
1040 + cmdutil.formatteropts,
1040 + cmdutil.formatteropts,
1041 _(b'[--rev REV] [OTHER]'),
1041 _(b'[--rev REV] [OTHER]'),
1042 )
1042 )
1043 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1043 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1044 """runs the changeset discovery protocol in isolation
1044 """runs the changeset discovery protocol in isolation
1045
1045
1046 The local peer can be "replaced" by a subset of the local repository by
1046 The local peer can be "replaced" by a subset of the local repository by
1047 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1047 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1048 be "replaced" by a subset of the local repository using the
1048 be "replaced" by a subset of the local repository using the
1049 `--local-as-revs` flag. This is useful to efficiently debug pathological
1049 `--local-as-revs` flag. This is useful to efficiently debug pathological
1050 discovery situation.
1050 discovery situation.
1051
1051
1052 The following developer oriented config are relevant for people playing with this command:
1052 The following developer oriented config are relevant for people playing with this command:
1053
1053
1054 * devel.discovery.exchange-heads=True
1054 * devel.discovery.exchange-heads=True
1055
1055
1056 If False, the discovery will not start with
1056 If False, the discovery will not start with
1057 remote head fetching and local head querying.
1057 remote head fetching and local head querying.
1058
1058
1059 * devel.discovery.grow-sample=True
1059 * devel.discovery.grow-sample=True
1060
1060
1061 If False, the sample size used in set discovery will not be increased
1061 If False, the sample size used in set discovery will not be increased
1062 through the process
1062 through the process
1063
1063
1064 * devel.discovery.grow-sample.dynamic=True
1064 * devel.discovery.grow-sample.dynamic=True
1065
1065
1066 When discovery.grow-sample.dynamic is True, the default, the sample size is
1066 When discovery.grow-sample.dynamic is True, the default, the sample size is
1067 adapted to the shape of the undecided set (it is set to the max of:
1067 adapted to the shape of the undecided set (it is set to the max of:
1068 <target-size>, len(roots(undecided)), len(heads(undecided)
1068 <target-size>, len(roots(undecided)), len(heads(undecided)
1069
1069
1070 * devel.discovery.grow-sample.rate=1.05
1070 * devel.discovery.grow-sample.rate=1.05
1071
1071
1072 the rate at which the sample grow
1072 the rate at which the sample grow
1073
1073
1074 * devel.discovery.randomize=True
1074 * devel.discovery.randomize=True
1075
1075
1076 If andom sampling during discovery are deterministic. It is meant for
1076 If andom sampling during discovery are deterministic. It is meant for
1077 integration tests.
1077 integration tests.
1078
1078
1079 * devel.discovery.sample-size=200
1079 * devel.discovery.sample-size=200
1080
1080
1081 Control the initial size of the discovery sample
1081 Control the initial size of the discovery sample
1082
1082
1083 * devel.discovery.sample-size.initial=100
1083 * devel.discovery.sample-size.initial=100
1084
1084
1085 Control the initial size of the discovery for initial change
1085 Control the initial size of the discovery for initial change
1086 """
1086 """
1087 opts = pycompat.byteskwargs(opts)
1087 opts = pycompat.byteskwargs(opts)
1088 unfi = repo.unfiltered()
1088 unfi = repo.unfiltered()
1089
1089
1090 # setup potential extra filtering
1090 # setup potential extra filtering
1091 local_revs = opts[b"local_as_revs"]
1091 local_revs = opts[b"local_as_revs"]
1092 remote_revs = opts[b"remote_as_revs"]
1092 remote_revs = opts[b"remote_as_revs"]
1093
1093
1094 # make sure tests are repeatable
1094 # make sure tests are repeatable
1095 random.seed(int(opts[b'seed']))
1095 random.seed(int(opts[b'seed']))
1096
1096
1097 if not remote_revs:
1097 if not remote_revs:
1098
1098
1099 remoteurl, branches = urlutil.get_unique_pull_path(
1099 remoteurl, branches = urlutil.get_unique_pull_path(
1100 b'debugdiscovery', repo, ui, remoteurl
1100 b'debugdiscovery', repo, ui, remoteurl
1101 )
1101 )
1102 remote = hg.peer(repo, opts, remoteurl)
1102 remote = hg.peer(repo, opts, remoteurl)
1103 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1103 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1104 else:
1104 else:
1105 branches = (None, [])
1105 branches = (None, [])
1106 remote_filtered_revs = scmutil.revrange(
1106 remote_filtered_revs = scmutil.revrange(
1107 unfi, [b"not (::(%s))" % remote_revs]
1107 unfi, [b"not (::(%s))" % remote_revs]
1108 )
1108 )
1109 remote_filtered_revs = frozenset(remote_filtered_revs)
1109 remote_filtered_revs = frozenset(remote_filtered_revs)
1110
1110
1111 def remote_func(x):
1111 def remote_func(x):
1112 return remote_filtered_revs
1112 return remote_filtered_revs
1113
1113
1114 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1114 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1115
1115
1116 remote = repo.peer()
1116 remote = repo.peer()
1117 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1117 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1118
1118
1119 if local_revs:
1119 if local_revs:
1120 local_filtered_revs = scmutil.revrange(
1120 local_filtered_revs = scmutil.revrange(
1121 unfi, [b"not (::(%s))" % local_revs]
1121 unfi, [b"not (::(%s))" % local_revs]
1122 )
1122 )
1123 local_filtered_revs = frozenset(local_filtered_revs)
1123 local_filtered_revs = frozenset(local_filtered_revs)
1124
1124
1125 def local_func(x):
1125 def local_func(x):
1126 return local_filtered_revs
1126 return local_filtered_revs
1127
1127
1128 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1128 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1129 repo = repo.filtered(b'debug-discovery-local-filter')
1129 repo = repo.filtered(b'debug-discovery-local-filter')
1130
1130
1131 data = {}
1131 data = {}
1132 if opts.get(b'old'):
1132 if opts.get(b'old'):
1133
1133
1134 def doit(pushedrevs, remoteheads, remote=remote):
1134 def doit(pushedrevs, remoteheads, remote=remote):
1135 if not util.safehasattr(remote, b'branches'):
1135 if not util.safehasattr(remote, b'branches'):
1136 # enable in-client legacy support
1136 # enable in-client legacy support
1137 remote = localrepo.locallegacypeer(remote.local())
1137 remote = localrepo.locallegacypeer(remote.local())
1138 common, _in, hds = treediscovery.findcommonincoming(
1138 common, _in, hds = treediscovery.findcommonincoming(
1139 repo, remote, force=True, audit=data
1139 repo, remote, force=True, audit=data
1140 )
1140 )
1141 common = set(common)
1141 common = set(common)
1142 if not opts.get(b'nonheads'):
1142 if not opts.get(b'nonheads'):
1143 ui.writenoi18n(
1143 ui.writenoi18n(
1144 b"unpruned common: %s\n"
1144 b"unpruned common: %s\n"
1145 % b" ".join(sorted(short(n) for n in common))
1145 % b" ".join(sorted(short(n) for n in common))
1146 )
1146 )
1147
1147
1148 clnode = repo.changelog.node
1148 clnode = repo.changelog.node
1149 common = repo.revs(b'heads(::%ln)', common)
1149 common = repo.revs(b'heads(::%ln)', common)
1150 common = {clnode(r) for r in common}
1150 common = {clnode(r) for r in common}
1151 return common, hds
1151 return common, hds
1152
1152
1153 else:
1153 else:
1154
1154
1155 def doit(pushedrevs, remoteheads, remote=remote):
1155 def doit(pushedrevs, remoteheads, remote=remote):
1156 nodes = None
1156 nodes = None
1157 if pushedrevs:
1157 if pushedrevs:
1158 revs = scmutil.revrange(repo, pushedrevs)
1158 revs = scmutil.revrange(repo, pushedrevs)
1159 nodes = [repo[r].node() for r in revs]
1159 nodes = [repo[r].node() for r in revs]
1160 common, any, hds = setdiscovery.findcommonheads(
1160 common, any, hds = setdiscovery.findcommonheads(
1161 ui, repo, remote, ancestorsof=nodes, audit=data
1161 ui, repo, remote, ancestorsof=nodes, audit=data
1162 )
1162 )
1163 return common, hds
1163 return common, hds
1164
1164
1165 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1165 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1166 localrevs = opts[b'rev']
1166 localrevs = opts[b'rev']
1167
1167
1168 fm = ui.formatter(b'debugdiscovery', opts)
1168 fm = ui.formatter(b'debugdiscovery', opts)
1169 if fm.strict_format:
1169 if fm.strict_format:
1170
1170
1171 @contextlib.contextmanager
1171 @contextlib.contextmanager
1172 def may_capture_output():
1172 def may_capture_output():
1173 ui.pushbuffer()
1173 ui.pushbuffer()
1174 yield
1174 yield
1175 data[b'output'] = ui.popbuffer()
1175 data[b'output'] = ui.popbuffer()
1176
1176
1177 else:
1177 else:
1178 may_capture_output = util.nullcontextmanager
1178 may_capture_output = util.nullcontextmanager
1179 with may_capture_output():
1179 with may_capture_output():
1180 with util.timedcm('debug-discovery') as t:
1180 with util.timedcm('debug-discovery') as t:
1181 common, hds = doit(localrevs, remoterevs)
1181 common, hds = doit(localrevs, remoterevs)
1182
1182
1183 # compute all statistics
1183 # compute all statistics
1184 heads_common = set(common)
1184 heads_common = set(common)
1185 heads_remote = set(hds)
1185 heads_remote = set(hds)
1186 heads_local = set(repo.heads())
1186 heads_local = set(repo.heads())
1187 # note: they cannot be a local or remote head that is in common and not
1187 # note: they cannot be a local or remote head that is in common and not
1188 # itself a head of common.
1188 # itself a head of common.
1189 heads_common_local = heads_common & heads_local
1189 heads_common_local = heads_common & heads_local
1190 heads_common_remote = heads_common & heads_remote
1190 heads_common_remote = heads_common & heads_remote
1191 heads_common_both = heads_common & heads_remote & heads_local
1191 heads_common_both = heads_common & heads_remote & heads_local
1192
1192
1193 all = repo.revs(b'all()')
1193 all = repo.revs(b'all()')
1194 common = repo.revs(b'::%ln', common)
1194 common = repo.revs(b'::%ln', common)
1195 roots_common = repo.revs(b'roots(::%ld)', common)
1195 roots_common = repo.revs(b'roots(::%ld)', common)
1196 missing = repo.revs(b'not ::%ld', common)
1196 missing = repo.revs(b'not ::%ld', common)
1197 heads_missing = repo.revs(b'heads(%ld)', missing)
1197 heads_missing = repo.revs(b'heads(%ld)', missing)
1198 roots_missing = repo.revs(b'roots(%ld)', missing)
1198 roots_missing = repo.revs(b'roots(%ld)', missing)
1199 assert len(common) + len(missing) == len(all)
1199 assert len(common) + len(missing) == len(all)
1200
1200
1201 initial_undecided = repo.revs(
1201 initial_undecided = repo.revs(
1202 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1202 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1203 )
1203 )
1204 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1204 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1205 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1205 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1206 common_initial_undecided = initial_undecided & common
1206 common_initial_undecided = initial_undecided & common
1207 missing_initial_undecided = initial_undecided & missing
1207 missing_initial_undecided = initial_undecided & missing
1208
1208
1209 data[b'elapsed'] = t.elapsed
1209 data[b'elapsed'] = t.elapsed
1210 data[b'nb-common-heads'] = len(heads_common)
1210 data[b'nb-common-heads'] = len(heads_common)
1211 data[b'nb-common-heads-local'] = len(heads_common_local)
1211 data[b'nb-common-heads-local'] = len(heads_common_local)
1212 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1212 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1213 data[b'nb-common-heads-both'] = len(heads_common_both)
1213 data[b'nb-common-heads-both'] = len(heads_common_both)
1214 data[b'nb-common-roots'] = len(roots_common)
1214 data[b'nb-common-roots'] = len(roots_common)
1215 data[b'nb-head-local'] = len(heads_local)
1215 data[b'nb-head-local'] = len(heads_local)
1216 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1216 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1217 data[b'nb-head-remote'] = len(heads_remote)
1217 data[b'nb-head-remote'] = len(heads_remote)
1218 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1218 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1219 heads_common_remote
1219 heads_common_remote
1220 )
1220 )
1221 data[b'nb-revs'] = len(all)
1221 data[b'nb-revs'] = len(all)
1222 data[b'nb-revs-common'] = len(common)
1222 data[b'nb-revs-common'] = len(common)
1223 data[b'nb-revs-missing'] = len(missing)
1223 data[b'nb-revs-missing'] = len(missing)
1224 data[b'nb-missing-heads'] = len(heads_missing)
1224 data[b'nb-missing-heads'] = len(heads_missing)
1225 data[b'nb-missing-roots'] = len(roots_missing)
1225 data[b'nb-missing-roots'] = len(roots_missing)
1226 data[b'nb-ini_und'] = len(initial_undecided)
1226 data[b'nb-ini_und'] = len(initial_undecided)
1227 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1227 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1228 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1228 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1229 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1229 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1230 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1230 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1231
1231
1232 fm.startitem()
1232 fm.startitem()
1233 fm.data(**pycompat.strkwargs(data))
1233 fm.data(**pycompat.strkwargs(data))
1234 # display discovery summary
1234 # display discovery summary
1235 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1235 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1236 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1236 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1237 fm.plain(b"heads summary:\n")
1237 fm.plain(b"heads summary:\n")
1238 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1238 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1239 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1239 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1240 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1240 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1241 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1241 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1242 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1242 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1243 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1243 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1244 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1244 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1245 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1245 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1246 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1246 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1247 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1247 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1248 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1248 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1249 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1249 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1250 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1250 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1251 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1251 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1252 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1252 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1253 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1253 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1254 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1254 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1255 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1255 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1256 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1256 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1257 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1257 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1258 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1258 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1259 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1259 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1260
1260
1261 if ui.verbose:
1261 if ui.verbose:
1262 fm.plain(
1262 fm.plain(
1263 b"common heads: %s\n"
1263 b"common heads: %s\n"
1264 % b" ".join(sorted(short(n) for n in heads_common))
1264 % b" ".join(sorted(short(n) for n in heads_common))
1265 )
1265 )
1266 fm.end()
1266 fm.end()
1267
1267
1268
1268
1269 _chunksize = 4 << 10
1269 _chunksize = 4 << 10
1270
1270
1271
1271
1272 @command(
1272 @command(
1273 b'debugdownload',
1273 b'debugdownload',
1274 [
1274 [
1275 (b'o', b'output', b'', _(b'path')),
1275 (b'o', b'output', b'', _(b'path')),
1276 ],
1276 ],
1277 optionalrepo=True,
1277 optionalrepo=True,
1278 )
1278 )
1279 def debugdownload(ui, repo, url, output=None, **opts):
1279 def debugdownload(ui, repo, url, output=None, **opts):
1280 """download a resource using Mercurial logic and config"""
1280 """download a resource using Mercurial logic and config"""
1281 fh = urlmod.open(ui, url, output)
1281 fh = urlmod.open(ui, url, output)
1282
1282
1283 dest = ui
1283 dest = ui
1284 if output:
1284 if output:
1285 dest = open(output, b"wb", _chunksize)
1285 dest = open(output, b"wb", _chunksize)
1286 try:
1286 try:
1287 data = fh.read(_chunksize)
1287 data = fh.read(_chunksize)
1288 while data:
1288 while data:
1289 dest.write(data)
1289 dest.write(data)
1290 data = fh.read(_chunksize)
1290 data = fh.read(_chunksize)
1291 finally:
1291 finally:
1292 if output:
1292 if output:
1293 dest.close()
1293 dest.close()
1294
1294
1295
1295
1296 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1296 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1297 def debugextensions(ui, repo, **opts):
1297 def debugextensions(ui, repo, **opts):
1298 '''show information about active extensions'''
1298 '''show information about active extensions'''
1299 opts = pycompat.byteskwargs(opts)
1299 opts = pycompat.byteskwargs(opts)
1300 exts = extensions.extensions(ui)
1300 exts = extensions.extensions(ui)
1301 hgver = util.version()
1301 hgver = util.version()
1302 fm = ui.formatter(b'debugextensions', opts)
1302 fm = ui.formatter(b'debugextensions', opts)
1303 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1303 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1304 isinternal = extensions.ismoduleinternal(extmod)
1304 isinternal = extensions.ismoduleinternal(extmod)
1305 extsource = None
1305 extsource = None
1306
1306
1307 if util.safehasattr(extmod, '__file__'):
1307 if util.safehasattr(extmod, '__file__'):
1308 extsource = pycompat.fsencode(extmod.__file__)
1308 extsource = pycompat.fsencode(extmod.__file__)
1309 elif getattr(sys, 'oxidized', False):
1309 elif getattr(sys, 'oxidized', False):
1310 extsource = pycompat.sysexecutable
1310 extsource = pycompat.sysexecutable
1311 if isinternal:
1311 if isinternal:
1312 exttestedwith = [] # never expose magic string to users
1312 exttestedwith = [] # never expose magic string to users
1313 else:
1313 else:
1314 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1314 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1315 extbuglink = getattr(extmod, 'buglink', None)
1315 extbuglink = getattr(extmod, 'buglink', None)
1316
1316
1317 fm.startitem()
1317 fm.startitem()
1318
1318
1319 if ui.quiet or ui.verbose:
1319 if ui.quiet or ui.verbose:
1320 fm.write(b'name', b'%s\n', extname)
1320 fm.write(b'name', b'%s\n', extname)
1321 else:
1321 else:
1322 fm.write(b'name', b'%s', extname)
1322 fm.write(b'name', b'%s', extname)
1323 if isinternal or hgver in exttestedwith:
1323 if isinternal or hgver in exttestedwith:
1324 fm.plain(b'\n')
1324 fm.plain(b'\n')
1325 elif not exttestedwith:
1325 elif not exttestedwith:
1326 fm.plain(_(b' (untested!)\n'))
1326 fm.plain(_(b' (untested!)\n'))
1327 else:
1327 else:
1328 lasttestedversion = exttestedwith[-1]
1328 lasttestedversion = exttestedwith[-1]
1329 fm.plain(b' (%s!)\n' % lasttestedversion)
1329 fm.plain(b' (%s!)\n' % lasttestedversion)
1330
1330
1331 fm.condwrite(
1331 fm.condwrite(
1332 ui.verbose and extsource,
1332 ui.verbose and extsource,
1333 b'source',
1333 b'source',
1334 _(b' location: %s\n'),
1334 _(b' location: %s\n'),
1335 extsource or b"",
1335 extsource or b"",
1336 )
1336 )
1337
1337
1338 if ui.verbose:
1338 if ui.verbose:
1339 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1339 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1340 fm.data(bundled=isinternal)
1340 fm.data(bundled=isinternal)
1341
1341
1342 fm.condwrite(
1342 fm.condwrite(
1343 ui.verbose and exttestedwith,
1343 ui.verbose and exttestedwith,
1344 b'testedwith',
1344 b'testedwith',
1345 _(b' tested with: %s\n'),
1345 _(b' tested with: %s\n'),
1346 fm.formatlist(exttestedwith, name=b'ver'),
1346 fm.formatlist(exttestedwith, name=b'ver'),
1347 )
1347 )
1348
1348
1349 fm.condwrite(
1349 fm.condwrite(
1350 ui.verbose and extbuglink,
1350 ui.verbose and extbuglink,
1351 b'buglink',
1351 b'buglink',
1352 _(b' bug reporting: %s\n'),
1352 _(b' bug reporting: %s\n'),
1353 extbuglink or b"",
1353 extbuglink or b"",
1354 )
1354 )
1355
1355
1356 fm.end()
1356 fm.end()
1357
1357
1358
1358
1359 @command(
1359 @command(
1360 b'debugfileset',
1360 b'debugfileset',
1361 [
1361 [
1362 (
1362 (
1363 b'r',
1363 b'r',
1364 b'rev',
1364 b'rev',
1365 b'',
1365 b'',
1366 _(b'apply the filespec on this revision'),
1366 _(b'apply the filespec on this revision'),
1367 _(b'REV'),
1367 _(b'REV'),
1368 ),
1368 ),
1369 (
1369 (
1370 b'',
1370 b'',
1371 b'all-files',
1371 b'all-files',
1372 False,
1372 False,
1373 _(b'test files from all revisions and working directory'),
1373 _(b'test files from all revisions and working directory'),
1374 ),
1374 ),
1375 (
1375 (
1376 b's',
1376 b's',
1377 b'show-matcher',
1377 b'show-matcher',
1378 None,
1378 None,
1379 _(b'print internal representation of matcher'),
1379 _(b'print internal representation of matcher'),
1380 ),
1380 ),
1381 (
1381 (
1382 b'p',
1382 b'p',
1383 b'show-stage',
1383 b'show-stage',
1384 [],
1384 [],
1385 _(b'print parsed tree at the given stage'),
1385 _(b'print parsed tree at the given stage'),
1386 _(b'NAME'),
1386 _(b'NAME'),
1387 ),
1387 ),
1388 ],
1388 ],
1389 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1389 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1390 )
1390 )
1391 def debugfileset(ui, repo, expr, **opts):
1391 def debugfileset(ui, repo, expr, **opts):
1392 '''parse and apply a fileset specification'''
1392 '''parse and apply a fileset specification'''
1393 from . import fileset
1393 from . import fileset
1394
1394
1395 fileset.symbols # force import of fileset so we have predicates to optimize
1395 fileset.symbols # force import of fileset so we have predicates to optimize
1396 opts = pycompat.byteskwargs(opts)
1396 opts = pycompat.byteskwargs(opts)
1397 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1397 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1398
1398
1399 stages = [
1399 stages = [
1400 (b'parsed', pycompat.identity),
1400 (b'parsed', pycompat.identity),
1401 (b'analyzed', filesetlang.analyze),
1401 (b'analyzed', filesetlang.analyze),
1402 (b'optimized', filesetlang.optimize),
1402 (b'optimized', filesetlang.optimize),
1403 ]
1403 ]
1404 stagenames = {n for n, f in stages}
1404 stagenames = {n for n, f in stages}
1405
1405
1406 showalways = set()
1406 showalways = set()
1407 if ui.verbose and not opts[b'show_stage']:
1407 if ui.verbose and not opts[b'show_stage']:
1408 # show parsed tree by --verbose (deprecated)
1408 # show parsed tree by --verbose (deprecated)
1409 showalways.add(b'parsed')
1409 showalways.add(b'parsed')
1410 if opts[b'show_stage'] == [b'all']:
1410 if opts[b'show_stage'] == [b'all']:
1411 showalways.update(stagenames)
1411 showalways.update(stagenames)
1412 else:
1412 else:
1413 for n in opts[b'show_stage']:
1413 for n in opts[b'show_stage']:
1414 if n not in stagenames:
1414 if n not in stagenames:
1415 raise error.Abort(_(b'invalid stage name: %s') % n)
1415 raise error.Abort(_(b'invalid stage name: %s') % n)
1416 showalways.update(opts[b'show_stage'])
1416 showalways.update(opts[b'show_stage'])
1417
1417
1418 tree = filesetlang.parse(expr)
1418 tree = filesetlang.parse(expr)
1419 for n, f in stages:
1419 for n, f in stages:
1420 tree = f(tree)
1420 tree = f(tree)
1421 if n in showalways:
1421 if n in showalways:
1422 if opts[b'show_stage'] or n != b'parsed':
1422 if opts[b'show_stage'] or n != b'parsed':
1423 ui.write(b"* %s:\n" % n)
1423 ui.write(b"* %s:\n" % n)
1424 ui.write(filesetlang.prettyformat(tree), b"\n")
1424 ui.write(filesetlang.prettyformat(tree), b"\n")
1425
1425
1426 files = set()
1426 files = set()
1427 if opts[b'all_files']:
1427 if opts[b'all_files']:
1428 for r in repo:
1428 for r in repo:
1429 c = repo[r]
1429 c = repo[r]
1430 files.update(c.files())
1430 files.update(c.files())
1431 files.update(c.substate)
1431 files.update(c.substate)
1432 if opts[b'all_files'] or ctx.rev() is None:
1432 if opts[b'all_files'] or ctx.rev() is None:
1433 wctx = repo[None]
1433 wctx = repo[None]
1434 files.update(
1434 files.update(
1435 repo.dirstate.walk(
1435 repo.dirstate.walk(
1436 scmutil.matchall(repo),
1436 scmutil.matchall(repo),
1437 subrepos=list(wctx.substate),
1437 subrepos=list(wctx.substate),
1438 unknown=True,
1438 unknown=True,
1439 ignored=True,
1439 ignored=True,
1440 )
1440 )
1441 )
1441 )
1442 files.update(wctx.substate)
1442 files.update(wctx.substate)
1443 else:
1443 else:
1444 files.update(ctx.files())
1444 files.update(ctx.files())
1445 files.update(ctx.substate)
1445 files.update(ctx.substate)
1446
1446
1447 m = ctx.matchfileset(repo.getcwd(), expr)
1447 m = ctx.matchfileset(repo.getcwd(), expr)
1448 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1448 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1449 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1449 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1450 for f in sorted(files):
1450 for f in sorted(files):
1451 if not m(f):
1451 if not m(f):
1452 continue
1452 continue
1453 ui.write(b"%s\n" % f)
1453 ui.write(b"%s\n" % f)
1454
1454
1455
1455
1456 @command(
1456 @command(
1457 b"debug-repair-issue6528",
1457 b"debug-repair-issue6528",
1458 [
1458 [
1459 (
1459 (
1460 b'',
1460 b'',
1461 b'to-report',
1461 b'to-report',
1462 b'',
1462 b'',
1463 _(b'build a report of affected revisions to this file'),
1463 _(b'build a report of affected revisions to this file'),
1464 _(b'FILE'),
1464 _(b'FILE'),
1465 ),
1465 ),
1466 (
1466 (
1467 b'',
1467 b'',
1468 b'from-report',
1468 b'from-report',
1469 b'',
1469 b'',
1470 _(b'repair revisions listed in this report file'),
1470 _(b'repair revisions listed in this report file'),
1471 _(b'FILE'),
1471 _(b'FILE'),
1472 ),
1472 ),
1473 (
1473 (
1474 b'',
1474 b'',
1475 b'paranoid',
1475 b'paranoid',
1476 False,
1476 False,
1477 _(b'check that both detection methods do the same thing'),
1477 _(b'check that both detection methods do the same thing'),
1478 ),
1478 ),
1479 ]
1479 ]
1480 + cmdutil.dryrunopts,
1480 + cmdutil.dryrunopts,
1481 )
1481 )
1482 def debug_repair_issue6528(ui, repo, **opts):
1482 def debug_repair_issue6528(ui, repo, **opts):
1483 """find affected revisions and repair them. See issue6528 for more details.
1483 """find affected revisions and repair them. See issue6528 for more details.
1484
1484
1485 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1485 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1486 computation of affected revisions for a given repository across clones.
1486 computation of affected revisions for a given repository across clones.
1487 The report format is line-based (with empty lines ignored):
1487 The report format is line-based (with empty lines ignored):
1488
1488
1489 ```
1489 ```
1490 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1490 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1491 ```
1491 ```
1492
1492
1493 There can be multiple broken revisions per filelog, they are separated by
1493 There can be multiple broken revisions per filelog, they are separated by
1494 a comma with no spaces. The only space is between the revision(s) and the
1494 a comma with no spaces. The only space is between the revision(s) and the
1495 filename.
1495 filename.
1496
1496
1497 Note that this does *not* mean that this repairs future affected revisions,
1497 Note that this does *not* mean that this repairs future affected revisions,
1498 that needs a separate fix at the exchange level that hasn't been written yet
1498 that needs a separate fix at the exchange level that hasn't been written yet
1499 (as of 5.9rc0).
1499 (as of 5.9rc0).
1500
1500
1501 There is a `--paranoid` flag to test that the fast implementation is correct
1501 There is a `--paranoid` flag to test that the fast implementation is correct
1502 by checking it against the slow implementation. Since this matter is quite
1502 by checking it against the slow implementation. Since this matter is quite
1503 urgent and testing every edge-case is probably quite costly, we use this
1503 urgent and testing every edge-case is probably quite costly, we use this
1504 method to test on large repositories as a fuzzing method of sorts.
1504 method to test on large repositories as a fuzzing method of sorts.
1505 """
1505 """
1506 cmdutil.check_incompatible_arguments(
1506 cmdutil.check_incompatible_arguments(
1507 opts, 'to_report', ['from_report', 'dry_run']
1507 opts, 'to_report', ['from_report', 'dry_run']
1508 )
1508 )
1509 dry_run = opts.get('dry_run')
1509 dry_run = opts.get('dry_run')
1510 to_report = opts.get('to_report')
1510 to_report = opts.get('to_report')
1511 from_report = opts.get('from_report')
1511 from_report = opts.get('from_report')
1512 paranoid = opts.get('paranoid')
1512 paranoid = opts.get('paranoid')
1513 # TODO maybe add filelog pattern and revision pattern parameters to help
1513 # TODO maybe add filelog pattern and revision pattern parameters to help
1514 # narrow down the search for users that know what they're looking for?
1514 # narrow down the search for users that know what they're looking for?
1515
1515
1516 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1516 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1517 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1517 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1518 raise error.Abort(_(msg))
1518 raise error.Abort(_(msg))
1519
1519
1520 rewrite.repair_issue6528(
1520 rewrite.repair_issue6528(
1521 ui,
1521 ui,
1522 repo,
1522 repo,
1523 dry_run=dry_run,
1523 dry_run=dry_run,
1524 to_report=to_report,
1524 to_report=to_report,
1525 from_report=from_report,
1525 from_report=from_report,
1526 paranoid=paranoid,
1526 paranoid=paranoid,
1527 )
1527 )
1528
1528
1529
1529
1530 @command(b'debugformat', [] + cmdutil.formatteropts)
1530 @command(b'debugformat', [] + cmdutil.formatteropts)
1531 def debugformat(ui, repo, **opts):
1531 def debugformat(ui, repo, **opts):
1532 """display format information about the current repository
1532 """display format information about the current repository
1533
1533
1534 Use --verbose to get extra information about current config value and
1534 Use --verbose to get extra information about current config value and
1535 Mercurial default."""
1535 Mercurial default."""
1536 opts = pycompat.byteskwargs(opts)
1536 opts = pycompat.byteskwargs(opts)
1537 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1537 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1538 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1538 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1539
1539
1540 def makeformatname(name):
1540 def makeformatname(name):
1541 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1541 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1542
1542
1543 fm = ui.formatter(b'debugformat', opts)
1543 fm = ui.formatter(b'debugformat', opts)
1544 if fm.isplain():
1544 if fm.isplain():
1545
1545
1546 def formatvalue(value):
1546 def formatvalue(value):
1547 if util.safehasattr(value, b'startswith'):
1547 if util.safehasattr(value, b'startswith'):
1548 return value
1548 return value
1549 if value:
1549 if value:
1550 return b'yes'
1550 return b'yes'
1551 else:
1551 else:
1552 return b'no'
1552 return b'no'
1553
1553
1554 else:
1554 else:
1555 formatvalue = pycompat.identity
1555 formatvalue = pycompat.identity
1556
1556
1557 fm.plain(b'format-variant')
1557 fm.plain(b'format-variant')
1558 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1558 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1559 fm.plain(b' repo')
1559 fm.plain(b' repo')
1560 if ui.verbose:
1560 if ui.verbose:
1561 fm.plain(b' config default')
1561 fm.plain(b' config default')
1562 fm.plain(b'\n')
1562 fm.plain(b'\n')
1563 for fv in upgrade.allformatvariant:
1563 for fv in upgrade.allformatvariant:
1564 fm.startitem()
1564 fm.startitem()
1565 repovalue = fv.fromrepo(repo)
1565 repovalue = fv.fromrepo(repo)
1566 configvalue = fv.fromconfig(repo)
1566 configvalue = fv.fromconfig(repo)
1567
1567
1568 if repovalue != configvalue:
1568 if repovalue != configvalue:
1569 namelabel = b'formatvariant.name.mismatchconfig'
1569 namelabel = b'formatvariant.name.mismatchconfig'
1570 repolabel = b'formatvariant.repo.mismatchconfig'
1570 repolabel = b'formatvariant.repo.mismatchconfig'
1571 elif repovalue != fv.default:
1571 elif repovalue != fv.default:
1572 namelabel = b'formatvariant.name.mismatchdefault'
1572 namelabel = b'formatvariant.name.mismatchdefault'
1573 repolabel = b'formatvariant.repo.mismatchdefault'
1573 repolabel = b'formatvariant.repo.mismatchdefault'
1574 else:
1574 else:
1575 namelabel = b'formatvariant.name.uptodate'
1575 namelabel = b'formatvariant.name.uptodate'
1576 repolabel = b'formatvariant.repo.uptodate'
1576 repolabel = b'formatvariant.repo.uptodate'
1577
1577
1578 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1578 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1579 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1579 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1580 if fv.default != configvalue:
1580 if fv.default != configvalue:
1581 configlabel = b'formatvariant.config.special'
1581 configlabel = b'formatvariant.config.special'
1582 else:
1582 else:
1583 configlabel = b'formatvariant.config.default'
1583 configlabel = b'formatvariant.config.default'
1584 fm.condwrite(
1584 fm.condwrite(
1585 ui.verbose,
1585 ui.verbose,
1586 b'config',
1586 b'config',
1587 b' %6s',
1587 b' %6s',
1588 formatvalue(configvalue),
1588 formatvalue(configvalue),
1589 label=configlabel,
1589 label=configlabel,
1590 )
1590 )
1591 fm.condwrite(
1591 fm.condwrite(
1592 ui.verbose,
1592 ui.verbose,
1593 b'default',
1593 b'default',
1594 b' %7s',
1594 b' %7s',
1595 formatvalue(fv.default),
1595 formatvalue(fv.default),
1596 label=b'formatvariant.default',
1596 label=b'formatvariant.default',
1597 )
1597 )
1598 fm.plain(b'\n')
1598 fm.plain(b'\n')
1599 fm.end()
1599 fm.end()
1600
1600
1601
1601
1602 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1602 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1603 def debugfsinfo(ui, path=b"."):
1603 def debugfsinfo(ui, path=b"."):
1604 """show information detected about current filesystem"""
1604 """show information detected about current filesystem"""
1605 ui.writenoi18n(b'path: %s\n' % path)
1605 ui.writenoi18n(b'path: %s\n' % path)
1606 ui.writenoi18n(
1606 ui.writenoi18n(
1607 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1607 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1608 )
1608 )
1609 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1609 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1610 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1610 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1611 ui.writenoi18n(
1611 ui.writenoi18n(
1612 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1612 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1613 )
1613 )
1614 ui.writenoi18n(
1614 ui.writenoi18n(
1615 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1615 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1616 )
1616 )
1617 casesensitive = b'(unknown)'
1617 casesensitive = b'(unknown)'
1618 try:
1618 try:
1619 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1619 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1620 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1620 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1621 except OSError:
1621 except OSError:
1622 pass
1622 pass
1623 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1623 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1624
1624
1625
1625
1626 @command(
1626 @command(
1627 b'debuggetbundle',
1627 b'debuggetbundle',
1628 [
1628 [
1629 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1629 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1630 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1630 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1631 (
1631 (
1632 b't',
1632 b't',
1633 b'type',
1633 b'type',
1634 b'bzip2',
1634 b'bzip2',
1635 _(b'bundle compression type to use'),
1635 _(b'bundle compression type to use'),
1636 _(b'TYPE'),
1636 _(b'TYPE'),
1637 ),
1637 ),
1638 ],
1638 ],
1639 _(b'REPO FILE [-H|-C ID]...'),
1639 _(b'REPO FILE [-H|-C ID]...'),
1640 norepo=True,
1640 norepo=True,
1641 )
1641 )
1642 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1642 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1643 """retrieves a bundle from a repo
1643 """retrieves a bundle from a repo
1644
1644
1645 Every ID must be a full-length hex node id string. Saves the bundle to the
1645 Every ID must be a full-length hex node id string. Saves the bundle to the
1646 given file.
1646 given file.
1647 """
1647 """
1648 opts = pycompat.byteskwargs(opts)
1648 opts = pycompat.byteskwargs(opts)
1649 repo = hg.peer(ui, opts, repopath)
1649 repo = hg.peer(ui, opts, repopath)
1650 if not repo.capable(b'getbundle'):
1650 if not repo.capable(b'getbundle'):
1651 raise error.Abort(b"getbundle() not supported by target repository")
1651 raise error.Abort(b"getbundle() not supported by target repository")
1652 args = {}
1652 args = {}
1653 if common:
1653 if common:
1654 args['common'] = [bin(s) for s in common]
1654 args['common'] = [bin(s) for s in common]
1655 if head:
1655 if head:
1656 args['heads'] = [bin(s) for s in head]
1656 args['heads'] = [bin(s) for s in head]
1657 # TODO: get desired bundlecaps from command line.
1657 # TODO: get desired bundlecaps from command line.
1658 args['bundlecaps'] = None
1658 args['bundlecaps'] = None
1659 bundle = repo.getbundle(b'debug', **args)
1659 bundle = repo.getbundle(b'debug', **args)
1660
1660
1661 bundletype = opts.get(b'type', b'bzip2').lower()
1661 bundletype = opts.get(b'type', b'bzip2').lower()
1662 btypes = {
1662 btypes = {
1663 b'none': b'HG10UN',
1663 b'none': b'HG10UN',
1664 b'bzip2': b'HG10BZ',
1664 b'bzip2': b'HG10BZ',
1665 b'gzip': b'HG10GZ',
1665 b'gzip': b'HG10GZ',
1666 b'bundle2': b'HG20',
1666 b'bundle2': b'HG20',
1667 }
1667 }
1668 bundletype = btypes.get(bundletype)
1668 bundletype = btypes.get(bundletype)
1669 if bundletype not in bundle2.bundletypes:
1669 if bundletype not in bundle2.bundletypes:
1670 raise error.Abort(_(b'unknown bundle type specified with --type'))
1670 raise error.Abort(_(b'unknown bundle type specified with --type'))
1671 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1671 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1672
1672
1673
1673
1674 @command(b'debugignore', [], b'[FILE]')
1674 @command(b'debugignore', [], b'[FILE]')
1675 def debugignore(ui, repo, *files, **opts):
1675 def debugignore(ui, repo, *files, **opts):
1676 """display the combined ignore pattern and information about ignored files
1676 """display the combined ignore pattern and information about ignored files
1677
1677
1678 With no argument display the combined ignore pattern.
1678 With no argument display the combined ignore pattern.
1679
1679
1680 Given space separated file names, shows if the given file is ignored and
1680 Given space separated file names, shows if the given file is ignored and
1681 if so, show the ignore rule (file and line number) that matched it.
1681 if so, show the ignore rule (file and line number) that matched it.
1682 """
1682 """
1683 ignore = repo.dirstate._ignore
1683 ignore = repo.dirstate._ignore
1684 if not files:
1684 if not files:
1685 # Show all the patterns
1685 # Show all the patterns
1686 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1686 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1687 else:
1687 else:
1688 m = scmutil.match(repo[None], pats=files)
1688 m = scmutil.match(repo[None], pats=files)
1689 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1689 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1690 for f in m.files():
1690 for f in m.files():
1691 nf = util.normpath(f)
1691 nf = util.normpath(f)
1692 ignored = None
1692 ignored = None
1693 ignoredata = None
1693 ignoredata = None
1694 if nf != b'.':
1694 if nf != b'.':
1695 if ignore(nf):
1695 if ignore(nf):
1696 ignored = nf
1696 ignored = nf
1697 ignoredata = repo.dirstate._ignorefileandline(nf)
1697 ignoredata = repo.dirstate._ignorefileandline(nf)
1698 else:
1698 else:
1699 for p in pathutil.finddirs(nf):
1699 for p in pathutil.finddirs(nf):
1700 if ignore(p):
1700 if ignore(p):
1701 ignored = p
1701 ignored = p
1702 ignoredata = repo.dirstate._ignorefileandline(p)
1702 ignoredata = repo.dirstate._ignorefileandline(p)
1703 break
1703 break
1704 if ignored:
1704 if ignored:
1705 if ignored == nf:
1705 if ignored == nf:
1706 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1706 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1707 else:
1707 else:
1708 ui.write(
1708 ui.write(
1709 _(
1709 _(
1710 b"%s is ignored because of "
1710 b"%s is ignored because of "
1711 b"containing directory %s\n"
1711 b"containing directory %s\n"
1712 )
1712 )
1713 % (uipathfn(f), ignored)
1713 % (uipathfn(f), ignored)
1714 )
1714 )
1715 ignorefile, lineno, line = ignoredata
1715 ignorefile, lineno, line = ignoredata
1716 ui.write(
1716 ui.write(
1717 _(b"(ignore rule in %s, line %d: '%s')\n")
1717 _(b"(ignore rule in %s, line %d: '%s')\n")
1718 % (ignorefile, lineno, line)
1718 % (ignorefile, lineno, line)
1719 )
1719 )
1720 else:
1720 else:
1721 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1721 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1722
1722
1723
1723
1724 @command(
1724 @command(
1725 b'debugindex',
1725 b'debugindex',
1726 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1726 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1727 _(b'-c|-m|FILE'),
1727 _(b'-c|-m|FILE'),
1728 )
1728 )
1729 def debugindex(ui, repo, file_=None, **opts):
1729 def debugindex(ui, repo, file_=None, **opts):
1730 """dump index data for a storage primitive"""
1730 """dump index data for a storage primitive"""
1731 opts = pycompat.byteskwargs(opts)
1731 opts = pycompat.byteskwargs(opts)
1732 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1732 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1733
1733
1734 if ui.debugflag:
1734 if ui.debugflag:
1735 shortfn = hex
1735 shortfn = hex
1736 else:
1736 else:
1737 shortfn = short
1737 shortfn = short
1738
1738
1739 idlen = 12
1739 idlen = 12
1740 for i in store:
1740 for i in store:
1741 idlen = len(shortfn(store.node(i)))
1741 idlen = len(shortfn(store.node(i)))
1742 break
1742 break
1743
1743
1744 fm = ui.formatter(b'debugindex', opts)
1744 fm = ui.formatter(b'debugindex', opts)
1745 fm.plain(
1745 fm.plain(
1746 b' rev linkrev %s %s p2\n'
1746 b' rev linkrev %s %s p2\n'
1747 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1747 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1748 )
1748 )
1749
1749
1750 for rev in store:
1750 for rev in store:
1751 node = store.node(rev)
1751 node = store.node(rev)
1752 parents = store.parents(node)
1752 parents = store.parents(node)
1753
1753
1754 fm.startitem()
1754 fm.startitem()
1755 fm.write(b'rev', b'%6d ', rev)
1755 fm.write(b'rev', b'%6d ', rev)
1756 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1756 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1757 fm.write(b'node', b'%s ', shortfn(node))
1757 fm.write(b'node', b'%s ', shortfn(node))
1758 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1758 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1759 fm.write(b'p2', b'%s', shortfn(parents[1]))
1759 fm.write(b'p2', b'%s', shortfn(parents[1]))
1760 fm.plain(b'\n')
1760 fm.plain(b'\n')
1761
1761
1762 fm.end()
1762 fm.end()
1763
1763
1764
1764
1765 @command(
1765 @command(
1766 b'debugindexdot',
1766 b'debugindexdot',
1767 cmdutil.debugrevlogopts,
1767 cmdutil.debugrevlogopts,
1768 _(b'-c|-m|FILE'),
1768 _(b'-c|-m|FILE'),
1769 optionalrepo=True,
1769 optionalrepo=True,
1770 )
1770 )
1771 def debugindexdot(ui, repo, file_=None, **opts):
1771 def debugindexdot(ui, repo, file_=None, **opts):
1772 """dump an index DAG as a graphviz dot file"""
1772 """dump an index DAG as a graphviz dot file"""
1773 opts = pycompat.byteskwargs(opts)
1773 opts = pycompat.byteskwargs(opts)
1774 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1774 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1775 ui.writenoi18n(b"digraph G {\n")
1775 ui.writenoi18n(b"digraph G {\n")
1776 for i in r:
1776 for i in r:
1777 node = r.node(i)
1777 node = r.node(i)
1778 pp = r.parents(node)
1778 pp = r.parents(node)
1779 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1779 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1780 if pp[1] != repo.nullid:
1780 if pp[1] != repo.nullid:
1781 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1781 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1782 ui.write(b"}\n")
1782 ui.write(b"}\n")
1783
1783
1784
1784
1785 @command(b'debugindexstats', [])
1785 @command(b'debugindexstats', [])
1786 def debugindexstats(ui, repo):
1786 def debugindexstats(ui, repo):
1787 """show stats related to the changelog index"""
1787 """show stats related to the changelog index"""
1788 repo.changelog.shortest(repo.nullid, 1)
1788 repo.changelog.shortest(repo.nullid, 1)
1789 index = repo.changelog.index
1789 index = repo.changelog.index
1790 if not util.safehasattr(index, b'stats'):
1790 if not util.safehasattr(index, b'stats'):
1791 raise error.Abort(_(b'debugindexstats only works with native code'))
1791 raise error.Abort(_(b'debugindexstats only works with native code'))
1792 for k, v in sorted(index.stats().items()):
1792 for k, v in sorted(index.stats().items()):
1793 ui.write(b'%s: %d\n' % (k, v))
1793 ui.write(b'%s: %d\n' % (k, v))
1794
1794
1795
1795
1796 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1796 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1797 def debuginstall(ui, **opts):
1797 def debuginstall(ui, **opts):
1798 """test Mercurial installation
1798 """test Mercurial installation
1799
1799
1800 Returns 0 on success.
1800 Returns 0 on success.
1801 """
1801 """
1802 opts = pycompat.byteskwargs(opts)
1802 opts = pycompat.byteskwargs(opts)
1803
1803
1804 problems = 0
1804 problems = 0
1805
1805
1806 fm = ui.formatter(b'debuginstall', opts)
1806 fm = ui.formatter(b'debuginstall', opts)
1807 fm.startitem()
1807 fm.startitem()
1808
1808
1809 # encoding might be unknown or wrong. don't translate these messages.
1809 # encoding might be unknown or wrong. don't translate these messages.
1810 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1810 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1811 err = None
1811 err = None
1812 try:
1812 try:
1813 codecs.lookup(pycompat.sysstr(encoding.encoding))
1813 codecs.lookup(pycompat.sysstr(encoding.encoding))
1814 except LookupError as inst:
1814 except LookupError as inst:
1815 err = stringutil.forcebytestr(inst)
1815 err = stringutil.forcebytestr(inst)
1816 problems += 1
1816 problems += 1
1817 fm.condwrite(
1817 fm.condwrite(
1818 err,
1818 err,
1819 b'encodingerror',
1819 b'encodingerror',
1820 b" %s\n (check that your locale is properly set)\n",
1820 b" %s\n (check that your locale is properly set)\n",
1821 err,
1821 err,
1822 )
1822 )
1823
1823
1824 # Python
1824 # Python
1825 pythonlib = None
1825 pythonlib = None
1826 if util.safehasattr(os, '__file__'):
1826 if util.safehasattr(os, '__file__'):
1827 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1827 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1828 elif getattr(sys, 'oxidized', False):
1828 elif getattr(sys, 'oxidized', False):
1829 pythonlib = pycompat.sysexecutable
1829 pythonlib = pycompat.sysexecutable
1830
1830
1831 fm.write(
1831 fm.write(
1832 b'pythonexe',
1832 b'pythonexe',
1833 _(b"checking Python executable (%s)\n"),
1833 _(b"checking Python executable (%s)\n"),
1834 pycompat.sysexecutable or _(b"unknown"),
1834 pycompat.sysexecutable or _(b"unknown"),
1835 )
1835 )
1836 fm.write(
1836 fm.write(
1837 b'pythonimplementation',
1837 b'pythonimplementation',
1838 _(b"checking Python implementation (%s)\n"),
1838 _(b"checking Python implementation (%s)\n"),
1839 pycompat.sysbytes(platform.python_implementation()),
1839 pycompat.sysbytes(platform.python_implementation()),
1840 )
1840 )
1841 fm.write(
1841 fm.write(
1842 b'pythonver',
1842 b'pythonver',
1843 _(b"checking Python version (%s)\n"),
1843 _(b"checking Python version (%s)\n"),
1844 (b"%d.%d.%d" % sys.version_info[:3]),
1844 (b"%d.%d.%d" % sys.version_info[:3]),
1845 )
1845 )
1846 fm.write(
1846 fm.write(
1847 b'pythonlib',
1847 b'pythonlib',
1848 _(b"checking Python lib (%s)...\n"),
1848 _(b"checking Python lib (%s)...\n"),
1849 pythonlib or _(b"unknown"),
1849 pythonlib or _(b"unknown"),
1850 )
1850 )
1851
1851
1852 try:
1852 try:
1853 from . import rustext # pytype: disable=import-error
1853 from . import rustext # pytype: disable=import-error
1854
1854
1855 rustext.__doc__ # trigger lazy import
1855 rustext.__doc__ # trigger lazy import
1856 except ImportError:
1856 except ImportError:
1857 rustext = None
1857 rustext = None
1858
1858
1859 security = set(sslutil.supportedprotocols)
1859 security = set(sslutil.supportedprotocols)
1860 if sslutil.hassni:
1860 if sslutil.hassni:
1861 security.add(b'sni')
1861 security.add(b'sni')
1862
1862
1863 fm.write(
1863 fm.write(
1864 b'pythonsecurity',
1864 b'pythonsecurity',
1865 _(b"checking Python security support (%s)\n"),
1865 _(b"checking Python security support (%s)\n"),
1866 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1866 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1867 )
1867 )
1868
1868
1869 # These are warnings, not errors. So don't increment problem count. This
1869 # These are warnings, not errors. So don't increment problem count. This
1870 # may change in the future.
1870 # may change in the future.
1871 if b'tls1.2' not in security:
1871 if b'tls1.2' not in security:
1872 fm.plain(
1872 fm.plain(
1873 _(
1873 _(
1874 b' TLS 1.2 not supported by Python install; '
1874 b' TLS 1.2 not supported by Python install; '
1875 b'network connections lack modern security\n'
1875 b'network connections lack modern security\n'
1876 )
1876 )
1877 )
1877 )
1878 if b'sni' not in security:
1878 if b'sni' not in security:
1879 fm.plain(
1879 fm.plain(
1880 _(
1880 _(
1881 b' SNI not supported by Python install; may have '
1881 b' SNI not supported by Python install; may have '
1882 b'connectivity issues with some servers\n'
1882 b'connectivity issues with some servers\n'
1883 )
1883 )
1884 )
1884 )
1885
1885
1886 fm.plain(
1886 fm.plain(
1887 _(
1887 _(
1888 b"checking Rust extensions (%s)\n"
1888 b"checking Rust extensions (%s)\n"
1889 % (b'missing' if rustext is None else b'installed')
1889 % (b'missing' if rustext is None else b'installed')
1890 ),
1890 ),
1891 )
1891 )
1892
1892
1893 # TODO print CA cert info
1893 # TODO print CA cert info
1894
1894
1895 # hg version
1895 # hg version
1896 hgver = util.version()
1896 hgver = util.version()
1897 fm.write(
1897 fm.write(
1898 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1898 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1899 )
1899 )
1900 fm.write(
1900 fm.write(
1901 b'hgverextra',
1901 b'hgverextra',
1902 _(b"checking Mercurial custom build (%s)\n"),
1902 _(b"checking Mercurial custom build (%s)\n"),
1903 b'+'.join(hgver.split(b'+')[1:]),
1903 b'+'.join(hgver.split(b'+')[1:]),
1904 )
1904 )
1905
1905
1906 # compiled modules
1906 # compiled modules
1907 hgmodules = None
1907 hgmodules = None
1908 if util.safehasattr(sys.modules[__name__], '__file__'):
1908 if util.safehasattr(sys.modules[__name__], '__file__'):
1909 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1909 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1910 elif getattr(sys, 'oxidized', False):
1910 elif getattr(sys, 'oxidized', False):
1911 hgmodules = pycompat.sysexecutable
1911 hgmodules = pycompat.sysexecutable
1912
1912
1913 fm.write(
1913 fm.write(
1914 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1914 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1915 )
1915 )
1916 fm.write(
1916 fm.write(
1917 b'hgmodules',
1917 b'hgmodules',
1918 _(b"checking installed modules (%s)...\n"),
1918 _(b"checking installed modules (%s)...\n"),
1919 hgmodules or _(b"unknown"),
1919 hgmodules or _(b"unknown"),
1920 )
1920 )
1921
1921
1922 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1922 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1923 rustext = rustandc # for now, that's the only case
1923 rustext = rustandc # for now, that's the only case
1924 cext = policy.policy in (b'c', b'allow') or rustandc
1924 cext = policy.policy in (b'c', b'allow') or rustandc
1925 nopure = cext or rustext
1925 nopure = cext or rustext
1926 if nopure:
1926 if nopure:
1927 err = None
1927 err = None
1928 try:
1928 try:
1929 if cext:
1929 if cext:
1930 from .cext import ( # pytype: disable=import-error
1930 from .cext import ( # pytype: disable=import-error
1931 base85,
1931 base85,
1932 bdiff,
1932 bdiff,
1933 mpatch,
1933 mpatch,
1934 osutil,
1934 osutil,
1935 )
1935 )
1936
1936
1937 # quiet pyflakes
1937 # quiet pyflakes
1938 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1938 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1939 if rustext:
1939 if rustext:
1940 from .rustext import ( # pytype: disable=import-error
1940 from .rustext import ( # pytype: disable=import-error
1941 ancestor,
1941 ancestor,
1942 dirstate,
1942 dirstate,
1943 )
1943 )
1944
1944
1945 dir(ancestor), dir(dirstate) # quiet pyflakes
1945 dir(ancestor), dir(dirstate) # quiet pyflakes
1946 except Exception as inst:
1946 except Exception as inst:
1947 err = stringutil.forcebytestr(inst)
1947 err = stringutil.forcebytestr(inst)
1948 problems += 1
1948 problems += 1
1949 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1949 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1950
1950
1951 compengines = util.compengines._engines.values()
1951 compengines = util.compengines._engines.values()
1952 fm.write(
1952 fm.write(
1953 b'compengines',
1953 b'compengines',
1954 _(b'checking registered compression engines (%s)\n'),
1954 _(b'checking registered compression engines (%s)\n'),
1955 fm.formatlist(
1955 fm.formatlist(
1956 sorted(e.name() for e in compengines),
1956 sorted(e.name() for e in compengines),
1957 name=b'compengine',
1957 name=b'compengine',
1958 fmt=b'%s',
1958 fmt=b'%s',
1959 sep=b', ',
1959 sep=b', ',
1960 ),
1960 ),
1961 )
1961 )
1962 fm.write(
1962 fm.write(
1963 b'compenginesavail',
1963 b'compenginesavail',
1964 _(b'checking available compression engines (%s)\n'),
1964 _(b'checking available compression engines (%s)\n'),
1965 fm.formatlist(
1965 fm.formatlist(
1966 sorted(e.name() for e in compengines if e.available()),
1966 sorted(e.name() for e in compengines if e.available()),
1967 name=b'compengine',
1967 name=b'compengine',
1968 fmt=b'%s',
1968 fmt=b'%s',
1969 sep=b', ',
1969 sep=b', ',
1970 ),
1970 ),
1971 )
1971 )
1972 wirecompengines = compression.compengines.supportedwireengines(
1972 wirecompengines = compression.compengines.supportedwireengines(
1973 compression.SERVERROLE
1973 compression.SERVERROLE
1974 )
1974 )
1975 fm.write(
1975 fm.write(
1976 b'compenginesserver',
1976 b'compenginesserver',
1977 _(
1977 _(
1978 b'checking available compression engines '
1978 b'checking available compression engines '
1979 b'for wire protocol (%s)\n'
1979 b'for wire protocol (%s)\n'
1980 ),
1980 ),
1981 fm.formatlist(
1981 fm.formatlist(
1982 [e.name() for e in wirecompengines if e.wireprotosupport()],
1982 [e.name() for e in wirecompengines if e.wireprotosupport()],
1983 name=b'compengine',
1983 name=b'compengine',
1984 fmt=b'%s',
1984 fmt=b'%s',
1985 sep=b', ',
1985 sep=b', ',
1986 ),
1986 ),
1987 )
1987 )
1988 re2 = b'missing'
1988 re2 = b'missing'
1989 if util._re2:
1989 if util._re2:
1990 re2 = b'available'
1990 re2 = b'available'
1991 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1991 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1992 fm.data(re2=bool(util._re2))
1992 fm.data(re2=bool(util._re2))
1993
1993
1994 # templates
1994 # templates
1995 p = templater.templatedir()
1995 p = templater.templatedir()
1996 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1996 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1997 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1997 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1998 if p:
1998 if p:
1999 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1999 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2000 if m:
2000 if m:
2001 # template found, check if it is working
2001 # template found, check if it is working
2002 err = None
2002 err = None
2003 try:
2003 try:
2004 templater.templater.frommapfile(m)
2004 templater.templater.frommapfile(m)
2005 except Exception as inst:
2005 except Exception as inst:
2006 err = stringutil.forcebytestr(inst)
2006 err = stringutil.forcebytestr(inst)
2007 p = None
2007 p = None
2008 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2008 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2009 else:
2009 else:
2010 p = None
2010 p = None
2011 fm.condwrite(
2011 fm.condwrite(
2012 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2012 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2013 )
2013 )
2014 fm.condwrite(
2014 fm.condwrite(
2015 not m,
2015 not m,
2016 b'defaulttemplatenotfound',
2016 b'defaulttemplatenotfound',
2017 _(b" template '%s' not found\n"),
2017 _(b" template '%s' not found\n"),
2018 b"default",
2018 b"default",
2019 )
2019 )
2020 if not p:
2020 if not p:
2021 problems += 1
2021 problems += 1
2022 fm.condwrite(
2022 fm.condwrite(
2023 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2023 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2024 )
2024 )
2025
2025
2026 # editor
2026 # editor
2027 editor = ui.geteditor()
2027 editor = ui.geteditor()
2028 editor = util.expandpath(editor)
2028 editor = util.expandpath(editor)
2029 editorbin = procutil.shellsplit(editor)[0]
2029 editorbin = procutil.shellsplit(editor)[0]
2030 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2030 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2031 cmdpath = procutil.findexe(editorbin)
2031 cmdpath = procutil.findexe(editorbin)
2032 fm.condwrite(
2032 fm.condwrite(
2033 not cmdpath and editor == b'vi',
2033 not cmdpath and editor == b'vi',
2034 b'vinotfound',
2034 b'vinotfound',
2035 _(
2035 _(
2036 b" No commit editor set and can't find %s in PATH\n"
2036 b" No commit editor set and can't find %s in PATH\n"
2037 b" (specify a commit editor in your configuration"
2037 b" (specify a commit editor in your configuration"
2038 b" file)\n"
2038 b" file)\n"
2039 ),
2039 ),
2040 not cmdpath and editor == b'vi' and editorbin,
2040 not cmdpath and editor == b'vi' and editorbin,
2041 )
2041 )
2042 fm.condwrite(
2042 fm.condwrite(
2043 not cmdpath and editor != b'vi',
2043 not cmdpath and editor != b'vi',
2044 b'editornotfound',
2044 b'editornotfound',
2045 _(
2045 _(
2046 b" Can't find editor '%s' in PATH\n"
2046 b" Can't find editor '%s' in PATH\n"
2047 b" (specify a commit editor in your configuration"
2047 b" (specify a commit editor in your configuration"
2048 b" file)\n"
2048 b" file)\n"
2049 ),
2049 ),
2050 not cmdpath and editorbin,
2050 not cmdpath and editorbin,
2051 )
2051 )
2052 if not cmdpath and editor != b'vi':
2052 if not cmdpath and editor != b'vi':
2053 problems += 1
2053 problems += 1
2054
2054
2055 # check username
2055 # check username
2056 username = None
2056 username = None
2057 err = None
2057 err = None
2058 try:
2058 try:
2059 username = ui.username()
2059 username = ui.username()
2060 except error.Abort as e:
2060 except error.Abort as e:
2061 err = e.message
2061 err = e.message
2062 problems += 1
2062 problems += 1
2063
2063
2064 fm.condwrite(
2064 fm.condwrite(
2065 username, b'username', _(b"checking username (%s)\n"), username
2065 username, b'username', _(b"checking username (%s)\n"), username
2066 )
2066 )
2067 fm.condwrite(
2067 fm.condwrite(
2068 err,
2068 err,
2069 b'usernameerror',
2069 b'usernameerror',
2070 _(
2070 _(
2071 b"checking username...\n %s\n"
2071 b"checking username...\n %s\n"
2072 b" (specify a username in your configuration file)\n"
2072 b" (specify a username in your configuration file)\n"
2073 ),
2073 ),
2074 err,
2074 err,
2075 )
2075 )
2076
2076
2077 for name, mod in extensions.extensions():
2077 for name, mod in extensions.extensions():
2078 handler = getattr(mod, 'debuginstall', None)
2078 handler = getattr(mod, 'debuginstall', None)
2079 if handler is not None:
2079 if handler is not None:
2080 problems += handler(ui, fm)
2080 problems += handler(ui, fm)
2081
2081
2082 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2082 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2083 if not problems:
2083 if not problems:
2084 fm.data(problems=problems)
2084 fm.data(problems=problems)
2085 fm.condwrite(
2085 fm.condwrite(
2086 problems,
2086 problems,
2087 b'problems',
2087 b'problems',
2088 _(b"%d problems detected, please check your install!\n"),
2088 _(b"%d problems detected, please check your install!\n"),
2089 problems,
2089 problems,
2090 )
2090 )
2091 fm.end()
2091 fm.end()
2092
2092
2093 return problems
2093 return problems
2094
2094
2095
2095
2096 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2096 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2097 def debugknown(ui, repopath, *ids, **opts):
2097 def debugknown(ui, repopath, *ids, **opts):
2098 """test whether node ids are known to a repo
2098 """test whether node ids are known to a repo
2099
2099
2100 Every ID must be a full-length hex node id string. Returns a list of 0s
2100 Every ID must be a full-length hex node id string. Returns a list of 0s
2101 and 1s indicating unknown/known.
2101 and 1s indicating unknown/known.
2102 """
2102 """
2103 opts = pycompat.byteskwargs(opts)
2103 opts = pycompat.byteskwargs(opts)
2104 repo = hg.peer(ui, opts, repopath)
2104 repo = hg.peer(ui, opts, repopath)
2105 if not repo.capable(b'known'):
2105 if not repo.capable(b'known'):
2106 raise error.Abort(b"known() not supported by target repository")
2106 raise error.Abort(b"known() not supported by target repository")
2107 flags = repo.known([bin(s) for s in ids])
2107 flags = repo.known([bin(s) for s in ids])
2108 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2108 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2109
2109
2110
2110
2111 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2111 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2112 def debuglabelcomplete(ui, repo, *args):
2112 def debuglabelcomplete(ui, repo, *args):
2113 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2113 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2114 debugnamecomplete(ui, repo, *args)
2114 debugnamecomplete(ui, repo, *args)
2115
2115
2116
2116
2117 @command(
2117 @command(
2118 b'debuglocks',
2118 b'debuglocks',
2119 [
2119 [
2120 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2120 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2121 (
2121 (
2122 b'W',
2122 b'W',
2123 b'force-free-wlock',
2123 b'force-free-wlock',
2124 None,
2124 None,
2125 _(b'free the working state lock (DANGEROUS)'),
2125 _(b'free the working state lock (DANGEROUS)'),
2126 ),
2126 ),
2127 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2127 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2128 (
2128 (
2129 b'S',
2129 b'S',
2130 b'set-wlock',
2130 b'set-wlock',
2131 None,
2131 None,
2132 _(b'set the working state lock until stopped'),
2132 _(b'set the working state lock until stopped'),
2133 ),
2133 ),
2134 ],
2134 ],
2135 _(b'[OPTION]...'),
2135 _(b'[OPTION]...'),
2136 )
2136 )
2137 def debuglocks(ui, repo, **opts):
2137 def debuglocks(ui, repo, **opts):
2138 """show or modify state of locks
2138 """show or modify state of locks
2139
2139
2140 By default, this command will show which locks are held. This
2140 By default, this command will show which locks are held. This
2141 includes the user and process holding the lock, the amount of time
2141 includes the user and process holding the lock, the amount of time
2142 the lock has been held, and the machine name where the process is
2142 the lock has been held, and the machine name where the process is
2143 running if it's not local.
2143 running if it's not local.
2144
2144
2145 Locks protect the integrity of Mercurial's data, so should be
2145 Locks protect the integrity of Mercurial's data, so should be
2146 treated with care. System crashes or other interruptions may cause
2146 treated with care. System crashes or other interruptions may cause
2147 locks to not be properly released, though Mercurial will usually
2147 locks to not be properly released, though Mercurial will usually
2148 detect and remove such stale locks automatically.
2148 detect and remove such stale locks automatically.
2149
2149
2150 However, detecting stale locks may not always be possible (for
2150 However, detecting stale locks may not always be possible (for
2151 instance, on a shared filesystem). Removing locks may also be
2151 instance, on a shared filesystem). Removing locks may also be
2152 blocked by filesystem permissions.
2152 blocked by filesystem permissions.
2153
2153
2154 Setting a lock will prevent other commands from changing the data.
2154 Setting a lock will prevent other commands from changing the data.
2155 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2155 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2156 The set locks are removed when the command exits.
2156 The set locks are removed when the command exits.
2157
2157
2158 Returns 0 if no locks are held.
2158 Returns 0 if no locks are held.
2159
2159
2160 """
2160 """
2161
2161
2162 if opts.get('force_free_lock'):
2162 if opts.get('force_free_lock'):
2163 repo.svfs.unlink(b'lock')
2163 repo.svfs.unlink(b'lock')
2164 if opts.get('force_free_wlock'):
2164 if opts.get('force_free_wlock'):
2165 repo.vfs.unlink(b'wlock')
2165 repo.vfs.unlink(b'wlock')
2166 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2166 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2167 return 0
2167 return 0
2168
2168
2169 locks = []
2169 locks = []
2170 try:
2170 try:
2171 if opts.get('set_wlock'):
2171 if opts.get('set_wlock'):
2172 try:
2172 try:
2173 locks.append(repo.wlock(False))
2173 locks.append(repo.wlock(False))
2174 except error.LockHeld:
2174 except error.LockHeld:
2175 raise error.Abort(_(b'wlock is already held'))
2175 raise error.Abort(_(b'wlock is already held'))
2176 if opts.get('set_lock'):
2176 if opts.get('set_lock'):
2177 try:
2177 try:
2178 locks.append(repo.lock(False))
2178 locks.append(repo.lock(False))
2179 except error.LockHeld:
2179 except error.LockHeld:
2180 raise error.Abort(_(b'lock is already held'))
2180 raise error.Abort(_(b'lock is already held'))
2181 if len(locks):
2181 if len(locks):
2182 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2182 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2183 return 0
2183 return 0
2184 finally:
2184 finally:
2185 release(*locks)
2185 release(*locks)
2186
2186
2187 now = time.time()
2187 now = time.time()
2188 held = 0
2188 held = 0
2189
2189
2190 def report(vfs, name, method):
2190 def report(vfs, name, method):
2191 # this causes stale locks to get reaped for more accurate reporting
2191 # this causes stale locks to get reaped for more accurate reporting
2192 try:
2192 try:
2193 l = method(False)
2193 l = method(False)
2194 except error.LockHeld:
2194 except error.LockHeld:
2195 l = None
2195 l = None
2196
2196
2197 if l:
2197 if l:
2198 l.release()
2198 l.release()
2199 else:
2199 else:
2200 try:
2200 try:
2201 st = vfs.lstat(name)
2201 st = vfs.lstat(name)
2202 age = now - st[stat.ST_MTIME]
2202 age = now - st[stat.ST_MTIME]
2203 user = util.username(st.st_uid)
2203 user = util.username(st.st_uid)
2204 locker = vfs.readlock(name)
2204 locker = vfs.readlock(name)
2205 if b":" in locker:
2205 if b":" in locker:
2206 host, pid = locker.split(b':')
2206 host, pid = locker.split(b':')
2207 if host == socket.gethostname():
2207 if host == socket.gethostname():
2208 locker = b'user %s, process %s' % (user or b'None', pid)
2208 locker = b'user %s, process %s' % (user or b'None', pid)
2209 else:
2209 else:
2210 locker = b'user %s, process %s, host %s' % (
2210 locker = b'user %s, process %s, host %s' % (
2211 user or b'None',
2211 user or b'None',
2212 pid,
2212 pid,
2213 host,
2213 host,
2214 )
2214 )
2215 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2215 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2216 return 1
2216 return 1
2217 except OSError as e:
2217 except OSError as e:
2218 if e.errno != errno.ENOENT:
2218 if e.errno != errno.ENOENT:
2219 raise
2219 raise
2220
2220
2221 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2221 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2222 return 0
2222 return 0
2223
2223
2224 held += report(repo.svfs, b"lock", repo.lock)
2224 held += report(repo.svfs, b"lock", repo.lock)
2225 held += report(repo.vfs, b"wlock", repo.wlock)
2225 held += report(repo.vfs, b"wlock", repo.wlock)
2226
2226
2227 return held
2227 return held
2228
2228
2229
2229
2230 @command(
2230 @command(
2231 b'debugmanifestfulltextcache',
2231 b'debugmanifestfulltextcache',
2232 [
2232 [
2233 (b'', b'clear', False, _(b'clear the cache')),
2233 (b'', b'clear', False, _(b'clear the cache')),
2234 (
2234 (
2235 b'a',
2235 b'a',
2236 b'add',
2236 b'add',
2237 [],
2237 [],
2238 _(b'add the given manifest nodes to the cache'),
2238 _(b'add the given manifest nodes to the cache'),
2239 _(b'NODE'),
2239 _(b'NODE'),
2240 ),
2240 ),
2241 ],
2241 ],
2242 b'',
2242 b'',
2243 )
2243 )
2244 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2244 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2245 """show, clear or amend the contents of the manifest fulltext cache"""
2245 """show, clear or amend the contents of the manifest fulltext cache"""
2246
2246
2247 def getcache():
2247 def getcache():
2248 r = repo.manifestlog.getstorage(b'')
2248 r = repo.manifestlog.getstorage(b'')
2249 try:
2249 try:
2250 return r._fulltextcache
2250 return r._fulltextcache
2251 except AttributeError:
2251 except AttributeError:
2252 msg = _(
2252 msg = _(
2253 b"Current revlog implementation doesn't appear to have a "
2253 b"Current revlog implementation doesn't appear to have a "
2254 b"manifest fulltext cache\n"
2254 b"manifest fulltext cache\n"
2255 )
2255 )
2256 raise error.Abort(msg)
2256 raise error.Abort(msg)
2257
2257
2258 if opts.get('clear'):
2258 if opts.get('clear'):
2259 with repo.wlock():
2259 with repo.wlock():
2260 cache = getcache()
2260 cache = getcache()
2261 cache.clear(clear_persisted_data=True)
2261 cache.clear(clear_persisted_data=True)
2262 return
2262 return
2263
2263
2264 if add:
2264 if add:
2265 with repo.wlock():
2265 with repo.wlock():
2266 m = repo.manifestlog
2266 m = repo.manifestlog
2267 store = m.getstorage(b'')
2267 store = m.getstorage(b'')
2268 for n in add:
2268 for n in add:
2269 try:
2269 try:
2270 manifest = m[store.lookup(n)]
2270 manifest = m[store.lookup(n)]
2271 except error.LookupError as e:
2271 except error.LookupError as e:
2272 raise error.Abort(
2272 raise error.Abort(
2273 bytes(e), hint=b"Check your manifest node id"
2273 bytes(e), hint=b"Check your manifest node id"
2274 )
2274 )
2275 manifest.read() # stores revisision in cache too
2275 manifest.read() # stores revisision in cache too
2276 return
2276 return
2277
2277
2278 cache = getcache()
2278 cache = getcache()
2279 if not len(cache):
2279 if not len(cache):
2280 ui.write(_(b'cache empty\n'))
2280 ui.write(_(b'cache empty\n'))
2281 else:
2281 else:
2282 ui.write(
2282 ui.write(
2283 _(
2283 _(
2284 b'cache contains %d manifest entries, in order of most to '
2284 b'cache contains %d manifest entries, in order of most to '
2285 b'least recent:\n'
2285 b'least recent:\n'
2286 )
2286 )
2287 % (len(cache),)
2287 % (len(cache),)
2288 )
2288 )
2289 totalsize = 0
2289 totalsize = 0
2290 for nodeid in cache:
2290 for nodeid in cache:
2291 # Use cache.get to not update the LRU order
2291 # Use cache.get to not update the LRU order
2292 data = cache.peek(nodeid)
2292 data = cache.peek(nodeid)
2293 size = len(data)
2293 size = len(data)
2294 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2294 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2295 ui.write(
2295 ui.write(
2296 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2296 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2297 )
2297 )
2298 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2298 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2299 ui.write(
2299 ui.write(
2300 _(b'total cache data size %s, on-disk %s\n')
2300 _(b'total cache data size %s, on-disk %s\n')
2301 % (util.bytecount(totalsize), util.bytecount(ondisk))
2301 % (util.bytecount(totalsize), util.bytecount(ondisk))
2302 )
2302 )
2303
2303
2304
2304
2305 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2305 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2306 def debugmergestate(ui, repo, *args, **opts):
2306 def debugmergestate(ui, repo, *args, **opts):
2307 """print merge state
2307 """print merge state
2308
2308
2309 Use --verbose to print out information about whether v1 or v2 merge state
2309 Use --verbose to print out information about whether v1 or v2 merge state
2310 was chosen."""
2310 was chosen."""
2311
2311
2312 if ui.verbose:
2312 if ui.verbose:
2313 ms = mergestatemod.mergestate(repo)
2313 ms = mergestatemod.mergestate(repo)
2314
2314
2315 # sort so that reasonable information is on top
2315 # sort so that reasonable information is on top
2316 v1records = ms._readrecordsv1()
2316 v1records = ms._readrecordsv1()
2317 v2records = ms._readrecordsv2()
2317 v2records = ms._readrecordsv2()
2318
2318
2319 if not v1records and not v2records:
2319 if not v1records and not v2records:
2320 pass
2320 pass
2321 elif not v2records:
2321 elif not v2records:
2322 ui.writenoi18n(b'no version 2 merge state\n')
2322 ui.writenoi18n(b'no version 2 merge state\n')
2323 elif ms._v1v2match(v1records, v2records):
2323 elif ms._v1v2match(v1records, v2records):
2324 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2324 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2325 else:
2325 else:
2326 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2326 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2327
2327
2328 opts = pycompat.byteskwargs(opts)
2328 opts = pycompat.byteskwargs(opts)
2329 if not opts[b'template']:
2329 if not opts[b'template']:
2330 opts[b'template'] = (
2330 opts[b'template'] = (
2331 b'{if(commits, "", "no merge state found\n")}'
2331 b'{if(commits, "", "no merge state found\n")}'
2332 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2332 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2333 b'{files % "file: {path} (state \\"{state}\\")\n'
2333 b'{files % "file: {path} (state \\"{state}\\")\n'
2334 b'{if(local_path, "'
2334 b'{if(local_path, "'
2335 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2335 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2336 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2336 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2337 b' other path: {other_path} (node {other_node})\n'
2337 b' other path: {other_path} (node {other_node})\n'
2338 b'")}'
2338 b'")}'
2339 b'{if(rename_side, "'
2339 b'{if(rename_side, "'
2340 b' rename side: {rename_side}\n'
2340 b' rename side: {rename_side}\n'
2341 b' renamed path: {renamed_path}\n'
2341 b' renamed path: {renamed_path}\n'
2342 b'")}'
2342 b'")}'
2343 b'{extras % " extra: {key} = {value}\n"}'
2343 b'{extras % " extra: {key} = {value}\n"}'
2344 b'"}'
2344 b'"}'
2345 b'{extras % "extra: {file} ({key} = {value})\n"}'
2345 b'{extras % "extra: {file} ({key} = {value})\n"}'
2346 )
2346 )
2347
2347
2348 ms = mergestatemod.mergestate.read(repo)
2348 ms = mergestatemod.mergestate.read(repo)
2349
2349
2350 fm = ui.formatter(b'debugmergestate', opts)
2350 fm = ui.formatter(b'debugmergestate', opts)
2351 fm.startitem()
2351 fm.startitem()
2352
2352
2353 fm_commits = fm.nested(b'commits')
2353 fm_commits = fm.nested(b'commits')
2354 if ms.active():
2354 if ms.active():
2355 for name, node, label_index in (
2355 for name, node, label_index in (
2356 (b'local', ms.local, 0),
2356 (b'local', ms.local, 0),
2357 (b'other', ms.other, 1),
2357 (b'other', ms.other, 1),
2358 ):
2358 ):
2359 fm_commits.startitem()
2359 fm_commits.startitem()
2360 fm_commits.data(name=name)
2360 fm_commits.data(name=name)
2361 fm_commits.data(node=hex(node))
2361 fm_commits.data(node=hex(node))
2362 if ms._labels and len(ms._labels) > label_index:
2362 if ms._labels and len(ms._labels) > label_index:
2363 fm_commits.data(label=ms._labels[label_index])
2363 fm_commits.data(label=ms._labels[label_index])
2364 fm_commits.end()
2364 fm_commits.end()
2365
2365
2366 fm_files = fm.nested(b'files')
2366 fm_files = fm.nested(b'files')
2367 if ms.active():
2367 if ms.active():
2368 for f in ms:
2368 for f in ms:
2369 fm_files.startitem()
2369 fm_files.startitem()
2370 fm_files.data(path=f)
2370 fm_files.data(path=f)
2371 state = ms._state[f]
2371 state = ms._state[f]
2372 fm_files.data(state=state[0])
2372 fm_files.data(state=state[0])
2373 if state[0] in (
2373 if state[0] in (
2374 mergestatemod.MERGE_RECORD_UNRESOLVED,
2374 mergestatemod.MERGE_RECORD_UNRESOLVED,
2375 mergestatemod.MERGE_RECORD_RESOLVED,
2375 mergestatemod.MERGE_RECORD_RESOLVED,
2376 ):
2376 ):
2377 fm_files.data(local_key=state[1])
2377 fm_files.data(local_key=state[1])
2378 fm_files.data(local_path=state[2])
2378 fm_files.data(local_path=state[2])
2379 fm_files.data(ancestor_path=state[3])
2379 fm_files.data(ancestor_path=state[3])
2380 fm_files.data(ancestor_node=state[4])
2380 fm_files.data(ancestor_node=state[4])
2381 fm_files.data(other_path=state[5])
2381 fm_files.data(other_path=state[5])
2382 fm_files.data(other_node=state[6])
2382 fm_files.data(other_node=state[6])
2383 fm_files.data(local_flags=state[7])
2383 fm_files.data(local_flags=state[7])
2384 elif state[0] in (
2384 elif state[0] in (
2385 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2385 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2386 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2386 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2387 ):
2387 ):
2388 fm_files.data(renamed_path=state[1])
2388 fm_files.data(renamed_path=state[1])
2389 fm_files.data(rename_side=state[2])
2389 fm_files.data(rename_side=state[2])
2390 fm_extras = fm_files.nested(b'extras')
2390 fm_extras = fm_files.nested(b'extras')
2391 for k, v in sorted(ms.extras(f).items()):
2391 for k, v in sorted(ms.extras(f).items()):
2392 fm_extras.startitem()
2392 fm_extras.startitem()
2393 fm_extras.data(key=k)
2393 fm_extras.data(key=k)
2394 fm_extras.data(value=v)
2394 fm_extras.data(value=v)
2395 fm_extras.end()
2395 fm_extras.end()
2396
2396
2397 fm_files.end()
2397 fm_files.end()
2398
2398
2399 fm_extras = fm.nested(b'extras')
2399 fm_extras = fm.nested(b'extras')
2400 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2400 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2401 if f in ms:
2401 if f in ms:
2402 # If file is in mergestate, we have already processed it's extras
2402 # If file is in mergestate, we have already processed it's extras
2403 continue
2403 continue
2404 for k, v in pycompat.iteritems(d):
2404 for k, v in pycompat.iteritems(d):
2405 fm_extras.startitem()
2405 fm_extras.startitem()
2406 fm_extras.data(file=f)
2406 fm_extras.data(file=f)
2407 fm_extras.data(key=k)
2407 fm_extras.data(key=k)
2408 fm_extras.data(value=v)
2408 fm_extras.data(value=v)
2409 fm_extras.end()
2409 fm_extras.end()
2410
2410
2411 fm.end()
2411 fm.end()
2412
2412
2413
2413
2414 @command(b'debugnamecomplete', [], _(b'NAME...'))
2414 @command(b'debugnamecomplete', [], _(b'NAME...'))
2415 def debugnamecomplete(ui, repo, *args):
2415 def debugnamecomplete(ui, repo, *args):
2416 '''complete "names" - tags, open branch names, bookmark names'''
2416 '''complete "names" - tags, open branch names, bookmark names'''
2417
2417
2418 names = set()
2418 names = set()
2419 # since we previously only listed open branches, we will handle that
2419 # since we previously only listed open branches, we will handle that
2420 # specially (after this for loop)
2420 # specially (after this for loop)
2421 for name, ns in pycompat.iteritems(repo.names):
2421 for name, ns in pycompat.iteritems(repo.names):
2422 if name != b'branches':
2422 if name != b'branches':
2423 names.update(ns.listnames(repo))
2423 names.update(ns.listnames(repo))
2424 names.update(
2424 names.update(
2425 tag
2425 tag
2426 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2426 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2427 if not closed
2427 if not closed
2428 )
2428 )
2429 completions = set()
2429 completions = set()
2430 if not args:
2430 if not args:
2431 args = [b'']
2431 args = [b'']
2432 for a in args:
2432 for a in args:
2433 completions.update(n for n in names if n.startswith(a))
2433 completions.update(n for n in names if n.startswith(a))
2434 ui.write(b'\n'.join(sorted(completions)))
2434 ui.write(b'\n'.join(sorted(completions)))
2435 ui.write(b'\n')
2435 ui.write(b'\n')
2436
2436
2437
2437
2438 @command(
2438 @command(
2439 b'debugnodemap',
2439 b'debugnodemap',
2440 [
2440 [
2441 (
2441 (
2442 b'',
2442 b'',
2443 b'dump-new',
2443 b'dump-new',
2444 False,
2444 False,
2445 _(b'write a (new) persistent binary nodemap on stdout'),
2445 _(b'write a (new) persistent binary nodemap on stdout'),
2446 ),
2446 ),
2447 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2447 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2448 (
2448 (
2449 b'',
2449 b'',
2450 b'check',
2450 b'check',
2451 False,
2451 False,
2452 _(b'check that the data on disk data are correct.'),
2452 _(b'check that the data on disk data are correct.'),
2453 ),
2453 ),
2454 (
2454 (
2455 b'',
2455 b'',
2456 b'metadata',
2456 b'metadata',
2457 False,
2457 False,
2458 _(b'display the on disk meta data for the nodemap'),
2458 _(b'display the on disk meta data for the nodemap'),
2459 ),
2459 ),
2460 ],
2460 ],
2461 )
2461 )
2462 def debugnodemap(ui, repo, **opts):
2462 def debugnodemap(ui, repo, **opts):
2463 """write and inspect on disk nodemap"""
2463 """write and inspect on disk nodemap"""
2464 if opts['dump_new']:
2464 if opts['dump_new']:
2465 unfi = repo.unfiltered()
2465 unfi = repo.unfiltered()
2466 cl = unfi.changelog
2466 cl = unfi.changelog
2467 if util.safehasattr(cl.index, "nodemap_data_all"):
2467 if util.safehasattr(cl.index, "nodemap_data_all"):
2468 data = cl.index.nodemap_data_all()
2468 data = cl.index.nodemap_data_all()
2469 else:
2469 else:
2470 data = nodemap.persistent_data(cl.index)
2470 data = nodemap.persistent_data(cl.index)
2471 ui.write(data)
2471 ui.write(data)
2472 elif opts['dump_disk']:
2472 elif opts['dump_disk']:
2473 unfi = repo.unfiltered()
2473 unfi = repo.unfiltered()
2474 cl = unfi.changelog
2474 cl = unfi.changelog
2475 nm_data = nodemap.persisted_data(cl)
2475 nm_data = nodemap.persisted_data(cl)
2476 if nm_data is not None:
2476 if nm_data is not None:
2477 docket, data = nm_data
2477 docket, data = nm_data
2478 ui.write(data[:])
2478 ui.write(data[:])
2479 elif opts['check']:
2479 elif opts['check']:
2480 unfi = repo.unfiltered()
2480 unfi = repo.unfiltered()
2481 cl = unfi.changelog
2481 cl = unfi.changelog
2482 nm_data = nodemap.persisted_data(cl)
2482 nm_data = nodemap.persisted_data(cl)
2483 if nm_data is not None:
2483 if nm_data is not None:
2484 docket, data = nm_data
2484 docket, data = nm_data
2485 return nodemap.check_data(ui, cl.index, data)
2485 return nodemap.check_data(ui, cl.index, data)
2486 elif opts['metadata']:
2486 elif opts['metadata']:
2487 unfi = repo.unfiltered()
2487 unfi = repo.unfiltered()
2488 cl = unfi.changelog
2488 cl = unfi.changelog
2489 nm_data = nodemap.persisted_data(cl)
2489 nm_data = nodemap.persisted_data(cl)
2490 if nm_data is not None:
2490 if nm_data is not None:
2491 docket, data = nm_data
2491 docket, data = nm_data
2492 ui.write((b"uid: %s\n") % docket.uid)
2492 ui.write((b"uid: %s\n") % docket.uid)
2493 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2493 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2494 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2494 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2495 ui.write((b"data-length: %d\n") % docket.data_length)
2495 ui.write((b"data-length: %d\n") % docket.data_length)
2496 ui.write((b"data-unused: %d\n") % docket.data_unused)
2496 ui.write((b"data-unused: %d\n") % docket.data_unused)
2497 unused_perc = docket.data_unused * 100.0 / docket.data_length
2497 unused_perc = docket.data_unused * 100.0 / docket.data_length
2498 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2498 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2499
2499
2500
2500
2501 @command(
2501 @command(
2502 b'debugobsolete',
2502 b'debugobsolete',
2503 [
2503 [
2504 (b'', b'flags', 0, _(b'markers flag')),
2504 (b'', b'flags', 0, _(b'markers flag')),
2505 (
2505 (
2506 b'',
2506 b'',
2507 b'record-parents',
2507 b'record-parents',
2508 False,
2508 False,
2509 _(b'record parent information for the precursor'),
2509 _(b'record parent information for the precursor'),
2510 ),
2510 ),
2511 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2511 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2512 (
2512 (
2513 b'',
2513 b'',
2514 b'exclusive',
2514 b'exclusive',
2515 False,
2515 False,
2516 _(b'restrict display to markers only relevant to REV'),
2516 _(b'restrict display to markers only relevant to REV'),
2517 ),
2517 ),
2518 (b'', b'index', False, _(b'display index of the marker')),
2518 (b'', b'index', False, _(b'display index of the marker')),
2519 (b'', b'delete', [], _(b'delete markers specified by indices')),
2519 (b'', b'delete', [], _(b'delete markers specified by indices')),
2520 ]
2520 ]
2521 + cmdutil.commitopts2
2521 + cmdutil.commitopts2
2522 + cmdutil.formatteropts,
2522 + cmdutil.formatteropts,
2523 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2523 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2524 )
2524 )
2525 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2525 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2526 """create arbitrary obsolete marker
2526 """create arbitrary obsolete marker
2527
2527
2528 With no arguments, displays the list of obsolescence markers."""
2528 With no arguments, displays the list of obsolescence markers."""
2529
2529
2530 opts = pycompat.byteskwargs(opts)
2530 opts = pycompat.byteskwargs(opts)
2531
2531
2532 def parsenodeid(s):
2532 def parsenodeid(s):
2533 try:
2533 try:
2534 # We do not use revsingle/revrange functions here to accept
2534 # We do not use revsingle/revrange functions here to accept
2535 # arbitrary node identifiers, possibly not present in the
2535 # arbitrary node identifiers, possibly not present in the
2536 # local repository.
2536 # local repository.
2537 n = bin(s)
2537 n = bin(s)
2538 if len(n) != repo.nodeconstants.nodelen:
2538 if len(n) != repo.nodeconstants.nodelen:
2539 raise TypeError()
2539 raise TypeError()
2540 return n
2540 return n
2541 except TypeError:
2541 except TypeError:
2542 raise error.InputError(
2542 raise error.InputError(
2543 b'changeset references must be full hexadecimal '
2543 b'changeset references must be full hexadecimal '
2544 b'node identifiers'
2544 b'node identifiers'
2545 )
2545 )
2546
2546
2547 if opts.get(b'delete'):
2547 if opts.get(b'delete'):
2548 indices = []
2548 indices = []
2549 for v in opts.get(b'delete'):
2549 for v in opts.get(b'delete'):
2550 try:
2550 try:
2551 indices.append(int(v))
2551 indices.append(int(v))
2552 except ValueError:
2552 except ValueError:
2553 raise error.InputError(
2553 raise error.InputError(
2554 _(b'invalid index value: %r') % v,
2554 _(b'invalid index value: %r') % v,
2555 hint=_(b'use integers for indices'),
2555 hint=_(b'use integers for indices'),
2556 )
2556 )
2557
2557
2558 if repo.currenttransaction():
2558 if repo.currenttransaction():
2559 raise error.Abort(
2559 raise error.Abort(
2560 _(b'cannot delete obsmarkers in the middle of transaction.')
2560 _(b'cannot delete obsmarkers in the middle of transaction.')
2561 )
2561 )
2562
2562
2563 with repo.lock():
2563 with repo.lock():
2564 n = repair.deleteobsmarkers(repo.obsstore, indices)
2564 n = repair.deleteobsmarkers(repo.obsstore, indices)
2565 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2565 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2566
2566
2567 return
2567 return
2568
2568
2569 if precursor is not None:
2569 if precursor is not None:
2570 if opts[b'rev']:
2570 if opts[b'rev']:
2571 raise error.InputError(
2571 raise error.InputError(
2572 b'cannot select revision when creating marker'
2572 b'cannot select revision when creating marker'
2573 )
2573 )
2574 metadata = {}
2574 metadata = {}
2575 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2575 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2576 succs = tuple(parsenodeid(succ) for succ in successors)
2576 succs = tuple(parsenodeid(succ) for succ in successors)
2577 l = repo.lock()
2577 l = repo.lock()
2578 try:
2578 try:
2579 tr = repo.transaction(b'debugobsolete')
2579 tr = repo.transaction(b'debugobsolete')
2580 try:
2580 try:
2581 date = opts.get(b'date')
2581 date = opts.get(b'date')
2582 if date:
2582 if date:
2583 date = dateutil.parsedate(date)
2583 date = dateutil.parsedate(date)
2584 else:
2584 else:
2585 date = None
2585 date = None
2586 prec = parsenodeid(precursor)
2586 prec = parsenodeid(precursor)
2587 parents = None
2587 parents = None
2588 if opts[b'record_parents']:
2588 if opts[b'record_parents']:
2589 if prec not in repo.unfiltered():
2589 if prec not in repo.unfiltered():
2590 raise error.Abort(
2590 raise error.Abort(
2591 b'cannot used --record-parents on '
2591 b'cannot used --record-parents on '
2592 b'unknown changesets'
2592 b'unknown changesets'
2593 )
2593 )
2594 parents = repo.unfiltered()[prec].parents()
2594 parents = repo.unfiltered()[prec].parents()
2595 parents = tuple(p.node() for p in parents)
2595 parents = tuple(p.node() for p in parents)
2596 repo.obsstore.create(
2596 repo.obsstore.create(
2597 tr,
2597 tr,
2598 prec,
2598 prec,
2599 succs,
2599 succs,
2600 opts[b'flags'],
2600 opts[b'flags'],
2601 parents=parents,
2601 parents=parents,
2602 date=date,
2602 date=date,
2603 metadata=metadata,
2603 metadata=metadata,
2604 ui=ui,
2604 ui=ui,
2605 )
2605 )
2606 tr.close()
2606 tr.close()
2607 except ValueError as exc:
2607 except ValueError as exc:
2608 raise error.Abort(
2608 raise error.Abort(
2609 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2609 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2610 )
2610 )
2611 finally:
2611 finally:
2612 tr.release()
2612 tr.release()
2613 finally:
2613 finally:
2614 l.release()
2614 l.release()
2615 else:
2615 else:
2616 if opts[b'rev']:
2616 if opts[b'rev']:
2617 revs = scmutil.revrange(repo, opts[b'rev'])
2617 revs = scmutil.revrange(repo, opts[b'rev'])
2618 nodes = [repo[r].node() for r in revs]
2618 nodes = [repo[r].node() for r in revs]
2619 markers = list(
2619 markers = list(
2620 obsutil.getmarkers(
2620 obsutil.getmarkers(
2621 repo, nodes=nodes, exclusive=opts[b'exclusive']
2621 repo, nodes=nodes, exclusive=opts[b'exclusive']
2622 )
2622 )
2623 )
2623 )
2624 markers.sort(key=lambda x: x._data)
2624 markers.sort(key=lambda x: x._data)
2625 else:
2625 else:
2626 markers = obsutil.getmarkers(repo)
2626 markers = obsutil.getmarkers(repo)
2627
2627
2628 markerstoiter = markers
2628 markerstoiter = markers
2629 isrelevant = lambda m: True
2629 isrelevant = lambda m: True
2630 if opts.get(b'rev') and opts.get(b'index'):
2630 if opts.get(b'rev') and opts.get(b'index'):
2631 markerstoiter = obsutil.getmarkers(repo)
2631 markerstoiter = obsutil.getmarkers(repo)
2632 markerset = set(markers)
2632 markerset = set(markers)
2633 isrelevant = lambda m: m in markerset
2633 isrelevant = lambda m: m in markerset
2634
2634
2635 fm = ui.formatter(b'debugobsolete', opts)
2635 fm = ui.formatter(b'debugobsolete', opts)
2636 for i, m in enumerate(markerstoiter):
2636 for i, m in enumerate(markerstoiter):
2637 if not isrelevant(m):
2637 if not isrelevant(m):
2638 # marker can be irrelevant when we're iterating over a set
2638 # marker can be irrelevant when we're iterating over a set
2639 # of markers (markerstoiter) which is bigger than the set
2639 # of markers (markerstoiter) which is bigger than the set
2640 # of markers we want to display (markers)
2640 # of markers we want to display (markers)
2641 # this can happen if both --index and --rev options are
2641 # this can happen if both --index and --rev options are
2642 # provided and thus we need to iterate over all of the markers
2642 # provided and thus we need to iterate over all of the markers
2643 # to get the correct indices, but only display the ones that
2643 # to get the correct indices, but only display the ones that
2644 # are relevant to --rev value
2644 # are relevant to --rev value
2645 continue
2645 continue
2646 fm.startitem()
2646 fm.startitem()
2647 ind = i if opts.get(b'index') else None
2647 ind = i if opts.get(b'index') else None
2648 cmdutil.showmarker(fm, m, index=ind)
2648 cmdutil.showmarker(fm, m, index=ind)
2649 fm.end()
2649 fm.end()
2650
2650
2651
2651
2652 @command(
2652 @command(
2653 b'debugp1copies',
2653 b'debugp1copies',
2654 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2654 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2655 _(b'[-r REV]'),
2655 _(b'[-r REV]'),
2656 )
2656 )
2657 def debugp1copies(ui, repo, **opts):
2657 def debugp1copies(ui, repo, **opts):
2658 """dump copy information compared to p1"""
2658 """dump copy information compared to p1"""
2659
2659
2660 opts = pycompat.byteskwargs(opts)
2660 opts = pycompat.byteskwargs(opts)
2661 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2661 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2662 for dst, src in ctx.p1copies().items():
2662 for dst, src in ctx.p1copies().items():
2663 ui.write(b'%s -> %s\n' % (src, dst))
2663 ui.write(b'%s -> %s\n' % (src, dst))
2664
2664
2665
2665
2666 @command(
2666 @command(
2667 b'debugp2copies',
2667 b'debugp2copies',
2668 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2668 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2669 _(b'[-r REV]'),
2669 _(b'[-r REV]'),
2670 )
2670 )
2671 def debugp1copies(ui, repo, **opts):
2671 def debugp1copies(ui, repo, **opts):
2672 """dump copy information compared to p2"""
2672 """dump copy information compared to p2"""
2673
2673
2674 opts = pycompat.byteskwargs(opts)
2674 opts = pycompat.byteskwargs(opts)
2675 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2675 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2676 for dst, src in ctx.p2copies().items():
2676 for dst, src in ctx.p2copies().items():
2677 ui.write(b'%s -> %s\n' % (src, dst))
2677 ui.write(b'%s -> %s\n' % (src, dst))
2678
2678
2679
2679
2680 @command(
2680 @command(
2681 b'debugpathcomplete',
2681 b'debugpathcomplete',
2682 [
2682 [
2683 (b'f', b'full', None, _(b'complete an entire path')),
2683 (b'f', b'full', None, _(b'complete an entire path')),
2684 (b'n', b'normal', None, _(b'show only normal files')),
2684 (b'n', b'normal', None, _(b'show only normal files')),
2685 (b'a', b'added', None, _(b'show only added files')),
2685 (b'a', b'added', None, _(b'show only added files')),
2686 (b'r', b'removed', None, _(b'show only removed files')),
2686 (b'r', b'removed', None, _(b'show only removed files')),
2687 ],
2687 ],
2688 _(b'FILESPEC...'),
2688 _(b'FILESPEC...'),
2689 )
2689 )
2690 def debugpathcomplete(ui, repo, *specs, **opts):
2690 def debugpathcomplete(ui, repo, *specs, **opts):
2691 """complete part or all of a tracked path
2691 """complete part or all of a tracked path
2692
2692
2693 This command supports shells that offer path name completion. It
2693 This command supports shells that offer path name completion. It
2694 currently completes only files already known to the dirstate.
2694 currently completes only files already known to the dirstate.
2695
2695
2696 Completion extends only to the next path segment unless
2696 Completion extends only to the next path segment unless
2697 --full is specified, in which case entire paths are used."""
2697 --full is specified, in which case entire paths are used."""
2698
2698
2699 def complete(path, acceptable):
2699 def complete(path, acceptable):
2700 dirstate = repo.dirstate
2700 dirstate = repo.dirstate
2701 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2701 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2702 rootdir = repo.root + pycompat.ossep
2702 rootdir = repo.root + pycompat.ossep
2703 if spec != repo.root and not spec.startswith(rootdir):
2703 if spec != repo.root and not spec.startswith(rootdir):
2704 return [], []
2704 return [], []
2705 if os.path.isdir(spec):
2705 if os.path.isdir(spec):
2706 spec += b'/'
2706 spec += b'/'
2707 spec = spec[len(rootdir) :]
2707 spec = spec[len(rootdir) :]
2708 fixpaths = pycompat.ossep != b'/'
2708 fixpaths = pycompat.ossep != b'/'
2709 if fixpaths:
2709 if fixpaths:
2710 spec = spec.replace(pycompat.ossep, b'/')
2710 spec = spec.replace(pycompat.ossep, b'/')
2711 speclen = len(spec)
2711 speclen = len(spec)
2712 fullpaths = opts['full']
2712 fullpaths = opts['full']
2713 files, dirs = set(), set()
2713 files, dirs = set(), set()
2714 adddir, addfile = dirs.add, files.add
2714 adddir, addfile = dirs.add, files.add
2715 for f, st in pycompat.iteritems(dirstate):
2715 for f, st in pycompat.iteritems(dirstate):
2716 if f.startswith(spec) and st.state in acceptable:
2716 if f.startswith(spec) and st.state in acceptable:
2717 if fixpaths:
2717 if fixpaths:
2718 f = f.replace(b'/', pycompat.ossep)
2718 f = f.replace(b'/', pycompat.ossep)
2719 if fullpaths:
2719 if fullpaths:
2720 addfile(f)
2720 addfile(f)
2721 continue
2721 continue
2722 s = f.find(pycompat.ossep, speclen)
2722 s = f.find(pycompat.ossep, speclen)
2723 if s >= 0:
2723 if s >= 0:
2724 adddir(f[:s])
2724 adddir(f[:s])
2725 else:
2725 else:
2726 addfile(f)
2726 addfile(f)
2727 return files, dirs
2727 return files, dirs
2728
2728
2729 acceptable = b''
2729 acceptable = b''
2730 if opts['normal']:
2730 if opts['normal']:
2731 acceptable += b'nm'
2731 acceptable += b'nm'
2732 if opts['added']:
2732 if opts['added']:
2733 acceptable += b'a'
2733 acceptable += b'a'
2734 if opts['removed']:
2734 if opts['removed']:
2735 acceptable += b'r'
2735 acceptable += b'r'
2736 cwd = repo.getcwd()
2736 cwd = repo.getcwd()
2737 if not specs:
2737 if not specs:
2738 specs = [b'.']
2738 specs = [b'.']
2739
2739
2740 files, dirs = set(), set()
2740 files, dirs = set(), set()
2741 for spec in specs:
2741 for spec in specs:
2742 f, d = complete(spec, acceptable or b'nmar')
2742 f, d = complete(spec, acceptable or b'nmar')
2743 files.update(f)
2743 files.update(f)
2744 dirs.update(d)
2744 dirs.update(d)
2745 files.update(dirs)
2745 files.update(dirs)
2746 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2746 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2747 ui.write(b'\n')
2747 ui.write(b'\n')
2748
2748
2749
2749
2750 @command(
2750 @command(
2751 b'debugpathcopies',
2751 b'debugpathcopies',
2752 cmdutil.walkopts,
2752 cmdutil.walkopts,
2753 b'hg debugpathcopies REV1 REV2 [FILE]',
2753 b'hg debugpathcopies REV1 REV2 [FILE]',
2754 inferrepo=True,
2754 inferrepo=True,
2755 )
2755 )
2756 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2756 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2757 """show copies between two revisions"""
2757 """show copies between two revisions"""
2758 ctx1 = scmutil.revsingle(repo, rev1)
2758 ctx1 = scmutil.revsingle(repo, rev1)
2759 ctx2 = scmutil.revsingle(repo, rev2)
2759 ctx2 = scmutil.revsingle(repo, rev2)
2760 m = scmutil.match(ctx1, pats, opts)
2760 m = scmutil.match(ctx1, pats, opts)
2761 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2761 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2762 ui.write(b'%s -> %s\n' % (src, dst))
2762 ui.write(b'%s -> %s\n' % (src, dst))
2763
2763
2764
2764
2765 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2765 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2766 def debugpeer(ui, path):
2766 def debugpeer(ui, path):
2767 """establish a connection to a peer repository"""
2767 """establish a connection to a peer repository"""
2768 # Always enable peer request logging. Requires --debug to display
2768 # Always enable peer request logging. Requires --debug to display
2769 # though.
2769 # though.
2770 overrides = {
2770 overrides = {
2771 (b'devel', b'debug.peer-request'): True,
2771 (b'devel', b'debug.peer-request'): True,
2772 }
2772 }
2773
2773
2774 with ui.configoverride(overrides):
2774 with ui.configoverride(overrides):
2775 peer = hg.peer(ui, {}, path)
2775 peer = hg.peer(ui, {}, path)
2776
2776
2777 try:
2777 try:
2778 local = peer.local() is not None
2778 local = peer.local() is not None
2779 canpush = peer.canpush()
2779 canpush = peer.canpush()
2780
2780
2781 ui.write(_(b'url: %s\n') % peer.url())
2781 ui.write(_(b'url: %s\n') % peer.url())
2782 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2782 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2783 ui.write(
2783 ui.write(
2784 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2784 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2785 )
2785 )
2786 finally:
2786 finally:
2787 peer.close()
2787 peer.close()
2788
2788
2789
2789
2790 @command(
2790 @command(
2791 b'debugpickmergetool',
2791 b'debugpickmergetool',
2792 [
2792 [
2793 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2793 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2794 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2794 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2795 ]
2795 ]
2796 + cmdutil.walkopts
2796 + cmdutil.walkopts
2797 + cmdutil.mergetoolopts,
2797 + cmdutil.mergetoolopts,
2798 _(b'[PATTERN]...'),
2798 _(b'[PATTERN]...'),
2799 inferrepo=True,
2799 inferrepo=True,
2800 )
2800 )
2801 def debugpickmergetool(ui, repo, *pats, **opts):
2801 def debugpickmergetool(ui, repo, *pats, **opts):
2802 """examine which merge tool is chosen for specified file
2802 """examine which merge tool is chosen for specified file
2803
2803
2804 As described in :hg:`help merge-tools`, Mercurial examines
2804 As described in :hg:`help merge-tools`, Mercurial examines
2805 configurations below in this order to decide which merge tool is
2805 configurations below in this order to decide which merge tool is
2806 chosen for specified file.
2806 chosen for specified file.
2807
2807
2808 1. ``--tool`` option
2808 1. ``--tool`` option
2809 2. ``HGMERGE`` environment variable
2809 2. ``HGMERGE`` environment variable
2810 3. configurations in ``merge-patterns`` section
2810 3. configurations in ``merge-patterns`` section
2811 4. configuration of ``ui.merge``
2811 4. configuration of ``ui.merge``
2812 5. configurations in ``merge-tools`` section
2812 5. configurations in ``merge-tools`` section
2813 6. ``hgmerge`` tool (for historical reason only)
2813 6. ``hgmerge`` tool (for historical reason only)
2814 7. default tool for fallback (``:merge`` or ``:prompt``)
2814 7. default tool for fallback (``:merge`` or ``:prompt``)
2815
2815
2816 This command writes out examination result in the style below::
2816 This command writes out examination result in the style below::
2817
2817
2818 FILE = MERGETOOL
2818 FILE = MERGETOOL
2819
2819
2820 By default, all files known in the first parent context of the
2820 By default, all files known in the first parent context of the
2821 working directory are examined. Use file patterns and/or -I/-X
2821 working directory are examined. Use file patterns and/or -I/-X
2822 options to limit target files. -r/--rev is also useful to examine
2822 options to limit target files. -r/--rev is also useful to examine
2823 files in another context without actual updating to it.
2823 files in another context without actual updating to it.
2824
2824
2825 With --debug, this command shows warning messages while matching
2825 With --debug, this command shows warning messages while matching
2826 against ``merge-patterns`` and so on, too. It is recommended to
2826 against ``merge-patterns`` and so on, too. It is recommended to
2827 use this option with explicit file patterns and/or -I/-X options,
2827 use this option with explicit file patterns and/or -I/-X options,
2828 because this option increases amount of output per file according
2828 because this option increases amount of output per file according
2829 to configurations in hgrc.
2829 to configurations in hgrc.
2830
2830
2831 With -v/--verbose, this command shows configurations below at
2831 With -v/--verbose, this command shows configurations below at
2832 first (only if specified).
2832 first (only if specified).
2833
2833
2834 - ``--tool`` option
2834 - ``--tool`` option
2835 - ``HGMERGE`` environment variable
2835 - ``HGMERGE`` environment variable
2836 - configuration of ``ui.merge``
2836 - configuration of ``ui.merge``
2837
2837
2838 If merge tool is chosen before matching against
2838 If merge tool is chosen before matching against
2839 ``merge-patterns``, this command can't show any helpful
2839 ``merge-patterns``, this command can't show any helpful
2840 information, even with --debug. In such case, information above is
2840 information, even with --debug. In such case, information above is
2841 useful to know why a merge tool is chosen.
2841 useful to know why a merge tool is chosen.
2842 """
2842 """
2843 opts = pycompat.byteskwargs(opts)
2843 opts = pycompat.byteskwargs(opts)
2844 overrides = {}
2844 overrides = {}
2845 if opts[b'tool']:
2845 if opts[b'tool']:
2846 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2846 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2847 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2847 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2848
2848
2849 with ui.configoverride(overrides, b'debugmergepatterns'):
2849 with ui.configoverride(overrides, b'debugmergepatterns'):
2850 hgmerge = encoding.environ.get(b"HGMERGE")
2850 hgmerge = encoding.environ.get(b"HGMERGE")
2851 if hgmerge is not None:
2851 if hgmerge is not None:
2852 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2852 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2853 uimerge = ui.config(b"ui", b"merge")
2853 uimerge = ui.config(b"ui", b"merge")
2854 if uimerge:
2854 if uimerge:
2855 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2855 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2856
2856
2857 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2857 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2858 m = scmutil.match(ctx, pats, opts)
2858 m = scmutil.match(ctx, pats, opts)
2859 changedelete = opts[b'changedelete']
2859 changedelete = opts[b'changedelete']
2860 for path in ctx.walk(m):
2860 for path in ctx.walk(m):
2861 fctx = ctx[path]
2861 fctx = ctx[path]
2862 with ui.silent(
2862 with ui.silent(
2863 error=True
2863 error=True
2864 ) if not ui.debugflag else util.nullcontextmanager():
2864 ) if not ui.debugflag else util.nullcontextmanager():
2865 tool, toolpath = filemerge._picktool(
2865 tool, toolpath = filemerge._picktool(
2866 repo,
2866 repo,
2867 ui,
2867 ui,
2868 path,
2868 path,
2869 fctx.isbinary(),
2869 fctx.isbinary(),
2870 b'l' in fctx.flags(),
2870 b'l' in fctx.flags(),
2871 changedelete,
2871 changedelete,
2872 )
2872 )
2873 ui.write(b'%s = %s\n' % (path, tool))
2873 ui.write(b'%s = %s\n' % (path, tool))
2874
2874
2875
2875
2876 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2876 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2877 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2877 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2878 """access the pushkey key/value protocol
2878 """access the pushkey key/value protocol
2879
2879
2880 With two args, list the keys in the given namespace.
2880 With two args, list the keys in the given namespace.
2881
2881
2882 With five args, set a key to new if it currently is set to old.
2882 With five args, set a key to new if it currently is set to old.
2883 Reports success or failure.
2883 Reports success or failure.
2884 """
2884 """
2885
2885
2886 target = hg.peer(ui, {}, repopath)
2886 target = hg.peer(ui, {}, repopath)
2887 try:
2887 try:
2888 if keyinfo:
2888 if keyinfo:
2889 key, old, new = keyinfo
2889 key, old, new = keyinfo
2890 with target.commandexecutor() as e:
2890 with target.commandexecutor() as e:
2891 r = e.callcommand(
2891 r = e.callcommand(
2892 b'pushkey',
2892 b'pushkey',
2893 {
2893 {
2894 b'namespace': namespace,
2894 b'namespace': namespace,
2895 b'key': key,
2895 b'key': key,
2896 b'old': old,
2896 b'old': old,
2897 b'new': new,
2897 b'new': new,
2898 },
2898 },
2899 ).result()
2899 ).result()
2900
2900
2901 ui.status(pycompat.bytestr(r) + b'\n')
2901 ui.status(pycompat.bytestr(r) + b'\n')
2902 return not r
2902 return not r
2903 else:
2903 else:
2904 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2904 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2905 ui.write(
2905 ui.write(
2906 b"%s\t%s\n"
2906 b"%s\t%s\n"
2907 % (stringutil.escapestr(k), stringutil.escapestr(v))
2907 % (stringutil.escapestr(k), stringutil.escapestr(v))
2908 )
2908 )
2909 finally:
2909 finally:
2910 target.close()
2910 target.close()
2911
2911
2912
2912
2913 @command(b'debugpvec', [], _(b'A B'))
2913 @command(b'debugpvec', [], _(b'A B'))
2914 def debugpvec(ui, repo, a, b=None):
2914 def debugpvec(ui, repo, a, b=None):
2915 ca = scmutil.revsingle(repo, a)
2915 ca = scmutil.revsingle(repo, a)
2916 cb = scmutil.revsingle(repo, b)
2916 cb = scmutil.revsingle(repo, b)
2917 pa = pvec.ctxpvec(ca)
2917 pa = pvec.ctxpvec(ca)
2918 pb = pvec.ctxpvec(cb)
2918 pb = pvec.ctxpvec(cb)
2919 if pa == pb:
2919 if pa == pb:
2920 rel = b"="
2920 rel = b"="
2921 elif pa > pb:
2921 elif pa > pb:
2922 rel = b">"
2922 rel = b">"
2923 elif pa < pb:
2923 elif pa < pb:
2924 rel = b"<"
2924 rel = b"<"
2925 elif pa | pb:
2925 elif pa | pb:
2926 rel = b"|"
2926 rel = b"|"
2927 ui.write(_(b"a: %s\n") % pa)
2927 ui.write(_(b"a: %s\n") % pa)
2928 ui.write(_(b"b: %s\n") % pb)
2928 ui.write(_(b"b: %s\n") % pb)
2929 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2929 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2930 ui.write(
2930 ui.write(
2931 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2931 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2932 % (
2932 % (
2933 abs(pa._depth - pb._depth),
2933 abs(pa._depth - pb._depth),
2934 pvec._hamming(pa._vec, pb._vec),
2934 pvec._hamming(pa._vec, pb._vec),
2935 pa.distance(pb),
2935 pa.distance(pb),
2936 rel,
2936 rel,
2937 )
2937 )
2938 )
2938 )
2939
2939
2940
2940
2941 @command(
2941 @command(
2942 b'debugrebuilddirstate|debugrebuildstate',
2942 b'debugrebuilddirstate|debugrebuildstate',
2943 [
2943 [
2944 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2944 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2945 (
2945 (
2946 b'',
2946 b'',
2947 b'minimal',
2947 b'minimal',
2948 None,
2948 None,
2949 _(
2949 _(
2950 b'only rebuild files that are inconsistent with '
2950 b'only rebuild files that are inconsistent with '
2951 b'the working copy parent'
2951 b'the working copy parent'
2952 ),
2952 ),
2953 ),
2953 ),
2954 ],
2954 ],
2955 _(b'[-r REV]'),
2955 _(b'[-r REV]'),
2956 )
2956 )
2957 def debugrebuilddirstate(ui, repo, rev, **opts):
2957 def debugrebuilddirstate(ui, repo, rev, **opts):
2958 """rebuild the dirstate as it would look like for the given revision
2958 """rebuild the dirstate as it would look like for the given revision
2959
2959
2960 If no revision is specified the first current parent will be used.
2960 If no revision is specified the first current parent will be used.
2961
2961
2962 The dirstate will be set to the files of the given revision.
2962 The dirstate will be set to the files of the given revision.
2963 The actual working directory content or existing dirstate
2963 The actual working directory content or existing dirstate
2964 information such as adds or removes is not considered.
2964 information such as adds or removes is not considered.
2965
2965
2966 ``minimal`` will only rebuild the dirstate status for files that claim to be
2966 ``minimal`` will only rebuild the dirstate status for files that claim to be
2967 tracked but are not in the parent manifest, or that exist in the parent
2967 tracked but are not in the parent manifest, or that exist in the parent
2968 manifest but are not in the dirstate. It will not change adds, removes, or
2968 manifest but are not in the dirstate. It will not change adds, removes, or
2969 modified files that are in the working copy parent.
2969 modified files that are in the working copy parent.
2970
2970
2971 One use of this command is to make the next :hg:`status` invocation
2971 One use of this command is to make the next :hg:`status` invocation
2972 check the actual file content.
2972 check the actual file content.
2973 """
2973 """
2974 ctx = scmutil.revsingle(repo, rev)
2974 ctx = scmutil.revsingle(repo, rev)
2975 with repo.wlock():
2975 with repo.wlock():
2976 dirstate = repo.dirstate
2976 dirstate = repo.dirstate
2977 changedfiles = None
2977 changedfiles = None
2978 # See command doc for what minimal does.
2978 # See command doc for what minimal does.
2979 if opts.get('minimal'):
2979 if opts.get('minimal'):
2980 manifestfiles = set(ctx.manifest().keys())
2980 manifestfiles = set(ctx.manifest().keys())
2981 dirstatefiles = set(dirstate)
2981 dirstatefiles = set(dirstate)
2982 manifestonly = manifestfiles - dirstatefiles
2982 manifestonly = manifestfiles - dirstatefiles
2983 dsonly = dirstatefiles - manifestfiles
2983 dsonly = dirstatefiles - manifestfiles
2984 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2984 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2985 changedfiles = manifestonly | dsnotadded
2985 changedfiles = manifestonly | dsnotadded
2986
2986
2987 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2987 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2988
2988
2989
2989
2990 @command(b'debugrebuildfncache', [], b'')
2990 @command(
2991 def debugrebuildfncache(ui, repo):
2991 b'debugrebuildfncache',
2992 [
2993 (
2994 b'',
2995 b'only-data',
2996 False,
2997 _(b'only look for wrong .d files (much faster)'),
2998 )
2999 ],
3000 b'',
3001 )
3002 def debugrebuildfncache(ui, repo, **opts):
2992 """rebuild the fncache file"""
3003 """rebuild the fncache file"""
2993 repair.rebuildfncache(ui, repo)
3004 opts = pycompat.byteskwargs(opts)
3005 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
2994
3006
2995
3007
2996 @command(
3008 @command(
2997 b'debugrename',
3009 b'debugrename',
2998 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3010 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2999 _(b'[-r REV] [FILE]...'),
3011 _(b'[-r REV] [FILE]...'),
3000 )
3012 )
3001 def debugrename(ui, repo, *pats, **opts):
3013 def debugrename(ui, repo, *pats, **opts):
3002 """dump rename information"""
3014 """dump rename information"""
3003
3015
3004 opts = pycompat.byteskwargs(opts)
3016 opts = pycompat.byteskwargs(opts)
3005 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3017 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3006 m = scmutil.match(ctx, pats, opts)
3018 m = scmutil.match(ctx, pats, opts)
3007 for abs in ctx.walk(m):
3019 for abs in ctx.walk(m):
3008 fctx = ctx[abs]
3020 fctx = ctx[abs]
3009 o = fctx.filelog().renamed(fctx.filenode())
3021 o = fctx.filelog().renamed(fctx.filenode())
3010 rel = repo.pathto(abs)
3022 rel = repo.pathto(abs)
3011 if o:
3023 if o:
3012 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3024 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3013 else:
3025 else:
3014 ui.write(_(b"%s not renamed\n") % rel)
3026 ui.write(_(b"%s not renamed\n") % rel)
3015
3027
3016
3028
3017 @command(b'debugrequires|debugrequirements', [], b'')
3029 @command(b'debugrequires|debugrequirements', [], b'')
3018 def debugrequirements(ui, repo):
3030 def debugrequirements(ui, repo):
3019 """print the current repo requirements"""
3031 """print the current repo requirements"""
3020 for r in sorted(repo.requirements):
3032 for r in sorted(repo.requirements):
3021 ui.write(b"%s\n" % r)
3033 ui.write(b"%s\n" % r)
3022
3034
3023
3035
3024 @command(
3036 @command(
3025 b'debugrevlog',
3037 b'debugrevlog',
3026 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3038 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3027 _(b'-c|-m|FILE'),
3039 _(b'-c|-m|FILE'),
3028 optionalrepo=True,
3040 optionalrepo=True,
3029 )
3041 )
3030 def debugrevlog(ui, repo, file_=None, **opts):
3042 def debugrevlog(ui, repo, file_=None, **opts):
3031 """show data and statistics about a revlog"""
3043 """show data and statistics about a revlog"""
3032 opts = pycompat.byteskwargs(opts)
3044 opts = pycompat.byteskwargs(opts)
3033 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3045 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3034
3046
3035 if opts.get(b"dump"):
3047 if opts.get(b"dump"):
3036 numrevs = len(r)
3048 numrevs = len(r)
3037 ui.write(
3049 ui.write(
3038 (
3050 (
3039 b"# rev p1rev p2rev start end deltastart base p1 p2"
3051 b"# rev p1rev p2rev start end deltastart base p1 p2"
3040 b" rawsize totalsize compression heads chainlen\n"
3052 b" rawsize totalsize compression heads chainlen\n"
3041 )
3053 )
3042 )
3054 )
3043 ts = 0
3055 ts = 0
3044 heads = set()
3056 heads = set()
3045
3057
3046 for rev in pycompat.xrange(numrevs):
3058 for rev in pycompat.xrange(numrevs):
3047 dbase = r.deltaparent(rev)
3059 dbase = r.deltaparent(rev)
3048 if dbase == -1:
3060 if dbase == -1:
3049 dbase = rev
3061 dbase = rev
3050 cbase = r.chainbase(rev)
3062 cbase = r.chainbase(rev)
3051 clen = r.chainlen(rev)
3063 clen = r.chainlen(rev)
3052 p1, p2 = r.parentrevs(rev)
3064 p1, p2 = r.parentrevs(rev)
3053 rs = r.rawsize(rev)
3065 rs = r.rawsize(rev)
3054 ts = ts + rs
3066 ts = ts + rs
3055 heads -= set(r.parentrevs(rev))
3067 heads -= set(r.parentrevs(rev))
3056 heads.add(rev)
3068 heads.add(rev)
3057 try:
3069 try:
3058 compression = ts / r.end(rev)
3070 compression = ts / r.end(rev)
3059 except ZeroDivisionError:
3071 except ZeroDivisionError:
3060 compression = 0
3072 compression = 0
3061 ui.write(
3073 ui.write(
3062 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3074 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3063 b"%11d %5d %8d\n"
3075 b"%11d %5d %8d\n"
3064 % (
3076 % (
3065 rev,
3077 rev,
3066 p1,
3078 p1,
3067 p2,
3079 p2,
3068 r.start(rev),
3080 r.start(rev),
3069 r.end(rev),
3081 r.end(rev),
3070 r.start(dbase),
3082 r.start(dbase),
3071 r.start(cbase),
3083 r.start(cbase),
3072 r.start(p1),
3084 r.start(p1),
3073 r.start(p2),
3085 r.start(p2),
3074 rs,
3086 rs,
3075 ts,
3087 ts,
3076 compression,
3088 compression,
3077 len(heads),
3089 len(heads),
3078 clen,
3090 clen,
3079 )
3091 )
3080 )
3092 )
3081 return 0
3093 return 0
3082
3094
3083 format = r._format_version
3095 format = r._format_version
3084 v = r._format_flags
3096 v = r._format_flags
3085 flags = []
3097 flags = []
3086 gdelta = False
3098 gdelta = False
3087 if v & revlog.FLAG_INLINE_DATA:
3099 if v & revlog.FLAG_INLINE_DATA:
3088 flags.append(b'inline')
3100 flags.append(b'inline')
3089 if v & revlog.FLAG_GENERALDELTA:
3101 if v & revlog.FLAG_GENERALDELTA:
3090 gdelta = True
3102 gdelta = True
3091 flags.append(b'generaldelta')
3103 flags.append(b'generaldelta')
3092 if not flags:
3104 if not flags:
3093 flags = [b'(none)']
3105 flags = [b'(none)']
3094
3106
3095 ### tracks merge vs single parent
3107 ### tracks merge vs single parent
3096 nummerges = 0
3108 nummerges = 0
3097
3109
3098 ### tracks ways the "delta" are build
3110 ### tracks ways the "delta" are build
3099 # nodelta
3111 # nodelta
3100 numempty = 0
3112 numempty = 0
3101 numemptytext = 0
3113 numemptytext = 0
3102 numemptydelta = 0
3114 numemptydelta = 0
3103 # full file content
3115 # full file content
3104 numfull = 0
3116 numfull = 0
3105 # intermediate snapshot against a prior snapshot
3117 # intermediate snapshot against a prior snapshot
3106 numsemi = 0
3118 numsemi = 0
3107 # snapshot count per depth
3119 # snapshot count per depth
3108 numsnapdepth = collections.defaultdict(lambda: 0)
3120 numsnapdepth = collections.defaultdict(lambda: 0)
3109 # delta against previous revision
3121 # delta against previous revision
3110 numprev = 0
3122 numprev = 0
3111 # delta against first or second parent (not prev)
3123 # delta against first or second parent (not prev)
3112 nump1 = 0
3124 nump1 = 0
3113 nump2 = 0
3125 nump2 = 0
3114 # delta against neither prev nor parents
3126 # delta against neither prev nor parents
3115 numother = 0
3127 numother = 0
3116 # delta against prev that are also first or second parent
3128 # delta against prev that are also first or second parent
3117 # (details of `numprev`)
3129 # (details of `numprev`)
3118 nump1prev = 0
3130 nump1prev = 0
3119 nump2prev = 0
3131 nump2prev = 0
3120
3132
3121 # data about delta chain of each revs
3133 # data about delta chain of each revs
3122 chainlengths = []
3134 chainlengths = []
3123 chainbases = []
3135 chainbases = []
3124 chainspans = []
3136 chainspans = []
3125
3137
3126 # data about each revision
3138 # data about each revision
3127 datasize = [None, 0, 0]
3139 datasize = [None, 0, 0]
3128 fullsize = [None, 0, 0]
3140 fullsize = [None, 0, 0]
3129 semisize = [None, 0, 0]
3141 semisize = [None, 0, 0]
3130 # snapshot count per depth
3142 # snapshot count per depth
3131 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3143 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3132 deltasize = [None, 0, 0]
3144 deltasize = [None, 0, 0]
3133 chunktypecounts = {}
3145 chunktypecounts = {}
3134 chunktypesizes = {}
3146 chunktypesizes = {}
3135
3147
3136 def addsize(size, l):
3148 def addsize(size, l):
3137 if l[0] is None or size < l[0]:
3149 if l[0] is None or size < l[0]:
3138 l[0] = size
3150 l[0] = size
3139 if size > l[1]:
3151 if size > l[1]:
3140 l[1] = size
3152 l[1] = size
3141 l[2] += size
3153 l[2] += size
3142
3154
3143 numrevs = len(r)
3155 numrevs = len(r)
3144 for rev in pycompat.xrange(numrevs):
3156 for rev in pycompat.xrange(numrevs):
3145 p1, p2 = r.parentrevs(rev)
3157 p1, p2 = r.parentrevs(rev)
3146 delta = r.deltaparent(rev)
3158 delta = r.deltaparent(rev)
3147 if format > 0:
3159 if format > 0:
3148 addsize(r.rawsize(rev), datasize)
3160 addsize(r.rawsize(rev), datasize)
3149 if p2 != nullrev:
3161 if p2 != nullrev:
3150 nummerges += 1
3162 nummerges += 1
3151 size = r.length(rev)
3163 size = r.length(rev)
3152 if delta == nullrev:
3164 if delta == nullrev:
3153 chainlengths.append(0)
3165 chainlengths.append(0)
3154 chainbases.append(r.start(rev))
3166 chainbases.append(r.start(rev))
3155 chainspans.append(size)
3167 chainspans.append(size)
3156 if size == 0:
3168 if size == 0:
3157 numempty += 1
3169 numempty += 1
3158 numemptytext += 1
3170 numemptytext += 1
3159 else:
3171 else:
3160 numfull += 1
3172 numfull += 1
3161 numsnapdepth[0] += 1
3173 numsnapdepth[0] += 1
3162 addsize(size, fullsize)
3174 addsize(size, fullsize)
3163 addsize(size, snapsizedepth[0])
3175 addsize(size, snapsizedepth[0])
3164 else:
3176 else:
3165 chainlengths.append(chainlengths[delta] + 1)
3177 chainlengths.append(chainlengths[delta] + 1)
3166 baseaddr = chainbases[delta]
3178 baseaddr = chainbases[delta]
3167 revaddr = r.start(rev)
3179 revaddr = r.start(rev)
3168 chainbases.append(baseaddr)
3180 chainbases.append(baseaddr)
3169 chainspans.append((revaddr - baseaddr) + size)
3181 chainspans.append((revaddr - baseaddr) + size)
3170 if size == 0:
3182 if size == 0:
3171 numempty += 1
3183 numempty += 1
3172 numemptydelta += 1
3184 numemptydelta += 1
3173 elif r.issnapshot(rev):
3185 elif r.issnapshot(rev):
3174 addsize(size, semisize)
3186 addsize(size, semisize)
3175 numsemi += 1
3187 numsemi += 1
3176 depth = r.snapshotdepth(rev)
3188 depth = r.snapshotdepth(rev)
3177 numsnapdepth[depth] += 1
3189 numsnapdepth[depth] += 1
3178 addsize(size, snapsizedepth[depth])
3190 addsize(size, snapsizedepth[depth])
3179 else:
3191 else:
3180 addsize(size, deltasize)
3192 addsize(size, deltasize)
3181 if delta == rev - 1:
3193 if delta == rev - 1:
3182 numprev += 1
3194 numprev += 1
3183 if delta == p1:
3195 if delta == p1:
3184 nump1prev += 1
3196 nump1prev += 1
3185 elif delta == p2:
3197 elif delta == p2:
3186 nump2prev += 1
3198 nump2prev += 1
3187 elif delta == p1:
3199 elif delta == p1:
3188 nump1 += 1
3200 nump1 += 1
3189 elif delta == p2:
3201 elif delta == p2:
3190 nump2 += 1
3202 nump2 += 1
3191 elif delta != nullrev:
3203 elif delta != nullrev:
3192 numother += 1
3204 numother += 1
3193
3205
3194 # Obtain data on the raw chunks in the revlog.
3206 # Obtain data on the raw chunks in the revlog.
3195 if util.safehasattr(r, b'_getsegmentforrevs'):
3207 if util.safehasattr(r, b'_getsegmentforrevs'):
3196 segment = r._getsegmentforrevs(rev, rev)[1]
3208 segment = r._getsegmentforrevs(rev, rev)[1]
3197 else:
3209 else:
3198 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3210 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3199 if segment:
3211 if segment:
3200 chunktype = bytes(segment[0:1])
3212 chunktype = bytes(segment[0:1])
3201 else:
3213 else:
3202 chunktype = b'empty'
3214 chunktype = b'empty'
3203
3215
3204 if chunktype not in chunktypecounts:
3216 if chunktype not in chunktypecounts:
3205 chunktypecounts[chunktype] = 0
3217 chunktypecounts[chunktype] = 0
3206 chunktypesizes[chunktype] = 0
3218 chunktypesizes[chunktype] = 0
3207
3219
3208 chunktypecounts[chunktype] += 1
3220 chunktypecounts[chunktype] += 1
3209 chunktypesizes[chunktype] += size
3221 chunktypesizes[chunktype] += size
3210
3222
3211 # Adjust size min value for empty cases
3223 # Adjust size min value for empty cases
3212 for size in (datasize, fullsize, semisize, deltasize):
3224 for size in (datasize, fullsize, semisize, deltasize):
3213 if size[0] is None:
3225 if size[0] is None:
3214 size[0] = 0
3226 size[0] = 0
3215
3227
3216 numdeltas = numrevs - numfull - numempty - numsemi
3228 numdeltas = numrevs - numfull - numempty - numsemi
3217 numoprev = numprev - nump1prev - nump2prev
3229 numoprev = numprev - nump1prev - nump2prev
3218 totalrawsize = datasize[2]
3230 totalrawsize = datasize[2]
3219 datasize[2] /= numrevs
3231 datasize[2] /= numrevs
3220 fulltotal = fullsize[2]
3232 fulltotal = fullsize[2]
3221 if numfull == 0:
3233 if numfull == 0:
3222 fullsize[2] = 0
3234 fullsize[2] = 0
3223 else:
3235 else:
3224 fullsize[2] /= numfull
3236 fullsize[2] /= numfull
3225 semitotal = semisize[2]
3237 semitotal = semisize[2]
3226 snaptotal = {}
3238 snaptotal = {}
3227 if numsemi > 0:
3239 if numsemi > 0:
3228 semisize[2] /= numsemi
3240 semisize[2] /= numsemi
3229 for depth in snapsizedepth:
3241 for depth in snapsizedepth:
3230 snaptotal[depth] = snapsizedepth[depth][2]
3242 snaptotal[depth] = snapsizedepth[depth][2]
3231 snapsizedepth[depth][2] /= numsnapdepth[depth]
3243 snapsizedepth[depth][2] /= numsnapdepth[depth]
3232
3244
3233 deltatotal = deltasize[2]
3245 deltatotal = deltasize[2]
3234 if numdeltas > 0:
3246 if numdeltas > 0:
3235 deltasize[2] /= numdeltas
3247 deltasize[2] /= numdeltas
3236 totalsize = fulltotal + semitotal + deltatotal
3248 totalsize = fulltotal + semitotal + deltatotal
3237 avgchainlen = sum(chainlengths) / numrevs
3249 avgchainlen = sum(chainlengths) / numrevs
3238 maxchainlen = max(chainlengths)
3250 maxchainlen = max(chainlengths)
3239 maxchainspan = max(chainspans)
3251 maxchainspan = max(chainspans)
3240 compratio = 1
3252 compratio = 1
3241 if totalsize:
3253 if totalsize:
3242 compratio = totalrawsize / totalsize
3254 compratio = totalrawsize / totalsize
3243
3255
3244 basedfmtstr = b'%%%dd\n'
3256 basedfmtstr = b'%%%dd\n'
3245 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3257 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3246
3258
3247 def dfmtstr(max):
3259 def dfmtstr(max):
3248 return basedfmtstr % len(str(max))
3260 return basedfmtstr % len(str(max))
3249
3261
3250 def pcfmtstr(max, padding=0):
3262 def pcfmtstr(max, padding=0):
3251 return basepcfmtstr % (len(str(max)), b' ' * padding)
3263 return basepcfmtstr % (len(str(max)), b' ' * padding)
3252
3264
3253 def pcfmt(value, total):
3265 def pcfmt(value, total):
3254 if total:
3266 if total:
3255 return (value, 100 * float(value) / total)
3267 return (value, 100 * float(value) / total)
3256 else:
3268 else:
3257 return value, 100.0
3269 return value, 100.0
3258
3270
3259 ui.writenoi18n(b'format : %d\n' % format)
3271 ui.writenoi18n(b'format : %d\n' % format)
3260 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3272 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3261
3273
3262 ui.write(b'\n')
3274 ui.write(b'\n')
3263 fmt = pcfmtstr(totalsize)
3275 fmt = pcfmtstr(totalsize)
3264 fmt2 = dfmtstr(totalsize)
3276 fmt2 = dfmtstr(totalsize)
3265 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3277 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3266 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3278 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3267 ui.writenoi18n(
3279 ui.writenoi18n(
3268 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3280 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3269 )
3281 )
3270 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3282 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3271 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3283 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3272 ui.writenoi18n(
3284 ui.writenoi18n(
3273 b' text : '
3285 b' text : '
3274 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3286 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3275 )
3287 )
3276 ui.writenoi18n(
3288 ui.writenoi18n(
3277 b' delta : '
3289 b' delta : '
3278 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3290 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3279 )
3291 )
3280 ui.writenoi18n(
3292 ui.writenoi18n(
3281 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3293 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3282 )
3294 )
3283 for depth in sorted(numsnapdepth):
3295 for depth in sorted(numsnapdepth):
3284 ui.write(
3296 ui.write(
3285 (b' lvl-%-3d : ' % depth)
3297 (b' lvl-%-3d : ' % depth)
3286 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3298 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3287 )
3299 )
3288 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3300 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3289 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3301 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3290 ui.writenoi18n(
3302 ui.writenoi18n(
3291 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3303 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3292 )
3304 )
3293 for depth in sorted(numsnapdepth):
3305 for depth in sorted(numsnapdepth):
3294 ui.write(
3306 ui.write(
3295 (b' lvl-%-3d : ' % depth)
3307 (b' lvl-%-3d : ' % depth)
3296 + fmt % pcfmt(snaptotal[depth], totalsize)
3308 + fmt % pcfmt(snaptotal[depth], totalsize)
3297 )
3309 )
3298 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3310 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3299
3311
3300 def fmtchunktype(chunktype):
3312 def fmtchunktype(chunktype):
3301 if chunktype == b'empty':
3313 if chunktype == b'empty':
3302 return b' %s : ' % chunktype
3314 return b' %s : ' % chunktype
3303 elif chunktype in pycompat.bytestr(string.ascii_letters):
3315 elif chunktype in pycompat.bytestr(string.ascii_letters):
3304 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3316 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3305 else:
3317 else:
3306 return b' 0x%s : ' % hex(chunktype)
3318 return b' 0x%s : ' % hex(chunktype)
3307
3319
3308 ui.write(b'\n')
3320 ui.write(b'\n')
3309 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3321 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3310 for chunktype in sorted(chunktypecounts):
3322 for chunktype in sorted(chunktypecounts):
3311 ui.write(fmtchunktype(chunktype))
3323 ui.write(fmtchunktype(chunktype))
3312 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3324 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3313 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3325 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3314 for chunktype in sorted(chunktypecounts):
3326 for chunktype in sorted(chunktypecounts):
3315 ui.write(fmtchunktype(chunktype))
3327 ui.write(fmtchunktype(chunktype))
3316 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3328 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3317
3329
3318 ui.write(b'\n')
3330 ui.write(b'\n')
3319 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3331 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3320 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3332 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3321 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3333 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3322 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3334 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3323 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3335 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3324
3336
3325 if format > 0:
3337 if format > 0:
3326 ui.write(b'\n')
3338 ui.write(b'\n')
3327 ui.writenoi18n(
3339 ui.writenoi18n(
3328 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3340 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3329 % tuple(datasize)
3341 % tuple(datasize)
3330 )
3342 )
3331 ui.writenoi18n(
3343 ui.writenoi18n(
3332 b'full revision size (min/max/avg) : %d / %d / %d\n'
3344 b'full revision size (min/max/avg) : %d / %d / %d\n'
3333 % tuple(fullsize)
3345 % tuple(fullsize)
3334 )
3346 )
3335 ui.writenoi18n(
3347 ui.writenoi18n(
3336 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3348 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3337 % tuple(semisize)
3349 % tuple(semisize)
3338 )
3350 )
3339 for depth in sorted(snapsizedepth):
3351 for depth in sorted(snapsizedepth):
3340 if depth == 0:
3352 if depth == 0:
3341 continue
3353 continue
3342 ui.writenoi18n(
3354 ui.writenoi18n(
3343 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3355 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3344 % ((depth,) + tuple(snapsizedepth[depth]))
3356 % ((depth,) + tuple(snapsizedepth[depth]))
3345 )
3357 )
3346 ui.writenoi18n(
3358 ui.writenoi18n(
3347 b'delta size (min/max/avg) : %d / %d / %d\n'
3359 b'delta size (min/max/avg) : %d / %d / %d\n'
3348 % tuple(deltasize)
3360 % tuple(deltasize)
3349 )
3361 )
3350
3362
3351 if numdeltas > 0:
3363 if numdeltas > 0:
3352 ui.write(b'\n')
3364 ui.write(b'\n')
3353 fmt = pcfmtstr(numdeltas)
3365 fmt = pcfmtstr(numdeltas)
3354 fmt2 = pcfmtstr(numdeltas, 4)
3366 fmt2 = pcfmtstr(numdeltas, 4)
3355 ui.writenoi18n(
3367 ui.writenoi18n(
3356 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3368 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3357 )
3369 )
3358 if numprev > 0:
3370 if numprev > 0:
3359 ui.writenoi18n(
3371 ui.writenoi18n(
3360 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3372 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3361 )
3373 )
3362 ui.writenoi18n(
3374 ui.writenoi18n(
3363 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3375 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3364 )
3376 )
3365 ui.writenoi18n(
3377 ui.writenoi18n(
3366 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3378 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3367 )
3379 )
3368 if gdelta:
3380 if gdelta:
3369 ui.writenoi18n(
3381 ui.writenoi18n(
3370 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3382 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3371 )
3383 )
3372 ui.writenoi18n(
3384 ui.writenoi18n(
3373 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3385 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3374 )
3386 )
3375 ui.writenoi18n(
3387 ui.writenoi18n(
3376 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3388 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3377 )
3389 )
3378
3390
3379
3391
3380 @command(
3392 @command(
3381 b'debugrevlogindex',
3393 b'debugrevlogindex',
3382 cmdutil.debugrevlogopts
3394 cmdutil.debugrevlogopts
3383 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3395 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3384 _(b'[-f FORMAT] -c|-m|FILE'),
3396 _(b'[-f FORMAT] -c|-m|FILE'),
3385 optionalrepo=True,
3397 optionalrepo=True,
3386 )
3398 )
3387 def debugrevlogindex(ui, repo, file_=None, **opts):
3399 def debugrevlogindex(ui, repo, file_=None, **opts):
3388 """dump the contents of a revlog index"""
3400 """dump the contents of a revlog index"""
3389 opts = pycompat.byteskwargs(opts)
3401 opts = pycompat.byteskwargs(opts)
3390 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3402 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3391 format = opts.get(b'format', 0)
3403 format = opts.get(b'format', 0)
3392 if format not in (0, 1):
3404 if format not in (0, 1):
3393 raise error.Abort(_(b"unknown format %d") % format)
3405 raise error.Abort(_(b"unknown format %d") % format)
3394
3406
3395 if ui.debugflag:
3407 if ui.debugflag:
3396 shortfn = hex
3408 shortfn = hex
3397 else:
3409 else:
3398 shortfn = short
3410 shortfn = short
3399
3411
3400 # There might not be anything in r, so have a sane default
3412 # There might not be anything in r, so have a sane default
3401 idlen = 12
3413 idlen = 12
3402 for i in r:
3414 for i in r:
3403 idlen = len(shortfn(r.node(i)))
3415 idlen = len(shortfn(r.node(i)))
3404 break
3416 break
3405
3417
3406 if format == 0:
3418 if format == 0:
3407 if ui.verbose:
3419 if ui.verbose:
3408 ui.writenoi18n(
3420 ui.writenoi18n(
3409 b" rev offset length linkrev %s %s p2\n"
3421 b" rev offset length linkrev %s %s p2\n"
3410 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3422 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3411 )
3423 )
3412 else:
3424 else:
3413 ui.writenoi18n(
3425 ui.writenoi18n(
3414 b" rev linkrev %s %s p2\n"
3426 b" rev linkrev %s %s p2\n"
3415 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3427 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3416 )
3428 )
3417 elif format == 1:
3429 elif format == 1:
3418 if ui.verbose:
3430 if ui.verbose:
3419 ui.writenoi18n(
3431 ui.writenoi18n(
3420 (
3432 (
3421 b" rev flag offset length size link p1"
3433 b" rev flag offset length size link p1"
3422 b" p2 %s\n"
3434 b" p2 %s\n"
3423 )
3435 )
3424 % b"nodeid".rjust(idlen)
3436 % b"nodeid".rjust(idlen)
3425 )
3437 )
3426 else:
3438 else:
3427 ui.writenoi18n(
3439 ui.writenoi18n(
3428 b" rev flag size link p1 p2 %s\n"
3440 b" rev flag size link p1 p2 %s\n"
3429 % b"nodeid".rjust(idlen)
3441 % b"nodeid".rjust(idlen)
3430 )
3442 )
3431
3443
3432 for i in r:
3444 for i in r:
3433 node = r.node(i)
3445 node = r.node(i)
3434 if format == 0:
3446 if format == 0:
3435 try:
3447 try:
3436 pp = r.parents(node)
3448 pp = r.parents(node)
3437 except Exception:
3449 except Exception:
3438 pp = [repo.nullid, repo.nullid]
3450 pp = [repo.nullid, repo.nullid]
3439 if ui.verbose:
3451 if ui.verbose:
3440 ui.write(
3452 ui.write(
3441 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3453 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3442 % (
3454 % (
3443 i,
3455 i,
3444 r.start(i),
3456 r.start(i),
3445 r.length(i),
3457 r.length(i),
3446 r.linkrev(i),
3458 r.linkrev(i),
3447 shortfn(node),
3459 shortfn(node),
3448 shortfn(pp[0]),
3460 shortfn(pp[0]),
3449 shortfn(pp[1]),
3461 shortfn(pp[1]),
3450 )
3462 )
3451 )
3463 )
3452 else:
3464 else:
3453 ui.write(
3465 ui.write(
3454 b"% 6d % 7d %s %s %s\n"
3466 b"% 6d % 7d %s %s %s\n"
3455 % (
3467 % (
3456 i,
3468 i,
3457 r.linkrev(i),
3469 r.linkrev(i),
3458 shortfn(node),
3470 shortfn(node),
3459 shortfn(pp[0]),
3471 shortfn(pp[0]),
3460 shortfn(pp[1]),
3472 shortfn(pp[1]),
3461 )
3473 )
3462 )
3474 )
3463 elif format == 1:
3475 elif format == 1:
3464 pr = r.parentrevs(i)
3476 pr = r.parentrevs(i)
3465 if ui.verbose:
3477 if ui.verbose:
3466 ui.write(
3478 ui.write(
3467 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3479 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3468 % (
3480 % (
3469 i,
3481 i,
3470 r.flags(i),
3482 r.flags(i),
3471 r.start(i),
3483 r.start(i),
3472 r.length(i),
3484 r.length(i),
3473 r.rawsize(i),
3485 r.rawsize(i),
3474 r.linkrev(i),
3486 r.linkrev(i),
3475 pr[0],
3487 pr[0],
3476 pr[1],
3488 pr[1],
3477 shortfn(node),
3489 shortfn(node),
3478 )
3490 )
3479 )
3491 )
3480 else:
3492 else:
3481 ui.write(
3493 ui.write(
3482 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3494 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3483 % (
3495 % (
3484 i,
3496 i,
3485 r.flags(i),
3497 r.flags(i),
3486 r.rawsize(i),
3498 r.rawsize(i),
3487 r.linkrev(i),
3499 r.linkrev(i),
3488 pr[0],
3500 pr[0],
3489 pr[1],
3501 pr[1],
3490 shortfn(node),
3502 shortfn(node),
3491 )
3503 )
3492 )
3504 )
3493
3505
3494
3506
3495 @command(
3507 @command(
3496 b'debugrevspec',
3508 b'debugrevspec',
3497 [
3509 [
3498 (
3510 (
3499 b'',
3511 b'',
3500 b'optimize',
3512 b'optimize',
3501 None,
3513 None,
3502 _(b'print parsed tree after optimizing (DEPRECATED)'),
3514 _(b'print parsed tree after optimizing (DEPRECATED)'),
3503 ),
3515 ),
3504 (
3516 (
3505 b'',
3517 b'',
3506 b'show-revs',
3518 b'show-revs',
3507 True,
3519 True,
3508 _(b'print list of result revisions (default)'),
3520 _(b'print list of result revisions (default)'),
3509 ),
3521 ),
3510 (
3522 (
3511 b's',
3523 b's',
3512 b'show-set',
3524 b'show-set',
3513 None,
3525 None,
3514 _(b'print internal representation of result set'),
3526 _(b'print internal representation of result set'),
3515 ),
3527 ),
3516 (
3528 (
3517 b'p',
3529 b'p',
3518 b'show-stage',
3530 b'show-stage',
3519 [],
3531 [],
3520 _(b'print parsed tree at the given stage'),
3532 _(b'print parsed tree at the given stage'),
3521 _(b'NAME'),
3533 _(b'NAME'),
3522 ),
3534 ),
3523 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3535 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3524 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3536 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3525 ],
3537 ],
3526 b'REVSPEC',
3538 b'REVSPEC',
3527 )
3539 )
3528 def debugrevspec(ui, repo, expr, **opts):
3540 def debugrevspec(ui, repo, expr, **opts):
3529 """parse and apply a revision specification
3541 """parse and apply a revision specification
3530
3542
3531 Use -p/--show-stage option to print the parsed tree at the given stages.
3543 Use -p/--show-stage option to print the parsed tree at the given stages.
3532 Use -p all to print tree at every stage.
3544 Use -p all to print tree at every stage.
3533
3545
3534 Use --no-show-revs option with -s or -p to print only the set
3546 Use --no-show-revs option with -s or -p to print only the set
3535 representation or the parsed tree respectively.
3547 representation or the parsed tree respectively.
3536
3548
3537 Use --verify-optimized to compare the optimized result with the unoptimized
3549 Use --verify-optimized to compare the optimized result with the unoptimized
3538 one. Returns 1 if the optimized result differs.
3550 one. Returns 1 if the optimized result differs.
3539 """
3551 """
3540 opts = pycompat.byteskwargs(opts)
3552 opts = pycompat.byteskwargs(opts)
3541 aliases = ui.configitems(b'revsetalias')
3553 aliases = ui.configitems(b'revsetalias')
3542 stages = [
3554 stages = [
3543 (b'parsed', lambda tree: tree),
3555 (b'parsed', lambda tree: tree),
3544 (
3556 (
3545 b'expanded',
3557 b'expanded',
3546 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3558 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3547 ),
3559 ),
3548 (b'concatenated', revsetlang.foldconcat),
3560 (b'concatenated', revsetlang.foldconcat),
3549 (b'analyzed', revsetlang.analyze),
3561 (b'analyzed', revsetlang.analyze),
3550 (b'optimized', revsetlang.optimize),
3562 (b'optimized', revsetlang.optimize),
3551 ]
3563 ]
3552 if opts[b'no_optimized']:
3564 if opts[b'no_optimized']:
3553 stages = stages[:-1]
3565 stages = stages[:-1]
3554 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3566 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3555 raise error.Abort(
3567 raise error.Abort(
3556 _(b'cannot use --verify-optimized with --no-optimized')
3568 _(b'cannot use --verify-optimized with --no-optimized')
3557 )
3569 )
3558 stagenames = {n for n, f in stages}
3570 stagenames = {n for n, f in stages}
3559
3571
3560 showalways = set()
3572 showalways = set()
3561 showchanged = set()
3573 showchanged = set()
3562 if ui.verbose and not opts[b'show_stage']:
3574 if ui.verbose and not opts[b'show_stage']:
3563 # show parsed tree by --verbose (deprecated)
3575 # show parsed tree by --verbose (deprecated)
3564 showalways.add(b'parsed')
3576 showalways.add(b'parsed')
3565 showchanged.update([b'expanded', b'concatenated'])
3577 showchanged.update([b'expanded', b'concatenated'])
3566 if opts[b'optimize']:
3578 if opts[b'optimize']:
3567 showalways.add(b'optimized')
3579 showalways.add(b'optimized')
3568 if opts[b'show_stage'] and opts[b'optimize']:
3580 if opts[b'show_stage'] and opts[b'optimize']:
3569 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3581 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3570 if opts[b'show_stage'] == [b'all']:
3582 if opts[b'show_stage'] == [b'all']:
3571 showalways.update(stagenames)
3583 showalways.update(stagenames)
3572 else:
3584 else:
3573 for n in opts[b'show_stage']:
3585 for n in opts[b'show_stage']:
3574 if n not in stagenames:
3586 if n not in stagenames:
3575 raise error.Abort(_(b'invalid stage name: %s') % n)
3587 raise error.Abort(_(b'invalid stage name: %s') % n)
3576 showalways.update(opts[b'show_stage'])
3588 showalways.update(opts[b'show_stage'])
3577
3589
3578 treebystage = {}
3590 treebystage = {}
3579 printedtree = None
3591 printedtree = None
3580 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3592 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3581 for n, f in stages:
3593 for n, f in stages:
3582 treebystage[n] = tree = f(tree)
3594 treebystage[n] = tree = f(tree)
3583 if n in showalways or (n in showchanged and tree != printedtree):
3595 if n in showalways or (n in showchanged and tree != printedtree):
3584 if opts[b'show_stage'] or n != b'parsed':
3596 if opts[b'show_stage'] or n != b'parsed':
3585 ui.write(b"* %s:\n" % n)
3597 ui.write(b"* %s:\n" % n)
3586 ui.write(revsetlang.prettyformat(tree), b"\n")
3598 ui.write(revsetlang.prettyformat(tree), b"\n")
3587 printedtree = tree
3599 printedtree = tree
3588
3600
3589 if opts[b'verify_optimized']:
3601 if opts[b'verify_optimized']:
3590 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3602 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3591 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3603 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3592 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3604 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3593 ui.writenoi18n(
3605 ui.writenoi18n(
3594 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3606 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3595 )
3607 )
3596 ui.writenoi18n(
3608 ui.writenoi18n(
3597 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3609 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3598 )
3610 )
3599 arevs = list(arevs)
3611 arevs = list(arevs)
3600 brevs = list(brevs)
3612 brevs = list(brevs)
3601 if arevs == brevs:
3613 if arevs == brevs:
3602 return 0
3614 return 0
3603 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3615 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3604 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3616 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3605 sm = difflib.SequenceMatcher(None, arevs, brevs)
3617 sm = difflib.SequenceMatcher(None, arevs, brevs)
3606 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3618 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3607 if tag in ('delete', 'replace'):
3619 if tag in ('delete', 'replace'):
3608 for c in arevs[alo:ahi]:
3620 for c in arevs[alo:ahi]:
3609 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3621 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3610 if tag in ('insert', 'replace'):
3622 if tag in ('insert', 'replace'):
3611 for c in brevs[blo:bhi]:
3623 for c in brevs[blo:bhi]:
3612 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3624 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3613 if tag == 'equal':
3625 if tag == 'equal':
3614 for c in arevs[alo:ahi]:
3626 for c in arevs[alo:ahi]:
3615 ui.write(b' %d\n' % c)
3627 ui.write(b' %d\n' % c)
3616 return 1
3628 return 1
3617
3629
3618 func = revset.makematcher(tree)
3630 func = revset.makematcher(tree)
3619 revs = func(repo)
3631 revs = func(repo)
3620 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3632 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3621 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3633 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3622 if not opts[b'show_revs']:
3634 if not opts[b'show_revs']:
3623 return
3635 return
3624 for c in revs:
3636 for c in revs:
3625 ui.write(b"%d\n" % c)
3637 ui.write(b"%d\n" % c)
3626
3638
3627
3639
3628 @command(
3640 @command(
3629 b'debugserve',
3641 b'debugserve',
3630 [
3642 [
3631 (
3643 (
3632 b'',
3644 b'',
3633 b'sshstdio',
3645 b'sshstdio',
3634 False,
3646 False,
3635 _(b'run an SSH server bound to process handles'),
3647 _(b'run an SSH server bound to process handles'),
3636 ),
3648 ),
3637 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3649 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3638 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3650 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3639 ],
3651 ],
3640 b'',
3652 b'',
3641 )
3653 )
3642 def debugserve(ui, repo, **opts):
3654 def debugserve(ui, repo, **opts):
3643 """run a server with advanced settings
3655 """run a server with advanced settings
3644
3656
3645 This command is similar to :hg:`serve`. It exists partially as a
3657 This command is similar to :hg:`serve`. It exists partially as a
3646 workaround to the fact that ``hg serve --stdio`` must have specific
3658 workaround to the fact that ``hg serve --stdio`` must have specific
3647 arguments for security reasons.
3659 arguments for security reasons.
3648 """
3660 """
3649 opts = pycompat.byteskwargs(opts)
3661 opts = pycompat.byteskwargs(opts)
3650
3662
3651 if not opts[b'sshstdio']:
3663 if not opts[b'sshstdio']:
3652 raise error.Abort(_(b'only --sshstdio is currently supported'))
3664 raise error.Abort(_(b'only --sshstdio is currently supported'))
3653
3665
3654 logfh = None
3666 logfh = None
3655
3667
3656 if opts[b'logiofd'] and opts[b'logiofile']:
3668 if opts[b'logiofd'] and opts[b'logiofile']:
3657 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3669 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3658
3670
3659 if opts[b'logiofd']:
3671 if opts[b'logiofd']:
3660 # Ideally we would be line buffered. But line buffering in binary
3672 # Ideally we would be line buffered. But line buffering in binary
3661 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3673 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3662 # buffering could have performance impacts. But since this isn't
3674 # buffering could have performance impacts. But since this isn't
3663 # performance critical code, it should be fine.
3675 # performance critical code, it should be fine.
3664 try:
3676 try:
3665 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3677 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3666 except OSError as e:
3678 except OSError as e:
3667 if e.errno != errno.ESPIPE:
3679 if e.errno != errno.ESPIPE:
3668 raise
3680 raise
3669 # can't seek a pipe, so `ab` mode fails on py3
3681 # can't seek a pipe, so `ab` mode fails on py3
3670 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3682 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3671 elif opts[b'logiofile']:
3683 elif opts[b'logiofile']:
3672 logfh = open(opts[b'logiofile'], b'ab', 0)
3684 logfh = open(opts[b'logiofile'], b'ab', 0)
3673
3685
3674 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3686 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3675 s.serve_forever()
3687 s.serve_forever()
3676
3688
3677
3689
3678 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3690 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3679 def debugsetparents(ui, repo, rev1, rev2=None):
3691 def debugsetparents(ui, repo, rev1, rev2=None):
3680 """manually set the parents of the current working directory (DANGEROUS)
3692 """manually set the parents of the current working directory (DANGEROUS)
3681
3693
3682 This command is not what you are looking for and should not be used. Using
3694 This command is not what you are looking for and should not be used. Using
3683 this command will most certainly results in slight corruption of the file
3695 this command will most certainly results in slight corruption of the file
3684 level histories withing your repository. DO NOT USE THIS COMMAND.
3696 level histories withing your repository. DO NOT USE THIS COMMAND.
3685
3697
3686 The command update the p1 and p2 field in the dirstate, and not touching
3698 The command update the p1 and p2 field in the dirstate, and not touching
3687 anything else. This useful for writing repository conversion tools, but
3699 anything else. This useful for writing repository conversion tools, but
3688 should be used with extreme care. For example, neither the working
3700 should be used with extreme care. For example, neither the working
3689 directory nor the dirstate is updated, so file status may be incorrect
3701 directory nor the dirstate is updated, so file status may be incorrect
3690 after running this command. Only used if you are one of the few people that
3702 after running this command. Only used if you are one of the few people that
3691 deeply unstand both conversion tools and file level histories. If you are
3703 deeply unstand both conversion tools and file level histories. If you are
3692 reading this help, you are not one of this people (most of them sailed west
3704 reading this help, you are not one of this people (most of them sailed west
3693 from Mithlond anyway.
3705 from Mithlond anyway.
3694
3706
3695 So one last time DO NOT USE THIS COMMAND.
3707 So one last time DO NOT USE THIS COMMAND.
3696
3708
3697 Returns 0 on success.
3709 Returns 0 on success.
3698 """
3710 """
3699
3711
3700 node1 = scmutil.revsingle(repo, rev1).node()
3712 node1 = scmutil.revsingle(repo, rev1).node()
3701 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3713 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3702
3714
3703 with repo.wlock():
3715 with repo.wlock():
3704 repo.setparents(node1, node2)
3716 repo.setparents(node1, node2)
3705
3717
3706
3718
3707 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3719 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3708 def debugsidedata(ui, repo, file_, rev=None, **opts):
3720 def debugsidedata(ui, repo, file_, rev=None, **opts):
3709 """dump the side data for a cl/manifest/file revision
3721 """dump the side data for a cl/manifest/file revision
3710
3722
3711 Use --verbose to dump the sidedata content."""
3723 Use --verbose to dump the sidedata content."""
3712 opts = pycompat.byteskwargs(opts)
3724 opts = pycompat.byteskwargs(opts)
3713 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3725 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3714 if rev is not None:
3726 if rev is not None:
3715 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3727 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3716 file_, rev = None, file_
3728 file_, rev = None, file_
3717 elif rev is None:
3729 elif rev is None:
3718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3730 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3719 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3731 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3720 r = getattr(r, '_revlog', r)
3732 r = getattr(r, '_revlog', r)
3721 try:
3733 try:
3722 sidedata = r.sidedata(r.lookup(rev))
3734 sidedata = r.sidedata(r.lookup(rev))
3723 except KeyError:
3735 except KeyError:
3724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3736 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3725 if sidedata:
3737 if sidedata:
3726 sidedata = list(sidedata.items())
3738 sidedata = list(sidedata.items())
3727 sidedata.sort()
3739 sidedata.sort()
3728 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3740 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3729 for key, value in sidedata:
3741 for key, value in sidedata:
3730 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3742 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3731 if ui.verbose:
3743 if ui.verbose:
3732 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3744 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3733
3745
3734
3746
3735 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3747 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3736 def debugssl(ui, repo, source=None, **opts):
3748 def debugssl(ui, repo, source=None, **opts):
3737 """test a secure connection to a server
3749 """test a secure connection to a server
3738
3750
3739 This builds the certificate chain for the server on Windows, installing the
3751 This builds the certificate chain for the server on Windows, installing the
3740 missing intermediates and trusted root via Windows Update if necessary. It
3752 missing intermediates and trusted root via Windows Update if necessary. It
3741 does nothing on other platforms.
3753 does nothing on other platforms.
3742
3754
3743 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3755 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3744 that server is used. See :hg:`help urls` for more information.
3756 that server is used. See :hg:`help urls` for more information.
3745
3757
3746 If the update succeeds, retry the original operation. Otherwise, the cause
3758 If the update succeeds, retry the original operation. Otherwise, the cause
3747 of the SSL error is likely another issue.
3759 of the SSL error is likely another issue.
3748 """
3760 """
3749 if not pycompat.iswindows:
3761 if not pycompat.iswindows:
3750 raise error.Abort(
3762 raise error.Abort(
3751 _(b'certificate chain building is only possible on Windows')
3763 _(b'certificate chain building is only possible on Windows')
3752 )
3764 )
3753
3765
3754 if not source:
3766 if not source:
3755 if not repo:
3767 if not repo:
3756 raise error.Abort(
3768 raise error.Abort(
3757 _(
3769 _(
3758 b"there is no Mercurial repository here, and no "
3770 b"there is no Mercurial repository here, and no "
3759 b"server specified"
3771 b"server specified"
3760 )
3772 )
3761 )
3773 )
3762 source = b"default"
3774 source = b"default"
3763
3775
3764 source, branches = urlutil.get_unique_pull_path(
3776 source, branches = urlutil.get_unique_pull_path(
3765 b'debugssl', repo, ui, source
3777 b'debugssl', repo, ui, source
3766 )
3778 )
3767 url = urlutil.url(source)
3779 url = urlutil.url(source)
3768
3780
3769 defaultport = {b'https': 443, b'ssh': 22}
3781 defaultport = {b'https': 443, b'ssh': 22}
3770 if url.scheme in defaultport:
3782 if url.scheme in defaultport:
3771 try:
3783 try:
3772 addr = (url.host, int(url.port or defaultport[url.scheme]))
3784 addr = (url.host, int(url.port or defaultport[url.scheme]))
3773 except ValueError:
3785 except ValueError:
3774 raise error.Abort(_(b"malformed port number in URL"))
3786 raise error.Abort(_(b"malformed port number in URL"))
3775 else:
3787 else:
3776 raise error.Abort(_(b"only https and ssh connections are supported"))
3788 raise error.Abort(_(b"only https and ssh connections are supported"))
3777
3789
3778 from . import win32
3790 from . import win32
3779
3791
3780 s = ssl.wrap_socket(
3792 s = ssl.wrap_socket(
3781 socket.socket(),
3793 socket.socket(),
3782 ssl_version=ssl.PROTOCOL_TLS,
3794 ssl_version=ssl.PROTOCOL_TLS,
3783 cert_reqs=ssl.CERT_NONE,
3795 cert_reqs=ssl.CERT_NONE,
3784 ca_certs=None,
3796 ca_certs=None,
3785 )
3797 )
3786
3798
3787 try:
3799 try:
3788 s.connect(addr)
3800 s.connect(addr)
3789 cert = s.getpeercert(True)
3801 cert = s.getpeercert(True)
3790
3802
3791 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3803 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3792
3804
3793 complete = win32.checkcertificatechain(cert, build=False)
3805 complete = win32.checkcertificatechain(cert, build=False)
3794
3806
3795 if not complete:
3807 if not complete:
3796 ui.status(_(b'certificate chain is incomplete, updating... '))
3808 ui.status(_(b'certificate chain is incomplete, updating... '))
3797
3809
3798 if not win32.checkcertificatechain(cert):
3810 if not win32.checkcertificatechain(cert):
3799 ui.status(_(b'failed.\n'))
3811 ui.status(_(b'failed.\n'))
3800 else:
3812 else:
3801 ui.status(_(b'done.\n'))
3813 ui.status(_(b'done.\n'))
3802 else:
3814 else:
3803 ui.status(_(b'full certificate chain is available\n'))
3815 ui.status(_(b'full certificate chain is available\n'))
3804 finally:
3816 finally:
3805 s.close()
3817 s.close()
3806
3818
3807
3819
3808 @command(
3820 @command(
3809 b"debugbackupbundle",
3821 b"debugbackupbundle",
3810 [
3822 [
3811 (
3823 (
3812 b"",
3824 b"",
3813 b"recover",
3825 b"recover",
3814 b"",
3826 b"",
3815 b"brings the specified changeset back into the repository",
3827 b"brings the specified changeset back into the repository",
3816 )
3828 )
3817 ]
3829 ]
3818 + cmdutil.logopts,
3830 + cmdutil.logopts,
3819 _(b"hg debugbackupbundle [--recover HASH]"),
3831 _(b"hg debugbackupbundle [--recover HASH]"),
3820 )
3832 )
3821 def debugbackupbundle(ui, repo, *pats, **opts):
3833 def debugbackupbundle(ui, repo, *pats, **opts):
3822 """lists the changesets available in backup bundles
3834 """lists the changesets available in backup bundles
3823
3835
3824 Without any arguments, this command prints a list of the changesets in each
3836 Without any arguments, this command prints a list of the changesets in each
3825 backup bundle.
3837 backup bundle.
3826
3838
3827 --recover takes a changeset hash and unbundles the first bundle that
3839 --recover takes a changeset hash and unbundles the first bundle that
3828 contains that hash, which puts that changeset back in your repository.
3840 contains that hash, which puts that changeset back in your repository.
3829
3841
3830 --verbose will print the entire commit message and the bundle path for that
3842 --verbose will print the entire commit message and the bundle path for that
3831 backup.
3843 backup.
3832 """
3844 """
3833 backups = list(
3845 backups = list(
3834 filter(
3846 filter(
3835 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3847 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3836 )
3848 )
3837 )
3849 )
3838 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3850 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3839
3851
3840 opts = pycompat.byteskwargs(opts)
3852 opts = pycompat.byteskwargs(opts)
3841 opts[b"bundle"] = b""
3853 opts[b"bundle"] = b""
3842 opts[b"force"] = None
3854 opts[b"force"] = None
3843 limit = logcmdutil.getlimit(opts)
3855 limit = logcmdutil.getlimit(opts)
3844
3856
3845 def display(other, chlist, displayer):
3857 def display(other, chlist, displayer):
3846 if opts.get(b"newest_first"):
3858 if opts.get(b"newest_first"):
3847 chlist.reverse()
3859 chlist.reverse()
3848 count = 0
3860 count = 0
3849 for n in chlist:
3861 for n in chlist:
3850 if limit is not None and count >= limit:
3862 if limit is not None and count >= limit:
3851 break
3863 break
3852 parents = [
3864 parents = [
3853 True for p in other.changelog.parents(n) if p != repo.nullid
3865 True for p in other.changelog.parents(n) if p != repo.nullid
3854 ]
3866 ]
3855 if opts.get(b"no_merges") and len(parents) == 2:
3867 if opts.get(b"no_merges") and len(parents) == 2:
3856 continue
3868 continue
3857 count += 1
3869 count += 1
3858 displayer.show(other[n])
3870 displayer.show(other[n])
3859
3871
3860 recovernode = opts.get(b"recover")
3872 recovernode = opts.get(b"recover")
3861 if recovernode:
3873 if recovernode:
3862 if scmutil.isrevsymbol(repo, recovernode):
3874 if scmutil.isrevsymbol(repo, recovernode):
3863 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3875 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3864 return
3876 return
3865 elif backups:
3877 elif backups:
3866 msg = _(
3878 msg = _(
3867 b"Recover changesets using: hg debugbackupbundle --recover "
3879 b"Recover changesets using: hg debugbackupbundle --recover "
3868 b"<changeset hash>\n\nAvailable backup changesets:"
3880 b"<changeset hash>\n\nAvailable backup changesets:"
3869 )
3881 )
3870 ui.status(msg, label=b"status.removed")
3882 ui.status(msg, label=b"status.removed")
3871 else:
3883 else:
3872 ui.status(_(b"no backup changesets found\n"))
3884 ui.status(_(b"no backup changesets found\n"))
3873 return
3885 return
3874
3886
3875 for backup in backups:
3887 for backup in backups:
3876 # Much of this is copied from the hg incoming logic
3888 # Much of this is copied from the hg incoming logic
3877 source = os.path.relpath(backup, encoding.getcwd())
3889 source = os.path.relpath(backup, encoding.getcwd())
3878 source, branches = urlutil.get_unique_pull_path(
3890 source, branches = urlutil.get_unique_pull_path(
3879 b'debugbackupbundle',
3891 b'debugbackupbundle',
3880 repo,
3892 repo,
3881 ui,
3893 ui,
3882 source,
3894 source,
3883 default_branches=opts.get(b'branch'),
3895 default_branches=opts.get(b'branch'),
3884 )
3896 )
3885 try:
3897 try:
3886 other = hg.peer(repo, opts, source)
3898 other = hg.peer(repo, opts, source)
3887 except error.LookupError as ex:
3899 except error.LookupError as ex:
3888 msg = _(b"\nwarning: unable to open bundle %s") % source
3900 msg = _(b"\nwarning: unable to open bundle %s") % source
3889 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3901 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3890 ui.warn(msg, hint=hint)
3902 ui.warn(msg, hint=hint)
3891 continue
3903 continue
3892 revs, checkout = hg.addbranchrevs(
3904 revs, checkout = hg.addbranchrevs(
3893 repo, other, branches, opts.get(b"rev")
3905 repo, other, branches, opts.get(b"rev")
3894 )
3906 )
3895
3907
3896 if revs:
3908 if revs:
3897 revs = [other.lookup(rev) for rev in revs]
3909 revs = [other.lookup(rev) for rev in revs]
3898
3910
3899 with ui.silent():
3911 with ui.silent():
3900 try:
3912 try:
3901 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3913 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3902 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3914 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3903 )
3915 )
3904 except error.LookupError:
3916 except error.LookupError:
3905 continue
3917 continue
3906
3918
3907 try:
3919 try:
3908 if not chlist:
3920 if not chlist:
3909 continue
3921 continue
3910 if recovernode:
3922 if recovernode:
3911 with repo.lock(), repo.transaction(b"unbundle") as tr:
3923 with repo.lock(), repo.transaction(b"unbundle") as tr:
3912 if scmutil.isrevsymbol(other, recovernode):
3924 if scmutil.isrevsymbol(other, recovernode):
3913 ui.status(_(b"Unbundling %s\n") % (recovernode))
3925 ui.status(_(b"Unbundling %s\n") % (recovernode))
3914 f = hg.openpath(ui, source)
3926 f = hg.openpath(ui, source)
3915 gen = exchange.readbundle(ui, f, source)
3927 gen = exchange.readbundle(ui, f, source)
3916 if isinstance(gen, bundle2.unbundle20):
3928 if isinstance(gen, bundle2.unbundle20):
3917 bundle2.applybundle(
3929 bundle2.applybundle(
3918 repo,
3930 repo,
3919 gen,
3931 gen,
3920 tr,
3932 tr,
3921 source=b"unbundle",
3933 source=b"unbundle",
3922 url=b"bundle:" + source,
3934 url=b"bundle:" + source,
3923 )
3935 )
3924 else:
3936 else:
3925 gen.apply(repo, b"unbundle", b"bundle:" + source)
3937 gen.apply(repo, b"unbundle", b"bundle:" + source)
3926 break
3938 break
3927 else:
3939 else:
3928 backupdate = encoding.strtolocal(
3940 backupdate = encoding.strtolocal(
3929 time.strftime(
3941 time.strftime(
3930 "%a %H:%M, %Y-%m-%d",
3942 "%a %H:%M, %Y-%m-%d",
3931 time.localtime(os.path.getmtime(source)),
3943 time.localtime(os.path.getmtime(source)),
3932 )
3944 )
3933 )
3945 )
3934 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3946 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3935 if ui.verbose:
3947 if ui.verbose:
3936 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3948 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3937 else:
3949 else:
3938 opts[
3950 opts[
3939 b"template"
3951 b"template"
3940 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3952 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3941 displayer = logcmdutil.changesetdisplayer(
3953 displayer = logcmdutil.changesetdisplayer(
3942 ui, other, opts, False
3954 ui, other, opts, False
3943 )
3955 )
3944 display(other, chlist, displayer)
3956 display(other, chlist, displayer)
3945 displayer.close()
3957 displayer.close()
3946 finally:
3958 finally:
3947 cleanupfn()
3959 cleanupfn()
3948
3960
3949
3961
3950 @command(
3962 @command(
3951 b'debugsub',
3963 b'debugsub',
3952 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3964 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3953 _(b'[-r REV] [REV]'),
3965 _(b'[-r REV] [REV]'),
3954 )
3966 )
3955 def debugsub(ui, repo, rev=None):
3967 def debugsub(ui, repo, rev=None):
3956 ctx = scmutil.revsingle(repo, rev, None)
3968 ctx = scmutil.revsingle(repo, rev, None)
3957 for k, v in sorted(ctx.substate.items()):
3969 for k, v in sorted(ctx.substate.items()):
3958 ui.writenoi18n(b'path %s\n' % k)
3970 ui.writenoi18n(b'path %s\n' % k)
3959 ui.writenoi18n(b' source %s\n' % v[0])
3971 ui.writenoi18n(b' source %s\n' % v[0])
3960 ui.writenoi18n(b' revision %s\n' % v[1])
3972 ui.writenoi18n(b' revision %s\n' % v[1])
3961
3973
3962
3974
3963 @command(b'debugshell', optionalrepo=True)
3975 @command(b'debugshell', optionalrepo=True)
3964 def debugshell(ui, repo):
3976 def debugshell(ui, repo):
3965 """run an interactive Python interpreter
3977 """run an interactive Python interpreter
3966
3978
3967 The local namespace is provided with a reference to the ui and
3979 The local namespace is provided with a reference to the ui and
3968 the repo instance (if available).
3980 the repo instance (if available).
3969 """
3981 """
3970 import code
3982 import code
3971
3983
3972 imported_objects = {
3984 imported_objects = {
3973 'ui': ui,
3985 'ui': ui,
3974 'repo': repo,
3986 'repo': repo,
3975 }
3987 }
3976
3988
3977 code.interact(local=imported_objects)
3989 code.interact(local=imported_objects)
3978
3990
3979
3991
3980 @command(
3992 @command(
3981 b'debugsuccessorssets',
3993 b'debugsuccessorssets',
3982 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3994 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3983 _(b'[REV]'),
3995 _(b'[REV]'),
3984 )
3996 )
3985 def debugsuccessorssets(ui, repo, *revs, **opts):
3997 def debugsuccessorssets(ui, repo, *revs, **opts):
3986 """show set of successors for revision
3998 """show set of successors for revision
3987
3999
3988 A successors set of changeset A is a consistent group of revisions that
4000 A successors set of changeset A is a consistent group of revisions that
3989 succeed A. It contains non-obsolete changesets only unless closests
4001 succeed A. It contains non-obsolete changesets only unless closests
3990 successors set is set.
4002 successors set is set.
3991
4003
3992 In most cases a changeset A has a single successors set containing a single
4004 In most cases a changeset A has a single successors set containing a single
3993 successor (changeset A replaced by A').
4005 successor (changeset A replaced by A').
3994
4006
3995 A changeset that is made obsolete with no successors are called "pruned".
4007 A changeset that is made obsolete with no successors are called "pruned".
3996 Such changesets have no successors sets at all.
4008 Such changesets have no successors sets at all.
3997
4009
3998 A changeset that has been "split" will have a successors set containing
4010 A changeset that has been "split" will have a successors set containing
3999 more than one successor.
4011 more than one successor.
4000
4012
4001 A changeset that has been rewritten in multiple different ways is called
4013 A changeset that has been rewritten in multiple different ways is called
4002 "divergent". Such changesets have multiple successor sets (each of which
4014 "divergent". Such changesets have multiple successor sets (each of which
4003 may also be split, i.e. have multiple successors).
4015 may also be split, i.e. have multiple successors).
4004
4016
4005 Results are displayed as follows::
4017 Results are displayed as follows::
4006
4018
4007 <rev1>
4019 <rev1>
4008 <successors-1A>
4020 <successors-1A>
4009 <rev2>
4021 <rev2>
4010 <successors-2A>
4022 <successors-2A>
4011 <successors-2B1> <successors-2B2> <successors-2B3>
4023 <successors-2B1> <successors-2B2> <successors-2B3>
4012
4024
4013 Here rev2 has two possible (i.e. divergent) successors sets. The first
4025 Here rev2 has two possible (i.e. divergent) successors sets. The first
4014 holds one element, whereas the second holds three (i.e. the changeset has
4026 holds one element, whereas the second holds three (i.e. the changeset has
4015 been split).
4027 been split).
4016 """
4028 """
4017 # passed to successorssets caching computation from one call to another
4029 # passed to successorssets caching computation from one call to another
4018 cache = {}
4030 cache = {}
4019 ctx2str = bytes
4031 ctx2str = bytes
4020 node2str = short
4032 node2str = short
4021 for rev in scmutil.revrange(repo, revs):
4033 for rev in scmutil.revrange(repo, revs):
4022 ctx = repo[rev]
4034 ctx = repo[rev]
4023 ui.write(b'%s\n' % ctx2str(ctx))
4035 ui.write(b'%s\n' % ctx2str(ctx))
4024 for succsset in obsutil.successorssets(
4036 for succsset in obsutil.successorssets(
4025 repo, ctx.node(), closest=opts['closest'], cache=cache
4037 repo, ctx.node(), closest=opts['closest'], cache=cache
4026 ):
4038 ):
4027 if succsset:
4039 if succsset:
4028 ui.write(b' ')
4040 ui.write(b' ')
4029 ui.write(node2str(succsset[0]))
4041 ui.write(node2str(succsset[0]))
4030 for node in succsset[1:]:
4042 for node in succsset[1:]:
4031 ui.write(b' ')
4043 ui.write(b' ')
4032 ui.write(node2str(node))
4044 ui.write(node2str(node))
4033 ui.write(b'\n')
4045 ui.write(b'\n')
4034
4046
4035
4047
4036 @command(b'debugtagscache', [])
4048 @command(b'debugtagscache', [])
4037 def debugtagscache(ui, repo):
4049 def debugtagscache(ui, repo):
4038 """display the contents of .hg/cache/hgtagsfnodes1"""
4050 """display the contents of .hg/cache/hgtagsfnodes1"""
4039 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4051 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4040 flog = repo.file(b'.hgtags')
4052 flog = repo.file(b'.hgtags')
4041 for r in repo:
4053 for r in repo:
4042 node = repo[r].node()
4054 node = repo[r].node()
4043 tagsnode = cache.getfnode(node, computemissing=False)
4055 tagsnode = cache.getfnode(node, computemissing=False)
4044 if tagsnode:
4056 if tagsnode:
4045 tagsnodedisplay = hex(tagsnode)
4057 tagsnodedisplay = hex(tagsnode)
4046 if not flog.hasnode(tagsnode):
4058 if not flog.hasnode(tagsnode):
4047 tagsnodedisplay += b' (unknown node)'
4059 tagsnodedisplay += b' (unknown node)'
4048 elif tagsnode is None:
4060 elif tagsnode is None:
4049 tagsnodedisplay = b'missing'
4061 tagsnodedisplay = b'missing'
4050 else:
4062 else:
4051 tagsnodedisplay = b'invalid'
4063 tagsnodedisplay = b'invalid'
4052
4064
4053 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4065 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4054
4066
4055
4067
4056 @command(
4068 @command(
4057 b'debugtemplate',
4069 b'debugtemplate',
4058 [
4070 [
4059 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4071 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4060 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4072 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4061 ],
4073 ],
4062 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4074 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4063 optionalrepo=True,
4075 optionalrepo=True,
4064 )
4076 )
4065 def debugtemplate(ui, repo, tmpl, **opts):
4077 def debugtemplate(ui, repo, tmpl, **opts):
4066 """parse and apply a template
4078 """parse and apply a template
4067
4079
4068 If -r/--rev is given, the template is processed as a log template and
4080 If -r/--rev is given, the template is processed as a log template and
4069 applied to the given changesets. Otherwise, it is processed as a generic
4081 applied to the given changesets. Otherwise, it is processed as a generic
4070 template.
4082 template.
4071
4083
4072 Use --verbose to print the parsed tree.
4084 Use --verbose to print the parsed tree.
4073 """
4085 """
4074 revs = None
4086 revs = None
4075 if opts['rev']:
4087 if opts['rev']:
4076 if repo is None:
4088 if repo is None:
4077 raise error.RepoError(
4089 raise error.RepoError(
4078 _(b'there is no Mercurial repository here (.hg not found)')
4090 _(b'there is no Mercurial repository here (.hg not found)')
4079 )
4091 )
4080 revs = scmutil.revrange(repo, opts['rev'])
4092 revs = scmutil.revrange(repo, opts['rev'])
4081
4093
4082 props = {}
4094 props = {}
4083 for d in opts['define']:
4095 for d in opts['define']:
4084 try:
4096 try:
4085 k, v = (e.strip() for e in d.split(b'=', 1))
4097 k, v = (e.strip() for e in d.split(b'=', 1))
4086 if not k or k == b'ui':
4098 if not k or k == b'ui':
4087 raise ValueError
4099 raise ValueError
4088 props[k] = v
4100 props[k] = v
4089 except ValueError:
4101 except ValueError:
4090 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4102 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4091
4103
4092 if ui.verbose:
4104 if ui.verbose:
4093 aliases = ui.configitems(b'templatealias')
4105 aliases = ui.configitems(b'templatealias')
4094 tree = templater.parse(tmpl)
4106 tree = templater.parse(tmpl)
4095 ui.note(templater.prettyformat(tree), b'\n')
4107 ui.note(templater.prettyformat(tree), b'\n')
4096 newtree = templater.expandaliases(tree, aliases)
4108 newtree = templater.expandaliases(tree, aliases)
4097 if newtree != tree:
4109 if newtree != tree:
4098 ui.notenoi18n(
4110 ui.notenoi18n(
4099 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4111 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4100 )
4112 )
4101
4113
4102 if revs is None:
4114 if revs is None:
4103 tres = formatter.templateresources(ui, repo)
4115 tres = formatter.templateresources(ui, repo)
4104 t = formatter.maketemplater(ui, tmpl, resources=tres)
4116 t = formatter.maketemplater(ui, tmpl, resources=tres)
4105 if ui.verbose:
4117 if ui.verbose:
4106 kwds, funcs = t.symbolsuseddefault()
4118 kwds, funcs = t.symbolsuseddefault()
4107 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4119 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4108 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4120 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4109 ui.write(t.renderdefault(props))
4121 ui.write(t.renderdefault(props))
4110 else:
4122 else:
4111 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4123 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4112 if ui.verbose:
4124 if ui.verbose:
4113 kwds, funcs = displayer.t.symbolsuseddefault()
4125 kwds, funcs = displayer.t.symbolsuseddefault()
4114 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4126 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4115 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4127 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4116 for r in revs:
4128 for r in revs:
4117 displayer.show(repo[r], **pycompat.strkwargs(props))
4129 displayer.show(repo[r], **pycompat.strkwargs(props))
4118 displayer.close()
4130 displayer.close()
4119
4131
4120
4132
4121 @command(
4133 @command(
4122 b'debuguigetpass',
4134 b'debuguigetpass',
4123 [
4135 [
4124 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4136 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4125 ],
4137 ],
4126 _(b'[-p TEXT]'),
4138 _(b'[-p TEXT]'),
4127 norepo=True,
4139 norepo=True,
4128 )
4140 )
4129 def debuguigetpass(ui, prompt=b''):
4141 def debuguigetpass(ui, prompt=b''):
4130 """show prompt to type password"""
4142 """show prompt to type password"""
4131 r = ui.getpass(prompt)
4143 r = ui.getpass(prompt)
4132 if r is None:
4144 if r is None:
4133 r = b"<default response>"
4145 r = b"<default response>"
4134 ui.writenoi18n(b'response: %s\n' % r)
4146 ui.writenoi18n(b'response: %s\n' % r)
4135
4147
4136
4148
4137 @command(
4149 @command(
4138 b'debuguiprompt',
4150 b'debuguiprompt',
4139 [
4151 [
4140 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4152 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4141 ],
4153 ],
4142 _(b'[-p TEXT]'),
4154 _(b'[-p TEXT]'),
4143 norepo=True,
4155 norepo=True,
4144 )
4156 )
4145 def debuguiprompt(ui, prompt=b''):
4157 def debuguiprompt(ui, prompt=b''):
4146 """show plain prompt"""
4158 """show plain prompt"""
4147 r = ui.prompt(prompt)
4159 r = ui.prompt(prompt)
4148 ui.writenoi18n(b'response: %s\n' % r)
4160 ui.writenoi18n(b'response: %s\n' % r)
4149
4161
4150
4162
4151 @command(b'debugupdatecaches', [])
4163 @command(b'debugupdatecaches', [])
4152 def debugupdatecaches(ui, repo, *pats, **opts):
4164 def debugupdatecaches(ui, repo, *pats, **opts):
4153 """warm all known caches in the repository"""
4165 """warm all known caches in the repository"""
4154 with repo.wlock(), repo.lock():
4166 with repo.wlock(), repo.lock():
4155 repo.updatecaches(caches=repository.CACHES_ALL)
4167 repo.updatecaches(caches=repository.CACHES_ALL)
4156
4168
4157
4169
4158 @command(
4170 @command(
4159 b'debugupgraderepo',
4171 b'debugupgraderepo',
4160 [
4172 [
4161 (
4173 (
4162 b'o',
4174 b'o',
4163 b'optimize',
4175 b'optimize',
4164 [],
4176 [],
4165 _(b'extra optimization to perform'),
4177 _(b'extra optimization to perform'),
4166 _(b'NAME'),
4178 _(b'NAME'),
4167 ),
4179 ),
4168 (b'', b'run', False, _(b'performs an upgrade')),
4180 (b'', b'run', False, _(b'performs an upgrade')),
4169 (b'', b'backup', True, _(b'keep the old repository content around')),
4181 (b'', b'backup', True, _(b'keep the old repository content around')),
4170 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4182 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4171 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4183 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4172 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4184 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4173 ],
4185 ],
4174 )
4186 )
4175 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4187 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4176 """upgrade a repository to use different features
4188 """upgrade a repository to use different features
4177
4189
4178 If no arguments are specified, the repository is evaluated for upgrade
4190 If no arguments are specified, the repository is evaluated for upgrade
4179 and a list of problems and potential optimizations is printed.
4191 and a list of problems and potential optimizations is printed.
4180
4192
4181 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4193 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4182 can be influenced via additional arguments. More details will be provided
4194 can be influenced via additional arguments. More details will be provided
4183 by the command output when run without ``--run``.
4195 by the command output when run without ``--run``.
4184
4196
4185 During the upgrade, the repository will be locked and no writes will be
4197 During the upgrade, the repository will be locked and no writes will be
4186 allowed.
4198 allowed.
4187
4199
4188 At the end of the upgrade, the repository may not be readable while new
4200 At the end of the upgrade, the repository may not be readable while new
4189 repository data is swapped in. This window will be as long as it takes to
4201 repository data is swapped in. This window will be as long as it takes to
4190 rename some directories inside the ``.hg`` directory. On most machines, this
4202 rename some directories inside the ``.hg`` directory. On most machines, this
4191 should complete almost instantaneously and the chances of a consumer being
4203 should complete almost instantaneously and the chances of a consumer being
4192 unable to access the repository should be low.
4204 unable to access the repository should be low.
4193
4205
4194 By default, all revlogs will be upgraded. You can restrict this using flags
4206 By default, all revlogs will be upgraded. You can restrict this using flags
4195 such as `--manifest`:
4207 such as `--manifest`:
4196
4208
4197 * `--manifest`: only optimize the manifest
4209 * `--manifest`: only optimize the manifest
4198 * `--no-manifest`: optimize all revlog but the manifest
4210 * `--no-manifest`: optimize all revlog but the manifest
4199 * `--changelog`: optimize the changelog only
4211 * `--changelog`: optimize the changelog only
4200 * `--no-changelog --no-manifest`: optimize filelogs only
4212 * `--no-changelog --no-manifest`: optimize filelogs only
4201 * `--filelogs`: optimize the filelogs only
4213 * `--filelogs`: optimize the filelogs only
4202 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4214 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4203 """
4215 """
4204 return upgrade.upgraderepo(
4216 return upgrade.upgraderepo(
4205 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4217 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4206 )
4218 )
4207
4219
4208
4220
4209 @command(
4221 @command(
4210 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4222 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4211 )
4223 )
4212 def debugwalk(ui, repo, *pats, **opts):
4224 def debugwalk(ui, repo, *pats, **opts):
4213 """show how files match on given patterns"""
4225 """show how files match on given patterns"""
4214 opts = pycompat.byteskwargs(opts)
4226 opts = pycompat.byteskwargs(opts)
4215 m = scmutil.match(repo[None], pats, opts)
4227 m = scmutil.match(repo[None], pats, opts)
4216 if ui.verbose:
4228 if ui.verbose:
4217 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4229 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4218 items = list(repo[None].walk(m))
4230 items = list(repo[None].walk(m))
4219 if not items:
4231 if not items:
4220 return
4232 return
4221 f = lambda fn: fn
4233 f = lambda fn: fn
4222 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4234 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4223 f = lambda fn: util.normpath(fn)
4235 f = lambda fn: util.normpath(fn)
4224 fmt = b'f %%-%ds %%-%ds %%s' % (
4236 fmt = b'f %%-%ds %%-%ds %%s' % (
4225 max([len(abs) for abs in items]),
4237 max([len(abs) for abs in items]),
4226 max([len(repo.pathto(abs)) for abs in items]),
4238 max([len(repo.pathto(abs)) for abs in items]),
4227 )
4239 )
4228 for abs in items:
4240 for abs in items:
4229 line = fmt % (
4241 line = fmt % (
4230 abs,
4242 abs,
4231 f(repo.pathto(abs)),
4243 f(repo.pathto(abs)),
4232 m.exact(abs) and b'exact' or b'',
4244 m.exact(abs) and b'exact' or b'',
4233 )
4245 )
4234 ui.write(b"%s\n" % line.rstrip())
4246 ui.write(b"%s\n" % line.rstrip())
4235
4247
4236
4248
4237 @command(b'debugwhyunstable', [], _(b'REV'))
4249 @command(b'debugwhyunstable', [], _(b'REV'))
4238 def debugwhyunstable(ui, repo, rev):
4250 def debugwhyunstable(ui, repo, rev):
4239 """explain instabilities of a changeset"""
4251 """explain instabilities of a changeset"""
4240 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4252 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4241 dnodes = b''
4253 dnodes = b''
4242 if entry.get(b'divergentnodes'):
4254 if entry.get(b'divergentnodes'):
4243 dnodes = (
4255 dnodes = (
4244 b' '.join(
4256 b' '.join(
4245 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4257 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4246 for ctx in entry[b'divergentnodes']
4258 for ctx in entry[b'divergentnodes']
4247 )
4259 )
4248 + b' '
4260 + b' '
4249 )
4261 )
4250 ui.write(
4262 ui.write(
4251 b'%s: %s%s %s\n'
4263 b'%s: %s%s %s\n'
4252 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4264 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4253 )
4265 )
4254
4266
4255
4267
4256 @command(
4268 @command(
4257 b'debugwireargs',
4269 b'debugwireargs',
4258 [
4270 [
4259 (b'', b'three', b'', b'three'),
4271 (b'', b'three', b'', b'three'),
4260 (b'', b'four', b'', b'four'),
4272 (b'', b'four', b'', b'four'),
4261 (b'', b'five', b'', b'five'),
4273 (b'', b'five', b'', b'five'),
4262 ]
4274 ]
4263 + cmdutil.remoteopts,
4275 + cmdutil.remoteopts,
4264 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4276 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4265 norepo=True,
4277 norepo=True,
4266 )
4278 )
4267 def debugwireargs(ui, repopath, *vals, **opts):
4279 def debugwireargs(ui, repopath, *vals, **opts):
4268 opts = pycompat.byteskwargs(opts)
4280 opts = pycompat.byteskwargs(opts)
4269 repo = hg.peer(ui, opts, repopath)
4281 repo = hg.peer(ui, opts, repopath)
4270 try:
4282 try:
4271 for opt in cmdutil.remoteopts:
4283 for opt in cmdutil.remoteopts:
4272 del opts[opt[1]]
4284 del opts[opt[1]]
4273 args = {}
4285 args = {}
4274 for k, v in pycompat.iteritems(opts):
4286 for k, v in pycompat.iteritems(opts):
4275 if v:
4287 if v:
4276 args[k] = v
4288 args[k] = v
4277 args = pycompat.strkwargs(args)
4289 args = pycompat.strkwargs(args)
4278 # run twice to check that we don't mess up the stream for the next command
4290 # run twice to check that we don't mess up the stream for the next command
4279 res1 = repo.debugwireargs(*vals, **args)
4291 res1 = repo.debugwireargs(*vals, **args)
4280 res2 = repo.debugwireargs(*vals, **args)
4292 res2 = repo.debugwireargs(*vals, **args)
4281 ui.write(b"%s\n" % res1)
4293 ui.write(b"%s\n" % res1)
4282 if res1 != res2:
4294 if res1 != res2:
4283 ui.warn(b"%s\n" % res2)
4295 ui.warn(b"%s\n" % res2)
4284 finally:
4296 finally:
4285 repo.close()
4297 repo.close()
4286
4298
4287
4299
4288 def _parsewirelangblocks(fh):
4300 def _parsewirelangblocks(fh):
4289 activeaction = None
4301 activeaction = None
4290 blocklines = []
4302 blocklines = []
4291 lastindent = 0
4303 lastindent = 0
4292
4304
4293 for line in fh:
4305 for line in fh:
4294 line = line.rstrip()
4306 line = line.rstrip()
4295 if not line:
4307 if not line:
4296 continue
4308 continue
4297
4309
4298 if line.startswith(b'#'):
4310 if line.startswith(b'#'):
4299 continue
4311 continue
4300
4312
4301 if not line.startswith(b' '):
4313 if not line.startswith(b' '):
4302 # New block. Flush previous one.
4314 # New block. Flush previous one.
4303 if activeaction:
4315 if activeaction:
4304 yield activeaction, blocklines
4316 yield activeaction, blocklines
4305
4317
4306 activeaction = line
4318 activeaction = line
4307 blocklines = []
4319 blocklines = []
4308 lastindent = 0
4320 lastindent = 0
4309 continue
4321 continue
4310
4322
4311 # Else we start with an indent.
4323 # Else we start with an indent.
4312
4324
4313 if not activeaction:
4325 if not activeaction:
4314 raise error.Abort(_(b'indented line outside of block'))
4326 raise error.Abort(_(b'indented line outside of block'))
4315
4327
4316 indent = len(line) - len(line.lstrip())
4328 indent = len(line) - len(line.lstrip())
4317
4329
4318 # If this line is indented more than the last line, concatenate it.
4330 # If this line is indented more than the last line, concatenate it.
4319 if indent > lastindent and blocklines:
4331 if indent > lastindent and blocklines:
4320 blocklines[-1] += line.lstrip()
4332 blocklines[-1] += line.lstrip()
4321 else:
4333 else:
4322 blocklines.append(line)
4334 blocklines.append(line)
4323 lastindent = indent
4335 lastindent = indent
4324
4336
4325 # Flush last block.
4337 # Flush last block.
4326 if activeaction:
4338 if activeaction:
4327 yield activeaction, blocklines
4339 yield activeaction, blocklines
4328
4340
4329
4341
4330 @command(
4342 @command(
4331 b'debugwireproto',
4343 b'debugwireproto',
4332 [
4344 [
4333 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4345 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4334 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4346 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4335 (
4347 (
4336 b'',
4348 b'',
4337 b'noreadstderr',
4349 b'noreadstderr',
4338 False,
4350 False,
4339 _(b'do not read from stderr of the remote'),
4351 _(b'do not read from stderr of the remote'),
4340 ),
4352 ),
4341 (
4353 (
4342 b'',
4354 b'',
4343 b'nologhandshake',
4355 b'nologhandshake',
4344 False,
4356 False,
4345 _(b'do not log I/O related to the peer handshake'),
4357 _(b'do not log I/O related to the peer handshake'),
4346 ),
4358 ),
4347 ]
4359 ]
4348 + cmdutil.remoteopts,
4360 + cmdutil.remoteopts,
4349 _(b'[PATH]'),
4361 _(b'[PATH]'),
4350 optionalrepo=True,
4362 optionalrepo=True,
4351 )
4363 )
4352 def debugwireproto(ui, repo, path=None, **opts):
4364 def debugwireproto(ui, repo, path=None, **opts):
4353 """send wire protocol commands to a server
4365 """send wire protocol commands to a server
4354
4366
4355 This command can be used to issue wire protocol commands to remote
4367 This command can be used to issue wire protocol commands to remote
4356 peers and to debug the raw data being exchanged.
4368 peers and to debug the raw data being exchanged.
4357
4369
4358 ``--localssh`` will start an SSH server against the current repository
4370 ``--localssh`` will start an SSH server against the current repository
4359 and connect to that. By default, the connection will perform a handshake
4371 and connect to that. By default, the connection will perform a handshake
4360 and establish an appropriate peer instance.
4372 and establish an appropriate peer instance.
4361
4373
4362 ``--peer`` can be used to bypass the handshake protocol and construct a
4374 ``--peer`` can be used to bypass the handshake protocol and construct a
4363 peer instance using the specified class type. Valid values are ``raw``,
4375 peer instance using the specified class type. Valid values are ``raw``,
4364 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4376 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4365 raw data payloads and don't support higher-level command actions.
4377 raw data payloads and don't support higher-level command actions.
4366
4378
4367 ``--noreadstderr`` can be used to disable automatic reading from stderr
4379 ``--noreadstderr`` can be used to disable automatic reading from stderr
4368 of the peer (for SSH connections only). Disabling automatic reading of
4380 of the peer (for SSH connections only). Disabling automatic reading of
4369 stderr is useful for making output more deterministic.
4381 stderr is useful for making output more deterministic.
4370
4382
4371 Commands are issued via a mini language which is specified via stdin.
4383 Commands are issued via a mini language which is specified via stdin.
4372 The language consists of individual actions to perform. An action is
4384 The language consists of individual actions to perform. An action is
4373 defined by a block. A block is defined as a line with no leading
4385 defined by a block. A block is defined as a line with no leading
4374 space followed by 0 or more lines with leading space. Blocks are
4386 space followed by 0 or more lines with leading space. Blocks are
4375 effectively a high-level command with additional metadata.
4387 effectively a high-level command with additional metadata.
4376
4388
4377 Lines beginning with ``#`` are ignored.
4389 Lines beginning with ``#`` are ignored.
4378
4390
4379 The following sections denote available actions.
4391 The following sections denote available actions.
4380
4392
4381 raw
4393 raw
4382 ---
4394 ---
4383
4395
4384 Send raw data to the server.
4396 Send raw data to the server.
4385
4397
4386 The block payload contains the raw data to send as one atomic send
4398 The block payload contains the raw data to send as one atomic send
4387 operation. The data may not actually be delivered in a single system
4399 operation. The data may not actually be delivered in a single system
4388 call: it depends on the abilities of the transport being used.
4400 call: it depends on the abilities of the transport being used.
4389
4401
4390 Each line in the block is de-indented and concatenated. Then, that
4402 Each line in the block is de-indented and concatenated. Then, that
4391 value is evaluated as a Python b'' literal. This allows the use of
4403 value is evaluated as a Python b'' literal. This allows the use of
4392 backslash escaping, etc.
4404 backslash escaping, etc.
4393
4405
4394 raw+
4406 raw+
4395 ----
4407 ----
4396
4408
4397 Behaves like ``raw`` except flushes output afterwards.
4409 Behaves like ``raw`` except flushes output afterwards.
4398
4410
4399 command <X>
4411 command <X>
4400 -----------
4412 -----------
4401
4413
4402 Send a request to run a named command, whose name follows the ``command``
4414 Send a request to run a named command, whose name follows the ``command``
4403 string.
4415 string.
4404
4416
4405 Arguments to the command are defined as lines in this block. The format of
4417 Arguments to the command are defined as lines in this block. The format of
4406 each line is ``<key> <value>``. e.g.::
4418 each line is ``<key> <value>``. e.g.::
4407
4419
4408 command listkeys
4420 command listkeys
4409 namespace bookmarks
4421 namespace bookmarks
4410
4422
4411 If the value begins with ``eval:``, it will be interpreted as a Python
4423 If the value begins with ``eval:``, it will be interpreted as a Python
4412 literal expression. Otherwise values are interpreted as Python b'' literals.
4424 literal expression. Otherwise values are interpreted as Python b'' literals.
4413 This allows sending complex types and encoding special byte sequences via
4425 This allows sending complex types and encoding special byte sequences via
4414 backslash escaping.
4426 backslash escaping.
4415
4427
4416 The following arguments have special meaning:
4428 The following arguments have special meaning:
4417
4429
4418 ``PUSHFILE``
4430 ``PUSHFILE``
4419 When defined, the *push* mechanism of the peer will be used instead
4431 When defined, the *push* mechanism of the peer will be used instead
4420 of the static request-response mechanism and the content of the
4432 of the static request-response mechanism and the content of the
4421 file specified in the value of this argument will be sent as the
4433 file specified in the value of this argument will be sent as the
4422 command payload.
4434 command payload.
4423
4435
4424 This can be used to submit a local bundle file to the remote.
4436 This can be used to submit a local bundle file to the remote.
4425
4437
4426 batchbegin
4438 batchbegin
4427 ----------
4439 ----------
4428
4440
4429 Instruct the peer to begin a batched send.
4441 Instruct the peer to begin a batched send.
4430
4442
4431 All ``command`` blocks are queued for execution until the next
4443 All ``command`` blocks are queued for execution until the next
4432 ``batchsubmit`` block.
4444 ``batchsubmit`` block.
4433
4445
4434 batchsubmit
4446 batchsubmit
4435 -----------
4447 -----------
4436
4448
4437 Submit previously queued ``command`` blocks as a batch request.
4449 Submit previously queued ``command`` blocks as a batch request.
4438
4450
4439 This action MUST be paired with a ``batchbegin`` action.
4451 This action MUST be paired with a ``batchbegin`` action.
4440
4452
4441 httprequest <method> <path>
4453 httprequest <method> <path>
4442 ---------------------------
4454 ---------------------------
4443
4455
4444 (HTTP peer only)
4456 (HTTP peer only)
4445
4457
4446 Send an HTTP request to the peer.
4458 Send an HTTP request to the peer.
4447
4459
4448 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4460 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4449
4461
4450 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4462 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4451 headers to add to the request. e.g. ``Accept: foo``.
4463 headers to add to the request. e.g. ``Accept: foo``.
4452
4464
4453 The following arguments are special:
4465 The following arguments are special:
4454
4466
4455 ``BODYFILE``
4467 ``BODYFILE``
4456 The content of the file defined as the value to this argument will be
4468 The content of the file defined as the value to this argument will be
4457 transferred verbatim as the HTTP request body.
4469 transferred verbatim as the HTTP request body.
4458
4470
4459 ``frame <type> <flags> <payload>``
4471 ``frame <type> <flags> <payload>``
4460 Send a unified protocol frame as part of the request body.
4472 Send a unified protocol frame as part of the request body.
4461
4473
4462 All frames will be collected and sent as the body to the HTTP
4474 All frames will be collected and sent as the body to the HTTP
4463 request.
4475 request.
4464
4476
4465 close
4477 close
4466 -----
4478 -----
4467
4479
4468 Close the connection to the server.
4480 Close the connection to the server.
4469
4481
4470 flush
4482 flush
4471 -----
4483 -----
4472
4484
4473 Flush data written to the server.
4485 Flush data written to the server.
4474
4486
4475 readavailable
4487 readavailable
4476 -------------
4488 -------------
4477
4489
4478 Close the write end of the connection and read all available data from
4490 Close the write end of the connection and read all available data from
4479 the server.
4491 the server.
4480
4492
4481 If the connection to the server encompasses multiple pipes, we poll both
4493 If the connection to the server encompasses multiple pipes, we poll both
4482 pipes and read available data.
4494 pipes and read available data.
4483
4495
4484 readline
4496 readline
4485 --------
4497 --------
4486
4498
4487 Read a line of output from the server. If there are multiple output
4499 Read a line of output from the server. If there are multiple output
4488 pipes, reads only the main pipe.
4500 pipes, reads only the main pipe.
4489
4501
4490 ereadline
4502 ereadline
4491 ---------
4503 ---------
4492
4504
4493 Like ``readline``, but read from the stderr pipe, if available.
4505 Like ``readline``, but read from the stderr pipe, if available.
4494
4506
4495 read <X>
4507 read <X>
4496 --------
4508 --------
4497
4509
4498 ``read()`` N bytes from the server's main output pipe.
4510 ``read()`` N bytes from the server's main output pipe.
4499
4511
4500 eread <X>
4512 eread <X>
4501 ---------
4513 ---------
4502
4514
4503 ``read()`` N bytes from the server's stderr pipe, if available.
4515 ``read()`` N bytes from the server's stderr pipe, if available.
4504
4516
4505 Specifying Unified Frame-Based Protocol Frames
4517 Specifying Unified Frame-Based Protocol Frames
4506 ----------------------------------------------
4518 ----------------------------------------------
4507
4519
4508 It is possible to emit a *Unified Frame-Based Protocol* by using special
4520 It is possible to emit a *Unified Frame-Based Protocol* by using special
4509 syntax.
4521 syntax.
4510
4522
4511 A frame is composed as a type, flags, and payload. These can be parsed
4523 A frame is composed as a type, flags, and payload. These can be parsed
4512 from a string of the form:
4524 from a string of the form:
4513
4525
4514 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4526 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4515
4527
4516 ``request-id`` and ``stream-id`` are integers defining the request and
4528 ``request-id`` and ``stream-id`` are integers defining the request and
4517 stream identifiers.
4529 stream identifiers.
4518
4530
4519 ``type`` can be an integer value for the frame type or the string name
4531 ``type`` can be an integer value for the frame type or the string name
4520 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4532 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4521 ``command-name``.
4533 ``command-name``.
4522
4534
4523 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4535 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4524 components. Each component (and there can be just one) can be an integer
4536 components. Each component (and there can be just one) can be an integer
4525 or a flag name for stream flags or frame flags, respectively. Values are
4537 or a flag name for stream flags or frame flags, respectively. Values are
4526 resolved to integers and then bitwise OR'd together.
4538 resolved to integers and then bitwise OR'd together.
4527
4539
4528 ``payload`` represents the raw frame payload. If it begins with
4540 ``payload`` represents the raw frame payload. If it begins with
4529 ``cbor:``, the following string is evaluated as Python code and the
4541 ``cbor:``, the following string is evaluated as Python code and the
4530 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4542 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4531 as a Python byte string literal.
4543 as a Python byte string literal.
4532 """
4544 """
4533 opts = pycompat.byteskwargs(opts)
4545 opts = pycompat.byteskwargs(opts)
4534
4546
4535 if opts[b'localssh'] and not repo:
4547 if opts[b'localssh'] and not repo:
4536 raise error.Abort(_(b'--localssh requires a repository'))
4548 raise error.Abort(_(b'--localssh requires a repository'))
4537
4549
4538 if opts[b'peer'] and opts[b'peer'] not in (
4550 if opts[b'peer'] and opts[b'peer'] not in (
4539 b'raw',
4551 b'raw',
4540 b'http2',
4552 b'http2',
4541 b'ssh1',
4553 b'ssh1',
4542 b'ssh2',
4554 b'ssh2',
4543 ):
4555 ):
4544 raise error.Abort(
4556 raise error.Abort(
4545 _(b'invalid value for --peer'),
4557 _(b'invalid value for --peer'),
4546 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4558 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4547 )
4559 )
4548
4560
4549 if path and opts[b'localssh']:
4561 if path and opts[b'localssh']:
4550 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4562 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4551
4563
4552 if ui.interactive():
4564 if ui.interactive():
4553 ui.write(_(b'(waiting for commands on stdin)\n'))
4565 ui.write(_(b'(waiting for commands on stdin)\n'))
4554
4566
4555 blocks = list(_parsewirelangblocks(ui.fin))
4567 blocks = list(_parsewirelangblocks(ui.fin))
4556
4568
4557 proc = None
4569 proc = None
4558 stdin = None
4570 stdin = None
4559 stdout = None
4571 stdout = None
4560 stderr = None
4572 stderr = None
4561 opener = None
4573 opener = None
4562
4574
4563 if opts[b'localssh']:
4575 if opts[b'localssh']:
4564 # We start the SSH server in its own process so there is process
4576 # We start the SSH server in its own process so there is process
4565 # separation. This prevents a whole class of potential bugs around
4577 # separation. This prevents a whole class of potential bugs around
4566 # shared state from interfering with server operation.
4578 # shared state from interfering with server operation.
4567 args = procutil.hgcmd() + [
4579 args = procutil.hgcmd() + [
4568 b'-R',
4580 b'-R',
4569 repo.root,
4581 repo.root,
4570 b'debugserve',
4582 b'debugserve',
4571 b'--sshstdio',
4583 b'--sshstdio',
4572 ]
4584 ]
4573 proc = subprocess.Popen(
4585 proc = subprocess.Popen(
4574 pycompat.rapply(procutil.tonativestr, args),
4586 pycompat.rapply(procutil.tonativestr, args),
4575 stdin=subprocess.PIPE,
4587 stdin=subprocess.PIPE,
4576 stdout=subprocess.PIPE,
4588 stdout=subprocess.PIPE,
4577 stderr=subprocess.PIPE,
4589 stderr=subprocess.PIPE,
4578 bufsize=0,
4590 bufsize=0,
4579 )
4591 )
4580
4592
4581 stdin = proc.stdin
4593 stdin = proc.stdin
4582 stdout = proc.stdout
4594 stdout = proc.stdout
4583 stderr = proc.stderr
4595 stderr = proc.stderr
4584
4596
4585 # We turn the pipes into observers so we can log I/O.
4597 # We turn the pipes into observers so we can log I/O.
4586 if ui.verbose or opts[b'peer'] == b'raw':
4598 if ui.verbose or opts[b'peer'] == b'raw':
4587 stdin = util.makeloggingfileobject(
4599 stdin = util.makeloggingfileobject(
4588 ui, proc.stdin, b'i', logdata=True
4600 ui, proc.stdin, b'i', logdata=True
4589 )
4601 )
4590 stdout = util.makeloggingfileobject(
4602 stdout = util.makeloggingfileobject(
4591 ui, proc.stdout, b'o', logdata=True
4603 ui, proc.stdout, b'o', logdata=True
4592 )
4604 )
4593 stderr = util.makeloggingfileobject(
4605 stderr = util.makeloggingfileobject(
4594 ui, proc.stderr, b'e', logdata=True
4606 ui, proc.stderr, b'e', logdata=True
4595 )
4607 )
4596
4608
4597 # --localssh also implies the peer connection settings.
4609 # --localssh also implies the peer connection settings.
4598
4610
4599 url = b'ssh://localserver'
4611 url = b'ssh://localserver'
4600 autoreadstderr = not opts[b'noreadstderr']
4612 autoreadstderr = not opts[b'noreadstderr']
4601
4613
4602 if opts[b'peer'] == b'ssh1':
4614 if opts[b'peer'] == b'ssh1':
4603 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4615 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4604 peer = sshpeer.sshv1peer(
4616 peer = sshpeer.sshv1peer(
4605 ui,
4617 ui,
4606 url,
4618 url,
4607 proc,
4619 proc,
4608 stdin,
4620 stdin,
4609 stdout,
4621 stdout,
4610 stderr,
4622 stderr,
4611 None,
4623 None,
4612 autoreadstderr=autoreadstderr,
4624 autoreadstderr=autoreadstderr,
4613 )
4625 )
4614 elif opts[b'peer'] == b'ssh2':
4626 elif opts[b'peer'] == b'ssh2':
4615 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4627 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4616 peer = sshpeer.sshv2peer(
4628 peer = sshpeer.sshv2peer(
4617 ui,
4629 ui,
4618 url,
4630 url,
4619 proc,
4631 proc,
4620 stdin,
4632 stdin,
4621 stdout,
4633 stdout,
4622 stderr,
4634 stderr,
4623 None,
4635 None,
4624 autoreadstderr=autoreadstderr,
4636 autoreadstderr=autoreadstderr,
4625 )
4637 )
4626 elif opts[b'peer'] == b'raw':
4638 elif opts[b'peer'] == b'raw':
4627 ui.write(_(b'using raw connection to peer\n'))
4639 ui.write(_(b'using raw connection to peer\n'))
4628 peer = None
4640 peer = None
4629 else:
4641 else:
4630 ui.write(_(b'creating ssh peer from handshake results\n'))
4642 ui.write(_(b'creating ssh peer from handshake results\n'))
4631 peer = sshpeer.makepeer(
4643 peer = sshpeer.makepeer(
4632 ui,
4644 ui,
4633 url,
4645 url,
4634 proc,
4646 proc,
4635 stdin,
4647 stdin,
4636 stdout,
4648 stdout,
4637 stderr,
4649 stderr,
4638 autoreadstderr=autoreadstderr,
4650 autoreadstderr=autoreadstderr,
4639 )
4651 )
4640
4652
4641 elif path:
4653 elif path:
4642 # We bypass hg.peer() so we can proxy the sockets.
4654 # We bypass hg.peer() so we can proxy the sockets.
4643 # TODO consider not doing this because we skip
4655 # TODO consider not doing this because we skip
4644 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4656 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4645 u = urlutil.url(path)
4657 u = urlutil.url(path)
4646 if u.scheme != b'http':
4658 if u.scheme != b'http':
4647 raise error.Abort(_(b'only http:// paths are currently supported'))
4659 raise error.Abort(_(b'only http:// paths are currently supported'))
4648
4660
4649 url, authinfo = u.authinfo()
4661 url, authinfo = u.authinfo()
4650 openerargs = {
4662 openerargs = {
4651 'useragent': b'Mercurial debugwireproto',
4663 'useragent': b'Mercurial debugwireproto',
4652 }
4664 }
4653
4665
4654 # Turn pipes/sockets into observers so we can log I/O.
4666 # Turn pipes/sockets into observers so we can log I/O.
4655 if ui.verbose:
4667 if ui.verbose:
4656 openerargs.update(
4668 openerargs.update(
4657 {
4669 {
4658 'loggingfh': ui,
4670 'loggingfh': ui,
4659 'loggingname': b's',
4671 'loggingname': b's',
4660 'loggingopts': {
4672 'loggingopts': {
4661 'logdata': True,
4673 'logdata': True,
4662 'logdataapis': False,
4674 'logdataapis': False,
4663 },
4675 },
4664 }
4676 }
4665 )
4677 )
4666
4678
4667 if ui.debugflag:
4679 if ui.debugflag:
4668 openerargs['loggingopts']['logdataapis'] = True
4680 openerargs['loggingopts']['logdataapis'] = True
4669
4681
4670 # Don't send default headers when in raw mode. This allows us to
4682 # Don't send default headers when in raw mode. This allows us to
4671 # bypass most of the behavior of our URL handling code so we can
4683 # bypass most of the behavior of our URL handling code so we can
4672 # have near complete control over what's sent on the wire.
4684 # have near complete control over what's sent on the wire.
4673 if opts[b'peer'] == b'raw':
4685 if opts[b'peer'] == b'raw':
4674 openerargs['sendaccept'] = False
4686 openerargs['sendaccept'] = False
4675
4687
4676 opener = urlmod.opener(ui, authinfo, **openerargs)
4688 opener = urlmod.opener(ui, authinfo, **openerargs)
4677
4689
4678 if opts[b'peer'] == b'http2':
4690 if opts[b'peer'] == b'http2':
4679 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4691 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4680 # We go through makepeer() because we need an API descriptor for
4692 # We go through makepeer() because we need an API descriptor for
4681 # the peer instance to be useful.
4693 # the peer instance to be useful.
4682 maybe_silent = (
4694 maybe_silent = (
4683 ui.silent()
4695 ui.silent()
4684 if opts[b'nologhandshake']
4696 if opts[b'nologhandshake']
4685 else util.nullcontextmanager()
4697 else util.nullcontextmanager()
4686 )
4698 )
4687 with maybe_silent, ui.configoverride(
4699 with maybe_silent, ui.configoverride(
4688 {(b'experimental', b'httppeer.advertise-v2'): True}
4700 {(b'experimental', b'httppeer.advertise-v2'): True}
4689 ):
4701 ):
4690 peer = httppeer.makepeer(ui, path, opener=opener)
4702 peer = httppeer.makepeer(ui, path, opener=opener)
4691
4703
4692 if not isinstance(peer, httppeer.httpv2peer):
4704 if not isinstance(peer, httppeer.httpv2peer):
4693 raise error.Abort(
4705 raise error.Abort(
4694 _(
4706 _(
4695 b'could not instantiate HTTP peer for '
4707 b'could not instantiate HTTP peer for '
4696 b'wire protocol version 2'
4708 b'wire protocol version 2'
4697 ),
4709 ),
4698 hint=_(
4710 hint=_(
4699 b'the server may not have the feature '
4711 b'the server may not have the feature '
4700 b'enabled or is not allowing this '
4712 b'enabled or is not allowing this '
4701 b'client version'
4713 b'client version'
4702 ),
4714 ),
4703 )
4715 )
4704
4716
4705 elif opts[b'peer'] == b'raw':
4717 elif opts[b'peer'] == b'raw':
4706 ui.write(_(b'using raw connection to peer\n'))
4718 ui.write(_(b'using raw connection to peer\n'))
4707 peer = None
4719 peer = None
4708 elif opts[b'peer']:
4720 elif opts[b'peer']:
4709 raise error.Abort(
4721 raise error.Abort(
4710 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4722 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4711 )
4723 )
4712 else:
4724 else:
4713 peer = httppeer.makepeer(ui, path, opener=opener)
4725 peer = httppeer.makepeer(ui, path, opener=opener)
4714
4726
4715 # We /could/ populate stdin/stdout with sock.makefile()...
4727 # We /could/ populate stdin/stdout with sock.makefile()...
4716 else:
4728 else:
4717 raise error.Abort(_(b'unsupported connection configuration'))
4729 raise error.Abort(_(b'unsupported connection configuration'))
4718
4730
4719 batchedcommands = None
4731 batchedcommands = None
4720
4732
4721 # Now perform actions based on the parsed wire language instructions.
4733 # Now perform actions based on the parsed wire language instructions.
4722 for action, lines in blocks:
4734 for action, lines in blocks:
4723 if action in (b'raw', b'raw+'):
4735 if action in (b'raw', b'raw+'):
4724 if not stdin:
4736 if not stdin:
4725 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4737 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4726
4738
4727 # Concatenate the data together.
4739 # Concatenate the data together.
4728 data = b''.join(l.lstrip() for l in lines)
4740 data = b''.join(l.lstrip() for l in lines)
4729 data = stringutil.unescapestr(data)
4741 data = stringutil.unescapestr(data)
4730 stdin.write(data)
4742 stdin.write(data)
4731
4743
4732 if action == b'raw+':
4744 if action == b'raw+':
4733 stdin.flush()
4745 stdin.flush()
4734 elif action == b'flush':
4746 elif action == b'flush':
4735 if not stdin:
4747 if not stdin:
4736 raise error.Abort(_(b'cannot call flush on this peer'))
4748 raise error.Abort(_(b'cannot call flush on this peer'))
4737 stdin.flush()
4749 stdin.flush()
4738 elif action.startswith(b'command'):
4750 elif action.startswith(b'command'):
4739 if not peer:
4751 if not peer:
4740 raise error.Abort(
4752 raise error.Abort(
4741 _(
4753 _(
4742 b'cannot send commands unless peer instance '
4754 b'cannot send commands unless peer instance '
4743 b'is available'
4755 b'is available'
4744 )
4756 )
4745 )
4757 )
4746
4758
4747 command = action.split(b' ', 1)[1]
4759 command = action.split(b' ', 1)[1]
4748
4760
4749 args = {}
4761 args = {}
4750 for line in lines:
4762 for line in lines:
4751 # We need to allow empty values.
4763 # We need to allow empty values.
4752 fields = line.lstrip().split(b' ', 1)
4764 fields = line.lstrip().split(b' ', 1)
4753 if len(fields) == 1:
4765 if len(fields) == 1:
4754 key = fields[0]
4766 key = fields[0]
4755 value = b''
4767 value = b''
4756 else:
4768 else:
4757 key, value = fields
4769 key, value = fields
4758
4770
4759 if value.startswith(b'eval:'):
4771 if value.startswith(b'eval:'):
4760 value = stringutil.evalpythonliteral(value[5:])
4772 value = stringutil.evalpythonliteral(value[5:])
4761 else:
4773 else:
4762 value = stringutil.unescapestr(value)
4774 value = stringutil.unescapestr(value)
4763
4775
4764 args[key] = value
4776 args[key] = value
4765
4777
4766 if batchedcommands is not None:
4778 if batchedcommands is not None:
4767 batchedcommands.append((command, args))
4779 batchedcommands.append((command, args))
4768 continue
4780 continue
4769
4781
4770 ui.status(_(b'sending %s command\n') % command)
4782 ui.status(_(b'sending %s command\n') % command)
4771
4783
4772 if b'PUSHFILE' in args:
4784 if b'PUSHFILE' in args:
4773 with open(args[b'PUSHFILE'], 'rb') as fh:
4785 with open(args[b'PUSHFILE'], 'rb') as fh:
4774 del args[b'PUSHFILE']
4786 del args[b'PUSHFILE']
4775 res, output = peer._callpush(
4787 res, output = peer._callpush(
4776 command, fh, **pycompat.strkwargs(args)
4788 command, fh, **pycompat.strkwargs(args)
4777 )
4789 )
4778 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4790 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4779 ui.status(
4791 ui.status(
4780 _(b'remote output: %s\n') % stringutil.escapestr(output)
4792 _(b'remote output: %s\n') % stringutil.escapestr(output)
4781 )
4793 )
4782 else:
4794 else:
4783 with peer.commandexecutor() as e:
4795 with peer.commandexecutor() as e:
4784 res = e.callcommand(command, args).result()
4796 res = e.callcommand(command, args).result()
4785
4797
4786 if isinstance(res, wireprotov2peer.commandresponse):
4798 if isinstance(res, wireprotov2peer.commandresponse):
4787 val = res.objects()
4799 val = res.objects()
4788 ui.status(
4800 ui.status(
4789 _(b'response: %s\n')
4801 _(b'response: %s\n')
4790 % stringutil.pprint(val, bprefix=True, indent=2)
4802 % stringutil.pprint(val, bprefix=True, indent=2)
4791 )
4803 )
4792 else:
4804 else:
4793 ui.status(
4805 ui.status(
4794 _(b'response: %s\n')
4806 _(b'response: %s\n')
4795 % stringutil.pprint(res, bprefix=True, indent=2)
4807 % stringutil.pprint(res, bprefix=True, indent=2)
4796 )
4808 )
4797
4809
4798 elif action == b'batchbegin':
4810 elif action == b'batchbegin':
4799 if batchedcommands is not None:
4811 if batchedcommands is not None:
4800 raise error.Abort(_(b'nested batchbegin not allowed'))
4812 raise error.Abort(_(b'nested batchbegin not allowed'))
4801
4813
4802 batchedcommands = []
4814 batchedcommands = []
4803 elif action == b'batchsubmit':
4815 elif action == b'batchsubmit':
4804 # There is a batching API we could go through. But it would be
4816 # There is a batching API we could go through. But it would be
4805 # difficult to normalize requests into function calls. It is easier
4817 # difficult to normalize requests into function calls. It is easier
4806 # to bypass this layer and normalize to commands + args.
4818 # to bypass this layer and normalize to commands + args.
4807 ui.status(
4819 ui.status(
4808 _(b'sending batch with %d sub-commands\n')
4820 _(b'sending batch with %d sub-commands\n')
4809 % len(batchedcommands)
4821 % len(batchedcommands)
4810 )
4822 )
4811 assert peer is not None
4823 assert peer is not None
4812 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4824 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4813 ui.status(
4825 ui.status(
4814 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4826 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4815 )
4827 )
4816
4828
4817 batchedcommands = None
4829 batchedcommands = None
4818
4830
4819 elif action.startswith(b'httprequest '):
4831 elif action.startswith(b'httprequest '):
4820 if not opener:
4832 if not opener:
4821 raise error.Abort(
4833 raise error.Abort(
4822 _(b'cannot use httprequest without an HTTP peer')
4834 _(b'cannot use httprequest without an HTTP peer')
4823 )
4835 )
4824
4836
4825 request = action.split(b' ', 2)
4837 request = action.split(b' ', 2)
4826 if len(request) != 3:
4838 if len(request) != 3:
4827 raise error.Abort(
4839 raise error.Abort(
4828 _(
4840 _(
4829 b'invalid httprequest: expected format is '
4841 b'invalid httprequest: expected format is '
4830 b'"httprequest <method> <path>'
4842 b'"httprequest <method> <path>'
4831 )
4843 )
4832 )
4844 )
4833
4845
4834 method, httppath = request[1:]
4846 method, httppath = request[1:]
4835 headers = {}
4847 headers = {}
4836 body = None
4848 body = None
4837 frames = []
4849 frames = []
4838 for line in lines:
4850 for line in lines:
4839 line = line.lstrip()
4851 line = line.lstrip()
4840 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4852 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4841 if m:
4853 if m:
4842 # Headers need to use native strings.
4854 # Headers need to use native strings.
4843 key = pycompat.strurl(m.group(1))
4855 key = pycompat.strurl(m.group(1))
4844 value = pycompat.strurl(m.group(2))
4856 value = pycompat.strurl(m.group(2))
4845 headers[key] = value
4857 headers[key] = value
4846 continue
4858 continue
4847
4859
4848 if line.startswith(b'BODYFILE '):
4860 if line.startswith(b'BODYFILE '):
4849 with open(line.split(b' ', 1), b'rb') as fh:
4861 with open(line.split(b' ', 1), b'rb') as fh:
4850 body = fh.read()
4862 body = fh.read()
4851 elif line.startswith(b'frame '):
4863 elif line.startswith(b'frame '):
4852 frame = wireprotoframing.makeframefromhumanstring(
4864 frame = wireprotoframing.makeframefromhumanstring(
4853 line[len(b'frame ') :]
4865 line[len(b'frame ') :]
4854 )
4866 )
4855
4867
4856 frames.append(frame)
4868 frames.append(frame)
4857 else:
4869 else:
4858 raise error.Abort(
4870 raise error.Abort(
4859 _(b'unknown argument to httprequest: %s') % line
4871 _(b'unknown argument to httprequest: %s') % line
4860 )
4872 )
4861
4873
4862 url = path + httppath
4874 url = path + httppath
4863
4875
4864 if frames:
4876 if frames:
4865 body = b''.join(bytes(f) for f in frames)
4877 body = b''.join(bytes(f) for f in frames)
4866
4878
4867 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4879 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4868
4880
4869 # urllib.Request insists on using has_data() as a proxy for
4881 # urllib.Request insists on using has_data() as a proxy for
4870 # determining the request method. Override that to use our
4882 # determining the request method. Override that to use our
4871 # explicitly requested method.
4883 # explicitly requested method.
4872 req.get_method = lambda: pycompat.sysstr(method)
4884 req.get_method = lambda: pycompat.sysstr(method)
4873
4885
4874 try:
4886 try:
4875 res = opener.open(req)
4887 res = opener.open(req)
4876 body = res.read()
4888 body = res.read()
4877 except util.urlerr.urlerror as e:
4889 except util.urlerr.urlerror as e:
4878 # read() method must be called, but only exists in Python 2
4890 # read() method must be called, but only exists in Python 2
4879 getattr(e, 'read', lambda: None)()
4891 getattr(e, 'read', lambda: None)()
4880 continue
4892 continue
4881
4893
4882 ct = res.headers.get('Content-Type')
4894 ct = res.headers.get('Content-Type')
4883 if ct == 'application/mercurial-cbor':
4895 if ct == 'application/mercurial-cbor':
4884 ui.write(
4896 ui.write(
4885 _(b'cbor> %s\n')
4897 _(b'cbor> %s\n')
4886 % stringutil.pprint(
4898 % stringutil.pprint(
4887 cborutil.decodeall(body), bprefix=True, indent=2
4899 cborutil.decodeall(body), bprefix=True, indent=2
4888 )
4900 )
4889 )
4901 )
4890
4902
4891 elif action == b'close':
4903 elif action == b'close':
4892 assert peer is not None
4904 assert peer is not None
4893 peer.close()
4905 peer.close()
4894 elif action == b'readavailable':
4906 elif action == b'readavailable':
4895 if not stdout or not stderr:
4907 if not stdout or not stderr:
4896 raise error.Abort(
4908 raise error.Abort(
4897 _(b'readavailable not available on this peer')
4909 _(b'readavailable not available on this peer')
4898 )
4910 )
4899
4911
4900 stdin.close()
4912 stdin.close()
4901 stdout.read()
4913 stdout.read()
4902 stderr.read()
4914 stderr.read()
4903
4915
4904 elif action == b'readline':
4916 elif action == b'readline':
4905 if not stdout:
4917 if not stdout:
4906 raise error.Abort(_(b'readline not available on this peer'))
4918 raise error.Abort(_(b'readline not available on this peer'))
4907 stdout.readline()
4919 stdout.readline()
4908 elif action == b'ereadline':
4920 elif action == b'ereadline':
4909 if not stderr:
4921 if not stderr:
4910 raise error.Abort(_(b'ereadline not available on this peer'))
4922 raise error.Abort(_(b'ereadline not available on this peer'))
4911 stderr.readline()
4923 stderr.readline()
4912 elif action.startswith(b'read '):
4924 elif action.startswith(b'read '):
4913 count = int(action.split(b' ', 1)[1])
4925 count = int(action.split(b' ', 1)[1])
4914 if not stdout:
4926 if not stdout:
4915 raise error.Abort(_(b'read not available on this peer'))
4927 raise error.Abort(_(b'read not available on this peer'))
4916 stdout.read(count)
4928 stdout.read(count)
4917 elif action.startswith(b'eread '):
4929 elif action.startswith(b'eread '):
4918 count = int(action.split(b' ', 1)[1])
4930 count = int(action.split(b' ', 1)[1])
4919 if not stderr:
4931 if not stderr:
4920 raise error.Abort(_(b'eread not available on this peer'))
4932 raise error.Abort(_(b'eread not available on this peer'))
4921 stderr.read(count)
4933 stderr.read(count)
4922 else:
4934 else:
4923 raise error.Abort(_(b'unknown action: %s') % action)
4935 raise error.Abort(_(b'unknown action: %s') % action)
4924
4936
4925 if batchedcommands is not None:
4937 if batchedcommands is not None:
4926 raise error.Abort(_(b'unclosed "batchbegin" request'))
4938 raise error.Abort(_(b'unclosed "batchbegin" request'))
4927
4939
4928 if peer:
4940 if peer:
4929 peer.close()
4941 peer.close()
4930
4942
4931 if proc:
4943 if proc:
4932 proc.kill()
4944 proc.kill()
@@ -1,551 +1,563 b''
1 # repair.py - functions for repository repair for mercurial
1 # repair.py - functions for repository repair for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 # Copyright 2007 Olivia Mackall
4 # Copyright 2007 Olivia Mackall
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import errno
11 import errno
12
12
13 from .i18n import _
13 from .i18n import _
14 from .node import (
14 from .node import (
15 hex,
15 hex,
16 short,
16 short,
17 )
17 )
18 from . import (
18 from . import (
19 bundle2,
19 bundle2,
20 changegroup,
20 changegroup,
21 discovery,
21 discovery,
22 error,
22 error,
23 exchange,
23 exchange,
24 obsolete,
24 obsolete,
25 obsutil,
25 obsutil,
26 pathutil,
26 pathutil,
27 phases,
27 phases,
28 pycompat,
28 pycompat,
29 requirements,
29 requirements,
30 scmutil,
30 scmutil,
31 util,
31 util,
32 )
32 )
33 from .utils import (
33 from .utils import (
34 hashutil,
34 hashutil,
35 stringutil,
35 stringutil,
36 urlutil,
36 urlutil,
37 )
37 )
38
38
39
39
40 def backupbundle(
40 def backupbundle(
41 repo, bases, heads, node, suffix, compress=True, obsolescence=True
41 repo, bases, heads, node, suffix, compress=True, obsolescence=True
42 ):
42 ):
43 """create a bundle with the specified revisions as a backup"""
43 """create a bundle with the specified revisions as a backup"""
44
44
45 backupdir = b"strip-backup"
45 backupdir = b"strip-backup"
46 vfs = repo.vfs
46 vfs = repo.vfs
47 if not vfs.isdir(backupdir):
47 if not vfs.isdir(backupdir):
48 vfs.mkdir(backupdir)
48 vfs.mkdir(backupdir)
49
49
50 # Include a hash of all the nodes in the filename for uniqueness
50 # Include a hash of all the nodes in the filename for uniqueness
51 allcommits = repo.set(b'%ln::%ln', bases, heads)
51 allcommits = repo.set(b'%ln::%ln', bases, heads)
52 allhashes = sorted(c.hex() for c in allcommits)
52 allhashes = sorted(c.hex() for c in allcommits)
53 totalhash = hashutil.sha1(b''.join(allhashes)).digest()
53 totalhash = hashutil.sha1(b''.join(allhashes)).digest()
54 name = b"%s/%s-%s-%s.hg" % (
54 name = b"%s/%s-%s-%s.hg" % (
55 backupdir,
55 backupdir,
56 short(node),
56 short(node),
57 hex(totalhash[:4]),
57 hex(totalhash[:4]),
58 suffix,
58 suffix,
59 )
59 )
60
60
61 cgversion = changegroup.localversion(repo)
61 cgversion = changegroup.localversion(repo)
62 comp = None
62 comp = None
63 if cgversion != b'01':
63 if cgversion != b'01':
64 bundletype = b"HG20"
64 bundletype = b"HG20"
65 if compress:
65 if compress:
66 comp = b'BZ'
66 comp = b'BZ'
67 elif compress:
67 elif compress:
68 bundletype = b"HG10BZ"
68 bundletype = b"HG10BZ"
69 else:
69 else:
70 bundletype = b"HG10UN"
70 bundletype = b"HG10UN"
71
71
72 outgoing = discovery.outgoing(repo, missingroots=bases, ancestorsof=heads)
72 outgoing = discovery.outgoing(repo, missingroots=bases, ancestorsof=heads)
73 contentopts = {
73 contentopts = {
74 b'cg.version': cgversion,
74 b'cg.version': cgversion,
75 b'obsolescence': obsolescence,
75 b'obsolescence': obsolescence,
76 b'phases': True,
76 b'phases': True,
77 }
77 }
78 return bundle2.writenewbundle(
78 return bundle2.writenewbundle(
79 repo.ui,
79 repo.ui,
80 repo,
80 repo,
81 b'strip',
81 b'strip',
82 name,
82 name,
83 bundletype,
83 bundletype,
84 outgoing,
84 outgoing,
85 contentopts,
85 contentopts,
86 vfs,
86 vfs,
87 compression=comp,
87 compression=comp,
88 )
88 )
89
89
90
90
91 def _collectfiles(repo, striprev):
91 def _collectfiles(repo, striprev):
92 """find out the filelogs affected by the strip"""
92 """find out the filelogs affected by the strip"""
93 files = set()
93 files = set()
94
94
95 for x in pycompat.xrange(striprev, len(repo)):
95 for x in pycompat.xrange(striprev, len(repo)):
96 files.update(repo[x].files())
96 files.update(repo[x].files())
97
97
98 return sorted(files)
98 return sorted(files)
99
99
100
100
101 def _collectrevlog(revlog, striprev):
101 def _collectrevlog(revlog, striprev):
102 _, brokenset = revlog.getstrippoint(striprev)
102 _, brokenset = revlog.getstrippoint(striprev)
103 return [revlog.linkrev(r) for r in brokenset]
103 return [revlog.linkrev(r) for r in brokenset]
104
104
105
105
106 def _collectbrokencsets(repo, files, striprev):
106 def _collectbrokencsets(repo, files, striprev):
107 """return the changesets which will be broken by the truncation"""
107 """return the changesets which will be broken by the truncation"""
108 s = set()
108 s = set()
109
109
110 for revlog in manifestrevlogs(repo):
110 for revlog in manifestrevlogs(repo):
111 s.update(_collectrevlog(revlog, striprev))
111 s.update(_collectrevlog(revlog, striprev))
112 for fname in files:
112 for fname in files:
113 s.update(_collectrevlog(repo.file(fname), striprev))
113 s.update(_collectrevlog(repo.file(fname), striprev))
114
114
115 return s
115 return s
116
116
117
117
118 def strip(ui, repo, nodelist, backup=True, topic=b'backup'):
118 def strip(ui, repo, nodelist, backup=True, topic=b'backup'):
119 # This function requires the caller to lock the repo, but it operates
119 # This function requires the caller to lock the repo, but it operates
120 # within a transaction of its own, and thus requires there to be no current
120 # within a transaction of its own, and thus requires there to be no current
121 # transaction when it is called.
121 # transaction when it is called.
122 if repo.currenttransaction() is not None:
122 if repo.currenttransaction() is not None:
123 raise error.ProgrammingError(b'cannot strip from inside a transaction')
123 raise error.ProgrammingError(b'cannot strip from inside a transaction')
124
124
125 # Simple way to maintain backwards compatibility for this
125 # Simple way to maintain backwards compatibility for this
126 # argument.
126 # argument.
127 if backup in [b'none', b'strip']:
127 if backup in [b'none', b'strip']:
128 backup = False
128 backup = False
129
129
130 repo = repo.unfiltered()
130 repo = repo.unfiltered()
131 repo.destroying()
131 repo.destroying()
132 vfs = repo.vfs
132 vfs = repo.vfs
133 # load bookmark before changelog to avoid side effect from outdated
133 # load bookmark before changelog to avoid side effect from outdated
134 # changelog (see repo._refreshchangelog)
134 # changelog (see repo._refreshchangelog)
135 repo._bookmarks
135 repo._bookmarks
136 cl = repo.changelog
136 cl = repo.changelog
137
137
138 # TODO handle undo of merge sets
138 # TODO handle undo of merge sets
139 if isinstance(nodelist, bytes):
139 if isinstance(nodelist, bytes):
140 nodelist = [nodelist]
140 nodelist = [nodelist]
141 striplist = [cl.rev(node) for node in nodelist]
141 striplist = [cl.rev(node) for node in nodelist]
142 striprev = min(striplist)
142 striprev = min(striplist)
143
143
144 files = _collectfiles(repo, striprev)
144 files = _collectfiles(repo, striprev)
145 saverevs = _collectbrokencsets(repo, files, striprev)
145 saverevs = _collectbrokencsets(repo, files, striprev)
146
146
147 # Some revisions with rev > striprev may not be descendants of striprev.
147 # Some revisions with rev > striprev may not be descendants of striprev.
148 # We have to find these revisions and put them in a bundle, so that
148 # We have to find these revisions and put them in a bundle, so that
149 # we can restore them after the truncations.
149 # we can restore them after the truncations.
150 # To create the bundle we use repo.changegroupsubset which requires
150 # To create the bundle we use repo.changegroupsubset which requires
151 # the list of heads and bases of the set of interesting revisions.
151 # the list of heads and bases of the set of interesting revisions.
152 # (head = revision in the set that has no descendant in the set;
152 # (head = revision in the set that has no descendant in the set;
153 # base = revision in the set that has no ancestor in the set)
153 # base = revision in the set that has no ancestor in the set)
154 tostrip = set(striplist)
154 tostrip = set(striplist)
155 saveheads = set(saverevs)
155 saveheads = set(saverevs)
156 for r in cl.revs(start=striprev + 1):
156 for r in cl.revs(start=striprev + 1):
157 if any(p in tostrip for p in cl.parentrevs(r)):
157 if any(p in tostrip for p in cl.parentrevs(r)):
158 tostrip.add(r)
158 tostrip.add(r)
159
159
160 if r not in tostrip:
160 if r not in tostrip:
161 saverevs.add(r)
161 saverevs.add(r)
162 saveheads.difference_update(cl.parentrevs(r))
162 saveheads.difference_update(cl.parentrevs(r))
163 saveheads.add(r)
163 saveheads.add(r)
164 saveheads = [cl.node(r) for r in saveheads]
164 saveheads = [cl.node(r) for r in saveheads]
165
165
166 # compute base nodes
166 # compute base nodes
167 if saverevs:
167 if saverevs:
168 descendants = set(cl.descendants(saverevs))
168 descendants = set(cl.descendants(saverevs))
169 saverevs.difference_update(descendants)
169 saverevs.difference_update(descendants)
170 savebases = [cl.node(r) for r in saverevs]
170 savebases = [cl.node(r) for r in saverevs]
171 stripbases = [cl.node(r) for r in tostrip]
171 stripbases = [cl.node(r) for r in tostrip]
172
172
173 stripobsidx = obsmarkers = ()
173 stripobsidx = obsmarkers = ()
174 if repo.ui.configbool(b'devel', b'strip-obsmarkers'):
174 if repo.ui.configbool(b'devel', b'strip-obsmarkers'):
175 obsmarkers = obsutil.exclusivemarkers(repo, stripbases)
175 obsmarkers = obsutil.exclusivemarkers(repo, stripbases)
176 if obsmarkers:
176 if obsmarkers:
177 stripobsidx = [
177 stripobsidx = [
178 i for i, m in enumerate(repo.obsstore) if m in obsmarkers
178 i for i, m in enumerate(repo.obsstore) if m in obsmarkers
179 ]
179 ]
180
180
181 newbmtarget, updatebm = _bookmarkmovements(repo, tostrip)
181 newbmtarget, updatebm = _bookmarkmovements(repo, tostrip)
182
182
183 backupfile = None
183 backupfile = None
184 node = nodelist[-1]
184 node = nodelist[-1]
185 if backup:
185 if backup:
186 backupfile = _createstripbackup(repo, stripbases, node, topic)
186 backupfile = _createstripbackup(repo, stripbases, node, topic)
187 # create a changegroup for all the branches we need to keep
187 # create a changegroup for all the branches we need to keep
188 tmpbundlefile = None
188 tmpbundlefile = None
189 if saveheads:
189 if saveheads:
190 # do not compress temporary bundle if we remove it from disk later
190 # do not compress temporary bundle if we remove it from disk later
191 #
191 #
192 # We do not include obsolescence, it might re-introduce prune markers
192 # We do not include obsolescence, it might re-introduce prune markers
193 # we are trying to strip. This is harmless since the stripped markers
193 # we are trying to strip. This is harmless since the stripped markers
194 # are already backed up and we did not touched the markers for the
194 # are already backed up and we did not touched the markers for the
195 # saved changesets.
195 # saved changesets.
196 tmpbundlefile = backupbundle(
196 tmpbundlefile = backupbundle(
197 repo,
197 repo,
198 savebases,
198 savebases,
199 saveheads,
199 saveheads,
200 node,
200 node,
201 b'temp',
201 b'temp',
202 compress=False,
202 compress=False,
203 obsolescence=False,
203 obsolescence=False,
204 )
204 )
205
205
206 with ui.uninterruptible():
206 with ui.uninterruptible():
207 try:
207 try:
208 with repo.transaction(b"strip") as tr:
208 with repo.transaction(b"strip") as tr:
209 # TODO this code violates the interface abstraction of the
209 # TODO this code violates the interface abstraction of the
210 # transaction and makes assumptions that file storage is
210 # transaction and makes assumptions that file storage is
211 # using append-only files. We'll need some kind of storage
211 # using append-only files. We'll need some kind of storage
212 # API to handle stripping for us.
212 # API to handle stripping for us.
213 oldfiles = set(tr._offsetmap.keys())
213 oldfiles = set(tr._offsetmap.keys())
214 oldfiles.update(tr._newfiles)
214 oldfiles.update(tr._newfiles)
215
215
216 tr.startgroup()
216 tr.startgroup()
217 cl.strip(striprev, tr)
217 cl.strip(striprev, tr)
218 stripmanifest(repo, striprev, tr, files)
218 stripmanifest(repo, striprev, tr, files)
219
219
220 for fn in files:
220 for fn in files:
221 repo.file(fn).strip(striprev, tr)
221 repo.file(fn).strip(striprev, tr)
222 tr.endgroup()
222 tr.endgroup()
223
223
224 entries = tr.readjournal()
224 entries = tr.readjournal()
225
225
226 for file, troffset in entries:
226 for file, troffset in entries:
227 if file in oldfiles:
227 if file in oldfiles:
228 continue
228 continue
229 with repo.svfs(file, b'a', checkambig=True) as fp:
229 with repo.svfs(file, b'a', checkambig=True) as fp:
230 fp.truncate(troffset)
230 fp.truncate(troffset)
231 if troffset == 0:
231 if troffset == 0:
232 repo.store.markremoved(file)
232 repo.store.markremoved(file)
233
233
234 deleteobsmarkers(repo.obsstore, stripobsidx)
234 deleteobsmarkers(repo.obsstore, stripobsidx)
235 del repo.obsstore
235 del repo.obsstore
236 repo.invalidatevolatilesets()
236 repo.invalidatevolatilesets()
237 repo._phasecache.filterunknown(repo)
237 repo._phasecache.filterunknown(repo)
238
238
239 if tmpbundlefile:
239 if tmpbundlefile:
240 ui.note(_(b"adding branch\n"))
240 ui.note(_(b"adding branch\n"))
241 f = vfs.open(tmpbundlefile, b"rb")
241 f = vfs.open(tmpbundlefile, b"rb")
242 gen = exchange.readbundle(ui, f, tmpbundlefile, vfs)
242 gen = exchange.readbundle(ui, f, tmpbundlefile, vfs)
243 # silence internal shuffling chatter
243 # silence internal shuffling chatter
244 maybe_silent = (
244 maybe_silent = (
245 repo.ui.silent()
245 repo.ui.silent()
246 if not repo.ui.verbose
246 if not repo.ui.verbose
247 else util.nullcontextmanager()
247 else util.nullcontextmanager()
248 )
248 )
249 with maybe_silent:
249 with maybe_silent:
250 tmpbundleurl = b'bundle:' + vfs.join(tmpbundlefile)
250 tmpbundleurl = b'bundle:' + vfs.join(tmpbundlefile)
251 txnname = b'strip'
251 txnname = b'strip'
252 if not isinstance(gen, bundle2.unbundle20):
252 if not isinstance(gen, bundle2.unbundle20):
253 txnname = b"strip\n%s" % urlutil.hidepassword(
253 txnname = b"strip\n%s" % urlutil.hidepassword(
254 tmpbundleurl
254 tmpbundleurl
255 )
255 )
256 with repo.transaction(txnname) as tr:
256 with repo.transaction(txnname) as tr:
257 bundle2.applybundle(
257 bundle2.applybundle(
258 repo, gen, tr, source=b'strip', url=tmpbundleurl
258 repo, gen, tr, source=b'strip', url=tmpbundleurl
259 )
259 )
260 f.close()
260 f.close()
261
261
262 with repo.transaction(b'repair') as tr:
262 with repo.transaction(b'repair') as tr:
263 bmchanges = [(m, repo[newbmtarget].node()) for m in updatebm]
263 bmchanges = [(m, repo[newbmtarget].node()) for m in updatebm]
264 repo._bookmarks.applychanges(repo, tr, bmchanges)
264 repo._bookmarks.applychanges(repo, tr, bmchanges)
265
265
266 # remove undo files
266 # remove undo files
267 for undovfs, undofile in repo.undofiles():
267 for undovfs, undofile in repo.undofiles():
268 try:
268 try:
269 undovfs.unlink(undofile)
269 undovfs.unlink(undofile)
270 except OSError as e:
270 except OSError as e:
271 if e.errno != errno.ENOENT:
271 if e.errno != errno.ENOENT:
272 ui.warn(
272 ui.warn(
273 _(b'error removing %s: %s\n')
273 _(b'error removing %s: %s\n')
274 % (
274 % (
275 undovfs.join(undofile),
275 undovfs.join(undofile),
276 stringutil.forcebytestr(e),
276 stringutil.forcebytestr(e),
277 )
277 )
278 )
278 )
279
279
280 except: # re-raises
280 except: # re-raises
281 if backupfile:
281 if backupfile:
282 ui.warn(
282 ui.warn(
283 _(b"strip failed, backup bundle stored in '%s'\n")
283 _(b"strip failed, backup bundle stored in '%s'\n")
284 % vfs.join(backupfile)
284 % vfs.join(backupfile)
285 )
285 )
286 if tmpbundlefile:
286 if tmpbundlefile:
287 ui.warn(
287 ui.warn(
288 _(b"strip failed, unrecovered changes stored in '%s'\n")
288 _(b"strip failed, unrecovered changes stored in '%s'\n")
289 % vfs.join(tmpbundlefile)
289 % vfs.join(tmpbundlefile)
290 )
290 )
291 ui.warn(
291 ui.warn(
292 _(
292 _(
293 b"(fix the problem, then recover the changesets with "
293 b"(fix the problem, then recover the changesets with "
294 b"\"hg unbundle '%s'\")\n"
294 b"\"hg unbundle '%s'\")\n"
295 )
295 )
296 % vfs.join(tmpbundlefile)
296 % vfs.join(tmpbundlefile)
297 )
297 )
298 raise
298 raise
299 else:
299 else:
300 if tmpbundlefile:
300 if tmpbundlefile:
301 # Remove temporary bundle only if there were no exceptions
301 # Remove temporary bundle only if there were no exceptions
302 vfs.unlink(tmpbundlefile)
302 vfs.unlink(tmpbundlefile)
303
303
304 repo.destroyed()
304 repo.destroyed()
305 # return the backup file path (or None if 'backup' was False) so
305 # return the backup file path (or None if 'backup' was False) so
306 # extensions can use it
306 # extensions can use it
307 return backupfile
307 return backupfile
308
308
309
309
310 def softstrip(ui, repo, nodelist, backup=True, topic=b'backup'):
310 def softstrip(ui, repo, nodelist, backup=True, topic=b'backup'):
311 """perform a "soft" strip using the archived phase"""
311 """perform a "soft" strip using the archived phase"""
312 tostrip = [c.node() for c in repo.set(b'sort(%ln::)', nodelist)]
312 tostrip = [c.node() for c in repo.set(b'sort(%ln::)', nodelist)]
313 if not tostrip:
313 if not tostrip:
314 return None
314 return None
315
315
316 backupfile = None
316 backupfile = None
317 if backup:
317 if backup:
318 node = tostrip[0]
318 node = tostrip[0]
319 backupfile = _createstripbackup(repo, tostrip, node, topic)
319 backupfile = _createstripbackup(repo, tostrip, node, topic)
320
320
321 newbmtarget, updatebm = _bookmarkmovements(repo, tostrip)
321 newbmtarget, updatebm = _bookmarkmovements(repo, tostrip)
322 with repo.transaction(b'strip') as tr:
322 with repo.transaction(b'strip') as tr:
323 phases.retractboundary(repo, tr, phases.archived, tostrip)
323 phases.retractboundary(repo, tr, phases.archived, tostrip)
324 bmchanges = [(m, repo[newbmtarget].node()) for m in updatebm]
324 bmchanges = [(m, repo[newbmtarget].node()) for m in updatebm]
325 repo._bookmarks.applychanges(repo, tr, bmchanges)
325 repo._bookmarks.applychanges(repo, tr, bmchanges)
326 return backupfile
326 return backupfile
327
327
328
328
329 def _bookmarkmovements(repo, tostrip):
329 def _bookmarkmovements(repo, tostrip):
330 # compute necessary bookmark movement
330 # compute necessary bookmark movement
331 bm = repo._bookmarks
331 bm = repo._bookmarks
332 updatebm = []
332 updatebm = []
333 for m in bm:
333 for m in bm:
334 rev = repo[bm[m]].rev()
334 rev = repo[bm[m]].rev()
335 if rev in tostrip:
335 if rev in tostrip:
336 updatebm.append(m)
336 updatebm.append(m)
337 newbmtarget = None
337 newbmtarget = None
338 # If we need to move bookmarks, compute bookmark
338 # If we need to move bookmarks, compute bookmark
339 # targets. Otherwise we can skip doing this logic.
339 # targets. Otherwise we can skip doing this logic.
340 if updatebm:
340 if updatebm:
341 # For a set s, max(parents(s) - s) is the same as max(heads(::s - s)),
341 # For a set s, max(parents(s) - s) is the same as max(heads(::s - s)),
342 # but is much faster
342 # but is much faster
343 newbmtarget = repo.revs(b'max(parents(%ld) - (%ld))', tostrip, tostrip)
343 newbmtarget = repo.revs(b'max(parents(%ld) - (%ld))', tostrip, tostrip)
344 if newbmtarget:
344 if newbmtarget:
345 newbmtarget = repo[newbmtarget.first()].node()
345 newbmtarget = repo[newbmtarget.first()].node()
346 else:
346 else:
347 newbmtarget = b'.'
347 newbmtarget = b'.'
348 return newbmtarget, updatebm
348 return newbmtarget, updatebm
349
349
350
350
351 def _createstripbackup(repo, stripbases, node, topic):
351 def _createstripbackup(repo, stripbases, node, topic):
352 # backup the changeset we are about to strip
352 # backup the changeset we are about to strip
353 vfs = repo.vfs
353 vfs = repo.vfs
354 cl = repo.changelog
354 cl = repo.changelog
355 backupfile = backupbundle(repo, stripbases, cl.heads(), node, topic)
355 backupfile = backupbundle(repo, stripbases, cl.heads(), node, topic)
356 repo.ui.status(_(b"saved backup bundle to %s\n") % vfs.join(backupfile))
356 repo.ui.status(_(b"saved backup bundle to %s\n") % vfs.join(backupfile))
357 repo.ui.log(
357 repo.ui.log(
358 b"backupbundle", b"saved backup bundle to %s\n", vfs.join(backupfile)
358 b"backupbundle", b"saved backup bundle to %s\n", vfs.join(backupfile)
359 )
359 )
360 return backupfile
360 return backupfile
361
361
362
362
363 def safestriproots(ui, repo, nodes):
363 def safestriproots(ui, repo, nodes):
364 """return list of roots of nodes where descendants are covered by nodes"""
364 """return list of roots of nodes where descendants are covered by nodes"""
365 torev = repo.unfiltered().changelog.rev
365 torev = repo.unfiltered().changelog.rev
366 revs = {torev(n) for n in nodes}
366 revs = {torev(n) for n in nodes}
367 # tostrip = wanted - unsafe = wanted - ancestors(orphaned)
367 # tostrip = wanted - unsafe = wanted - ancestors(orphaned)
368 # orphaned = affected - wanted
368 # orphaned = affected - wanted
369 # affected = descendants(roots(wanted))
369 # affected = descendants(roots(wanted))
370 # wanted = revs
370 # wanted = revs
371 revset = b'%ld - ( ::( (roots(%ld):: and not _phase(%s)) -%ld) )'
371 revset = b'%ld - ( ::( (roots(%ld):: and not _phase(%s)) -%ld) )'
372 tostrip = set(repo.revs(revset, revs, revs, phases.internal, revs))
372 tostrip = set(repo.revs(revset, revs, revs, phases.internal, revs))
373 notstrip = revs - tostrip
373 notstrip = revs - tostrip
374 if notstrip:
374 if notstrip:
375 nodestr = b', '.join(sorted(short(repo[n].node()) for n in notstrip))
375 nodestr = b', '.join(sorted(short(repo[n].node()) for n in notstrip))
376 ui.warn(
376 ui.warn(
377 _(b'warning: orphaned descendants detected, not stripping %s\n')
377 _(b'warning: orphaned descendants detected, not stripping %s\n')
378 % nodestr
378 % nodestr
379 )
379 )
380 return [c.node() for c in repo.set(b'roots(%ld)', tostrip)]
380 return [c.node() for c in repo.set(b'roots(%ld)', tostrip)]
381
381
382
382
383 class stripcallback(object):
383 class stripcallback(object):
384 """used as a transaction postclose callback"""
384 """used as a transaction postclose callback"""
385
385
386 def __init__(self, ui, repo, backup, topic):
386 def __init__(self, ui, repo, backup, topic):
387 self.ui = ui
387 self.ui = ui
388 self.repo = repo
388 self.repo = repo
389 self.backup = backup
389 self.backup = backup
390 self.topic = topic or b'backup'
390 self.topic = topic or b'backup'
391 self.nodelist = []
391 self.nodelist = []
392
392
393 def addnodes(self, nodes):
393 def addnodes(self, nodes):
394 self.nodelist.extend(nodes)
394 self.nodelist.extend(nodes)
395
395
396 def __call__(self, tr):
396 def __call__(self, tr):
397 roots = safestriproots(self.ui, self.repo, self.nodelist)
397 roots = safestriproots(self.ui, self.repo, self.nodelist)
398 if roots:
398 if roots:
399 strip(self.ui, self.repo, roots, self.backup, self.topic)
399 strip(self.ui, self.repo, roots, self.backup, self.topic)
400
400
401
401
402 def delayedstrip(ui, repo, nodelist, topic=None, backup=True):
402 def delayedstrip(ui, repo, nodelist, topic=None, backup=True):
403 """like strip, but works inside transaction and won't strip irreverent revs
403 """like strip, but works inside transaction and won't strip irreverent revs
404
404
405 nodelist must explicitly contain all descendants. Otherwise a warning will
405 nodelist must explicitly contain all descendants. Otherwise a warning will
406 be printed that some nodes are not stripped.
406 be printed that some nodes are not stripped.
407
407
408 Will do a backup if `backup` is True. The last non-None "topic" will be
408 Will do a backup if `backup` is True. The last non-None "topic" will be
409 used as the backup topic name. The default backup topic name is "backup".
409 used as the backup topic name. The default backup topic name is "backup".
410 """
410 """
411 tr = repo.currenttransaction()
411 tr = repo.currenttransaction()
412 if not tr:
412 if not tr:
413 nodes = safestriproots(ui, repo, nodelist)
413 nodes = safestriproots(ui, repo, nodelist)
414 return strip(ui, repo, nodes, backup=backup, topic=topic)
414 return strip(ui, repo, nodes, backup=backup, topic=topic)
415 # transaction postclose callbacks are called in alphabet order.
415 # transaction postclose callbacks are called in alphabet order.
416 # use '\xff' as prefix so we are likely to be called last.
416 # use '\xff' as prefix so we are likely to be called last.
417 callback = tr.getpostclose(b'\xffstrip')
417 callback = tr.getpostclose(b'\xffstrip')
418 if callback is None:
418 if callback is None:
419 callback = stripcallback(ui, repo, backup=backup, topic=topic)
419 callback = stripcallback(ui, repo, backup=backup, topic=topic)
420 tr.addpostclose(b'\xffstrip', callback)
420 tr.addpostclose(b'\xffstrip', callback)
421 if topic:
421 if topic:
422 callback.topic = topic
422 callback.topic = topic
423 callback.addnodes(nodelist)
423 callback.addnodes(nodelist)
424
424
425
425
426 def stripmanifest(repo, striprev, tr, files):
426 def stripmanifest(repo, striprev, tr, files):
427 for revlog in manifestrevlogs(repo):
427 for revlog in manifestrevlogs(repo):
428 revlog.strip(striprev, tr)
428 revlog.strip(striprev, tr)
429
429
430
430
431 def manifestrevlogs(repo):
431 def manifestrevlogs(repo):
432 yield repo.manifestlog.getstorage(b'')
432 yield repo.manifestlog.getstorage(b'')
433 if scmutil.istreemanifest(repo):
433 if scmutil.istreemanifest(repo):
434 # This logic is safe if treemanifest isn't enabled, but also
434 # This logic is safe if treemanifest isn't enabled, but also
435 # pointless, so we skip it if treemanifest isn't enabled.
435 # pointless, so we skip it if treemanifest isn't enabled.
436 for t, unencoded, encoded, size in repo.store.datafiles():
436 for t, unencoded, encoded, size in repo.store.datafiles():
437 if unencoded.startswith(b'meta/') and unencoded.endswith(
437 if unencoded.startswith(b'meta/') and unencoded.endswith(
438 b'00manifest.i'
438 b'00manifest.i'
439 ):
439 ):
440 dir = unencoded[5:-12]
440 dir = unencoded[5:-12]
441 yield repo.manifestlog.getstorage(dir)
441 yield repo.manifestlog.getstorage(dir)
442
442
443
443
444 def rebuildfncache(ui, repo):
444 def rebuildfncache(ui, repo, only_data=False):
445 """Rebuilds the fncache file from repo history.
445 """Rebuilds the fncache file from repo history.
446
446
447 Missing entries will be added. Extra entries will be removed.
447 Missing entries will be added. Extra entries will be removed.
448 """
448 """
449 repo = repo.unfiltered()
449 repo = repo.unfiltered()
450
450
451 if requirements.FNCACHE_REQUIREMENT not in repo.requirements:
451 if requirements.FNCACHE_REQUIREMENT not in repo.requirements:
452 ui.warn(
452 ui.warn(
453 _(
453 _(
454 b'(not rebuilding fncache because repository does not '
454 b'(not rebuilding fncache because repository does not '
455 b'support fncache)\n'
455 b'support fncache)\n'
456 )
456 )
457 )
457 )
458 return
458 return
459
459
460 with repo.lock():
460 with repo.lock():
461 fnc = repo.store.fncache
461 fnc = repo.store.fncache
462 fnc.ensureloaded(warn=ui.warn)
462 fnc.ensureloaded(warn=ui.warn)
463
463
464 oldentries = set(fnc.entries)
464 oldentries = set(fnc.entries)
465 newentries = set()
465 newentries = set()
466 seenfiles = set()
466 seenfiles = set()
467
467
468 progress = ui.makeprogress(
468 if only_data:
469 _(b'rebuilding'), unit=_(b'changesets'), total=len(repo)
469 # Trust the listing of .i from the fncache, but not the .d. This is
470 )
470 # much faster, because we only need to stat every possible .d files,
471 for rev in repo:
471 # instead of reading the full changelog
472 progress.update(rev)
472 for f in fnc:
473 if f[:5] == b'data/' and f[-2:] == b'.i':
474 seenfiles.add(f[5:-2])
475 newentries.add(f)
476 dataf = f[:-2] + b'.d'
477 if repo.store._exists(dataf):
478 newentries.add(dataf)
479 else:
480 progress = ui.makeprogress(
481 _(b'rebuilding'), unit=_(b'changesets'), total=len(repo)
482 )
483 for rev in repo:
484 progress.update(rev)
473
485
474 ctx = repo[rev]
486 ctx = repo[rev]
475 for f in ctx.files():
487 for f in ctx.files():
476 # This is to minimize I/O.
488 # This is to minimize I/O.
477 if f in seenfiles:
489 if f in seenfiles:
478 continue
490 continue
479 seenfiles.add(f)
491 seenfiles.add(f)
480
492
481 i = b'data/%s.i' % f
493 i = b'data/%s.i' % f
482 d = b'data/%s.d' % f
494 d = b'data/%s.d' % f
483
495
484 if repo.store._exists(i):
496 if repo.store._exists(i):
485 newentries.add(i)
497 newentries.add(i)
486 if repo.store._exists(d):
498 if repo.store._exists(d):
487 newentries.add(d)
499 newentries.add(d)
488
500
489 progress.complete()
501 progress.complete()
490
502
491 if requirements.TREEMANIFEST_REQUIREMENT in repo.requirements:
503 if requirements.TREEMANIFEST_REQUIREMENT in repo.requirements:
492 # This logic is safe if treemanifest isn't enabled, but also
504 # This logic is safe if treemanifest isn't enabled, but also
493 # pointless, so we skip it if treemanifest isn't enabled.
505 # pointless, so we skip it if treemanifest isn't enabled.
494 for dir in pathutil.dirs(seenfiles):
506 for dir in pathutil.dirs(seenfiles):
495 i = b'meta/%s/00manifest.i' % dir
507 i = b'meta/%s/00manifest.i' % dir
496 d = b'meta/%s/00manifest.d' % dir
508 d = b'meta/%s/00manifest.d' % dir
497
509
498 if repo.store._exists(i):
510 if repo.store._exists(i):
499 newentries.add(i)
511 newentries.add(i)
500 if repo.store._exists(d):
512 if repo.store._exists(d):
501 newentries.add(d)
513 newentries.add(d)
502
514
503 addcount = len(newentries - oldentries)
515 addcount = len(newentries - oldentries)
504 removecount = len(oldentries - newentries)
516 removecount = len(oldentries - newentries)
505 for p in sorted(oldentries - newentries):
517 for p in sorted(oldentries - newentries):
506 ui.write(_(b'removing %s\n') % p)
518 ui.write(_(b'removing %s\n') % p)
507 for p in sorted(newentries - oldentries):
519 for p in sorted(newentries - oldentries):
508 ui.write(_(b'adding %s\n') % p)
520 ui.write(_(b'adding %s\n') % p)
509
521
510 if addcount or removecount:
522 if addcount or removecount:
511 ui.write(
523 ui.write(
512 _(b'%d items added, %d removed from fncache\n')
524 _(b'%d items added, %d removed from fncache\n')
513 % (addcount, removecount)
525 % (addcount, removecount)
514 )
526 )
515 fnc.entries = newentries
527 fnc.entries = newentries
516 fnc._dirty = True
528 fnc._dirty = True
517
529
518 with repo.transaction(b'fncache') as tr:
530 with repo.transaction(b'fncache') as tr:
519 fnc.write(tr)
531 fnc.write(tr)
520 else:
532 else:
521 ui.write(_(b'fncache already up to date\n'))
533 ui.write(_(b'fncache already up to date\n'))
522
534
523
535
524 def deleteobsmarkers(obsstore, indices):
536 def deleteobsmarkers(obsstore, indices):
525 """Delete some obsmarkers from obsstore and return how many were deleted
537 """Delete some obsmarkers from obsstore and return how many were deleted
526
538
527 'indices' is a list of ints which are the indices
539 'indices' is a list of ints which are the indices
528 of the markers to be deleted.
540 of the markers to be deleted.
529
541
530 Every invocation of this function completely rewrites the obsstore file,
542 Every invocation of this function completely rewrites the obsstore file,
531 skipping the markers we want to be removed. The new temporary file is
543 skipping the markers we want to be removed. The new temporary file is
532 created, remaining markers are written there and on .close() this file
544 created, remaining markers are written there and on .close() this file
533 gets atomically renamed to obsstore, thus guaranteeing consistency."""
545 gets atomically renamed to obsstore, thus guaranteeing consistency."""
534 if not indices:
546 if not indices:
535 # we don't want to rewrite the obsstore with the same content
547 # we don't want to rewrite the obsstore with the same content
536 return
548 return
537
549
538 left = []
550 left = []
539 current = obsstore._all
551 current = obsstore._all
540 n = 0
552 n = 0
541 for i, m in enumerate(current):
553 for i, m in enumerate(current):
542 if i in indices:
554 if i in indices:
543 n += 1
555 n += 1
544 continue
556 continue
545 left.append(m)
557 left.append(m)
546
558
547 newobsstorefile = obsstore.svfs(b'obsstore', b'w', atomictemp=True)
559 newobsstorefile = obsstore.svfs(b'obsstore', b'w', atomictemp=True)
548 for bytes in obsolete.encodemarkers(left, True, obsstore._version):
560 for bytes in obsolete.encodemarkers(left, True, obsstore._version):
549 newobsstorefile.write(bytes)
561 newobsstorefile.write(bytes)
550 newobsstorefile.close()
562 newobsstorefile.close()
551 return n
563 return n
@@ -1,447 +1,447 b''
1 Show all commands except debug commands
1 Show all commands except debug commands
2 $ hg debugcomplete
2 $ hg debugcomplete
3 abort
3 abort
4 add
4 add
5 addremove
5 addremove
6 annotate
6 annotate
7 archive
7 archive
8 backout
8 backout
9 bisect
9 bisect
10 bookmarks
10 bookmarks
11 branch
11 branch
12 branches
12 branches
13 bundle
13 bundle
14 cat
14 cat
15 clone
15 clone
16 commit
16 commit
17 config
17 config
18 continue
18 continue
19 copy
19 copy
20 diff
20 diff
21 export
21 export
22 files
22 files
23 forget
23 forget
24 graft
24 graft
25 grep
25 grep
26 heads
26 heads
27 help
27 help
28 identify
28 identify
29 import
29 import
30 incoming
30 incoming
31 init
31 init
32 locate
32 locate
33 log
33 log
34 manifest
34 manifest
35 merge
35 merge
36 outgoing
36 outgoing
37 parents
37 parents
38 paths
38 paths
39 phase
39 phase
40 pull
40 pull
41 purge
41 purge
42 push
42 push
43 recover
43 recover
44 remove
44 remove
45 rename
45 rename
46 resolve
46 resolve
47 revert
47 revert
48 rollback
48 rollback
49 root
49 root
50 serve
50 serve
51 shelve
51 shelve
52 status
52 status
53 summary
53 summary
54 tag
54 tag
55 tags
55 tags
56 tip
56 tip
57 unbundle
57 unbundle
58 unshelve
58 unshelve
59 update
59 update
60 verify
60 verify
61 version
61 version
62
62
63 Show all commands that start with "a"
63 Show all commands that start with "a"
64 $ hg debugcomplete a
64 $ hg debugcomplete a
65 abort
65 abort
66 add
66 add
67 addremove
67 addremove
68 annotate
68 annotate
69 archive
69 archive
70
70
71 Do not show debug commands if there are other candidates
71 Do not show debug commands if there are other candidates
72 $ hg debugcomplete d
72 $ hg debugcomplete d
73 diff
73 diff
74
74
75 Show debug commands if there are no other candidates
75 Show debug commands if there are no other candidates
76 $ hg debugcomplete debug
76 $ hg debugcomplete debug
77 debug-repair-issue6528
77 debug-repair-issue6528
78 debugancestor
78 debugancestor
79 debugantivirusrunning
79 debugantivirusrunning
80 debugapplystreamclonebundle
80 debugapplystreamclonebundle
81 debugbackupbundle
81 debugbackupbundle
82 debugbuilddag
82 debugbuilddag
83 debugbundle
83 debugbundle
84 debugcapabilities
84 debugcapabilities
85 debugchangedfiles
85 debugchangedfiles
86 debugcheckstate
86 debugcheckstate
87 debugcolor
87 debugcolor
88 debugcommands
88 debugcommands
89 debugcomplete
89 debugcomplete
90 debugconfig
90 debugconfig
91 debugcreatestreamclonebundle
91 debugcreatestreamclonebundle
92 debugdag
92 debugdag
93 debugdata
93 debugdata
94 debugdate
94 debugdate
95 debugdeltachain
95 debugdeltachain
96 debugdirstate
96 debugdirstate
97 debugdirstateignorepatternshash
97 debugdirstateignorepatternshash
98 debugdiscovery
98 debugdiscovery
99 debugdownload
99 debugdownload
100 debugextensions
100 debugextensions
101 debugfileset
101 debugfileset
102 debugformat
102 debugformat
103 debugfsinfo
103 debugfsinfo
104 debuggetbundle
104 debuggetbundle
105 debugignore
105 debugignore
106 debugindex
106 debugindex
107 debugindexdot
107 debugindexdot
108 debugindexstats
108 debugindexstats
109 debuginstall
109 debuginstall
110 debugknown
110 debugknown
111 debuglabelcomplete
111 debuglabelcomplete
112 debuglocks
112 debuglocks
113 debugmanifestfulltextcache
113 debugmanifestfulltextcache
114 debugmergestate
114 debugmergestate
115 debugnamecomplete
115 debugnamecomplete
116 debugnodemap
116 debugnodemap
117 debugobsolete
117 debugobsolete
118 debugp1copies
118 debugp1copies
119 debugp2copies
119 debugp2copies
120 debugpathcomplete
120 debugpathcomplete
121 debugpathcopies
121 debugpathcopies
122 debugpeer
122 debugpeer
123 debugpickmergetool
123 debugpickmergetool
124 debugpushkey
124 debugpushkey
125 debugpvec
125 debugpvec
126 debugrebuilddirstate
126 debugrebuilddirstate
127 debugrebuildfncache
127 debugrebuildfncache
128 debugrename
128 debugrename
129 debugrequires
129 debugrequires
130 debugrevlog
130 debugrevlog
131 debugrevlogindex
131 debugrevlogindex
132 debugrevspec
132 debugrevspec
133 debugserve
133 debugserve
134 debugsetparents
134 debugsetparents
135 debugshell
135 debugshell
136 debugsidedata
136 debugsidedata
137 debugssl
137 debugssl
138 debugstrip
138 debugstrip
139 debugsub
139 debugsub
140 debugsuccessorssets
140 debugsuccessorssets
141 debugtagscache
141 debugtagscache
142 debugtemplate
142 debugtemplate
143 debuguigetpass
143 debuguigetpass
144 debuguiprompt
144 debuguiprompt
145 debugupdatecaches
145 debugupdatecaches
146 debugupgraderepo
146 debugupgraderepo
147 debugwalk
147 debugwalk
148 debugwhyunstable
148 debugwhyunstable
149 debugwireargs
149 debugwireargs
150 debugwireproto
150 debugwireproto
151
151
152 Do not show the alias of a debug command if there are other candidates
152 Do not show the alias of a debug command if there are other candidates
153 (this should hide rawcommit)
153 (this should hide rawcommit)
154 $ hg debugcomplete r
154 $ hg debugcomplete r
155 recover
155 recover
156 remove
156 remove
157 rename
157 rename
158 resolve
158 resolve
159 revert
159 revert
160 rollback
160 rollback
161 root
161 root
162 Show the alias of a debug command if there are no other candidates
162 Show the alias of a debug command if there are no other candidates
163 $ hg debugcomplete rawc
163 $ hg debugcomplete rawc
164
164
165
165
166 Show the global options
166 Show the global options
167 $ hg debugcomplete --options | sort
167 $ hg debugcomplete --options | sort
168 --color
168 --color
169 --config
169 --config
170 --cwd
170 --cwd
171 --debug
171 --debug
172 --debugger
172 --debugger
173 --encoding
173 --encoding
174 --encodingmode
174 --encodingmode
175 --help
175 --help
176 --hidden
176 --hidden
177 --noninteractive
177 --noninteractive
178 --pager
178 --pager
179 --profile
179 --profile
180 --quiet
180 --quiet
181 --repository
181 --repository
182 --time
182 --time
183 --traceback
183 --traceback
184 --verbose
184 --verbose
185 --version
185 --version
186 -R
186 -R
187 -h
187 -h
188 -q
188 -q
189 -v
189 -v
190 -y
190 -y
191
191
192 Show the options for the "serve" command
192 Show the options for the "serve" command
193 $ hg debugcomplete --options serve | sort
193 $ hg debugcomplete --options serve | sort
194 --accesslog
194 --accesslog
195 --address
195 --address
196 --certificate
196 --certificate
197 --cmdserver
197 --cmdserver
198 --color
198 --color
199 --config
199 --config
200 --cwd
200 --cwd
201 --daemon
201 --daemon
202 --daemon-postexec
202 --daemon-postexec
203 --debug
203 --debug
204 --debugger
204 --debugger
205 --encoding
205 --encoding
206 --encodingmode
206 --encodingmode
207 --errorlog
207 --errorlog
208 --help
208 --help
209 --hidden
209 --hidden
210 --ipv6
210 --ipv6
211 --name
211 --name
212 --noninteractive
212 --noninteractive
213 --pager
213 --pager
214 --pid-file
214 --pid-file
215 --port
215 --port
216 --prefix
216 --prefix
217 --print-url
217 --print-url
218 --profile
218 --profile
219 --quiet
219 --quiet
220 --repository
220 --repository
221 --stdio
221 --stdio
222 --style
222 --style
223 --subrepos
223 --subrepos
224 --templates
224 --templates
225 --time
225 --time
226 --traceback
226 --traceback
227 --verbose
227 --verbose
228 --version
228 --version
229 --web-conf
229 --web-conf
230 -6
230 -6
231 -A
231 -A
232 -E
232 -E
233 -R
233 -R
234 -S
234 -S
235 -a
235 -a
236 -d
236 -d
237 -h
237 -h
238 -n
238 -n
239 -p
239 -p
240 -q
240 -q
241 -t
241 -t
242 -v
242 -v
243 -y
243 -y
244
244
245 Show an error if we use --options with an ambiguous abbreviation
245 Show an error if we use --options with an ambiguous abbreviation
246 $ hg debugcomplete --options s
246 $ hg debugcomplete --options s
247 hg: command 's' is ambiguous:
247 hg: command 's' is ambiguous:
248 serve shelve showconfig status summary
248 serve shelve showconfig status summary
249 [10]
249 [10]
250
250
251 Show all commands + options
251 Show all commands + options
252 $ hg debugcommands
252 $ hg debugcommands
253 abort: dry-run
253 abort: dry-run
254 add: include, exclude, subrepos, dry-run
254 add: include, exclude, subrepos, dry-run
255 addremove: similarity, subrepos, include, exclude, dry-run
255 addremove: similarity, subrepos, include, exclude, dry-run
256 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, include, exclude, template
256 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, include, exclude, template
257 archive: no-decode, prefix, rev, type, subrepos, include, exclude
257 archive: no-decode, prefix, rev, type, subrepos, include, exclude
258 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
258 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
259 bisect: reset, good, bad, skip, extend, command, noupdate
259 bisect: reset, good, bad, skip, extend, command, noupdate
260 bookmarks: force, rev, delete, rename, inactive, list, template
260 bookmarks: force, rev, delete, rename, inactive, list, template
261 branch: force, clean, rev
261 branch: force, clean, rev
262 branches: active, closed, rev, template
262 branches: active, closed, rev, template
263 bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
263 bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
264 cat: output, rev, decode, include, exclude, template
264 cat: output, rev, decode, include, exclude, template
265 clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
265 clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
266 commit: addremove, close-branch, amend, secret, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos
266 commit: addremove, close-branch, amend, secret, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos
267 config: untrusted, exp-all-known, edit, local, source, shared, non-shared, global, template
267 config: untrusted, exp-all-known, edit, local, source, shared, non-shared, global, template
268 continue: dry-run
268 continue: dry-run
269 copy: forget, after, at-rev, force, include, exclude, dry-run
269 copy: forget, after, at-rev, force, include, exclude, dry-run
270 debug-repair-issue6528: to-report, from-report, paranoid, dry-run
270 debug-repair-issue6528: to-report, from-report, paranoid, dry-run
271 debugancestor:
271 debugancestor:
272 debugantivirusrunning:
272 debugantivirusrunning:
273 debugapplystreamclonebundle:
273 debugapplystreamclonebundle:
274 debugbackupbundle: recover, patch, git, limit, no-merges, stat, graph, style, template
274 debugbackupbundle: recover, patch, git, limit, no-merges, stat, graph, style, template
275 debugbuilddag: mergeable-file, overwritten-file, new-file
275 debugbuilddag: mergeable-file, overwritten-file, new-file
276 debugbundle: all, part-type, spec
276 debugbundle: all, part-type, spec
277 debugcapabilities:
277 debugcapabilities:
278 debugchangedfiles: compute
278 debugchangedfiles: compute
279 debugcheckstate:
279 debugcheckstate:
280 debugcolor: style
280 debugcolor: style
281 debugcommands:
281 debugcommands:
282 debugcomplete: options
282 debugcomplete: options
283 debugcreatestreamclonebundle:
283 debugcreatestreamclonebundle:
284 debugdag: tags, branches, dots, spaces
284 debugdag: tags, branches, dots, spaces
285 debugdata: changelog, manifest, dir
285 debugdata: changelog, manifest, dir
286 debugdate: extended
286 debugdate: extended
287 debugdeltachain: changelog, manifest, dir, template
287 debugdeltachain: changelog, manifest, dir, template
288 debugdirstateignorepatternshash:
288 debugdirstateignorepatternshash:
289 debugdirstate: nodates, dates, datesort, all
289 debugdirstate: nodates, dates, datesort, all
290 debugdiscovery: old, nonheads, rev, seed, local-as-revs, remote-as-revs, ssh, remotecmd, insecure, template
290 debugdiscovery: old, nonheads, rev, seed, local-as-revs, remote-as-revs, ssh, remotecmd, insecure, template
291 debugdownload: output
291 debugdownload: output
292 debugextensions: template
292 debugextensions: template
293 debugfileset: rev, all-files, show-matcher, show-stage
293 debugfileset: rev, all-files, show-matcher, show-stage
294 debugformat: template
294 debugformat: template
295 debugfsinfo:
295 debugfsinfo:
296 debuggetbundle: head, common, type
296 debuggetbundle: head, common, type
297 debugignore:
297 debugignore:
298 debugindex: changelog, manifest, dir, template
298 debugindex: changelog, manifest, dir, template
299 debugindexdot: changelog, manifest, dir
299 debugindexdot: changelog, manifest, dir
300 debugindexstats:
300 debugindexstats:
301 debuginstall: template
301 debuginstall: template
302 debugknown:
302 debugknown:
303 debuglabelcomplete:
303 debuglabelcomplete:
304 debuglocks: force-free-lock, force-free-wlock, set-lock, set-wlock
304 debuglocks: force-free-lock, force-free-wlock, set-lock, set-wlock
305 debugmanifestfulltextcache: clear, add
305 debugmanifestfulltextcache: clear, add
306 debugmergestate: style, template
306 debugmergestate: style, template
307 debugnamecomplete:
307 debugnamecomplete:
308 debugnodemap: dump-new, dump-disk, check, metadata
308 debugnodemap: dump-new, dump-disk, check, metadata
309 debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
309 debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
310 debugp1copies: rev
310 debugp1copies: rev
311 debugp2copies: rev
311 debugp2copies: rev
312 debugpathcomplete: full, normal, added, removed
312 debugpathcomplete: full, normal, added, removed
313 debugpathcopies: include, exclude
313 debugpathcopies: include, exclude
314 debugpeer:
314 debugpeer:
315 debugpickmergetool: rev, changedelete, include, exclude, tool
315 debugpickmergetool: rev, changedelete, include, exclude, tool
316 debugpushkey:
316 debugpushkey:
317 debugpvec:
317 debugpvec:
318 debugrebuilddirstate: rev, minimal
318 debugrebuilddirstate: rev, minimal
319 debugrebuildfncache:
319 debugrebuildfncache: only-data
320 debugrename: rev
320 debugrename: rev
321 debugrequires:
321 debugrequires:
322 debugrevlog: changelog, manifest, dir, dump
322 debugrevlog: changelog, manifest, dir, dump
323 debugrevlogindex: changelog, manifest, dir, format
323 debugrevlogindex: changelog, manifest, dir, format
324 debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
324 debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
325 debugserve: sshstdio, logiofd, logiofile
325 debugserve: sshstdio, logiofd, logiofile
326 debugsetparents:
326 debugsetparents:
327 debugshell:
327 debugshell:
328 debugsidedata: changelog, manifest, dir
328 debugsidedata: changelog, manifest, dir
329 debugssl:
329 debugssl:
330 debugstrip: rev, force, no-backup, nobackup, , keep, bookmark, soft
330 debugstrip: rev, force, no-backup, nobackup, , keep, bookmark, soft
331 debugsub: rev
331 debugsub: rev
332 debugsuccessorssets: closest
332 debugsuccessorssets: closest
333 debugtagscache:
333 debugtagscache:
334 debugtemplate: rev, define
334 debugtemplate: rev, define
335 debuguigetpass: prompt
335 debuguigetpass: prompt
336 debuguiprompt: prompt
336 debuguiprompt: prompt
337 debugupdatecaches:
337 debugupdatecaches:
338 debugupgraderepo: optimize, run, backup, changelog, manifest, filelogs
338 debugupgraderepo: optimize, run, backup, changelog, manifest, filelogs
339 debugwalk: include, exclude
339 debugwalk: include, exclude
340 debugwhyunstable:
340 debugwhyunstable:
341 debugwireargs: three, four, five, ssh, remotecmd, insecure
341 debugwireargs: three, four, five, ssh, remotecmd, insecure
342 debugwireproto: localssh, peer, noreadstderr, nologhandshake, ssh, remotecmd, insecure
342 debugwireproto: localssh, peer, noreadstderr, nologhandshake, ssh, remotecmd, insecure
343 diff: rev, from, to, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
343 diff: rev, from, to, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
344 export: bookmark, output, switch-parent, rev, text, git, binary, nodates, template
344 export: bookmark, output, switch-parent, rev, text, git, binary, nodates, template
345 files: rev, print0, include, exclude, template, subrepos
345 files: rev, print0, include, exclude, template, subrepos
346 forget: interactive, include, exclude, dry-run
346 forget: interactive, include, exclude, dry-run
347 graft: rev, base, continue, stop, abort, edit, log, no-commit, force, currentdate, currentuser, date, user, tool, dry-run
347 graft: rev, base, continue, stop, abort, edit, log, no-commit, force, currentdate, currentuser, date, user, tool, dry-run
348 grep: print0, all, diff, text, follow, ignore-case, files-with-matches, line-number, rev, all-files, user, date, template, include, exclude
348 grep: print0, all, diff, text, follow, ignore-case, files-with-matches, line-number, rev, all-files, user, date, template, include, exclude
349 heads: rev, topo, active, closed, style, template
349 heads: rev, topo, active, closed, style, template
350 help: extension, command, keyword, system
350 help: extension, command, keyword, system
351 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure, template
351 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure, template
352 import: strip, base, secret, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
352 import: strip, base, secret, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
353 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
353 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
354 init: ssh, remotecmd, insecure
354 init: ssh, remotecmd, insecure
355 locate: rev, print0, fullpath, include, exclude
355 locate: rev, print0, fullpath, include, exclude
356 log: follow, follow-first, date, copies, keyword, rev, line-range, removed, only-merges, user, only-branch, branch, bookmark, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
356 log: follow, follow-first, date, copies, keyword, rev, line-range, removed, only-merges, user, only-branch, branch, bookmark, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
357 manifest: rev, all, template
357 manifest: rev, all, template
358 merge: force, rev, preview, abort, tool
358 merge: force, rev, preview, abort, tool
359 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
359 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
360 parents: rev, style, template
360 parents: rev, style, template
361 paths: template
361 paths: template
362 phase: public, draft, secret, force, rev
362 phase: public, draft, secret, force, rev
363 pull: update, force, confirm, rev, bookmark, branch, ssh, remotecmd, insecure
363 pull: update, force, confirm, rev, bookmark, branch, ssh, remotecmd, insecure
364 purge: abort-on-err, all, ignored, dirs, files, print, print0, confirm, include, exclude
364 purge: abort-on-err, all, ignored, dirs, files, print, print0, confirm, include, exclude
365 push: force, rev, bookmark, all-bookmarks, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure
365 push: force, rev, bookmark, all-bookmarks, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure
366 recover: verify
366 recover: verify
367 remove: after, force, subrepos, include, exclude, dry-run
367 remove: after, force, subrepos, include, exclude, dry-run
368 rename: forget, after, at-rev, force, include, exclude, dry-run
368 rename: forget, after, at-rev, force, include, exclude, dry-run
369 resolve: all, list, mark, unmark, no-status, re-merge, tool, include, exclude, template
369 resolve: all, list, mark, unmark, no-status, re-merge, tool, include, exclude, template
370 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
370 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
371 rollback: dry-run, force
371 rollback: dry-run, force
372 root: template
372 root: template
373 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, print-url, subrepos
373 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, print-url, subrepos
374 shelve: addremove, unknown, cleanup, date, delete, edit, keep, list, message, name, patch, interactive, stat, include, exclude
374 shelve: addremove, unknown, cleanup, date, delete, edit, keep, list, message, name, patch, interactive, stat, include, exclude
375 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, terse, copies, print0, rev, change, include, exclude, subrepos, template
375 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, terse, copies, print0, rev, change, include, exclude, subrepos, template
376 summary: remote
376 summary: remote
377 tag: force, local, rev, remove, edit, message, date, user
377 tag: force, local, rev, remove, edit, message, date, user
378 tags: template
378 tags: template
379 tip: patch, git, style, template
379 tip: patch, git, style, template
380 unbundle: update
380 unbundle: update
381 unshelve: abort, continue, interactive, keep, name, tool, date
381 unshelve: abort, continue, interactive, keep, name, tool, date
382 update: clean, check, merge, date, rev, tool
382 update: clean, check, merge, date, rev, tool
383 verify: full
383 verify: full
384 version: template
384 version: template
385
385
386 $ hg init a
386 $ hg init a
387 $ cd a
387 $ cd a
388 $ echo fee > fee
388 $ echo fee > fee
389 $ hg ci -q -Amfee
389 $ hg ci -q -Amfee
390 $ hg tag fee
390 $ hg tag fee
391 $ mkdir fie
391 $ mkdir fie
392 $ echo dead > fie/dead
392 $ echo dead > fie/dead
393 $ echo live > fie/live
393 $ echo live > fie/live
394 $ hg bookmark fo
394 $ hg bookmark fo
395 $ hg branch -q fie
395 $ hg branch -q fie
396 $ hg ci -q -Amfie
396 $ hg ci -q -Amfie
397 $ echo fo > fo
397 $ echo fo > fo
398 $ hg branch -qf default
398 $ hg branch -qf default
399 $ hg ci -q -Amfo
399 $ hg ci -q -Amfo
400 $ echo Fum > Fum
400 $ echo Fum > Fum
401 $ hg ci -q -AmFum
401 $ hg ci -q -AmFum
402 $ hg bookmark Fum
402 $ hg bookmark Fum
403
403
404 Test debugpathcomplete
404 Test debugpathcomplete
405
405
406 $ hg debugpathcomplete f
406 $ hg debugpathcomplete f
407 fee
407 fee
408 fie
408 fie
409 fo
409 fo
410 $ hg debugpathcomplete -f f
410 $ hg debugpathcomplete -f f
411 fee
411 fee
412 fie/dead
412 fie/dead
413 fie/live
413 fie/live
414 fo
414 fo
415
415
416 $ hg rm Fum
416 $ hg rm Fum
417 $ hg debugpathcomplete -r F
417 $ hg debugpathcomplete -r F
418 Fum
418 Fum
419
419
420 Test debugnamecomplete
420 Test debugnamecomplete
421
421
422 $ hg debugnamecomplete
422 $ hg debugnamecomplete
423 Fum
423 Fum
424 default
424 default
425 fee
425 fee
426 fie
426 fie
427 fo
427 fo
428 tip
428 tip
429 $ hg debugnamecomplete f
429 $ hg debugnamecomplete f
430 fee
430 fee
431 fie
431 fie
432 fo
432 fo
433
433
434 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
434 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
435 used for completions in some shells.
435 used for completions in some shells.
436
436
437 $ hg debuglabelcomplete
437 $ hg debuglabelcomplete
438 Fum
438 Fum
439 default
439 default
440 fee
440 fee
441 fie
441 fie
442 fo
442 fo
443 tip
443 tip
444 $ hg debuglabelcomplete f
444 $ hg debuglabelcomplete f
445 fee
445 fee
446 fie
446 fie
447 fo
447 fo
@@ -1,168 +1,172 b''
1 Test correctness of revlog inline -> non-inline transition
1 Test correctness of revlog inline -> non-inline transition
2 ----------------------------------------------------------
2 ----------------------------------------------------------
3
3
4 Helper extension to intercept renames.
4 Helper extension to intercept renames.
5
5
6 $ cat > $TESTTMP/intercept_rename.py << EOF
6 $ cat > $TESTTMP/intercept_rename.py << EOF
7 > import os
7 > import os
8 > import sys
8 > import sys
9 > from mercurial import extensions, util
9 > from mercurial import extensions, util
10 >
10 >
11 > def extsetup(ui):
11 > def extsetup(ui):
12 > def close(orig, *args, **kwargs):
12 > def close(orig, *args, **kwargs):
13 > path = util.normpath(args[0]._atomictempfile__name)
13 > path = util.normpath(args[0]._atomictempfile__name)
14 > if path.endswith(b'/.hg/store/data/file.i'):
14 > if path.endswith(b'/.hg/store/data/file.i'):
15 > os._exit(80)
15 > os._exit(80)
16 > return orig(*args, **kwargs)
16 > return orig(*args, **kwargs)
17 > extensions.wrapfunction(util.atomictempfile, 'close', close)
17 > extensions.wrapfunction(util.atomictempfile, 'close', close)
18 > EOF
18 > EOF
19
19
20 Test offset computation to correctly factor in the index entries themselve.
20 Test offset computation to correctly factor in the index entries themselve.
21 Also test that the new data size has the correct size if the transaction is aborted
21 Also test that the new data size has the correct size if the transaction is aborted
22 after the index has been replaced.
22 after the index has been replaced.
23
23
24 Test repo has one small, one moderate and one big change. The clone has
24 Test repo has one small, one moderate and one big change. The clone has
25 the small and moderate change and will transition to non-inline storage when
25 the small and moderate change and will transition to non-inline storage when
26 adding the big change.
26 adding the big change.
27
27
28 $ hg init troffset-computation --config format.revlog-compression=none
28 $ hg init troffset-computation --config format.revlog-compression=none
29 $ cd troffset-computation
29 $ cd troffset-computation
30 $ printf '%20d' '1' > file
30 $ printf '%20d' '1' > file
31 $ hg commit -Aqm_
31 $ hg commit -Aqm_
32 $ printf '%1024d' '1' > file
32 $ printf '%1024d' '1' > file
33 $ hg commit -Aqm_
33 $ hg commit -Aqm_
34 $ dd if=/dev/zero of=file bs=1k count=128 > /dev/null 2>&1
34 $ dd if=/dev/zero of=file bs=1k count=128 > /dev/null 2>&1
35 $ hg commit -Aqm_
35 $ hg commit -Aqm_
36 $ cd ..
36 $ cd ..
37
37
38 $ hg clone -r 1 troffset-computation troffset-computation-copy --config format.revlog-compression=none -q
38 $ hg clone -r 1 troffset-computation troffset-computation-copy --config format.revlog-compression=none -q
39 $ cd troffset-computation-copy
39 $ cd troffset-computation-copy
40
40
41 Reference size:
41 Reference size:
42
42
43 $ f -s .hg/store/data/file*
43 $ f -s .hg/store/data/file*
44 .hg/store/data/file.i: size=1174
44 .hg/store/data/file.i: size=1174
45
45
46 $ cat > .hg/hgrc <<EOF
46 $ cat > .hg/hgrc <<EOF
47 > [hooks]
47 > [hooks]
48 > pretxnchangegroup = python:$TESTDIR/helper-killhook.py:killme
48 > pretxnchangegroup = python:$TESTDIR/helper-killhook.py:killme
49 > EOF
49 > EOF
50 #if chg
50 #if chg
51 $ hg pull ../troffset-computation
51 $ hg pull ../troffset-computation
52 pulling from ../troffset-computation
52 pulling from ../troffset-computation
53 [255]
53 [255]
54 #else
54 #else
55 $ hg pull ../troffset-computation
55 $ hg pull ../troffset-computation
56 pulling from ../troffset-computation
56 pulling from ../troffset-computation
57 [80]
57 [80]
58 #endif
58 #endif
59 $ cat .hg/store/journal | tr -s '\000' ' ' | grep data/file | tail -1
59 $ cat .hg/store/journal | tr -s '\000' ' ' | grep data/file | tail -1
60 data/file.i 128
60 data/file.i 128
61
61
62 The first file.i entry should match the size above.
62 The first file.i entry should match the size above.
63 The first file.d entry is the temporary record during the split,
63 The first file.d entry is the temporary record during the split,
64 the second entry after the split happened. The sum of the second file.d
64 the second entry after the split happened. The sum of the second file.d
65 and the second file.i entry should match the first file.i entry.
65 and the second file.i entry should match the first file.i entry.
66
66
67 $ cat .hg/store/journal | tr -s '\000' ' ' | grep data/file
67 $ cat .hg/store/journal | tr -s '\000' ' ' | grep data/file
68 data/file.i 1174
68 data/file.i 1174
69 data/file.d 0
69 data/file.d 0
70 data/file.d 1046
70 data/file.d 1046
71 data/file.i 128
71 data/file.i 128
72 $ hg recover
72 $ hg recover
73 rolling back interrupted transaction
73 rolling back interrupted transaction
74 (verify step skipped, run `hg verify` to check your repository content)
74 (verify step skipped, run `hg verify` to check your repository content)
75 $ f -s .hg/store/data/file*
75 $ f -s .hg/store/data/file*
76 .hg/store/data/file.d: size=1046
76 .hg/store/data/file.d: size=1046
77 .hg/store/data/file.i: size=128
77 .hg/store/data/file.i: size=128
78 $ hg tip
78 $ hg tip
79 changeset: 1:3ce491143aec
79 changeset: 1:3ce491143aec
80 tag: tip
80 tag: tip
81 user: test
81 user: test
82 date: Thu Jan 01 00:00:00 1970 +0000
82 date: Thu Jan 01 00:00:00 1970 +0000
83 summary: _
83 summary: _
84
84
85 $ hg verify -q
85 $ hg verify -q
86 warning: revlog 'data/file.d' not in fncache!
86 warning: revlog 'data/file.d' not in fncache!
87 1 warnings encountered!
87 1 warnings encountered!
88 hint: run "hg debugrebuildfncache" to recover from corrupt fncache
88 hint: run "hg debugrebuildfncache" to recover from corrupt fncache
89 $ hg debugrebuildfncache --only-data
90 adding data/file.d
91 1 items added, 0 removed from fncache
92 $ hg verify -q
89 $ cd ..
93 $ cd ..
90
94
91
95
92 Now retry the procedure but intercept the rename of the index and check that
96 Now retry the procedure but intercept the rename of the index and check that
93 the journal does not contain the new index size. This demonstrates the edge case
97 the journal does not contain the new index size. This demonstrates the edge case
94 where the data file is left as garbage.
98 where the data file is left as garbage.
95
99
96 $ hg clone -r 1 troffset-computation troffset-computation-copy2 --config format.revlog-compression=none -q
100 $ hg clone -r 1 troffset-computation troffset-computation-copy2 --config format.revlog-compression=none -q
97 $ cd troffset-computation-copy2
101 $ cd troffset-computation-copy2
98 $ cat > .hg/hgrc <<EOF
102 $ cat > .hg/hgrc <<EOF
99 > [extensions]
103 > [extensions]
100 > intercept_rename = $TESTTMP/intercept_rename.py
104 > intercept_rename = $TESTTMP/intercept_rename.py
101 > [hooks]
105 > [hooks]
102 > pretxnchangegroup = python:$TESTDIR/helper-killhook.py:killme
106 > pretxnchangegroup = python:$TESTDIR/helper-killhook.py:killme
103 > EOF
107 > EOF
104 #if chg
108 #if chg
105 $ hg pull ../troffset-computation
109 $ hg pull ../troffset-computation
106 pulling from ../troffset-computation
110 pulling from ../troffset-computation
107 [255]
111 [255]
108 #else
112 #else
109 $ hg pull ../troffset-computation
113 $ hg pull ../troffset-computation
110 pulling from ../troffset-computation
114 pulling from ../troffset-computation
111 [80]
115 [80]
112 #endif
116 #endif
113 $ cat .hg/store/journal | tr -s '\000' ' ' | grep data/file
117 $ cat .hg/store/journal | tr -s '\000' ' ' | grep data/file
114 data/file.i 1174
118 data/file.i 1174
115 data/file.d 0
119 data/file.d 0
116 data/file.d 1046
120 data/file.d 1046
117
121
118 $ hg recover
122 $ hg recover
119 rolling back interrupted transaction
123 rolling back interrupted transaction
120 (verify step skipped, run `hg verify` to check your repository content)
124 (verify step skipped, run `hg verify` to check your repository content)
121 $ f -s .hg/store/data/file*
125 $ f -s .hg/store/data/file*
122 .hg/store/data/file.d: size=1046
126 .hg/store/data/file.d: size=1046
123 .hg/store/data/file.i: size=1174
127 .hg/store/data/file.i: size=1174
124 $ hg tip
128 $ hg tip
125 changeset: 1:3ce491143aec
129 changeset: 1:3ce491143aec
126 tag: tip
130 tag: tip
127 user: test
131 user: test
128 date: Thu Jan 01 00:00:00 1970 +0000
132 date: Thu Jan 01 00:00:00 1970 +0000
129 summary: _
133 summary: _
130
134
131 $ hg verify -q
135 $ hg verify -q
132 $ cd ..
136 $ cd ..
133
137
134
138
135 Repeat the original test but let hg rollback the transaction.
139 Repeat the original test but let hg rollback the transaction.
136
140
137 $ hg clone -r 1 troffset-computation troffset-computation-copy-rb --config format.revlog-compression=none -q
141 $ hg clone -r 1 troffset-computation troffset-computation-copy-rb --config format.revlog-compression=none -q
138 $ cd troffset-computation-copy-rb
142 $ cd troffset-computation-copy-rb
139 $ cat > .hg/hgrc <<EOF
143 $ cat > .hg/hgrc <<EOF
140 > [hooks]
144 > [hooks]
141 > pretxnchangegroup = false
145 > pretxnchangegroup = false
142 > EOF
146 > EOF
143 $ hg pull ../troffset-computation
147 $ hg pull ../troffset-computation
144 pulling from ../troffset-computation
148 pulling from ../troffset-computation
145 searching for changes
149 searching for changes
146 adding changesets
150 adding changesets
147 adding manifests
151 adding manifests
148 adding file changes
152 adding file changes
149 transaction abort!
153 transaction abort!
150 rollback completed
154 rollback completed
151 abort: pretxnchangegroup hook exited with status 1
155 abort: pretxnchangegroup hook exited with status 1
152 [40]
156 [40]
153 $ f -s .hg/store/data/file*
157 $ f -s .hg/store/data/file*
154 .hg/store/data/file.d: size=1046
158 .hg/store/data/file.d: size=1046
155 .hg/store/data/file.i: size=128
159 .hg/store/data/file.i: size=128
156 $ hg tip
160 $ hg tip
157 changeset: 1:3ce491143aec
161 changeset: 1:3ce491143aec
158 tag: tip
162 tag: tip
159 user: test
163 user: test
160 date: Thu Jan 01 00:00:00 1970 +0000
164 date: Thu Jan 01 00:00:00 1970 +0000
161 summary: _
165 summary: _
162
166
163 $ hg verify -q
167 $ hg verify -q
164 warning: revlog 'data/file.d' not in fncache!
168 warning: revlog 'data/file.d' not in fncache!
165 1 warnings encountered!
169 1 warnings encountered!
166 hint: run "hg debugrebuildfncache" to recover from corrupt fncache
170 hint: run "hg debugrebuildfncache" to recover from corrupt fncache
167 $ cd ..
171 $ cd ..
168
172
General Comments 0
You need to be logged in to leave comments. Login now