##// END OF EJS Templates
debugtagscache: verify that filenode is correct...
Pulkit Goyal -
r47401:e4e971ab default
parent child Browse files
Show More
@@ -1,4755 +1,4758
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import glob
14 import glob
15 import operator
15 import operator
16 import os
16 import os
17 import platform
17 import platform
18 import random
18 import random
19 import re
19 import re
20 import socket
20 import socket
21 import ssl
21 import ssl
22 import stat
22 import stat
23 import string
23 import string
24 import subprocess
24 import subprocess
25 import sys
25 import sys
26 import time
26 import time
27
27
28 from .i18n import _
28 from .i18n import _
29 from .node import (
29 from .node import (
30 bin,
30 bin,
31 hex,
31 hex,
32 nullid,
32 nullid,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from .pycompat import (
36 from .pycompat import (
37 getattr,
37 getattr,
38 open,
38 open,
39 )
39 )
40 from . import (
40 from . import (
41 bundle2,
41 bundle2,
42 bundlerepo,
42 bundlerepo,
43 changegroup,
43 changegroup,
44 cmdutil,
44 cmdutil,
45 color,
45 color,
46 context,
46 context,
47 copies,
47 copies,
48 dagparser,
48 dagparser,
49 encoding,
49 encoding,
50 error,
50 error,
51 exchange,
51 exchange,
52 extensions,
52 extensions,
53 filemerge,
53 filemerge,
54 filesetlang,
54 filesetlang,
55 formatter,
55 formatter,
56 hg,
56 hg,
57 httppeer,
57 httppeer,
58 localrepo,
58 localrepo,
59 lock as lockmod,
59 lock as lockmod,
60 logcmdutil,
60 logcmdutil,
61 mergestate as mergestatemod,
61 mergestate as mergestatemod,
62 metadata,
62 metadata,
63 obsolete,
63 obsolete,
64 obsutil,
64 obsutil,
65 pathutil,
65 pathutil,
66 phases,
66 phases,
67 policy,
67 policy,
68 pvec,
68 pvec,
69 pycompat,
69 pycompat,
70 registrar,
70 registrar,
71 repair,
71 repair,
72 repoview,
72 repoview,
73 revlog,
73 revlog,
74 revset,
74 revset,
75 revsetlang,
75 revsetlang,
76 scmutil,
76 scmutil,
77 setdiscovery,
77 setdiscovery,
78 simplemerge,
78 simplemerge,
79 sshpeer,
79 sshpeer,
80 sslutil,
80 sslutil,
81 streamclone,
81 streamclone,
82 strip,
82 strip,
83 tags as tagsmod,
83 tags as tagsmod,
84 templater,
84 templater,
85 treediscovery,
85 treediscovery,
86 upgrade,
86 upgrade,
87 url as urlmod,
87 url as urlmod,
88 util,
88 util,
89 vfs as vfsmod,
89 vfs as vfsmod,
90 wireprotoframing,
90 wireprotoframing,
91 wireprotoserver,
91 wireprotoserver,
92 wireprotov2peer,
92 wireprotov2peer,
93 )
93 )
94 from .utils import (
94 from .utils import (
95 cborutil,
95 cborutil,
96 compression,
96 compression,
97 dateutil,
97 dateutil,
98 procutil,
98 procutil,
99 stringutil,
99 stringutil,
100 )
100 )
101
101
102 from .revlogutils import (
102 from .revlogutils import (
103 deltas as deltautil,
103 deltas as deltautil,
104 nodemap,
104 nodemap,
105 sidedata,
105 sidedata,
106 )
106 )
107
107
108 release = lockmod.release
108 release = lockmod.release
109
109
110 table = {}
110 table = {}
111 table.update(strip.command._table)
111 table.update(strip.command._table)
112 command = registrar.command(table)
112 command = registrar.command(table)
113
113
114
114
115 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
115 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
116 def debugancestor(ui, repo, *args):
116 def debugancestor(ui, repo, *args):
117 """find the ancestor revision of two revisions in a given index"""
117 """find the ancestor revision of two revisions in a given index"""
118 if len(args) == 3:
118 if len(args) == 3:
119 index, rev1, rev2 = args
119 index, rev1, rev2 = args
120 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
120 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
121 lookup = r.lookup
121 lookup = r.lookup
122 elif len(args) == 2:
122 elif len(args) == 2:
123 if not repo:
123 if not repo:
124 raise error.Abort(
124 raise error.Abort(
125 _(b'there is no Mercurial repository here (.hg not found)')
125 _(b'there is no Mercurial repository here (.hg not found)')
126 )
126 )
127 rev1, rev2 = args
127 rev1, rev2 = args
128 r = repo.changelog
128 r = repo.changelog
129 lookup = repo.lookup
129 lookup = repo.lookup
130 else:
130 else:
131 raise error.Abort(_(b'either two or three arguments required'))
131 raise error.Abort(_(b'either two or three arguments required'))
132 a = r.ancestor(lookup(rev1), lookup(rev2))
132 a = r.ancestor(lookup(rev1), lookup(rev2))
133 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
133 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
134
134
135
135
136 @command(b'debugantivirusrunning', [])
136 @command(b'debugantivirusrunning', [])
137 def debugantivirusrunning(ui, repo):
137 def debugantivirusrunning(ui, repo):
138 """attempt to trigger an antivirus scanner to see if one is active"""
138 """attempt to trigger an antivirus scanner to see if one is active"""
139 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
139 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
140 f.write(
140 f.write(
141 util.b85decode(
141 util.b85decode(
142 # This is a base85-armored version of the EICAR test file. See
142 # This is a base85-armored version of the EICAR test file. See
143 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
143 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
144 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
144 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
145 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
145 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
146 )
146 )
147 )
147 )
148 # Give an AV engine time to scan the file.
148 # Give an AV engine time to scan the file.
149 time.sleep(2)
149 time.sleep(2)
150 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
150 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
151
151
152
152
153 @command(b'debugapplystreamclonebundle', [], b'FILE')
153 @command(b'debugapplystreamclonebundle', [], b'FILE')
154 def debugapplystreamclonebundle(ui, repo, fname):
154 def debugapplystreamclonebundle(ui, repo, fname):
155 """apply a stream clone bundle file"""
155 """apply a stream clone bundle file"""
156 f = hg.openpath(ui, fname)
156 f = hg.openpath(ui, fname)
157 gen = exchange.readbundle(ui, f, fname)
157 gen = exchange.readbundle(ui, f, fname)
158 gen.apply(repo)
158 gen.apply(repo)
159
159
160
160
161 @command(
161 @command(
162 b'debugbuilddag',
162 b'debugbuilddag',
163 [
163 [
164 (
164 (
165 b'm',
165 b'm',
166 b'mergeable-file',
166 b'mergeable-file',
167 None,
167 None,
168 _(b'add single file mergeable changes'),
168 _(b'add single file mergeable changes'),
169 ),
169 ),
170 (
170 (
171 b'o',
171 b'o',
172 b'overwritten-file',
172 b'overwritten-file',
173 None,
173 None,
174 _(b'add single file all revs overwrite'),
174 _(b'add single file all revs overwrite'),
175 ),
175 ),
176 (b'n', b'new-file', None, _(b'add new file at each rev')),
176 (b'n', b'new-file', None, _(b'add new file at each rev')),
177 ],
177 ],
178 _(b'[OPTION]... [TEXT]'),
178 _(b'[OPTION]... [TEXT]'),
179 )
179 )
180 def debugbuilddag(
180 def debugbuilddag(
181 ui,
181 ui,
182 repo,
182 repo,
183 text=None,
183 text=None,
184 mergeable_file=False,
184 mergeable_file=False,
185 overwritten_file=False,
185 overwritten_file=False,
186 new_file=False,
186 new_file=False,
187 ):
187 ):
188 """builds a repo with a given DAG from scratch in the current empty repo
188 """builds a repo with a given DAG from scratch in the current empty repo
189
189
190 The description of the DAG is read from stdin if not given on the
190 The description of the DAG is read from stdin if not given on the
191 command line.
191 command line.
192
192
193 Elements:
193 Elements:
194
194
195 - "+n" is a linear run of n nodes based on the current default parent
195 - "+n" is a linear run of n nodes based on the current default parent
196 - "." is a single node based on the current default parent
196 - "." is a single node based on the current default parent
197 - "$" resets the default parent to null (implied at the start);
197 - "$" resets the default parent to null (implied at the start);
198 otherwise the default parent is always the last node created
198 otherwise the default parent is always the last node created
199 - "<p" sets the default parent to the backref p
199 - "<p" sets the default parent to the backref p
200 - "*p" is a fork at parent p, which is a backref
200 - "*p" is a fork at parent p, which is a backref
201 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
201 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
202 - "/p2" is a merge of the preceding node and p2
202 - "/p2" is a merge of the preceding node and p2
203 - ":tag" defines a local tag for the preceding node
203 - ":tag" defines a local tag for the preceding node
204 - "@branch" sets the named branch for subsequent nodes
204 - "@branch" sets the named branch for subsequent nodes
205 - "#...\\n" is a comment up to the end of the line
205 - "#...\\n" is a comment up to the end of the line
206
206
207 Whitespace between the above elements is ignored.
207 Whitespace between the above elements is ignored.
208
208
209 A backref is either
209 A backref is either
210
210
211 - a number n, which references the node curr-n, where curr is the current
211 - a number n, which references the node curr-n, where curr is the current
212 node, or
212 node, or
213 - the name of a local tag you placed earlier using ":tag", or
213 - the name of a local tag you placed earlier using ":tag", or
214 - empty to denote the default parent.
214 - empty to denote the default parent.
215
215
216 All string valued-elements are either strictly alphanumeric, or must
216 All string valued-elements are either strictly alphanumeric, or must
217 be enclosed in double quotes ("..."), with "\\" as escape character.
217 be enclosed in double quotes ("..."), with "\\" as escape character.
218 """
218 """
219
219
220 if text is None:
220 if text is None:
221 ui.status(_(b"reading DAG from stdin\n"))
221 ui.status(_(b"reading DAG from stdin\n"))
222 text = ui.fin.read()
222 text = ui.fin.read()
223
223
224 cl = repo.changelog
224 cl = repo.changelog
225 if len(cl) > 0:
225 if len(cl) > 0:
226 raise error.Abort(_(b'repository is not empty'))
226 raise error.Abort(_(b'repository is not empty'))
227
227
228 # determine number of revs in DAG
228 # determine number of revs in DAG
229 total = 0
229 total = 0
230 for type, data in dagparser.parsedag(text):
230 for type, data in dagparser.parsedag(text):
231 if type == b'n':
231 if type == b'n':
232 total += 1
232 total += 1
233
233
234 if mergeable_file:
234 if mergeable_file:
235 linesperrev = 2
235 linesperrev = 2
236 # make a file with k lines per rev
236 # make a file with k lines per rev
237 initialmergedlines = [
237 initialmergedlines = [
238 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
238 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
239 ]
239 ]
240 initialmergedlines.append(b"")
240 initialmergedlines.append(b"")
241
241
242 tags = []
242 tags = []
243 progress = ui.makeprogress(
243 progress = ui.makeprogress(
244 _(b'building'), unit=_(b'revisions'), total=total
244 _(b'building'), unit=_(b'revisions'), total=total
245 )
245 )
246 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
246 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
247 at = -1
247 at = -1
248 atbranch = b'default'
248 atbranch = b'default'
249 nodeids = []
249 nodeids = []
250 id = 0
250 id = 0
251 progress.update(id)
251 progress.update(id)
252 for type, data in dagparser.parsedag(text):
252 for type, data in dagparser.parsedag(text):
253 if type == b'n':
253 if type == b'n':
254 ui.note((b'node %s\n' % pycompat.bytestr(data)))
254 ui.note((b'node %s\n' % pycompat.bytestr(data)))
255 id, ps = data
255 id, ps = data
256
256
257 files = []
257 files = []
258 filecontent = {}
258 filecontent = {}
259
259
260 p2 = None
260 p2 = None
261 if mergeable_file:
261 if mergeable_file:
262 fn = b"mf"
262 fn = b"mf"
263 p1 = repo[ps[0]]
263 p1 = repo[ps[0]]
264 if len(ps) > 1:
264 if len(ps) > 1:
265 p2 = repo[ps[1]]
265 p2 = repo[ps[1]]
266 pa = p1.ancestor(p2)
266 pa = p1.ancestor(p2)
267 base, local, other = [
267 base, local, other = [
268 x[fn].data() for x in (pa, p1, p2)
268 x[fn].data() for x in (pa, p1, p2)
269 ]
269 ]
270 m3 = simplemerge.Merge3Text(base, local, other)
270 m3 = simplemerge.Merge3Text(base, local, other)
271 ml = [l.strip() for l in m3.merge_lines()]
271 ml = [l.strip() for l in m3.merge_lines()]
272 ml.append(b"")
272 ml.append(b"")
273 elif at > 0:
273 elif at > 0:
274 ml = p1[fn].data().split(b"\n")
274 ml = p1[fn].data().split(b"\n")
275 else:
275 else:
276 ml = initialmergedlines
276 ml = initialmergedlines
277 ml[id * linesperrev] += b" r%i" % id
277 ml[id * linesperrev] += b" r%i" % id
278 mergedtext = b"\n".join(ml)
278 mergedtext = b"\n".join(ml)
279 files.append(fn)
279 files.append(fn)
280 filecontent[fn] = mergedtext
280 filecontent[fn] = mergedtext
281
281
282 if overwritten_file:
282 if overwritten_file:
283 fn = b"of"
283 fn = b"of"
284 files.append(fn)
284 files.append(fn)
285 filecontent[fn] = b"r%i\n" % id
285 filecontent[fn] = b"r%i\n" % id
286
286
287 if new_file:
287 if new_file:
288 fn = b"nf%i" % id
288 fn = b"nf%i" % id
289 files.append(fn)
289 files.append(fn)
290 filecontent[fn] = b"r%i\n" % id
290 filecontent[fn] = b"r%i\n" % id
291 if len(ps) > 1:
291 if len(ps) > 1:
292 if not p2:
292 if not p2:
293 p2 = repo[ps[1]]
293 p2 = repo[ps[1]]
294 for fn in p2:
294 for fn in p2:
295 if fn.startswith(b"nf"):
295 if fn.startswith(b"nf"):
296 files.append(fn)
296 files.append(fn)
297 filecontent[fn] = p2[fn].data()
297 filecontent[fn] = p2[fn].data()
298
298
299 def fctxfn(repo, cx, path):
299 def fctxfn(repo, cx, path):
300 if path in filecontent:
300 if path in filecontent:
301 return context.memfilectx(
301 return context.memfilectx(
302 repo, cx, path, filecontent[path]
302 repo, cx, path, filecontent[path]
303 )
303 )
304 return None
304 return None
305
305
306 if len(ps) == 0 or ps[0] < 0:
306 if len(ps) == 0 or ps[0] < 0:
307 pars = [None, None]
307 pars = [None, None]
308 elif len(ps) == 1:
308 elif len(ps) == 1:
309 pars = [nodeids[ps[0]], None]
309 pars = [nodeids[ps[0]], None]
310 else:
310 else:
311 pars = [nodeids[p] for p in ps]
311 pars = [nodeids[p] for p in ps]
312 cx = context.memctx(
312 cx = context.memctx(
313 repo,
313 repo,
314 pars,
314 pars,
315 b"r%i" % id,
315 b"r%i" % id,
316 files,
316 files,
317 fctxfn,
317 fctxfn,
318 date=(id, 0),
318 date=(id, 0),
319 user=b"debugbuilddag",
319 user=b"debugbuilddag",
320 extra={b'branch': atbranch},
320 extra={b'branch': atbranch},
321 )
321 )
322 nodeid = repo.commitctx(cx)
322 nodeid = repo.commitctx(cx)
323 nodeids.append(nodeid)
323 nodeids.append(nodeid)
324 at = id
324 at = id
325 elif type == b'l':
325 elif type == b'l':
326 id, name = data
326 id, name = data
327 ui.note((b'tag %s\n' % name))
327 ui.note((b'tag %s\n' % name))
328 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
328 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
329 elif type == b'a':
329 elif type == b'a':
330 ui.note((b'branch %s\n' % data))
330 ui.note((b'branch %s\n' % data))
331 atbranch = data
331 atbranch = data
332 progress.update(id)
332 progress.update(id)
333
333
334 if tags:
334 if tags:
335 repo.vfs.write(b"localtags", b"".join(tags))
335 repo.vfs.write(b"localtags", b"".join(tags))
336
336
337
337
338 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
338 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
339 indent_string = b' ' * indent
339 indent_string = b' ' * indent
340 if all:
340 if all:
341 ui.writenoi18n(
341 ui.writenoi18n(
342 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
342 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
343 % indent_string
343 % indent_string
344 )
344 )
345
345
346 def showchunks(named):
346 def showchunks(named):
347 ui.write(b"\n%s%s\n" % (indent_string, named))
347 ui.write(b"\n%s%s\n" % (indent_string, named))
348 for deltadata in gen.deltaiter():
348 for deltadata in gen.deltaiter():
349 node, p1, p2, cs, deltabase, delta, flags = deltadata
349 node, p1, p2, cs, deltabase, delta, flags = deltadata
350 ui.write(
350 ui.write(
351 b"%s%s %s %s %s %s %d\n"
351 b"%s%s %s %s %s %s %d\n"
352 % (
352 % (
353 indent_string,
353 indent_string,
354 hex(node),
354 hex(node),
355 hex(p1),
355 hex(p1),
356 hex(p2),
356 hex(p2),
357 hex(cs),
357 hex(cs),
358 hex(deltabase),
358 hex(deltabase),
359 len(delta),
359 len(delta),
360 )
360 )
361 )
361 )
362
362
363 gen.changelogheader()
363 gen.changelogheader()
364 showchunks(b"changelog")
364 showchunks(b"changelog")
365 gen.manifestheader()
365 gen.manifestheader()
366 showchunks(b"manifest")
366 showchunks(b"manifest")
367 for chunkdata in iter(gen.filelogheader, {}):
367 for chunkdata in iter(gen.filelogheader, {}):
368 fname = chunkdata[b'filename']
368 fname = chunkdata[b'filename']
369 showchunks(fname)
369 showchunks(fname)
370 else:
370 else:
371 if isinstance(gen, bundle2.unbundle20):
371 if isinstance(gen, bundle2.unbundle20):
372 raise error.Abort(_(b'use debugbundle2 for this file'))
372 raise error.Abort(_(b'use debugbundle2 for this file'))
373 gen.changelogheader()
373 gen.changelogheader()
374 for deltadata in gen.deltaiter():
374 for deltadata in gen.deltaiter():
375 node, p1, p2, cs, deltabase, delta, flags = deltadata
375 node, p1, p2, cs, deltabase, delta, flags = deltadata
376 ui.write(b"%s%s\n" % (indent_string, hex(node)))
376 ui.write(b"%s%s\n" % (indent_string, hex(node)))
377
377
378
378
379 def _debugobsmarkers(ui, part, indent=0, **opts):
379 def _debugobsmarkers(ui, part, indent=0, **opts):
380 """display version and markers contained in 'data'"""
380 """display version and markers contained in 'data'"""
381 opts = pycompat.byteskwargs(opts)
381 opts = pycompat.byteskwargs(opts)
382 data = part.read()
382 data = part.read()
383 indent_string = b' ' * indent
383 indent_string = b' ' * indent
384 try:
384 try:
385 version, markers = obsolete._readmarkers(data)
385 version, markers = obsolete._readmarkers(data)
386 except error.UnknownVersion as exc:
386 except error.UnknownVersion as exc:
387 msg = b"%sunsupported version: %s (%d bytes)\n"
387 msg = b"%sunsupported version: %s (%d bytes)\n"
388 msg %= indent_string, exc.version, len(data)
388 msg %= indent_string, exc.version, len(data)
389 ui.write(msg)
389 ui.write(msg)
390 else:
390 else:
391 msg = b"%sversion: %d (%d bytes)\n"
391 msg = b"%sversion: %d (%d bytes)\n"
392 msg %= indent_string, version, len(data)
392 msg %= indent_string, version, len(data)
393 ui.write(msg)
393 ui.write(msg)
394 fm = ui.formatter(b'debugobsolete', opts)
394 fm = ui.formatter(b'debugobsolete', opts)
395 for rawmarker in sorted(markers):
395 for rawmarker in sorted(markers):
396 m = obsutil.marker(None, rawmarker)
396 m = obsutil.marker(None, rawmarker)
397 fm.startitem()
397 fm.startitem()
398 fm.plain(indent_string)
398 fm.plain(indent_string)
399 cmdutil.showmarker(fm, m)
399 cmdutil.showmarker(fm, m)
400 fm.end()
400 fm.end()
401
401
402
402
403 def _debugphaseheads(ui, data, indent=0):
403 def _debugphaseheads(ui, data, indent=0):
404 """display version and markers contained in 'data'"""
404 """display version and markers contained in 'data'"""
405 indent_string = b' ' * indent
405 indent_string = b' ' * indent
406 headsbyphase = phases.binarydecode(data)
406 headsbyphase = phases.binarydecode(data)
407 for phase in phases.allphases:
407 for phase in phases.allphases:
408 for head in headsbyphase[phase]:
408 for head in headsbyphase[phase]:
409 ui.write(indent_string)
409 ui.write(indent_string)
410 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
410 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
411
411
412
412
413 def _quasirepr(thing):
413 def _quasirepr(thing):
414 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
414 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
415 return b'{%s}' % (
415 return b'{%s}' % (
416 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
416 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
417 )
417 )
418 return pycompat.bytestr(repr(thing))
418 return pycompat.bytestr(repr(thing))
419
419
420
420
421 def _debugbundle2(ui, gen, all=None, **opts):
421 def _debugbundle2(ui, gen, all=None, **opts):
422 """lists the contents of a bundle2"""
422 """lists the contents of a bundle2"""
423 if not isinstance(gen, bundle2.unbundle20):
423 if not isinstance(gen, bundle2.unbundle20):
424 raise error.Abort(_(b'not a bundle2 file'))
424 raise error.Abort(_(b'not a bundle2 file'))
425 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
425 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
426 parttypes = opts.get('part_type', [])
426 parttypes = opts.get('part_type', [])
427 for part in gen.iterparts():
427 for part in gen.iterparts():
428 if parttypes and part.type not in parttypes:
428 if parttypes and part.type not in parttypes:
429 continue
429 continue
430 msg = b'%s -- %s (mandatory: %r)\n'
430 msg = b'%s -- %s (mandatory: %r)\n'
431 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
431 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
432 if part.type == b'changegroup':
432 if part.type == b'changegroup':
433 version = part.params.get(b'version', b'01')
433 version = part.params.get(b'version', b'01')
434 cg = changegroup.getunbundler(version, part, b'UN')
434 cg = changegroup.getunbundler(version, part, b'UN')
435 if not ui.quiet:
435 if not ui.quiet:
436 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
436 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
437 if part.type == b'obsmarkers':
437 if part.type == b'obsmarkers':
438 if not ui.quiet:
438 if not ui.quiet:
439 _debugobsmarkers(ui, part, indent=4, **opts)
439 _debugobsmarkers(ui, part, indent=4, **opts)
440 if part.type == b'phase-heads':
440 if part.type == b'phase-heads':
441 if not ui.quiet:
441 if not ui.quiet:
442 _debugphaseheads(ui, part, indent=4)
442 _debugphaseheads(ui, part, indent=4)
443
443
444
444
445 @command(
445 @command(
446 b'debugbundle',
446 b'debugbundle',
447 [
447 [
448 (b'a', b'all', None, _(b'show all details')),
448 (b'a', b'all', None, _(b'show all details')),
449 (b'', b'part-type', [], _(b'show only the named part type')),
449 (b'', b'part-type', [], _(b'show only the named part type')),
450 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
450 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
451 ],
451 ],
452 _(b'FILE'),
452 _(b'FILE'),
453 norepo=True,
453 norepo=True,
454 )
454 )
455 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
455 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
456 """lists the contents of a bundle"""
456 """lists the contents of a bundle"""
457 with hg.openpath(ui, bundlepath) as f:
457 with hg.openpath(ui, bundlepath) as f:
458 if spec:
458 if spec:
459 spec = exchange.getbundlespec(ui, f)
459 spec = exchange.getbundlespec(ui, f)
460 ui.write(b'%s\n' % spec)
460 ui.write(b'%s\n' % spec)
461 return
461 return
462
462
463 gen = exchange.readbundle(ui, f, bundlepath)
463 gen = exchange.readbundle(ui, f, bundlepath)
464 if isinstance(gen, bundle2.unbundle20):
464 if isinstance(gen, bundle2.unbundle20):
465 return _debugbundle2(ui, gen, all=all, **opts)
465 return _debugbundle2(ui, gen, all=all, **opts)
466 _debugchangegroup(ui, gen, all=all, **opts)
466 _debugchangegroup(ui, gen, all=all, **opts)
467
467
468
468
469 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
469 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
470 def debugcapabilities(ui, path, **opts):
470 def debugcapabilities(ui, path, **opts):
471 """lists the capabilities of a remote peer"""
471 """lists the capabilities of a remote peer"""
472 opts = pycompat.byteskwargs(opts)
472 opts = pycompat.byteskwargs(opts)
473 peer = hg.peer(ui, opts, path)
473 peer = hg.peer(ui, opts, path)
474 caps = peer.capabilities()
474 caps = peer.capabilities()
475 ui.writenoi18n(b'Main capabilities:\n')
475 ui.writenoi18n(b'Main capabilities:\n')
476 for c in sorted(caps):
476 for c in sorted(caps):
477 ui.write(b' %s\n' % c)
477 ui.write(b' %s\n' % c)
478 b2caps = bundle2.bundle2caps(peer)
478 b2caps = bundle2.bundle2caps(peer)
479 if b2caps:
479 if b2caps:
480 ui.writenoi18n(b'Bundle2 capabilities:\n')
480 ui.writenoi18n(b'Bundle2 capabilities:\n')
481 for key, values in sorted(pycompat.iteritems(b2caps)):
481 for key, values in sorted(pycompat.iteritems(b2caps)):
482 ui.write(b' %s\n' % key)
482 ui.write(b' %s\n' % key)
483 for v in values:
483 for v in values:
484 ui.write(b' %s\n' % v)
484 ui.write(b' %s\n' % v)
485
485
486
486
487 @command(
487 @command(
488 b'debugchangedfiles',
488 b'debugchangedfiles',
489 [
489 [
490 (
490 (
491 b'',
491 b'',
492 b'compute',
492 b'compute',
493 False,
493 False,
494 b"compute information instead of reading it from storage",
494 b"compute information instead of reading it from storage",
495 ),
495 ),
496 ],
496 ],
497 b'REV',
497 b'REV',
498 )
498 )
499 def debugchangedfiles(ui, repo, rev, **opts):
499 def debugchangedfiles(ui, repo, rev, **opts):
500 """list the stored files changes for a revision"""
500 """list the stored files changes for a revision"""
501 ctx = scmutil.revsingle(repo, rev, None)
501 ctx = scmutil.revsingle(repo, rev, None)
502 files = None
502 files = None
503
503
504 if opts['compute']:
504 if opts['compute']:
505 files = metadata.compute_all_files_changes(ctx)
505 files = metadata.compute_all_files_changes(ctx)
506 else:
506 else:
507 sd = repo.changelog.sidedata(ctx.rev())
507 sd = repo.changelog.sidedata(ctx.rev())
508 files_block = sd.get(sidedata.SD_FILES)
508 files_block = sd.get(sidedata.SD_FILES)
509 if files_block is not None:
509 if files_block is not None:
510 files = metadata.decode_files_sidedata(sd)
510 files = metadata.decode_files_sidedata(sd)
511 if files is not None:
511 if files is not None:
512 for f in sorted(files.touched):
512 for f in sorted(files.touched):
513 if f in files.added:
513 if f in files.added:
514 action = b"added"
514 action = b"added"
515 elif f in files.removed:
515 elif f in files.removed:
516 action = b"removed"
516 action = b"removed"
517 elif f in files.merged:
517 elif f in files.merged:
518 action = b"merged"
518 action = b"merged"
519 elif f in files.salvaged:
519 elif f in files.salvaged:
520 action = b"salvaged"
520 action = b"salvaged"
521 else:
521 else:
522 action = b"touched"
522 action = b"touched"
523
523
524 copy_parent = b""
524 copy_parent = b""
525 copy_source = b""
525 copy_source = b""
526 if f in files.copied_from_p1:
526 if f in files.copied_from_p1:
527 copy_parent = b"p1"
527 copy_parent = b"p1"
528 copy_source = files.copied_from_p1[f]
528 copy_source = files.copied_from_p1[f]
529 elif f in files.copied_from_p2:
529 elif f in files.copied_from_p2:
530 copy_parent = b"p2"
530 copy_parent = b"p2"
531 copy_source = files.copied_from_p2[f]
531 copy_source = files.copied_from_p2[f]
532
532
533 data = (action, copy_parent, f, copy_source)
533 data = (action, copy_parent, f, copy_source)
534 template = b"%-8s %2s: %s, %s;\n"
534 template = b"%-8s %2s: %s, %s;\n"
535 ui.write(template % data)
535 ui.write(template % data)
536
536
537
537
538 @command(b'debugcheckstate', [], b'')
538 @command(b'debugcheckstate', [], b'')
539 def debugcheckstate(ui, repo):
539 def debugcheckstate(ui, repo):
540 """validate the correctness of the current dirstate"""
540 """validate the correctness of the current dirstate"""
541 parent1, parent2 = repo.dirstate.parents()
541 parent1, parent2 = repo.dirstate.parents()
542 m1 = repo[parent1].manifest()
542 m1 = repo[parent1].manifest()
543 m2 = repo[parent2].manifest()
543 m2 = repo[parent2].manifest()
544 errors = 0
544 errors = 0
545 for f in repo.dirstate:
545 for f in repo.dirstate:
546 state = repo.dirstate[f]
546 state = repo.dirstate[f]
547 if state in b"nr" and f not in m1:
547 if state in b"nr" and f not in m1:
548 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
548 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
549 errors += 1
549 errors += 1
550 if state in b"a" and f in m1:
550 if state in b"a" and f in m1:
551 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
551 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
552 errors += 1
552 errors += 1
553 if state in b"m" and f not in m1 and f not in m2:
553 if state in b"m" and f not in m1 and f not in m2:
554 ui.warn(
554 ui.warn(
555 _(b"%s in state %s, but not in either manifest\n") % (f, state)
555 _(b"%s in state %s, but not in either manifest\n") % (f, state)
556 )
556 )
557 errors += 1
557 errors += 1
558 for f in m1:
558 for f in m1:
559 state = repo.dirstate[f]
559 state = repo.dirstate[f]
560 if state not in b"nrm":
560 if state not in b"nrm":
561 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
561 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
562 errors += 1
562 errors += 1
563 if errors:
563 if errors:
564 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
564 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
565 raise error.Abort(errstr)
565 raise error.Abort(errstr)
566
566
567
567
568 @command(
568 @command(
569 b'debugcolor',
569 b'debugcolor',
570 [(b'', b'style', None, _(b'show all configured styles'))],
570 [(b'', b'style', None, _(b'show all configured styles'))],
571 b'hg debugcolor',
571 b'hg debugcolor',
572 )
572 )
573 def debugcolor(ui, repo, **opts):
573 def debugcolor(ui, repo, **opts):
574 """show available color, effects or style"""
574 """show available color, effects or style"""
575 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
575 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
576 if opts.get('style'):
576 if opts.get('style'):
577 return _debugdisplaystyle(ui)
577 return _debugdisplaystyle(ui)
578 else:
578 else:
579 return _debugdisplaycolor(ui)
579 return _debugdisplaycolor(ui)
580
580
581
581
582 def _debugdisplaycolor(ui):
582 def _debugdisplaycolor(ui):
583 ui = ui.copy()
583 ui = ui.copy()
584 ui._styles.clear()
584 ui._styles.clear()
585 for effect in color._activeeffects(ui).keys():
585 for effect in color._activeeffects(ui).keys():
586 ui._styles[effect] = effect
586 ui._styles[effect] = effect
587 if ui._terminfoparams:
587 if ui._terminfoparams:
588 for k, v in ui.configitems(b'color'):
588 for k, v in ui.configitems(b'color'):
589 if k.startswith(b'color.'):
589 if k.startswith(b'color.'):
590 ui._styles[k] = k[6:]
590 ui._styles[k] = k[6:]
591 elif k.startswith(b'terminfo.'):
591 elif k.startswith(b'terminfo.'):
592 ui._styles[k] = k[9:]
592 ui._styles[k] = k[9:]
593 ui.write(_(b'available colors:\n'))
593 ui.write(_(b'available colors:\n'))
594 # sort label with a '_' after the other to group '_background' entry.
594 # sort label with a '_' after the other to group '_background' entry.
595 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
595 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
596 for colorname, label in items:
596 for colorname, label in items:
597 ui.write(b'%s\n' % colorname, label=label)
597 ui.write(b'%s\n' % colorname, label=label)
598
598
599
599
600 def _debugdisplaystyle(ui):
600 def _debugdisplaystyle(ui):
601 ui.write(_(b'available style:\n'))
601 ui.write(_(b'available style:\n'))
602 if not ui._styles:
602 if not ui._styles:
603 return
603 return
604 width = max(len(s) for s in ui._styles)
604 width = max(len(s) for s in ui._styles)
605 for label, effects in sorted(ui._styles.items()):
605 for label, effects in sorted(ui._styles.items()):
606 ui.write(b'%s' % label, label=label)
606 ui.write(b'%s' % label, label=label)
607 if effects:
607 if effects:
608 # 50
608 # 50
609 ui.write(b': ')
609 ui.write(b': ')
610 ui.write(b' ' * (max(0, width - len(label))))
610 ui.write(b' ' * (max(0, width - len(label))))
611 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
611 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
612 ui.write(b'\n')
612 ui.write(b'\n')
613
613
614
614
615 @command(b'debugcreatestreamclonebundle', [], b'FILE')
615 @command(b'debugcreatestreamclonebundle', [], b'FILE')
616 def debugcreatestreamclonebundle(ui, repo, fname):
616 def debugcreatestreamclonebundle(ui, repo, fname):
617 """create a stream clone bundle file
617 """create a stream clone bundle file
618
618
619 Stream bundles are special bundles that are essentially archives of
619 Stream bundles are special bundles that are essentially archives of
620 revlog files. They are commonly used for cloning very quickly.
620 revlog files. They are commonly used for cloning very quickly.
621 """
621 """
622 # TODO we may want to turn this into an abort when this functionality
622 # TODO we may want to turn this into an abort when this functionality
623 # is moved into `hg bundle`.
623 # is moved into `hg bundle`.
624 if phases.hassecret(repo):
624 if phases.hassecret(repo):
625 ui.warn(
625 ui.warn(
626 _(
626 _(
627 b'(warning: stream clone bundle will contain secret '
627 b'(warning: stream clone bundle will contain secret '
628 b'revisions)\n'
628 b'revisions)\n'
629 )
629 )
630 )
630 )
631
631
632 requirements, gen = streamclone.generatebundlev1(repo)
632 requirements, gen = streamclone.generatebundlev1(repo)
633 changegroup.writechunks(ui, gen, fname)
633 changegroup.writechunks(ui, gen, fname)
634
634
635 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
635 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
636
636
637
637
638 @command(
638 @command(
639 b'debugdag',
639 b'debugdag',
640 [
640 [
641 (b't', b'tags', None, _(b'use tags as labels')),
641 (b't', b'tags', None, _(b'use tags as labels')),
642 (b'b', b'branches', None, _(b'annotate with branch names')),
642 (b'b', b'branches', None, _(b'annotate with branch names')),
643 (b'', b'dots', None, _(b'use dots for runs')),
643 (b'', b'dots', None, _(b'use dots for runs')),
644 (b's', b'spaces', None, _(b'separate elements by spaces')),
644 (b's', b'spaces', None, _(b'separate elements by spaces')),
645 ],
645 ],
646 _(b'[OPTION]... [FILE [REV]...]'),
646 _(b'[OPTION]... [FILE [REV]...]'),
647 optionalrepo=True,
647 optionalrepo=True,
648 )
648 )
649 def debugdag(ui, repo, file_=None, *revs, **opts):
649 def debugdag(ui, repo, file_=None, *revs, **opts):
650 """format the changelog or an index DAG as a concise textual description
650 """format the changelog or an index DAG as a concise textual description
651
651
652 If you pass a revlog index, the revlog's DAG is emitted. If you list
652 If you pass a revlog index, the revlog's DAG is emitted. If you list
653 revision numbers, they get labeled in the output as rN.
653 revision numbers, they get labeled in the output as rN.
654
654
655 Otherwise, the changelog DAG of the current repo is emitted.
655 Otherwise, the changelog DAG of the current repo is emitted.
656 """
656 """
657 spaces = opts.get('spaces')
657 spaces = opts.get('spaces')
658 dots = opts.get('dots')
658 dots = opts.get('dots')
659 if file_:
659 if file_:
660 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
660 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
661 revs = {int(r) for r in revs}
661 revs = {int(r) for r in revs}
662
662
663 def events():
663 def events():
664 for r in rlog:
664 for r in rlog:
665 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
665 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
666 if r in revs:
666 if r in revs:
667 yield b'l', (r, b"r%i" % r)
667 yield b'l', (r, b"r%i" % r)
668
668
669 elif repo:
669 elif repo:
670 cl = repo.changelog
670 cl = repo.changelog
671 tags = opts.get('tags')
671 tags = opts.get('tags')
672 branches = opts.get('branches')
672 branches = opts.get('branches')
673 if tags:
673 if tags:
674 labels = {}
674 labels = {}
675 for l, n in repo.tags().items():
675 for l, n in repo.tags().items():
676 labels.setdefault(cl.rev(n), []).append(l)
676 labels.setdefault(cl.rev(n), []).append(l)
677
677
678 def events():
678 def events():
679 b = b"default"
679 b = b"default"
680 for r in cl:
680 for r in cl:
681 if branches:
681 if branches:
682 newb = cl.read(cl.node(r))[5][b'branch']
682 newb = cl.read(cl.node(r))[5][b'branch']
683 if newb != b:
683 if newb != b:
684 yield b'a', newb
684 yield b'a', newb
685 b = newb
685 b = newb
686 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
686 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
687 if tags:
687 if tags:
688 ls = labels.get(r)
688 ls = labels.get(r)
689 if ls:
689 if ls:
690 for l in ls:
690 for l in ls:
691 yield b'l', (r, l)
691 yield b'l', (r, l)
692
692
693 else:
693 else:
694 raise error.Abort(_(b'need repo for changelog dag'))
694 raise error.Abort(_(b'need repo for changelog dag'))
695
695
696 for line in dagparser.dagtextlines(
696 for line in dagparser.dagtextlines(
697 events(),
697 events(),
698 addspaces=spaces,
698 addspaces=spaces,
699 wraplabels=True,
699 wraplabels=True,
700 wrapannotations=True,
700 wrapannotations=True,
701 wrapnonlinear=dots,
701 wrapnonlinear=dots,
702 usedots=dots,
702 usedots=dots,
703 maxlinewidth=70,
703 maxlinewidth=70,
704 ):
704 ):
705 ui.write(line)
705 ui.write(line)
706 ui.write(b"\n")
706 ui.write(b"\n")
707
707
708
708
709 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
709 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
710 def debugdata(ui, repo, file_, rev=None, **opts):
710 def debugdata(ui, repo, file_, rev=None, **opts):
711 """dump the contents of a data file revision"""
711 """dump the contents of a data file revision"""
712 opts = pycompat.byteskwargs(opts)
712 opts = pycompat.byteskwargs(opts)
713 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
713 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
714 if rev is not None:
714 if rev is not None:
715 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
715 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
716 file_, rev = None, file_
716 file_, rev = None, file_
717 elif rev is None:
717 elif rev is None:
718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
719 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
720 try:
720 try:
721 ui.write(r.rawdata(r.lookup(rev)))
721 ui.write(r.rawdata(r.lookup(rev)))
722 except KeyError:
722 except KeyError:
723 raise error.Abort(_(b'invalid revision identifier %s') % rev)
723 raise error.Abort(_(b'invalid revision identifier %s') % rev)
724
724
725
725
726 @command(
726 @command(
727 b'debugdate',
727 b'debugdate',
728 [(b'e', b'extended', None, _(b'try extended date formats'))],
728 [(b'e', b'extended', None, _(b'try extended date formats'))],
729 _(b'[-e] DATE [RANGE]'),
729 _(b'[-e] DATE [RANGE]'),
730 norepo=True,
730 norepo=True,
731 optionalrepo=True,
731 optionalrepo=True,
732 )
732 )
733 def debugdate(ui, date, range=None, **opts):
733 def debugdate(ui, date, range=None, **opts):
734 """parse and display a date"""
734 """parse and display a date"""
735 if opts["extended"]:
735 if opts["extended"]:
736 d = dateutil.parsedate(date, dateutil.extendeddateformats)
736 d = dateutil.parsedate(date, dateutil.extendeddateformats)
737 else:
737 else:
738 d = dateutil.parsedate(date)
738 d = dateutil.parsedate(date)
739 ui.writenoi18n(b"internal: %d %d\n" % d)
739 ui.writenoi18n(b"internal: %d %d\n" % d)
740 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
740 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
741 if range:
741 if range:
742 m = dateutil.matchdate(range)
742 m = dateutil.matchdate(range)
743 ui.writenoi18n(b"match: %s\n" % m(d[0]))
743 ui.writenoi18n(b"match: %s\n" % m(d[0]))
744
744
745
745
746 @command(
746 @command(
747 b'debugdeltachain',
747 b'debugdeltachain',
748 cmdutil.debugrevlogopts + cmdutil.formatteropts,
748 cmdutil.debugrevlogopts + cmdutil.formatteropts,
749 _(b'-c|-m|FILE'),
749 _(b'-c|-m|FILE'),
750 optionalrepo=True,
750 optionalrepo=True,
751 )
751 )
752 def debugdeltachain(ui, repo, file_=None, **opts):
752 def debugdeltachain(ui, repo, file_=None, **opts):
753 """dump information about delta chains in a revlog
753 """dump information about delta chains in a revlog
754
754
755 Output can be templatized. Available template keywords are:
755 Output can be templatized. Available template keywords are:
756
756
757 :``rev``: revision number
757 :``rev``: revision number
758 :``chainid``: delta chain identifier (numbered by unique base)
758 :``chainid``: delta chain identifier (numbered by unique base)
759 :``chainlen``: delta chain length to this revision
759 :``chainlen``: delta chain length to this revision
760 :``prevrev``: previous revision in delta chain
760 :``prevrev``: previous revision in delta chain
761 :``deltatype``: role of delta / how it was computed
761 :``deltatype``: role of delta / how it was computed
762 :``compsize``: compressed size of revision
762 :``compsize``: compressed size of revision
763 :``uncompsize``: uncompressed size of revision
763 :``uncompsize``: uncompressed size of revision
764 :``chainsize``: total size of compressed revisions in chain
764 :``chainsize``: total size of compressed revisions in chain
765 :``chainratio``: total chain size divided by uncompressed revision size
765 :``chainratio``: total chain size divided by uncompressed revision size
766 (new delta chains typically start at ratio 2.00)
766 (new delta chains typically start at ratio 2.00)
767 :``lindist``: linear distance from base revision in delta chain to end
767 :``lindist``: linear distance from base revision in delta chain to end
768 of this revision
768 of this revision
769 :``extradist``: total size of revisions not part of this delta chain from
769 :``extradist``: total size of revisions not part of this delta chain from
770 base of delta chain to end of this revision; a measurement
770 base of delta chain to end of this revision; a measurement
771 of how much extra data we need to read/seek across to read
771 of how much extra data we need to read/seek across to read
772 the delta chain for this revision
772 the delta chain for this revision
773 :``extraratio``: extradist divided by chainsize; another representation of
773 :``extraratio``: extradist divided by chainsize; another representation of
774 how much unrelated data is needed to load this delta chain
774 how much unrelated data is needed to load this delta chain
775
775
776 If the repository is configured to use the sparse read, additional keywords
776 If the repository is configured to use the sparse read, additional keywords
777 are available:
777 are available:
778
778
779 :``readsize``: total size of data read from the disk for a revision
779 :``readsize``: total size of data read from the disk for a revision
780 (sum of the sizes of all the blocks)
780 (sum of the sizes of all the blocks)
781 :``largestblock``: size of the largest block of data read from the disk
781 :``largestblock``: size of the largest block of data read from the disk
782 :``readdensity``: density of useful bytes in the data read from the disk
782 :``readdensity``: density of useful bytes in the data read from the disk
783 :``srchunks``: in how many data hunks the whole revision would be read
783 :``srchunks``: in how many data hunks the whole revision would be read
784
784
785 The sparse read can be enabled with experimental.sparse-read = True
785 The sparse read can be enabled with experimental.sparse-read = True
786 """
786 """
787 opts = pycompat.byteskwargs(opts)
787 opts = pycompat.byteskwargs(opts)
788 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
788 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
789 index = r.index
789 index = r.index
790 start = r.start
790 start = r.start
791 length = r.length
791 length = r.length
792 generaldelta = r.version & revlog.FLAG_GENERALDELTA
792 generaldelta = r.version & revlog.FLAG_GENERALDELTA
793 withsparseread = getattr(r, '_withsparseread', False)
793 withsparseread = getattr(r, '_withsparseread', False)
794
794
795 def revinfo(rev):
795 def revinfo(rev):
796 e = index[rev]
796 e = index[rev]
797 compsize = e[1]
797 compsize = e[1]
798 uncompsize = e[2]
798 uncompsize = e[2]
799 chainsize = 0
799 chainsize = 0
800
800
801 if generaldelta:
801 if generaldelta:
802 if e[3] == e[5]:
802 if e[3] == e[5]:
803 deltatype = b'p1'
803 deltatype = b'p1'
804 elif e[3] == e[6]:
804 elif e[3] == e[6]:
805 deltatype = b'p2'
805 deltatype = b'p2'
806 elif e[3] == rev - 1:
806 elif e[3] == rev - 1:
807 deltatype = b'prev'
807 deltatype = b'prev'
808 elif e[3] == rev:
808 elif e[3] == rev:
809 deltatype = b'base'
809 deltatype = b'base'
810 else:
810 else:
811 deltatype = b'other'
811 deltatype = b'other'
812 else:
812 else:
813 if e[3] == rev:
813 if e[3] == rev:
814 deltatype = b'base'
814 deltatype = b'base'
815 else:
815 else:
816 deltatype = b'prev'
816 deltatype = b'prev'
817
817
818 chain = r._deltachain(rev)[0]
818 chain = r._deltachain(rev)[0]
819 for iterrev in chain:
819 for iterrev in chain:
820 e = index[iterrev]
820 e = index[iterrev]
821 chainsize += e[1]
821 chainsize += e[1]
822
822
823 return compsize, uncompsize, deltatype, chain, chainsize
823 return compsize, uncompsize, deltatype, chain, chainsize
824
824
825 fm = ui.formatter(b'debugdeltachain', opts)
825 fm = ui.formatter(b'debugdeltachain', opts)
826
826
827 fm.plain(
827 fm.plain(
828 b' rev chain# chainlen prev delta '
828 b' rev chain# chainlen prev delta '
829 b'size rawsize chainsize ratio lindist extradist '
829 b'size rawsize chainsize ratio lindist extradist '
830 b'extraratio'
830 b'extraratio'
831 )
831 )
832 if withsparseread:
832 if withsparseread:
833 fm.plain(b' readsize largestblk rddensity srchunks')
833 fm.plain(b' readsize largestblk rddensity srchunks')
834 fm.plain(b'\n')
834 fm.plain(b'\n')
835
835
836 chainbases = {}
836 chainbases = {}
837 for rev in r:
837 for rev in r:
838 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
838 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
839 chainbase = chain[0]
839 chainbase = chain[0]
840 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
840 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
841 basestart = start(chainbase)
841 basestart = start(chainbase)
842 revstart = start(rev)
842 revstart = start(rev)
843 lineardist = revstart + comp - basestart
843 lineardist = revstart + comp - basestart
844 extradist = lineardist - chainsize
844 extradist = lineardist - chainsize
845 try:
845 try:
846 prevrev = chain[-2]
846 prevrev = chain[-2]
847 except IndexError:
847 except IndexError:
848 prevrev = -1
848 prevrev = -1
849
849
850 if uncomp != 0:
850 if uncomp != 0:
851 chainratio = float(chainsize) / float(uncomp)
851 chainratio = float(chainsize) / float(uncomp)
852 else:
852 else:
853 chainratio = chainsize
853 chainratio = chainsize
854
854
855 if chainsize != 0:
855 if chainsize != 0:
856 extraratio = float(extradist) / float(chainsize)
856 extraratio = float(extradist) / float(chainsize)
857 else:
857 else:
858 extraratio = extradist
858 extraratio = extradist
859
859
860 fm.startitem()
860 fm.startitem()
861 fm.write(
861 fm.write(
862 b'rev chainid chainlen prevrev deltatype compsize '
862 b'rev chainid chainlen prevrev deltatype compsize '
863 b'uncompsize chainsize chainratio lindist extradist '
863 b'uncompsize chainsize chainratio lindist extradist '
864 b'extraratio',
864 b'extraratio',
865 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
865 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
866 rev,
866 rev,
867 chainid,
867 chainid,
868 len(chain),
868 len(chain),
869 prevrev,
869 prevrev,
870 deltatype,
870 deltatype,
871 comp,
871 comp,
872 uncomp,
872 uncomp,
873 chainsize,
873 chainsize,
874 chainratio,
874 chainratio,
875 lineardist,
875 lineardist,
876 extradist,
876 extradist,
877 extraratio,
877 extraratio,
878 rev=rev,
878 rev=rev,
879 chainid=chainid,
879 chainid=chainid,
880 chainlen=len(chain),
880 chainlen=len(chain),
881 prevrev=prevrev,
881 prevrev=prevrev,
882 deltatype=deltatype,
882 deltatype=deltatype,
883 compsize=comp,
883 compsize=comp,
884 uncompsize=uncomp,
884 uncompsize=uncomp,
885 chainsize=chainsize,
885 chainsize=chainsize,
886 chainratio=chainratio,
886 chainratio=chainratio,
887 lindist=lineardist,
887 lindist=lineardist,
888 extradist=extradist,
888 extradist=extradist,
889 extraratio=extraratio,
889 extraratio=extraratio,
890 )
890 )
891 if withsparseread:
891 if withsparseread:
892 readsize = 0
892 readsize = 0
893 largestblock = 0
893 largestblock = 0
894 srchunks = 0
894 srchunks = 0
895
895
896 for revschunk in deltautil.slicechunk(r, chain):
896 for revschunk in deltautil.slicechunk(r, chain):
897 srchunks += 1
897 srchunks += 1
898 blkend = start(revschunk[-1]) + length(revschunk[-1])
898 blkend = start(revschunk[-1]) + length(revschunk[-1])
899 blksize = blkend - start(revschunk[0])
899 blksize = blkend - start(revschunk[0])
900
900
901 readsize += blksize
901 readsize += blksize
902 if largestblock < blksize:
902 if largestblock < blksize:
903 largestblock = blksize
903 largestblock = blksize
904
904
905 if readsize:
905 if readsize:
906 readdensity = float(chainsize) / float(readsize)
906 readdensity = float(chainsize) / float(readsize)
907 else:
907 else:
908 readdensity = 1
908 readdensity = 1
909
909
910 fm.write(
910 fm.write(
911 b'readsize largestblock readdensity srchunks',
911 b'readsize largestblock readdensity srchunks',
912 b' %10d %10d %9.5f %8d',
912 b' %10d %10d %9.5f %8d',
913 readsize,
913 readsize,
914 largestblock,
914 largestblock,
915 readdensity,
915 readdensity,
916 srchunks,
916 srchunks,
917 readsize=readsize,
917 readsize=readsize,
918 largestblock=largestblock,
918 largestblock=largestblock,
919 readdensity=readdensity,
919 readdensity=readdensity,
920 srchunks=srchunks,
920 srchunks=srchunks,
921 )
921 )
922
922
923 fm.plain(b'\n')
923 fm.plain(b'\n')
924
924
925 fm.end()
925 fm.end()
926
926
927
927
928 @command(
928 @command(
929 b'debugdirstate|debugstate',
929 b'debugdirstate|debugstate',
930 [
930 [
931 (
931 (
932 b'',
932 b'',
933 b'nodates',
933 b'nodates',
934 None,
934 None,
935 _(b'do not display the saved mtime (DEPRECATED)'),
935 _(b'do not display the saved mtime (DEPRECATED)'),
936 ),
936 ),
937 (b'', b'dates', True, _(b'display the saved mtime')),
937 (b'', b'dates', True, _(b'display the saved mtime')),
938 (b'', b'datesort', None, _(b'sort by saved mtime')),
938 (b'', b'datesort', None, _(b'sort by saved mtime')),
939 ],
939 ],
940 _(b'[OPTION]...'),
940 _(b'[OPTION]...'),
941 )
941 )
942 def debugstate(ui, repo, **opts):
942 def debugstate(ui, repo, **opts):
943 """show the contents of the current dirstate"""
943 """show the contents of the current dirstate"""
944
944
945 nodates = not opts['dates']
945 nodates = not opts['dates']
946 if opts.get('nodates') is not None:
946 if opts.get('nodates') is not None:
947 nodates = True
947 nodates = True
948 datesort = opts.get('datesort')
948 datesort = opts.get('datesort')
949
949
950 if datesort:
950 if datesort:
951 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
951 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
952 else:
952 else:
953 keyfunc = None # sort by filename
953 keyfunc = None # sort by filename
954 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
954 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
955 if ent[3] == -1:
955 if ent[3] == -1:
956 timestr = b'unset '
956 timestr = b'unset '
957 elif nodates:
957 elif nodates:
958 timestr = b'set '
958 timestr = b'set '
959 else:
959 else:
960 timestr = time.strftime(
960 timestr = time.strftime(
961 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
961 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
962 )
962 )
963 timestr = encoding.strtolocal(timestr)
963 timestr = encoding.strtolocal(timestr)
964 if ent[1] & 0o20000:
964 if ent[1] & 0o20000:
965 mode = b'lnk'
965 mode = b'lnk'
966 else:
966 else:
967 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
967 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
968 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
968 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
969 for f in repo.dirstate.copies():
969 for f in repo.dirstate.copies():
970 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
970 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
971
971
972
972
973 @command(
973 @command(
974 b'debugdiscovery',
974 b'debugdiscovery',
975 [
975 [
976 (b'', b'old', None, _(b'use old-style discovery')),
976 (b'', b'old', None, _(b'use old-style discovery')),
977 (
977 (
978 b'',
978 b'',
979 b'nonheads',
979 b'nonheads',
980 None,
980 None,
981 _(b'use old-style discovery with non-heads included'),
981 _(b'use old-style discovery with non-heads included'),
982 ),
982 ),
983 (b'', b'rev', [], b'restrict discovery to this set of revs'),
983 (b'', b'rev', [], b'restrict discovery to this set of revs'),
984 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
984 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
985 (
985 (
986 b'',
986 b'',
987 b'local-as-revs',
987 b'local-as-revs',
988 "",
988 "",
989 'treat local has having these revisions only',
989 'treat local has having these revisions only',
990 ),
990 ),
991 (
991 (
992 b'',
992 b'',
993 b'remote-as-revs',
993 b'remote-as-revs',
994 "",
994 "",
995 'use local as remote, with only these these revisions',
995 'use local as remote, with only these these revisions',
996 ),
996 ),
997 ]
997 ]
998 + cmdutil.remoteopts,
998 + cmdutil.remoteopts,
999 _(b'[--rev REV] [OTHER]'),
999 _(b'[--rev REV] [OTHER]'),
1000 )
1000 )
1001 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1001 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1002 """runs the changeset discovery protocol in isolation
1002 """runs the changeset discovery protocol in isolation
1003
1003
1004 The local peer can be "replaced" by a subset of the local repository by
1004 The local peer can be "replaced" by a subset of the local repository by
1005 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1005 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1006 be "replaced" by a subset of the local repository using the
1006 be "replaced" by a subset of the local repository using the
1007 `--local-as-revs` flag. This is useful to efficiently debug pathological
1007 `--local-as-revs` flag. This is useful to efficiently debug pathological
1008 discovery situation.
1008 discovery situation.
1009 """
1009 """
1010 opts = pycompat.byteskwargs(opts)
1010 opts = pycompat.byteskwargs(opts)
1011 unfi = repo.unfiltered()
1011 unfi = repo.unfiltered()
1012
1012
1013 # setup potential extra filtering
1013 # setup potential extra filtering
1014 local_revs = opts[b"local_as_revs"]
1014 local_revs = opts[b"local_as_revs"]
1015 remote_revs = opts[b"remote_as_revs"]
1015 remote_revs = opts[b"remote_as_revs"]
1016
1016
1017 # make sure tests are repeatable
1017 # make sure tests are repeatable
1018 random.seed(int(opts[b'seed']))
1018 random.seed(int(opts[b'seed']))
1019
1019
1020 if not remote_revs:
1020 if not remote_revs:
1021
1021
1022 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
1022 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
1023 remote = hg.peer(repo, opts, remoteurl)
1023 remote = hg.peer(repo, opts, remoteurl)
1024 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
1024 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
1025 else:
1025 else:
1026 branches = (None, [])
1026 branches = (None, [])
1027 remote_filtered_revs = scmutil.revrange(
1027 remote_filtered_revs = scmutil.revrange(
1028 unfi, [b"not (::(%s))" % remote_revs]
1028 unfi, [b"not (::(%s))" % remote_revs]
1029 )
1029 )
1030 remote_filtered_revs = frozenset(remote_filtered_revs)
1030 remote_filtered_revs = frozenset(remote_filtered_revs)
1031
1031
1032 def remote_func(x):
1032 def remote_func(x):
1033 return remote_filtered_revs
1033 return remote_filtered_revs
1034
1034
1035 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1035 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1036
1036
1037 remote = repo.peer()
1037 remote = repo.peer()
1038 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1038 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1039
1039
1040 if local_revs:
1040 if local_revs:
1041 local_filtered_revs = scmutil.revrange(
1041 local_filtered_revs = scmutil.revrange(
1042 unfi, [b"not (::(%s))" % local_revs]
1042 unfi, [b"not (::(%s))" % local_revs]
1043 )
1043 )
1044 local_filtered_revs = frozenset(local_filtered_revs)
1044 local_filtered_revs = frozenset(local_filtered_revs)
1045
1045
1046 def local_func(x):
1046 def local_func(x):
1047 return local_filtered_revs
1047 return local_filtered_revs
1048
1048
1049 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1049 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1050 repo = repo.filtered(b'debug-discovery-local-filter')
1050 repo = repo.filtered(b'debug-discovery-local-filter')
1051
1051
1052 data = {}
1052 data = {}
1053 if opts.get(b'old'):
1053 if opts.get(b'old'):
1054
1054
1055 def doit(pushedrevs, remoteheads, remote=remote):
1055 def doit(pushedrevs, remoteheads, remote=remote):
1056 if not util.safehasattr(remote, b'branches'):
1056 if not util.safehasattr(remote, b'branches'):
1057 # enable in-client legacy support
1057 # enable in-client legacy support
1058 remote = localrepo.locallegacypeer(remote.local())
1058 remote = localrepo.locallegacypeer(remote.local())
1059 common, _in, hds = treediscovery.findcommonincoming(
1059 common, _in, hds = treediscovery.findcommonincoming(
1060 repo, remote, force=True, audit=data
1060 repo, remote, force=True, audit=data
1061 )
1061 )
1062 common = set(common)
1062 common = set(common)
1063 if not opts.get(b'nonheads'):
1063 if not opts.get(b'nonheads'):
1064 ui.writenoi18n(
1064 ui.writenoi18n(
1065 b"unpruned common: %s\n"
1065 b"unpruned common: %s\n"
1066 % b" ".join(sorted(short(n) for n in common))
1066 % b" ".join(sorted(short(n) for n in common))
1067 )
1067 )
1068
1068
1069 clnode = repo.changelog.node
1069 clnode = repo.changelog.node
1070 common = repo.revs(b'heads(::%ln)', common)
1070 common = repo.revs(b'heads(::%ln)', common)
1071 common = {clnode(r) for r in common}
1071 common = {clnode(r) for r in common}
1072 return common, hds
1072 return common, hds
1073
1073
1074 else:
1074 else:
1075
1075
1076 def doit(pushedrevs, remoteheads, remote=remote):
1076 def doit(pushedrevs, remoteheads, remote=remote):
1077 nodes = None
1077 nodes = None
1078 if pushedrevs:
1078 if pushedrevs:
1079 revs = scmutil.revrange(repo, pushedrevs)
1079 revs = scmutil.revrange(repo, pushedrevs)
1080 nodes = [repo[r].node() for r in revs]
1080 nodes = [repo[r].node() for r in revs]
1081 common, any, hds = setdiscovery.findcommonheads(
1081 common, any, hds = setdiscovery.findcommonheads(
1082 ui, repo, remote, ancestorsof=nodes, audit=data
1082 ui, repo, remote, ancestorsof=nodes, audit=data
1083 )
1083 )
1084 return common, hds
1084 return common, hds
1085
1085
1086 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1086 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1087 localrevs = opts[b'rev']
1087 localrevs = opts[b'rev']
1088 with util.timedcm('debug-discovery') as t:
1088 with util.timedcm('debug-discovery') as t:
1089 common, hds = doit(localrevs, remoterevs)
1089 common, hds = doit(localrevs, remoterevs)
1090
1090
1091 # compute all statistics
1091 # compute all statistics
1092 heads_common = set(common)
1092 heads_common = set(common)
1093 heads_remote = set(hds)
1093 heads_remote = set(hds)
1094 heads_local = set(repo.heads())
1094 heads_local = set(repo.heads())
1095 # note: they cannot be a local or remote head that is in common and not
1095 # note: they cannot be a local or remote head that is in common and not
1096 # itself a head of common.
1096 # itself a head of common.
1097 heads_common_local = heads_common & heads_local
1097 heads_common_local = heads_common & heads_local
1098 heads_common_remote = heads_common & heads_remote
1098 heads_common_remote = heads_common & heads_remote
1099 heads_common_both = heads_common & heads_remote & heads_local
1099 heads_common_both = heads_common & heads_remote & heads_local
1100
1100
1101 all = repo.revs(b'all()')
1101 all = repo.revs(b'all()')
1102 common = repo.revs(b'::%ln', common)
1102 common = repo.revs(b'::%ln', common)
1103 roots_common = repo.revs(b'roots(::%ld)', common)
1103 roots_common = repo.revs(b'roots(::%ld)', common)
1104 missing = repo.revs(b'not ::%ld', common)
1104 missing = repo.revs(b'not ::%ld', common)
1105 heads_missing = repo.revs(b'heads(%ld)', missing)
1105 heads_missing = repo.revs(b'heads(%ld)', missing)
1106 roots_missing = repo.revs(b'roots(%ld)', missing)
1106 roots_missing = repo.revs(b'roots(%ld)', missing)
1107 assert len(common) + len(missing) == len(all)
1107 assert len(common) + len(missing) == len(all)
1108
1108
1109 initial_undecided = repo.revs(
1109 initial_undecided = repo.revs(
1110 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1110 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1111 )
1111 )
1112 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1112 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1113 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1113 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1114 common_initial_undecided = initial_undecided & common
1114 common_initial_undecided = initial_undecided & common
1115 missing_initial_undecided = initial_undecided & missing
1115 missing_initial_undecided = initial_undecided & missing
1116
1116
1117 data[b'elapsed'] = t.elapsed
1117 data[b'elapsed'] = t.elapsed
1118 data[b'nb-common-heads'] = len(heads_common)
1118 data[b'nb-common-heads'] = len(heads_common)
1119 data[b'nb-common-heads-local'] = len(heads_common_local)
1119 data[b'nb-common-heads-local'] = len(heads_common_local)
1120 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1120 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1121 data[b'nb-common-heads-both'] = len(heads_common_both)
1121 data[b'nb-common-heads-both'] = len(heads_common_both)
1122 data[b'nb-common-roots'] = len(roots_common)
1122 data[b'nb-common-roots'] = len(roots_common)
1123 data[b'nb-head-local'] = len(heads_local)
1123 data[b'nb-head-local'] = len(heads_local)
1124 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1124 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1125 data[b'nb-head-remote'] = len(heads_remote)
1125 data[b'nb-head-remote'] = len(heads_remote)
1126 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1126 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1127 heads_common_remote
1127 heads_common_remote
1128 )
1128 )
1129 data[b'nb-revs'] = len(all)
1129 data[b'nb-revs'] = len(all)
1130 data[b'nb-revs-common'] = len(common)
1130 data[b'nb-revs-common'] = len(common)
1131 data[b'nb-revs-missing'] = len(missing)
1131 data[b'nb-revs-missing'] = len(missing)
1132 data[b'nb-missing-heads'] = len(heads_missing)
1132 data[b'nb-missing-heads'] = len(heads_missing)
1133 data[b'nb-missing-roots'] = len(roots_missing)
1133 data[b'nb-missing-roots'] = len(roots_missing)
1134 data[b'nb-ini_und'] = len(initial_undecided)
1134 data[b'nb-ini_und'] = len(initial_undecided)
1135 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1135 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1136 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1136 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1137 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1137 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1138 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1138 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1139
1139
1140 # display discovery summary
1140 # display discovery summary
1141 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
1141 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
1142 ui.writenoi18n(b"round-trips: %(total-roundtrips)9d\n" % data)
1142 ui.writenoi18n(b"round-trips: %(total-roundtrips)9d\n" % data)
1143 ui.writenoi18n(b"heads summary:\n")
1143 ui.writenoi18n(b"heads summary:\n")
1144 ui.writenoi18n(b" total common heads: %(nb-common-heads)9d\n" % data)
1144 ui.writenoi18n(b" total common heads: %(nb-common-heads)9d\n" % data)
1145 ui.writenoi18n(
1145 ui.writenoi18n(
1146 b" also local heads: %(nb-common-heads-local)9d\n" % data
1146 b" also local heads: %(nb-common-heads-local)9d\n" % data
1147 )
1147 )
1148 ui.writenoi18n(
1148 ui.writenoi18n(
1149 b" also remote heads: %(nb-common-heads-remote)9d\n" % data
1149 b" also remote heads: %(nb-common-heads-remote)9d\n" % data
1150 )
1150 )
1151 ui.writenoi18n(b" both: %(nb-common-heads-both)9d\n" % data)
1151 ui.writenoi18n(b" both: %(nb-common-heads-both)9d\n" % data)
1152 ui.writenoi18n(b" local heads: %(nb-head-local)9d\n" % data)
1152 ui.writenoi18n(b" local heads: %(nb-head-local)9d\n" % data)
1153 ui.writenoi18n(
1153 ui.writenoi18n(
1154 b" common: %(nb-common-heads-local)9d\n" % data
1154 b" common: %(nb-common-heads-local)9d\n" % data
1155 )
1155 )
1156 ui.writenoi18n(
1156 ui.writenoi18n(
1157 b" missing: %(nb-head-local-missing)9d\n" % data
1157 b" missing: %(nb-head-local-missing)9d\n" % data
1158 )
1158 )
1159 ui.writenoi18n(b" remote heads: %(nb-head-remote)9d\n" % data)
1159 ui.writenoi18n(b" remote heads: %(nb-head-remote)9d\n" % data)
1160 ui.writenoi18n(
1160 ui.writenoi18n(
1161 b" common: %(nb-common-heads-remote)9d\n" % data
1161 b" common: %(nb-common-heads-remote)9d\n" % data
1162 )
1162 )
1163 ui.writenoi18n(
1163 ui.writenoi18n(
1164 b" unknown: %(nb-head-remote-unknown)9d\n" % data
1164 b" unknown: %(nb-head-remote-unknown)9d\n" % data
1165 )
1165 )
1166 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
1166 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
1167 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
1167 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
1168 ui.writenoi18n(b" heads: %(nb-common-heads)9d\n" % data)
1168 ui.writenoi18n(b" heads: %(nb-common-heads)9d\n" % data)
1169 ui.writenoi18n(b" roots: %(nb-common-roots)9d\n" % data)
1169 ui.writenoi18n(b" roots: %(nb-common-roots)9d\n" % data)
1170 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
1170 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
1171 ui.writenoi18n(b" heads: %(nb-missing-heads)9d\n" % data)
1171 ui.writenoi18n(b" heads: %(nb-missing-heads)9d\n" % data)
1172 ui.writenoi18n(b" roots: %(nb-missing-roots)9d\n" % data)
1172 ui.writenoi18n(b" roots: %(nb-missing-roots)9d\n" % data)
1173 ui.writenoi18n(b" first undecided set: %(nb-ini_und)9d\n" % data)
1173 ui.writenoi18n(b" first undecided set: %(nb-ini_und)9d\n" % data)
1174 ui.writenoi18n(b" heads: %(nb-ini_und-heads)9d\n" % data)
1174 ui.writenoi18n(b" heads: %(nb-ini_und-heads)9d\n" % data)
1175 ui.writenoi18n(b" roots: %(nb-ini_und-roots)9d\n" % data)
1175 ui.writenoi18n(b" roots: %(nb-ini_und-roots)9d\n" % data)
1176 ui.writenoi18n(b" common: %(nb-ini_und-common)9d\n" % data)
1176 ui.writenoi18n(b" common: %(nb-ini_und-common)9d\n" % data)
1177 ui.writenoi18n(b" missing: %(nb-ini_und-missing)9d\n" % data)
1177 ui.writenoi18n(b" missing: %(nb-ini_und-missing)9d\n" % data)
1178
1178
1179 if ui.verbose:
1179 if ui.verbose:
1180 ui.writenoi18n(
1180 ui.writenoi18n(
1181 b"common heads: %s\n"
1181 b"common heads: %s\n"
1182 % b" ".join(sorted(short(n) for n in heads_common))
1182 % b" ".join(sorted(short(n) for n in heads_common))
1183 )
1183 )
1184
1184
1185
1185
1186 _chunksize = 4 << 10
1186 _chunksize = 4 << 10
1187
1187
1188
1188
1189 @command(
1189 @command(
1190 b'debugdownload',
1190 b'debugdownload',
1191 [
1191 [
1192 (b'o', b'output', b'', _(b'path')),
1192 (b'o', b'output', b'', _(b'path')),
1193 ],
1193 ],
1194 optionalrepo=True,
1194 optionalrepo=True,
1195 )
1195 )
1196 def debugdownload(ui, repo, url, output=None, **opts):
1196 def debugdownload(ui, repo, url, output=None, **opts):
1197 """download a resource using Mercurial logic and config"""
1197 """download a resource using Mercurial logic and config"""
1198 fh = urlmod.open(ui, url, output)
1198 fh = urlmod.open(ui, url, output)
1199
1199
1200 dest = ui
1200 dest = ui
1201 if output:
1201 if output:
1202 dest = open(output, b"wb", _chunksize)
1202 dest = open(output, b"wb", _chunksize)
1203 try:
1203 try:
1204 data = fh.read(_chunksize)
1204 data = fh.read(_chunksize)
1205 while data:
1205 while data:
1206 dest.write(data)
1206 dest.write(data)
1207 data = fh.read(_chunksize)
1207 data = fh.read(_chunksize)
1208 finally:
1208 finally:
1209 if output:
1209 if output:
1210 dest.close()
1210 dest.close()
1211
1211
1212
1212
1213 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1213 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1214 def debugextensions(ui, repo, **opts):
1214 def debugextensions(ui, repo, **opts):
1215 '''show information about active extensions'''
1215 '''show information about active extensions'''
1216 opts = pycompat.byteskwargs(opts)
1216 opts = pycompat.byteskwargs(opts)
1217 exts = extensions.extensions(ui)
1217 exts = extensions.extensions(ui)
1218 hgver = util.version()
1218 hgver = util.version()
1219 fm = ui.formatter(b'debugextensions', opts)
1219 fm = ui.formatter(b'debugextensions', opts)
1220 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1220 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1221 isinternal = extensions.ismoduleinternal(extmod)
1221 isinternal = extensions.ismoduleinternal(extmod)
1222 extsource = None
1222 extsource = None
1223
1223
1224 if util.safehasattr(extmod, '__file__'):
1224 if util.safehasattr(extmod, '__file__'):
1225 extsource = pycompat.fsencode(extmod.__file__)
1225 extsource = pycompat.fsencode(extmod.__file__)
1226 elif getattr(sys, 'oxidized', False):
1226 elif getattr(sys, 'oxidized', False):
1227 extsource = pycompat.sysexecutable
1227 extsource = pycompat.sysexecutable
1228 if isinternal:
1228 if isinternal:
1229 exttestedwith = [] # never expose magic string to users
1229 exttestedwith = [] # never expose magic string to users
1230 else:
1230 else:
1231 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1231 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1232 extbuglink = getattr(extmod, 'buglink', None)
1232 extbuglink = getattr(extmod, 'buglink', None)
1233
1233
1234 fm.startitem()
1234 fm.startitem()
1235
1235
1236 if ui.quiet or ui.verbose:
1236 if ui.quiet or ui.verbose:
1237 fm.write(b'name', b'%s\n', extname)
1237 fm.write(b'name', b'%s\n', extname)
1238 else:
1238 else:
1239 fm.write(b'name', b'%s', extname)
1239 fm.write(b'name', b'%s', extname)
1240 if isinternal or hgver in exttestedwith:
1240 if isinternal or hgver in exttestedwith:
1241 fm.plain(b'\n')
1241 fm.plain(b'\n')
1242 elif not exttestedwith:
1242 elif not exttestedwith:
1243 fm.plain(_(b' (untested!)\n'))
1243 fm.plain(_(b' (untested!)\n'))
1244 else:
1244 else:
1245 lasttestedversion = exttestedwith[-1]
1245 lasttestedversion = exttestedwith[-1]
1246 fm.plain(b' (%s!)\n' % lasttestedversion)
1246 fm.plain(b' (%s!)\n' % lasttestedversion)
1247
1247
1248 fm.condwrite(
1248 fm.condwrite(
1249 ui.verbose and extsource,
1249 ui.verbose and extsource,
1250 b'source',
1250 b'source',
1251 _(b' location: %s\n'),
1251 _(b' location: %s\n'),
1252 extsource or b"",
1252 extsource or b"",
1253 )
1253 )
1254
1254
1255 if ui.verbose:
1255 if ui.verbose:
1256 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1256 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1257 fm.data(bundled=isinternal)
1257 fm.data(bundled=isinternal)
1258
1258
1259 fm.condwrite(
1259 fm.condwrite(
1260 ui.verbose and exttestedwith,
1260 ui.verbose and exttestedwith,
1261 b'testedwith',
1261 b'testedwith',
1262 _(b' tested with: %s\n'),
1262 _(b' tested with: %s\n'),
1263 fm.formatlist(exttestedwith, name=b'ver'),
1263 fm.formatlist(exttestedwith, name=b'ver'),
1264 )
1264 )
1265
1265
1266 fm.condwrite(
1266 fm.condwrite(
1267 ui.verbose and extbuglink,
1267 ui.verbose and extbuglink,
1268 b'buglink',
1268 b'buglink',
1269 _(b' bug reporting: %s\n'),
1269 _(b' bug reporting: %s\n'),
1270 extbuglink or b"",
1270 extbuglink or b"",
1271 )
1271 )
1272
1272
1273 fm.end()
1273 fm.end()
1274
1274
1275
1275
1276 @command(
1276 @command(
1277 b'debugfileset',
1277 b'debugfileset',
1278 [
1278 [
1279 (
1279 (
1280 b'r',
1280 b'r',
1281 b'rev',
1281 b'rev',
1282 b'',
1282 b'',
1283 _(b'apply the filespec on this revision'),
1283 _(b'apply the filespec on this revision'),
1284 _(b'REV'),
1284 _(b'REV'),
1285 ),
1285 ),
1286 (
1286 (
1287 b'',
1287 b'',
1288 b'all-files',
1288 b'all-files',
1289 False,
1289 False,
1290 _(b'test files from all revisions and working directory'),
1290 _(b'test files from all revisions and working directory'),
1291 ),
1291 ),
1292 (
1292 (
1293 b's',
1293 b's',
1294 b'show-matcher',
1294 b'show-matcher',
1295 None,
1295 None,
1296 _(b'print internal representation of matcher'),
1296 _(b'print internal representation of matcher'),
1297 ),
1297 ),
1298 (
1298 (
1299 b'p',
1299 b'p',
1300 b'show-stage',
1300 b'show-stage',
1301 [],
1301 [],
1302 _(b'print parsed tree at the given stage'),
1302 _(b'print parsed tree at the given stage'),
1303 _(b'NAME'),
1303 _(b'NAME'),
1304 ),
1304 ),
1305 ],
1305 ],
1306 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1306 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1307 )
1307 )
1308 def debugfileset(ui, repo, expr, **opts):
1308 def debugfileset(ui, repo, expr, **opts):
1309 '''parse and apply a fileset specification'''
1309 '''parse and apply a fileset specification'''
1310 from . import fileset
1310 from . import fileset
1311
1311
1312 fileset.symbols # force import of fileset so we have predicates to optimize
1312 fileset.symbols # force import of fileset so we have predicates to optimize
1313 opts = pycompat.byteskwargs(opts)
1313 opts = pycompat.byteskwargs(opts)
1314 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1314 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1315
1315
1316 stages = [
1316 stages = [
1317 (b'parsed', pycompat.identity),
1317 (b'parsed', pycompat.identity),
1318 (b'analyzed', filesetlang.analyze),
1318 (b'analyzed', filesetlang.analyze),
1319 (b'optimized', filesetlang.optimize),
1319 (b'optimized', filesetlang.optimize),
1320 ]
1320 ]
1321 stagenames = {n for n, f in stages}
1321 stagenames = {n for n, f in stages}
1322
1322
1323 showalways = set()
1323 showalways = set()
1324 if ui.verbose and not opts[b'show_stage']:
1324 if ui.verbose and not opts[b'show_stage']:
1325 # show parsed tree by --verbose (deprecated)
1325 # show parsed tree by --verbose (deprecated)
1326 showalways.add(b'parsed')
1326 showalways.add(b'parsed')
1327 if opts[b'show_stage'] == [b'all']:
1327 if opts[b'show_stage'] == [b'all']:
1328 showalways.update(stagenames)
1328 showalways.update(stagenames)
1329 else:
1329 else:
1330 for n in opts[b'show_stage']:
1330 for n in opts[b'show_stage']:
1331 if n not in stagenames:
1331 if n not in stagenames:
1332 raise error.Abort(_(b'invalid stage name: %s') % n)
1332 raise error.Abort(_(b'invalid stage name: %s') % n)
1333 showalways.update(opts[b'show_stage'])
1333 showalways.update(opts[b'show_stage'])
1334
1334
1335 tree = filesetlang.parse(expr)
1335 tree = filesetlang.parse(expr)
1336 for n, f in stages:
1336 for n, f in stages:
1337 tree = f(tree)
1337 tree = f(tree)
1338 if n in showalways:
1338 if n in showalways:
1339 if opts[b'show_stage'] or n != b'parsed':
1339 if opts[b'show_stage'] or n != b'parsed':
1340 ui.write(b"* %s:\n" % n)
1340 ui.write(b"* %s:\n" % n)
1341 ui.write(filesetlang.prettyformat(tree), b"\n")
1341 ui.write(filesetlang.prettyformat(tree), b"\n")
1342
1342
1343 files = set()
1343 files = set()
1344 if opts[b'all_files']:
1344 if opts[b'all_files']:
1345 for r in repo:
1345 for r in repo:
1346 c = repo[r]
1346 c = repo[r]
1347 files.update(c.files())
1347 files.update(c.files())
1348 files.update(c.substate)
1348 files.update(c.substate)
1349 if opts[b'all_files'] or ctx.rev() is None:
1349 if opts[b'all_files'] or ctx.rev() is None:
1350 wctx = repo[None]
1350 wctx = repo[None]
1351 files.update(
1351 files.update(
1352 repo.dirstate.walk(
1352 repo.dirstate.walk(
1353 scmutil.matchall(repo),
1353 scmutil.matchall(repo),
1354 subrepos=list(wctx.substate),
1354 subrepos=list(wctx.substate),
1355 unknown=True,
1355 unknown=True,
1356 ignored=True,
1356 ignored=True,
1357 )
1357 )
1358 )
1358 )
1359 files.update(wctx.substate)
1359 files.update(wctx.substate)
1360 else:
1360 else:
1361 files.update(ctx.files())
1361 files.update(ctx.files())
1362 files.update(ctx.substate)
1362 files.update(ctx.substate)
1363
1363
1364 m = ctx.matchfileset(repo.getcwd(), expr)
1364 m = ctx.matchfileset(repo.getcwd(), expr)
1365 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1365 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1366 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1366 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1367 for f in sorted(files):
1367 for f in sorted(files):
1368 if not m(f):
1368 if not m(f):
1369 continue
1369 continue
1370 ui.write(b"%s\n" % f)
1370 ui.write(b"%s\n" % f)
1371
1371
1372
1372
1373 @command(b'debugformat', [] + cmdutil.formatteropts)
1373 @command(b'debugformat', [] + cmdutil.formatteropts)
1374 def debugformat(ui, repo, **opts):
1374 def debugformat(ui, repo, **opts):
1375 """display format information about the current repository
1375 """display format information about the current repository
1376
1376
1377 Use --verbose to get extra information about current config value and
1377 Use --verbose to get extra information about current config value and
1378 Mercurial default."""
1378 Mercurial default."""
1379 opts = pycompat.byteskwargs(opts)
1379 opts = pycompat.byteskwargs(opts)
1380 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1380 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1381 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1381 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1382
1382
1383 def makeformatname(name):
1383 def makeformatname(name):
1384 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1384 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1385
1385
1386 fm = ui.formatter(b'debugformat', opts)
1386 fm = ui.formatter(b'debugformat', opts)
1387 if fm.isplain():
1387 if fm.isplain():
1388
1388
1389 def formatvalue(value):
1389 def formatvalue(value):
1390 if util.safehasattr(value, b'startswith'):
1390 if util.safehasattr(value, b'startswith'):
1391 return value
1391 return value
1392 if value:
1392 if value:
1393 return b'yes'
1393 return b'yes'
1394 else:
1394 else:
1395 return b'no'
1395 return b'no'
1396
1396
1397 else:
1397 else:
1398 formatvalue = pycompat.identity
1398 formatvalue = pycompat.identity
1399
1399
1400 fm.plain(b'format-variant')
1400 fm.plain(b'format-variant')
1401 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1401 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1402 fm.plain(b' repo')
1402 fm.plain(b' repo')
1403 if ui.verbose:
1403 if ui.verbose:
1404 fm.plain(b' config default')
1404 fm.plain(b' config default')
1405 fm.plain(b'\n')
1405 fm.plain(b'\n')
1406 for fv in upgrade.allformatvariant:
1406 for fv in upgrade.allformatvariant:
1407 fm.startitem()
1407 fm.startitem()
1408 repovalue = fv.fromrepo(repo)
1408 repovalue = fv.fromrepo(repo)
1409 configvalue = fv.fromconfig(repo)
1409 configvalue = fv.fromconfig(repo)
1410
1410
1411 if repovalue != configvalue:
1411 if repovalue != configvalue:
1412 namelabel = b'formatvariant.name.mismatchconfig'
1412 namelabel = b'formatvariant.name.mismatchconfig'
1413 repolabel = b'formatvariant.repo.mismatchconfig'
1413 repolabel = b'formatvariant.repo.mismatchconfig'
1414 elif repovalue != fv.default:
1414 elif repovalue != fv.default:
1415 namelabel = b'formatvariant.name.mismatchdefault'
1415 namelabel = b'formatvariant.name.mismatchdefault'
1416 repolabel = b'formatvariant.repo.mismatchdefault'
1416 repolabel = b'formatvariant.repo.mismatchdefault'
1417 else:
1417 else:
1418 namelabel = b'formatvariant.name.uptodate'
1418 namelabel = b'formatvariant.name.uptodate'
1419 repolabel = b'formatvariant.repo.uptodate'
1419 repolabel = b'formatvariant.repo.uptodate'
1420
1420
1421 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1421 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1422 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1422 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1423 if fv.default != configvalue:
1423 if fv.default != configvalue:
1424 configlabel = b'formatvariant.config.special'
1424 configlabel = b'formatvariant.config.special'
1425 else:
1425 else:
1426 configlabel = b'formatvariant.config.default'
1426 configlabel = b'formatvariant.config.default'
1427 fm.condwrite(
1427 fm.condwrite(
1428 ui.verbose,
1428 ui.verbose,
1429 b'config',
1429 b'config',
1430 b' %6s',
1430 b' %6s',
1431 formatvalue(configvalue),
1431 formatvalue(configvalue),
1432 label=configlabel,
1432 label=configlabel,
1433 )
1433 )
1434 fm.condwrite(
1434 fm.condwrite(
1435 ui.verbose,
1435 ui.verbose,
1436 b'default',
1436 b'default',
1437 b' %7s',
1437 b' %7s',
1438 formatvalue(fv.default),
1438 formatvalue(fv.default),
1439 label=b'formatvariant.default',
1439 label=b'formatvariant.default',
1440 )
1440 )
1441 fm.plain(b'\n')
1441 fm.plain(b'\n')
1442 fm.end()
1442 fm.end()
1443
1443
1444
1444
1445 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1445 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1446 def debugfsinfo(ui, path=b"."):
1446 def debugfsinfo(ui, path=b"."):
1447 """show information detected about current filesystem"""
1447 """show information detected about current filesystem"""
1448 ui.writenoi18n(b'path: %s\n' % path)
1448 ui.writenoi18n(b'path: %s\n' % path)
1449 ui.writenoi18n(
1449 ui.writenoi18n(
1450 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1450 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1451 )
1451 )
1452 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1452 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1453 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1453 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1454 ui.writenoi18n(
1454 ui.writenoi18n(
1455 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1455 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1456 )
1456 )
1457 ui.writenoi18n(
1457 ui.writenoi18n(
1458 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1458 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1459 )
1459 )
1460 casesensitive = b'(unknown)'
1460 casesensitive = b'(unknown)'
1461 try:
1461 try:
1462 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1462 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1463 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1463 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1464 except OSError:
1464 except OSError:
1465 pass
1465 pass
1466 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1466 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1467
1467
1468
1468
1469 @command(
1469 @command(
1470 b'debuggetbundle',
1470 b'debuggetbundle',
1471 [
1471 [
1472 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1472 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1473 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1473 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1474 (
1474 (
1475 b't',
1475 b't',
1476 b'type',
1476 b'type',
1477 b'bzip2',
1477 b'bzip2',
1478 _(b'bundle compression type to use'),
1478 _(b'bundle compression type to use'),
1479 _(b'TYPE'),
1479 _(b'TYPE'),
1480 ),
1480 ),
1481 ],
1481 ],
1482 _(b'REPO FILE [-H|-C ID]...'),
1482 _(b'REPO FILE [-H|-C ID]...'),
1483 norepo=True,
1483 norepo=True,
1484 )
1484 )
1485 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1485 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1486 """retrieves a bundle from a repo
1486 """retrieves a bundle from a repo
1487
1487
1488 Every ID must be a full-length hex node id string. Saves the bundle to the
1488 Every ID must be a full-length hex node id string. Saves the bundle to the
1489 given file.
1489 given file.
1490 """
1490 """
1491 opts = pycompat.byteskwargs(opts)
1491 opts = pycompat.byteskwargs(opts)
1492 repo = hg.peer(ui, opts, repopath)
1492 repo = hg.peer(ui, opts, repopath)
1493 if not repo.capable(b'getbundle'):
1493 if not repo.capable(b'getbundle'):
1494 raise error.Abort(b"getbundle() not supported by target repository")
1494 raise error.Abort(b"getbundle() not supported by target repository")
1495 args = {}
1495 args = {}
1496 if common:
1496 if common:
1497 args['common'] = [bin(s) for s in common]
1497 args['common'] = [bin(s) for s in common]
1498 if head:
1498 if head:
1499 args['heads'] = [bin(s) for s in head]
1499 args['heads'] = [bin(s) for s in head]
1500 # TODO: get desired bundlecaps from command line.
1500 # TODO: get desired bundlecaps from command line.
1501 args['bundlecaps'] = None
1501 args['bundlecaps'] = None
1502 bundle = repo.getbundle(b'debug', **args)
1502 bundle = repo.getbundle(b'debug', **args)
1503
1503
1504 bundletype = opts.get(b'type', b'bzip2').lower()
1504 bundletype = opts.get(b'type', b'bzip2').lower()
1505 btypes = {
1505 btypes = {
1506 b'none': b'HG10UN',
1506 b'none': b'HG10UN',
1507 b'bzip2': b'HG10BZ',
1507 b'bzip2': b'HG10BZ',
1508 b'gzip': b'HG10GZ',
1508 b'gzip': b'HG10GZ',
1509 b'bundle2': b'HG20',
1509 b'bundle2': b'HG20',
1510 }
1510 }
1511 bundletype = btypes.get(bundletype)
1511 bundletype = btypes.get(bundletype)
1512 if bundletype not in bundle2.bundletypes:
1512 if bundletype not in bundle2.bundletypes:
1513 raise error.Abort(_(b'unknown bundle type specified with --type'))
1513 raise error.Abort(_(b'unknown bundle type specified with --type'))
1514 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1514 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1515
1515
1516
1516
1517 @command(b'debugignore', [], b'[FILE]')
1517 @command(b'debugignore', [], b'[FILE]')
1518 def debugignore(ui, repo, *files, **opts):
1518 def debugignore(ui, repo, *files, **opts):
1519 """display the combined ignore pattern and information about ignored files
1519 """display the combined ignore pattern and information about ignored files
1520
1520
1521 With no argument display the combined ignore pattern.
1521 With no argument display the combined ignore pattern.
1522
1522
1523 Given space separated file names, shows if the given file is ignored and
1523 Given space separated file names, shows if the given file is ignored and
1524 if so, show the ignore rule (file and line number) that matched it.
1524 if so, show the ignore rule (file and line number) that matched it.
1525 """
1525 """
1526 ignore = repo.dirstate._ignore
1526 ignore = repo.dirstate._ignore
1527 if not files:
1527 if not files:
1528 # Show all the patterns
1528 # Show all the patterns
1529 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1529 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1530 else:
1530 else:
1531 m = scmutil.match(repo[None], pats=files)
1531 m = scmutil.match(repo[None], pats=files)
1532 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1532 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1533 for f in m.files():
1533 for f in m.files():
1534 nf = util.normpath(f)
1534 nf = util.normpath(f)
1535 ignored = None
1535 ignored = None
1536 ignoredata = None
1536 ignoredata = None
1537 if nf != b'.':
1537 if nf != b'.':
1538 if ignore(nf):
1538 if ignore(nf):
1539 ignored = nf
1539 ignored = nf
1540 ignoredata = repo.dirstate._ignorefileandline(nf)
1540 ignoredata = repo.dirstate._ignorefileandline(nf)
1541 else:
1541 else:
1542 for p in pathutil.finddirs(nf):
1542 for p in pathutil.finddirs(nf):
1543 if ignore(p):
1543 if ignore(p):
1544 ignored = p
1544 ignored = p
1545 ignoredata = repo.dirstate._ignorefileandline(p)
1545 ignoredata = repo.dirstate._ignorefileandline(p)
1546 break
1546 break
1547 if ignored:
1547 if ignored:
1548 if ignored == nf:
1548 if ignored == nf:
1549 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1549 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1550 else:
1550 else:
1551 ui.write(
1551 ui.write(
1552 _(
1552 _(
1553 b"%s is ignored because of "
1553 b"%s is ignored because of "
1554 b"containing directory %s\n"
1554 b"containing directory %s\n"
1555 )
1555 )
1556 % (uipathfn(f), ignored)
1556 % (uipathfn(f), ignored)
1557 )
1557 )
1558 ignorefile, lineno, line = ignoredata
1558 ignorefile, lineno, line = ignoredata
1559 ui.write(
1559 ui.write(
1560 _(b"(ignore rule in %s, line %d: '%s')\n")
1560 _(b"(ignore rule in %s, line %d: '%s')\n")
1561 % (ignorefile, lineno, line)
1561 % (ignorefile, lineno, line)
1562 )
1562 )
1563 else:
1563 else:
1564 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1564 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1565
1565
1566
1566
1567 @command(
1567 @command(
1568 b'debugindex',
1568 b'debugindex',
1569 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1569 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1570 _(b'-c|-m|FILE'),
1570 _(b'-c|-m|FILE'),
1571 )
1571 )
1572 def debugindex(ui, repo, file_=None, **opts):
1572 def debugindex(ui, repo, file_=None, **opts):
1573 """dump index data for a storage primitive"""
1573 """dump index data for a storage primitive"""
1574 opts = pycompat.byteskwargs(opts)
1574 opts = pycompat.byteskwargs(opts)
1575 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1575 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1576
1576
1577 if ui.debugflag:
1577 if ui.debugflag:
1578 shortfn = hex
1578 shortfn = hex
1579 else:
1579 else:
1580 shortfn = short
1580 shortfn = short
1581
1581
1582 idlen = 12
1582 idlen = 12
1583 for i in store:
1583 for i in store:
1584 idlen = len(shortfn(store.node(i)))
1584 idlen = len(shortfn(store.node(i)))
1585 break
1585 break
1586
1586
1587 fm = ui.formatter(b'debugindex', opts)
1587 fm = ui.formatter(b'debugindex', opts)
1588 fm.plain(
1588 fm.plain(
1589 b' rev linkrev %s %s p2\n'
1589 b' rev linkrev %s %s p2\n'
1590 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1590 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1591 )
1591 )
1592
1592
1593 for rev in store:
1593 for rev in store:
1594 node = store.node(rev)
1594 node = store.node(rev)
1595 parents = store.parents(node)
1595 parents = store.parents(node)
1596
1596
1597 fm.startitem()
1597 fm.startitem()
1598 fm.write(b'rev', b'%6d ', rev)
1598 fm.write(b'rev', b'%6d ', rev)
1599 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1599 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1600 fm.write(b'node', b'%s ', shortfn(node))
1600 fm.write(b'node', b'%s ', shortfn(node))
1601 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1601 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1602 fm.write(b'p2', b'%s', shortfn(parents[1]))
1602 fm.write(b'p2', b'%s', shortfn(parents[1]))
1603 fm.plain(b'\n')
1603 fm.plain(b'\n')
1604
1604
1605 fm.end()
1605 fm.end()
1606
1606
1607
1607
1608 @command(
1608 @command(
1609 b'debugindexdot',
1609 b'debugindexdot',
1610 cmdutil.debugrevlogopts,
1610 cmdutil.debugrevlogopts,
1611 _(b'-c|-m|FILE'),
1611 _(b'-c|-m|FILE'),
1612 optionalrepo=True,
1612 optionalrepo=True,
1613 )
1613 )
1614 def debugindexdot(ui, repo, file_=None, **opts):
1614 def debugindexdot(ui, repo, file_=None, **opts):
1615 """dump an index DAG as a graphviz dot file"""
1615 """dump an index DAG as a graphviz dot file"""
1616 opts = pycompat.byteskwargs(opts)
1616 opts = pycompat.byteskwargs(opts)
1617 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1617 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1618 ui.writenoi18n(b"digraph G {\n")
1618 ui.writenoi18n(b"digraph G {\n")
1619 for i in r:
1619 for i in r:
1620 node = r.node(i)
1620 node = r.node(i)
1621 pp = r.parents(node)
1621 pp = r.parents(node)
1622 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1622 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1623 if pp[1] != nullid:
1623 if pp[1] != nullid:
1624 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1624 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1625 ui.write(b"}\n")
1625 ui.write(b"}\n")
1626
1626
1627
1627
1628 @command(b'debugindexstats', [])
1628 @command(b'debugindexstats', [])
1629 def debugindexstats(ui, repo):
1629 def debugindexstats(ui, repo):
1630 """show stats related to the changelog index"""
1630 """show stats related to the changelog index"""
1631 repo.changelog.shortest(nullid, 1)
1631 repo.changelog.shortest(nullid, 1)
1632 index = repo.changelog.index
1632 index = repo.changelog.index
1633 if not util.safehasattr(index, b'stats'):
1633 if not util.safehasattr(index, b'stats'):
1634 raise error.Abort(_(b'debugindexstats only works with native code'))
1634 raise error.Abort(_(b'debugindexstats only works with native code'))
1635 for k, v in sorted(index.stats().items()):
1635 for k, v in sorted(index.stats().items()):
1636 ui.write(b'%s: %d\n' % (k, v))
1636 ui.write(b'%s: %d\n' % (k, v))
1637
1637
1638
1638
1639 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1639 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1640 def debuginstall(ui, **opts):
1640 def debuginstall(ui, **opts):
1641 """test Mercurial installation
1641 """test Mercurial installation
1642
1642
1643 Returns 0 on success.
1643 Returns 0 on success.
1644 """
1644 """
1645 opts = pycompat.byteskwargs(opts)
1645 opts = pycompat.byteskwargs(opts)
1646
1646
1647 problems = 0
1647 problems = 0
1648
1648
1649 fm = ui.formatter(b'debuginstall', opts)
1649 fm = ui.formatter(b'debuginstall', opts)
1650 fm.startitem()
1650 fm.startitem()
1651
1651
1652 # encoding might be unknown or wrong. don't translate these messages.
1652 # encoding might be unknown or wrong. don't translate these messages.
1653 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1653 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1654 err = None
1654 err = None
1655 try:
1655 try:
1656 codecs.lookup(pycompat.sysstr(encoding.encoding))
1656 codecs.lookup(pycompat.sysstr(encoding.encoding))
1657 except LookupError as inst:
1657 except LookupError as inst:
1658 err = stringutil.forcebytestr(inst)
1658 err = stringutil.forcebytestr(inst)
1659 problems += 1
1659 problems += 1
1660 fm.condwrite(
1660 fm.condwrite(
1661 err,
1661 err,
1662 b'encodingerror',
1662 b'encodingerror',
1663 b" %s\n (check that your locale is properly set)\n",
1663 b" %s\n (check that your locale is properly set)\n",
1664 err,
1664 err,
1665 )
1665 )
1666
1666
1667 # Python
1667 # Python
1668 pythonlib = None
1668 pythonlib = None
1669 if util.safehasattr(os, '__file__'):
1669 if util.safehasattr(os, '__file__'):
1670 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1670 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1671 elif getattr(sys, 'oxidized', False):
1671 elif getattr(sys, 'oxidized', False):
1672 pythonlib = pycompat.sysexecutable
1672 pythonlib = pycompat.sysexecutable
1673
1673
1674 fm.write(
1674 fm.write(
1675 b'pythonexe',
1675 b'pythonexe',
1676 _(b"checking Python executable (%s)\n"),
1676 _(b"checking Python executable (%s)\n"),
1677 pycompat.sysexecutable or _(b"unknown"),
1677 pycompat.sysexecutable or _(b"unknown"),
1678 )
1678 )
1679 fm.write(
1679 fm.write(
1680 b'pythonimplementation',
1680 b'pythonimplementation',
1681 _(b"checking Python implementation (%s)\n"),
1681 _(b"checking Python implementation (%s)\n"),
1682 pycompat.sysbytes(platform.python_implementation()),
1682 pycompat.sysbytes(platform.python_implementation()),
1683 )
1683 )
1684 fm.write(
1684 fm.write(
1685 b'pythonver',
1685 b'pythonver',
1686 _(b"checking Python version (%s)\n"),
1686 _(b"checking Python version (%s)\n"),
1687 (b"%d.%d.%d" % sys.version_info[:3]),
1687 (b"%d.%d.%d" % sys.version_info[:3]),
1688 )
1688 )
1689 fm.write(
1689 fm.write(
1690 b'pythonlib',
1690 b'pythonlib',
1691 _(b"checking Python lib (%s)...\n"),
1691 _(b"checking Python lib (%s)...\n"),
1692 pythonlib or _(b"unknown"),
1692 pythonlib or _(b"unknown"),
1693 )
1693 )
1694
1694
1695 try:
1695 try:
1696 from . import rustext
1696 from . import rustext
1697
1697
1698 rustext.__doc__ # trigger lazy import
1698 rustext.__doc__ # trigger lazy import
1699 except ImportError:
1699 except ImportError:
1700 rustext = None
1700 rustext = None
1701
1701
1702 security = set(sslutil.supportedprotocols)
1702 security = set(sslutil.supportedprotocols)
1703 if sslutil.hassni:
1703 if sslutil.hassni:
1704 security.add(b'sni')
1704 security.add(b'sni')
1705
1705
1706 fm.write(
1706 fm.write(
1707 b'pythonsecurity',
1707 b'pythonsecurity',
1708 _(b"checking Python security support (%s)\n"),
1708 _(b"checking Python security support (%s)\n"),
1709 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1709 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1710 )
1710 )
1711
1711
1712 # These are warnings, not errors. So don't increment problem count. This
1712 # These are warnings, not errors. So don't increment problem count. This
1713 # may change in the future.
1713 # may change in the future.
1714 if b'tls1.2' not in security:
1714 if b'tls1.2' not in security:
1715 fm.plain(
1715 fm.plain(
1716 _(
1716 _(
1717 b' TLS 1.2 not supported by Python install; '
1717 b' TLS 1.2 not supported by Python install; '
1718 b'network connections lack modern security\n'
1718 b'network connections lack modern security\n'
1719 )
1719 )
1720 )
1720 )
1721 if b'sni' not in security:
1721 if b'sni' not in security:
1722 fm.plain(
1722 fm.plain(
1723 _(
1723 _(
1724 b' SNI not supported by Python install; may have '
1724 b' SNI not supported by Python install; may have '
1725 b'connectivity issues with some servers\n'
1725 b'connectivity issues with some servers\n'
1726 )
1726 )
1727 )
1727 )
1728
1728
1729 fm.plain(
1729 fm.plain(
1730 _(
1730 _(
1731 b"checking Rust extensions (%s)\n"
1731 b"checking Rust extensions (%s)\n"
1732 % (b'missing' if rustext is None else b'installed')
1732 % (b'missing' if rustext is None else b'installed')
1733 ),
1733 ),
1734 )
1734 )
1735
1735
1736 # TODO print CA cert info
1736 # TODO print CA cert info
1737
1737
1738 # hg version
1738 # hg version
1739 hgver = util.version()
1739 hgver = util.version()
1740 fm.write(
1740 fm.write(
1741 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1741 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1742 )
1742 )
1743 fm.write(
1743 fm.write(
1744 b'hgverextra',
1744 b'hgverextra',
1745 _(b"checking Mercurial custom build (%s)\n"),
1745 _(b"checking Mercurial custom build (%s)\n"),
1746 b'+'.join(hgver.split(b'+')[1:]),
1746 b'+'.join(hgver.split(b'+')[1:]),
1747 )
1747 )
1748
1748
1749 # compiled modules
1749 # compiled modules
1750 hgmodules = None
1750 hgmodules = None
1751 if util.safehasattr(sys.modules[__name__], '__file__'):
1751 if util.safehasattr(sys.modules[__name__], '__file__'):
1752 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1752 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1753 elif getattr(sys, 'oxidized', False):
1753 elif getattr(sys, 'oxidized', False):
1754 hgmodules = pycompat.sysexecutable
1754 hgmodules = pycompat.sysexecutable
1755
1755
1756 fm.write(
1756 fm.write(
1757 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1757 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1758 )
1758 )
1759 fm.write(
1759 fm.write(
1760 b'hgmodules',
1760 b'hgmodules',
1761 _(b"checking installed modules (%s)...\n"),
1761 _(b"checking installed modules (%s)...\n"),
1762 hgmodules or _(b"unknown"),
1762 hgmodules or _(b"unknown"),
1763 )
1763 )
1764
1764
1765 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1765 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1766 rustext = rustandc # for now, that's the only case
1766 rustext = rustandc # for now, that's the only case
1767 cext = policy.policy in (b'c', b'allow') or rustandc
1767 cext = policy.policy in (b'c', b'allow') or rustandc
1768 nopure = cext or rustext
1768 nopure = cext or rustext
1769 if nopure:
1769 if nopure:
1770 err = None
1770 err = None
1771 try:
1771 try:
1772 if cext:
1772 if cext:
1773 from .cext import ( # pytype: disable=import-error
1773 from .cext import ( # pytype: disable=import-error
1774 base85,
1774 base85,
1775 bdiff,
1775 bdiff,
1776 mpatch,
1776 mpatch,
1777 osutil,
1777 osutil,
1778 )
1778 )
1779
1779
1780 # quiet pyflakes
1780 # quiet pyflakes
1781 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1781 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1782 if rustext:
1782 if rustext:
1783 from .rustext import ( # pytype: disable=import-error
1783 from .rustext import ( # pytype: disable=import-error
1784 ancestor,
1784 ancestor,
1785 dirstate,
1785 dirstate,
1786 )
1786 )
1787
1787
1788 dir(ancestor), dir(dirstate) # quiet pyflakes
1788 dir(ancestor), dir(dirstate) # quiet pyflakes
1789 except Exception as inst:
1789 except Exception as inst:
1790 err = stringutil.forcebytestr(inst)
1790 err = stringutil.forcebytestr(inst)
1791 problems += 1
1791 problems += 1
1792 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1792 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1793
1793
1794 compengines = util.compengines._engines.values()
1794 compengines = util.compengines._engines.values()
1795 fm.write(
1795 fm.write(
1796 b'compengines',
1796 b'compengines',
1797 _(b'checking registered compression engines (%s)\n'),
1797 _(b'checking registered compression engines (%s)\n'),
1798 fm.formatlist(
1798 fm.formatlist(
1799 sorted(e.name() for e in compengines),
1799 sorted(e.name() for e in compengines),
1800 name=b'compengine',
1800 name=b'compengine',
1801 fmt=b'%s',
1801 fmt=b'%s',
1802 sep=b', ',
1802 sep=b', ',
1803 ),
1803 ),
1804 )
1804 )
1805 fm.write(
1805 fm.write(
1806 b'compenginesavail',
1806 b'compenginesavail',
1807 _(b'checking available compression engines (%s)\n'),
1807 _(b'checking available compression engines (%s)\n'),
1808 fm.formatlist(
1808 fm.formatlist(
1809 sorted(e.name() for e in compengines if e.available()),
1809 sorted(e.name() for e in compengines if e.available()),
1810 name=b'compengine',
1810 name=b'compengine',
1811 fmt=b'%s',
1811 fmt=b'%s',
1812 sep=b', ',
1812 sep=b', ',
1813 ),
1813 ),
1814 )
1814 )
1815 wirecompengines = compression.compengines.supportedwireengines(
1815 wirecompengines = compression.compengines.supportedwireengines(
1816 compression.SERVERROLE
1816 compression.SERVERROLE
1817 )
1817 )
1818 fm.write(
1818 fm.write(
1819 b'compenginesserver',
1819 b'compenginesserver',
1820 _(
1820 _(
1821 b'checking available compression engines '
1821 b'checking available compression engines '
1822 b'for wire protocol (%s)\n'
1822 b'for wire protocol (%s)\n'
1823 ),
1823 ),
1824 fm.formatlist(
1824 fm.formatlist(
1825 [e.name() for e in wirecompengines if e.wireprotosupport()],
1825 [e.name() for e in wirecompengines if e.wireprotosupport()],
1826 name=b'compengine',
1826 name=b'compengine',
1827 fmt=b'%s',
1827 fmt=b'%s',
1828 sep=b', ',
1828 sep=b', ',
1829 ),
1829 ),
1830 )
1830 )
1831 re2 = b'missing'
1831 re2 = b'missing'
1832 if util._re2:
1832 if util._re2:
1833 re2 = b'available'
1833 re2 = b'available'
1834 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1834 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1835 fm.data(re2=bool(util._re2))
1835 fm.data(re2=bool(util._re2))
1836
1836
1837 # templates
1837 # templates
1838 p = templater.templatedir()
1838 p = templater.templatedir()
1839 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1839 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1840 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1840 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1841 if p:
1841 if p:
1842 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1842 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1843 if m:
1843 if m:
1844 # template found, check if it is working
1844 # template found, check if it is working
1845 err = None
1845 err = None
1846 try:
1846 try:
1847 templater.templater.frommapfile(m)
1847 templater.templater.frommapfile(m)
1848 except Exception as inst:
1848 except Exception as inst:
1849 err = stringutil.forcebytestr(inst)
1849 err = stringutil.forcebytestr(inst)
1850 p = None
1850 p = None
1851 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1851 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1852 else:
1852 else:
1853 p = None
1853 p = None
1854 fm.condwrite(
1854 fm.condwrite(
1855 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1855 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1856 )
1856 )
1857 fm.condwrite(
1857 fm.condwrite(
1858 not m,
1858 not m,
1859 b'defaulttemplatenotfound',
1859 b'defaulttemplatenotfound',
1860 _(b" template '%s' not found\n"),
1860 _(b" template '%s' not found\n"),
1861 b"default",
1861 b"default",
1862 )
1862 )
1863 if not p:
1863 if not p:
1864 problems += 1
1864 problems += 1
1865 fm.condwrite(
1865 fm.condwrite(
1866 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1866 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1867 )
1867 )
1868
1868
1869 # editor
1869 # editor
1870 editor = ui.geteditor()
1870 editor = ui.geteditor()
1871 editor = util.expandpath(editor)
1871 editor = util.expandpath(editor)
1872 editorbin = procutil.shellsplit(editor)[0]
1872 editorbin = procutil.shellsplit(editor)[0]
1873 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1873 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1874 cmdpath = procutil.findexe(editorbin)
1874 cmdpath = procutil.findexe(editorbin)
1875 fm.condwrite(
1875 fm.condwrite(
1876 not cmdpath and editor == b'vi',
1876 not cmdpath and editor == b'vi',
1877 b'vinotfound',
1877 b'vinotfound',
1878 _(
1878 _(
1879 b" No commit editor set and can't find %s in PATH\n"
1879 b" No commit editor set and can't find %s in PATH\n"
1880 b" (specify a commit editor in your configuration"
1880 b" (specify a commit editor in your configuration"
1881 b" file)\n"
1881 b" file)\n"
1882 ),
1882 ),
1883 not cmdpath and editor == b'vi' and editorbin,
1883 not cmdpath and editor == b'vi' and editorbin,
1884 )
1884 )
1885 fm.condwrite(
1885 fm.condwrite(
1886 not cmdpath and editor != b'vi',
1886 not cmdpath and editor != b'vi',
1887 b'editornotfound',
1887 b'editornotfound',
1888 _(
1888 _(
1889 b" Can't find editor '%s' in PATH\n"
1889 b" Can't find editor '%s' in PATH\n"
1890 b" (specify a commit editor in your configuration"
1890 b" (specify a commit editor in your configuration"
1891 b" file)\n"
1891 b" file)\n"
1892 ),
1892 ),
1893 not cmdpath and editorbin,
1893 not cmdpath and editorbin,
1894 )
1894 )
1895 if not cmdpath and editor != b'vi':
1895 if not cmdpath and editor != b'vi':
1896 problems += 1
1896 problems += 1
1897
1897
1898 # check username
1898 # check username
1899 username = None
1899 username = None
1900 err = None
1900 err = None
1901 try:
1901 try:
1902 username = ui.username()
1902 username = ui.username()
1903 except error.Abort as e:
1903 except error.Abort as e:
1904 err = e.message
1904 err = e.message
1905 problems += 1
1905 problems += 1
1906
1906
1907 fm.condwrite(
1907 fm.condwrite(
1908 username, b'username', _(b"checking username (%s)\n"), username
1908 username, b'username', _(b"checking username (%s)\n"), username
1909 )
1909 )
1910 fm.condwrite(
1910 fm.condwrite(
1911 err,
1911 err,
1912 b'usernameerror',
1912 b'usernameerror',
1913 _(
1913 _(
1914 b"checking username...\n %s\n"
1914 b"checking username...\n %s\n"
1915 b" (specify a username in your configuration file)\n"
1915 b" (specify a username in your configuration file)\n"
1916 ),
1916 ),
1917 err,
1917 err,
1918 )
1918 )
1919
1919
1920 for name, mod in extensions.extensions():
1920 for name, mod in extensions.extensions():
1921 handler = getattr(mod, 'debuginstall', None)
1921 handler = getattr(mod, 'debuginstall', None)
1922 if handler is not None:
1922 if handler is not None:
1923 problems += handler(ui, fm)
1923 problems += handler(ui, fm)
1924
1924
1925 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1925 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1926 if not problems:
1926 if not problems:
1927 fm.data(problems=problems)
1927 fm.data(problems=problems)
1928 fm.condwrite(
1928 fm.condwrite(
1929 problems,
1929 problems,
1930 b'problems',
1930 b'problems',
1931 _(b"%d problems detected, please check your install!\n"),
1931 _(b"%d problems detected, please check your install!\n"),
1932 problems,
1932 problems,
1933 )
1933 )
1934 fm.end()
1934 fm.end()
1935
1935
1936 return problems
1936 return problems
1937
1937
1938
1938
1939 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1939 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1940 def debugknown(ui, repopath, *ids, **opts):
1940 def debugknown(ui, repopath, *ids, **opts):
1941 """test whether node ids are known to a repo
1941 """test whether node ids are known to a repo
1942
1942
1943 Every ID must be a full-length hex node id string. Returns a list of 0s
1943 Every ID must be a full-length hex node id string. Returns a list of 0s
1944 and 1s indicating unknown/known.
1944 and 1s indicating unknown/known.
1945 """
1945 """
1946 opts = pycompat.byteskwargs(opts)
1946 opts = pycompat.byteskwargs(opts)
1947 repo = hg.peer(ui, opts, repopath)
1947 repo = hg.peer(ui, opts, repopath)
1948 if not repo.capable(b'known'):
1948 if not repo.capable(b'known'):
1949 raise error.Abort(b"known() not supported by target repository")
1949 raise error.Abort(b"known() not supported by target repository")
1950 flags = repo.known([bin(s) for s in ids])
1950 flags = repo.known([bin(s) for s in ids])
1951 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1951 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1952
1952
1953
1953
1954 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1954 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1955 def debuglabelcomplete(ui, repo, *args):
1955 def debuglabelcomplete(ui, repo, *args):
1956 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1956 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1957 debugnamecomplete(ui, repo, *args)
1957 debugnamecomplete(ui, repo, *args)
1958
1958
1959
1959
1960 @command(
1960 @command(
1961 b'debuglocks',
1961 b'debuglocks',
1962 [
1962 [
1963 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
1963 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
1964 (
1964 (
1965 b'W',
1965 b'W',
1966 b'force-free-wlock',
1966 b'force-free-wlock',
1967 None,
1967 None,
1968 _(b'free the working state lock (DANGEROUS)'),
1968 _(b'free the working state lock (DANGEROUS)'),
1969 ),
1969 ),
1970 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1970 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1971 (
1971 (
1972 b'S',
1972 b'S',
1973 b'set-wlock',
1973 b'set-wlock',
1974 None,
1974 None,
1975 _(b'set the working state lock until stopped'),
1975 _(b'set the working state lock until stopped'),
1976 ),
1976 ),
1977 ],
1977 ],
1978 _(b'[OPTION]...'),
1978 _(b'[OPTION]...'),
1979 )
1979 )
1980 def debuglocks(ui, repo, **opts):
1980 def debuglocks(ui, repo, **opts):
1981 """show or modify state of locks
1981 """show or modify state of locks
1982
1982
1983 By default, this command will show which locks are held. This
1983 By default, this command will show which locks are held. This
1984 includes the user and process holding the lock, the amount of time
1984 includes the user and process holding the lock, the amount of time
1985 the lock has been held, and the machine name where the process is
1985 the lock has been held, and the machine name where the process is
1986 running if it's not local.
1986 running if it's not local.
1987
1987
1988 Locks protect the integrity of Mercurial's data, so should be
1988 Locks protect the integrity of Mercurial's data, so should be
1989 treated with care. System crashes or other interruptions may cause
1989 treated with care. System crashes or other interruptions may cause
1990 locks to not be properly released, though Mercurial will usually
1990 locks to not be properly released, though Mercurial will usually
1991 detect and remove such stale locks automatically.
1991 detect and remove such stale locks automatically.
1992
1992
1993 However, detecting stale locks may not always be possible (for
1993 However, detecting stale locks may not always be possible (for
1994 instance, on a shared filesystem). Removing locks may also be
1994 instance, on a shared filesystem). Removing locks may also be
1995 blocked by filesystem permissions.
1995 blocked by filesystem permissions.
1996
1996
1997 Setting a lock will prevent other commands from changing the data.
1997 Setting a lock will prevent other commands from changing the data.
1998 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1998 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1999 The set locks are removed when the command exits.
1999 The set locks are removed when the command exits.
2000
2000
2001 Returns 0 if no locks are held.
2001 Returns 0 if no locks are held.
2002
2002
2003 """
2003 """
2004
2004
2005 if opts.get('force_free_lock'):
2005 if opts.get('force_free_lock'):
2006 repo.svfs.unlink(b'lock')
2006 repo.svfs.unlink(b'lock')
2007 if opts.get('force_free_wlock'):
2007 if opts.get('force_free_wlock'):
2008 repo.vfs.unlink(b'wlock')
2008 repo.vfs.unlink(b'wlock')
2009 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2009 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2010 return 0
2010 return 0
2011
2011
2012 locks = []
2012 locks = []
2013 try:
2013 try:
2014 if opts.get('set_wlock'):
2014 if opts.get('set_wlock'):
2015 try:
2015 try:
2016 locks.append(repo.wlock(False))
2016 locks.append(repo.wlock(False))
2017 except error.LockHeld:
2017 except error.LockHeld:
2018 raise error.Abort(_(b'wlock is already held'))
2018 raise error.Abort(_(b'wlock is already held'))
2019 if opts.get('set_lock'):
2019 if opts.get('set_lock'):
2020 try:
2020 try:
2021 locks.append(repo.lock(False))
2021 locks.append(repo.lock(False))
2022 except error.LockHeld:
2022 except error.LockHeld:
2023 raise error.Abort(_(b'lock is already held'))
2023 raise error.Abort(_(b'lock is already held'))
2024 if len(locks):
2024 if len(locks):
2025 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2025 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2026 return 0
2026 return 0
2027 finally:
2027 finally:
2028 release(*locks)
2028 release(*locks)
2029
2029
2030 now = time.time()
2030 now = time.time()
2031 held = 0
2031 held = 0
2032
2032
2033 def report(vfs, name, method):
2033 def report(vfs, name, method):
2034 # this causes stale locks to get reaped for more accurate reporting
2034 # this causes stale locks to get reaped for more accurate reporting
2035 try:
2035 try:
2036 l = method(False)
2036 l = method(False)
2037 except error.LockHeld:
2037 except error.LockHeld:
2038 l = None
2038 l = None
2039
2039
2040 if l:
2040 if l:
2041 l.release()
2041 l.release()
2042 else:
2042 else:
2043 try:
2043 try:
2044 st = vfs.lstat(name)
2044 st = vfs.lstat(name)
2045 age = now - st[stat.ST_MTIME]
2045 age = now - st[stat.ST_MTIME]
2046 user = util.username(st.st_uid)
2046 user = util.username(st.st_uid)
2047 locker = vfs.readlock(name)
2047 locker = vfs.readlock(name)
2048 if b":" in locker:
2048 if b":" in locker:
2049 host, pid = locker.split(b':')
2049 host, pid = locker.split(b':')
2050 if host == socket.gethostname():
2050 if host == socket.gethostname():
2051 locker = b'user %s, process %s' % (user or b'None', pid)
2051 locker = b'user %s, process %s' % (user or b'None', pid)
2052 else:
2052 else:
2053 locker = b'user %s, process %s, host %s' % (
2053 locker = b'user %s, process %s, host %s' % (
2054 user or b'None',
2054 user or b'None',
2055 pid,
2055 pid,
2056 host,
2056 host,
2057 )
2057 )
2058 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2058 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2059 return 1
2059 return 1
2060 except OSError as e:
2060 except OSError as e:
2061 if e.errno != errno.ENOENT:
2061 if e.errno != errno.ENOENT:
2062 raise
2062 raise
2063
2063
2064 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2064 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2065 return 0
2065 return 0
2066
2066
2067 held += report(repo.svfs, b"lock", repo.lock)
2067 held += report(repo.svfs, b"lock", repo.lock)
2068 held += report(repo.vfs, b"wlock", repo.wlock)
2068 held += report(repo.vfs, b"wlock", repo.wlock)
2069
2069
2070 return held
2070 return held
2071
2071
2072
2072
2073 @command(
2073 @command(
2074 b'debugmanifestfulltextcache',
2074 b'debugmanifestfulltextcache',
2075 [
2075 [
2076 (b'', b'clear', False, _(b'clear the cache')),
2076 (b'', b'clear', False, _(b'clear the cache')),
2077 (
2077 (
2078 b'a',
2078 b'a',
2079 b'add',
2079 b'add',
2080 [],
2080 [],
2081 _(b'add the given manifest nodes to the cache'),
2081 _(b'add the given manifest nodes to the cache'),
2082 _(b'NODE'),
2082 _(b'NODE'),
2083 ),
2083 ),
2084 ],
2084 ],
2085 b'',
2085 b'',
2086 )
2086 )
2087 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2087 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2088 """show, clear or amend the contents of the manifest fulltext cache"""
2088 """show, clear or amend the contents of the manifest fulltext cache"""
2089
2089
2090 def getcache():
2090 def getcache():
2091 r = repo.manifestlog.getstorage(b'')
2091 r = repo.manifestlog.getstorage(b'')
2092 try:
2092 try:
2093 return r._fulltextcache
2093 return r._fulltextcache
2094 except AttributeError:
2094 except AttributeError:
2095 msg = _(
2095 msg = _(
2096 b"Current revlog implementation doesn't appear to have a "
2096 b"Current revlog implementation doesn't appear to have a "
2097 b"manifest fulltext cache\n"
2097 b"manifest fulltext cache\n"
2098 )
2098 )
2099 raise error.Abort(msg)
2099 raise error.Abort(msg)
2100
2100
2101 if opts.get('clear'):
2101 if opts.get('clear'):
2102 with repo.wlock():
2102 with repo.wlock():
2103 cache = getcache()
2103 cache = getcache()
2104 cache.clear(clear_persisted_data=True)
2104 cache.clear(clear_persisted_data=True)
2105 return
2105 return
2106
2106
2107 if add:
2107 if add:
2108 with repo.wlock():
2108 with repo.wlock():
2109 m = repo.manifestlog
2109 m = repo.manifestlog
2110 store = m.getstorage(b'')
2110 store = m.getstorage(b'')
2111 for n in add:
2111 for n in add:
2112 try:
2112 try:
2113 manifest = m[store.lookup(n)]
2113 manifest = m[store.lookup(n)]
2114 except error.LookupError as e:
2114 except error.LookupError as e:
2115 raise error.Abort(e, hint=b"Check your manifest node id")
2115 raise error.Abort(e, hint=b"Check your manifest node id")
2116 manifest.read() # stores revisision in cache too
2116 manifest.read() # stores revisision in cache too
2117 return
2117 return
2118
2118
2119 cache = getcache()
2119 cache = getcache()
2120 if not len(cache):
2120 if not len(cache):
2121 ui.write(_(b'cache empty\n'))
2121 ui.write(_(b'cache empty\n'))
2122 else:
2122 else:
2123 ui.write(
2123 ui.write(
2124 _(
2124 _(
2125 b'cache contains %d manifest entries, in order of most to '
2125 b'cache contains %d manifest entries, in order of most to '
2126 b'least recent:\n'
2126 b'least recent:\n'
2127 )
2127 )
2128 % (len(cache),)
2128 % (len(cache),)
2129 )
2129 )
2130 totalsize = 0
2130 totalsize = 0
2131 for nodeid in cache:
2131 for nodeid in cache:
2132 # Use cache.get to not update the LRU order
2132 # Use cache.get to not update the LRU order
2133 data = cache.peek(nodeid)
2133 data = cache.peek(nodeid)
2134 size = len(data)
2134 size = len(data)
2135 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2135 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2136 ui.write(
2136 ui.write(
2137 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2137 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2138 )
2138 )
2139 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2139 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2140 ui.write(
2140 ui.write(
2141 _(b'total cache data size %s, on-disk %s\n')
2141 _(b'total cache data size %s, on-disk %s\n')
2142 % (util.bytecount(totalsize), util.bytecount(ondisk))
2142 % (util.bytecount(totalsize), util.bytecount(ondisk))
2143 )
2143 )
2144
2144
2145
2145
2146 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2146 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2147 def debugmergestate(ui, repo, *args, **opts):
2147 def debugmergestate(ui, repo, *args, **opts):
2148 """print merge state
2148 """print merge state
2149
2149
2150 Use --verbose to print out information about whether v1 or v2 merge state
2150 Use --verbose to print out information about whether v1 or v2 merge state
2151 was chosen."""
2151 was chosen."""
2152
2152
2153 if ui.verbose:
2153 if ui.verbose:
2154 ms = mergestatemod.mergestate(repo)
2154 ms = mergestatemod.mergestate(repo)
2155
2155
2156 # sort so that reasonable information is on top
2156 # sort so that reasonable information is on top
2157 v1records = ms._readrecordsv1()
2157 v1records = ms._readrecordsv1()
2158 v2records = ms._readrecordsv2()
2158 v2records = ms._readrecordsv2()
2159
2159
2160 if not v1records and not v2records:
2160 if not v1records and not v2records:
2161 pass
2161 pass
2162 elif not v2records:
2162 elif not v2records:
2163 ui.writenoi18n(b'no version 2 merge state\n')
2163 ui.writenoi18n(b'no version 2 merge state\n')
2164 elif ms._v1v2match(v1records, v2records):
2164 elif ms._v1v2match(v1records, v2records):
2165 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2165 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2166 else:
2166 else:
2167 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2167 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2168
2168
2169 opts = pycompat.byteskwargs(opts)
2169 opts = pycompat.byteskwargs(opts)
2170 if not opts[b'template']:
2170 if not opts[b'template']:
2171 opts[b'template'] = (
2171 opts[b'template'] = (
2172 b'{if(commits, "", "no merge state found\n")}'
2172 b'{if(commits, "", "no merge state found\n")}'
2173 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2173 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2174 b'{files % "file: {path} (state \\"{state}\\")\n'
2174 b'{files % "file: {path} (state \\"{state}\\")\n'
2175 b'{if(local_path, "'
2175 b'{if(local_path, "'
2176 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2176 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2177 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2177 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2178 b' other path: {other_path} (node {other_node})\n'
2178 b' other path: {other_path} (node {other_node})\n'
2179 b'")}'
2179 b'")}'
2180 b'{if(rename_side, "'
2180 b'{if(rename_side, "'
2181 b' rename side: {rename_side}\n'
2181 b' rename side: {rename_side}\n'
2182 b' renamed path: {renamed_path}\n'
2182 b' renamed path: {renamed_path}\n'
2183 b'")}'
2183 b'")}'
2184 b'{extras % " extra: {key} = {value}\n"}'
2184 b'{extras % " extra: {key} = {value}\n"}'
2185 b'"}'
2185 b'"}'
2186 b'{extras % "extra: {file} ({key} = {value})\n"}'
2186 b'{extras % "extra: {file} ({key} = {value})\n"}'
2187 )
2187 )
2188
2188
2189 ms = mergestatemod.mergestate.read(repo)
2189 ms = mergestatemod.mergestate.read(repo)
2190
2190
2191 fm = ui.formatter(b'debugmergestate', opts)
2191 fm = ui.formatter(b'debugmergestate', opts)
2192 fm.startitem()
2192 fm.startitem()
2193
2193
2194 fm_commits = fm.nested(b'commits')
2194 fm_commits = fm.nested(b'commits')
2195 if ms.active():
2195 if ms.active():
2196 for name, node, label_index in (
2196 for name, node, label_index in (
2197 (b'local', ms.local, 0),
2197 (b'local', ms.local, 0),
2198 (b'other', ms.other, 1),
2198 (b'other', ms.other, 1),
2199 ):
2199 ):
2200 fm_commits.startitem()
2200 fm_commits.startitem()
2201 fm_commits.data(name=name)
2201 fm_commits.data(name=name)
2202 fm_commits.data(node=hex(node))
2202 fm_commits.data(node=hex(node))
2203 if ms._labels and len(ms._labels) > label_index:
2203 if ms._labels and len(ms._labels) > label_index:
2204 fm_commits.data(label=ms._labels[label_index])
2204 fm_commits.data(label=ms._labels[label_index])
2205 fm_commits.end()
2205 fm_commits.end()
2206
2206
2207 fm_files = fm.nested(b'files')
2207 fm_files = fm.nested(b'files')
2208 if ms.active():
2208 if ms.active():
2209 for f in ms:
2209 for f in ms:
2210 fm_files.startitem()
2210 fm_files.startitem()
2211 fm_files.data(path=f)
2211 fm_files.data(path=f)
2212 state = ms._state[f]
2212 state = ms._state[f]
2213 fm_files.data(state=state[0])
2213 fm_files.data(state=state[0])
2214 if state[0] in (
2214 if state[0] in (
2215 mergestatemod.MERGE_RECORD_UNRESOLVED,
2215 mergestatemod.MERGE_RECORD_UNRESOLVED,
2216 mergestatemod.MERGE_RECORD_RESOLVED,
2216 mergestatemod.MERGE_RECORD_RESOLVED,
2217 ):
2217 ):
2218 fm_files.data(local_key=state[1])
2218 fm_files.data(local_key=state[1])
2219 fm_files.data(local_path=state[2])
2219 fm_files.data(local_path=state[2])
2220 fm_files.data(ancestor_path=state[3])
2220 fm_files.data(ancestor_path=state[3])
2221 fm_files.data(ancestor_node=state[4])
2221 fm_files.data(ancestor_node=state[4])
2222 fm_files.data(other_path=state[5])
2222 fm_files.data(other_path=state[5])
2223 fm_files.data(other_node=state[6])
2223 fm_files.data(other_node=state[6])
2224 fm_files.data(local_flags=state[7])
2224 fm_files.data(local_flags=state[7])
2225 elif state[0] in (
2225 elif state[0] in (
2226 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2226 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2227 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2227 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2228 ):
2228 ):
2229 fm_files.data(renamed_path=state[1])
2229 fm_files.data(renamed_path=state[1])
2230 fm_files.data(rename_side=state[2])
2230 fm_files.data(rename_side=state[2])
2231 fm_extras = fm_files.nested(b'extras')
2231 fm_extras = fm_files.nested(b'extras')
2232 for k, v in sorted(ms.extras(f).items()):
2232 for k, v in sorted(ms.extras(f).items()):
2233 fm_extras.startitem()
2233 fm_extras.startitem()
2234 fm_extras.data(key=k)
2234 fm_extras.data(key=k)
2235 fm_extras.data(value=v)
2235 fm_extras.data(value=v)
2236 fm_extras.end()
2236 fm_extras.end()
2237
2237
2238 fm_files.end()
2238 fm_files.end()
2239
2239
2240 fm_extras = fm.nested(b'extras')
2240 fm_extras = fm.nested(b'extras')
2241 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2241 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2242 if f in ms:
2242 if f in ms:
2243 # If file is in mergestate, we have already processed it's extras
2243 # If file is in mergestate, we have already processed it's extras
2244 continue
2244 continue
2245 for k, v in pycompat.iteritems(d):
2245 for k, v in pycompat.iteritems(d):
2246 fm_extras.startitem()
2246 fm_extras.startitem()
2247 fm_extras.data(file=f)
2247 fm_extras.data(file=f)
2248 fm_extras.data(key=k)
2248 fm_extras.data(key=k)
2249 fm_extras.data(value=v)
2249 fm_extras.data(value=v)
2250 fm_extras.end()
2250 fm_extras.end()
2251
2251
2252 fm.end()
2252 fm.end()
2253
2253
2254
2254
2255 @command(b'debugnamecomplete', [], _(b'NAME...'))
2255 @command(b'debugnamecomplete', [], _(b'NAME...'))
2256 def debugnamecomplete(ui, repo, *args):
2256 def debugnamecomplete(ui, repo, *args):
2257 '''complete "names" - tags, open branch names, bookmark names'''
2257 '''complete "names" - tags, open branch names, bookmark names'''
2258
2258
2259 names = set()
2259 names = set()
2260 # since we previously only listed open branches, we will handle that
2260 # since we previously only listed open branches, we will handle that
2261 # specially (after this for loop)
2261 # specially (after this for loop)
2262 for name, ns in pycompat.iteritems(repo.names):
2262 for name, ns in pycompat.iteritems(repo.names):
2263 if name != b'branches':
2263 if name != b'branches':
2264 names.update(ns.listnames(repo))
2264 names.update(ns.listnames(repo))
2265 names.update(
2265 names.update(
2266 tag
2266 tag
2267 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2267 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2268 if not closed
2268 if not closed
2269 )
2269 )
2270 completions = set()
2270 completions = set()
2271 if not args:
2271 if not args:
2272 args = [b'']
2272 args = [b'']
2273 for a in args:
2273 for a in args:
2274 completions.update(n for n in names if n.startswith(a))
2274 completions.update(n for n in names if n.startswith(a))
2275 ui.write(b'\n'.join(sorted(completions)))
2275 ui.write(b'\n'.join(sorted(completions)))
2276 ui.write(b'\n')
2276 ui.write(b'\n')
2277
2277
2278
2278
2279 @command(
2279 @command(
2280 b'debugnodemap',
2280 b'debugnodemap',
2281 [
2281 [
2282 (
2282 (
2283 b'',
2283 b'',
2284 b'dump-new',
2284 b'dump-new',
2285 False,
2285 False,
2286 _(b'write a (new) persistent binary nodemap on stdout'),
2286 _(b'write a (new) persistent binary nodemap on stdout'),
2287 ),
2287 ),
2288 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2288 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2289 (
2289 (
2290 b'',
2290 b'',
2291 b'check',
2291 b'check',
2292 False,
2292 False,
2293 _(b'check that the data on disk data are correct.'),
2293 _(b'check that the data on disk data are correct.'),
2294 ),
2294 ),
2295 (
2295 (
2296 b'',
2296 b'',
2297 b'metadata',
2297 b'metadata',
2298 False,
2298 False,
2299 _(b'display the on disk meta data for the nodemap'),
2299 _(b'display the on disk meta data for the nodemap'),
2300 ),
2300 ),
2301 ],
2301 ],
2302 )
2302 )
2303 def debugnodemap(ui, repo, **opts):
2303 def debugnodemap(ui, repo, **opts):
2304 """write and inspect on disk nodemap"""
2304 """write and inspect on disk nodemap"""
2305 if opts['dump_new']:
2305 if opts['dump_new']:
2306 unfi = repo.unfiltered()
2306 unfi = repo.unfiltered()
2307 cl = unfi.changelog
2307 cl = unfi.changelog
2308 if util.safehasattr(cl.index, "nodemap_data_all"):
2308 if util.safehasattr(cl.index, "nodemap_data_all"):
2309 data = cl.index.nodemap_data_all()
2309 data = cl.index.nodemap_data_all()
2310 else:
2310 else:
2311 data = nodemap.persistent_data(cl.index)
2311 data = nodemap.persistent_data(cl.index)
2312 ui.write(data)
2312 ui.write(data)
2313 elif opts['dump_disk']:
2313 elif opts['dump_disk']:
2314 unfi = repo.unfiltered()
2314 unfi = repo.unfiltered()
2315 cl = unfi.changelog
2315 cl = unfi.changelog
2316 nm_data = nodemap.persisted_data(cl)
2316 nm_data = nodemap.persisted_data(cl)
2317 if nm_data is not None:
2317 if nm_data is not None:
2318 docket, data = nm_data
2318 docket, data = nm_data
2319 ui.write(data[:])
2319 ui.write(data[:])
2320 elif opts['check']:
2320 elif opts['check']:
2321 unfi = repo.unfiltered()
2321 unfi = repo.unfiltered()
2322 cl = unfi.changelog
2322 cl = unfi.changelog
2323 nm_data = nodemap.persisted_data(cl)
2323 nm_data = nodemap.persisted_data(cl)
2324 if nm_data is not None:
2324 if nm_data is not None:
2325 docket, data = nm_data
2325 docket, data = nm_data
2326 return nodemap.check_data(ui, cl.index, data)
2326 return nodemap.check_data(ui, cl.index, data)
2327 elif opts['metadata']:
2327 elif opts['metadata']:
2328 unfi = repo.unfiltered()
2328 unfi = repo.unfiltered()
2329 cl = unfi.changelog
2329 cl = unfi.changelog
2330 nm_data = nodemap.persisted_data(cl)
2330 nm_data = nodemap.persisted_data(cl)
2331 if nm_data is not None:
2331 if nm_data is not None:
2332 docket, data = nm_data
2332 docket, data = nm_data
2333 ui.write((b"uid: %s\n") % docket.uid)
2333 ui.write((b"uid: %s\n") % docket.uid)
2334 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2334 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2335 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2335 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2336 ui.write((b"data-length: %d\n") % docket.data_length)
2336 ui.write((b"data-length: %d\n") % docket.data_length)
2337 ui.write((b"data-unused: %d\n") % docket.data_unused)
2337 ui.write((b"data-unused: %d\n") % docket.data_unused)
2338 unused_perc = docket.data_unused * 100.0 / docket.data_length
2338 unused_perc = docket.data_unused * 100.0 / docket.data_length
2339 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2339 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2340
2340
2341
2341
2342 @command(
2342 @command(
2343 b'debugobsolete',
2343 b'debugobsolete',
2344 [
2344 [
2345 (b'', b'flags', 0, _(b'markers flag')),
2345 (b'', b'flags', 0, _(b'markers flag')),
2346 (
2346 (
2347 b'',
2347 b'',
2348 b'record-parents',
2348 b'record-parents',
2349 False,
2349 False,
2350 _(b'record parent information for the precursor'),
2350 _(b'record parent information for the precursor'),
2351 ),
2351 ),
2352 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2352 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2353 (
2353 (
2354 b'',
2354 b'',
2355 b'exclusive',
2355 b'exclusive',
2356 False,
2356 False,
2357 _(b'restrict display to markers only relevant to REV'),
2357 _(b'restrict display to markers only relevant to REV'),
2358 ),
2358 ),
2359 (b'', b'index', False, _(b'display index of the marker')),
2359 (b'', b'index', False, _(b'display index of the marker')),
2360 (b'', b'delete', [], _(b'delete markers specified by indices')),
2360 (b'', b'delete', [], _(b'delete markers specified by indices')),
2361 ]
2361 ]
2362 + cmdutil.commitopts2
2362 + cmdutil.commitopts2
2363 + cmdutil.formatteropts,
2363 + cmdutil.formatteropts,
2364 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2364 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2365 )
2365 )
2366 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2366 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2367 """create arbitrary obsolete marker
2367 """create arbitrary obsolete marker
2368
2368
2369 With no arguments, displays the list of obsolescence markers."""
2369 With no arguments, displays the list of obsolescence markers."""
2370
2370
2371 opts = pycompat.byteskwargs(opts)
2371 opts = pycompat.byteskwargs(opts)
2372
2372
2373 def parsenodeid(s):
2373 def parsenodeid(s):
2374 try:
2374 try:
2375 # We do not use revsingle/revrange functions here to accept
2375 # We do not use revsingle/revrange functions here to accept
2376 # arbitrary node identifiers, possibly not present in the
2376 # arbitrary node identifiers, possibly not present in the
2377 # local repository.
2377 # local repository.
2378 n = bin(s)
2378 n = bin(s)
2379 if len(n) != len(nullid):
2379 if len(n) != len(nullid):
2380 raise TypeError()
2380 raise TypeError()
2381 return n
2381 return n
2382 except TypeError:
2382 except TypeError:
2383 raise error.InputError(
2383 raise error.InputError(
2384 b'changeset references must be full hexadecimal '
2384 b'changeset references must be full hexadecimal '
2385 b'node identifiers'
2385 b'node identifiers'
2386 )
2386 )
2387
2387
2388 if opts.get(b'delete'):
2388 if opts.get(b'delete'):
2389 indices = []
2389 indices = []
2390 for v in opts.get(b'delete'):
2390 for v in opts.get(b'delete'):
2391 try:
2391 try:
2392 indices.append(int(v))
2392 indices.append(int(v))
2393 except ValueError:
2393 except ValueError:
2394 raise error.InputError(
2394 raise error.InputError(
2395 _(b'invalid index value: %r') % v,
2395 _(b'invalid index value: %r') % v,
2396 hint=_(b'use integers for indices'),
2396 hint=_(b'use integers for indices'),
2397 )
2397 )
2398
2398
2399 if repo.currenttransaction():
2399 if repo.currenttransaction():
2400 raise error.Abort(
2400 raise error.Abort(
2401 _(b'cannot delete obsmarkers in the middle of transaction.')
2401 _(b'cannot delete obsmarkers in the middle of transaction.')
2402 )
2402 )
2403
2403
2404 with repo.lock():
2404 with repo.lock():
2405 n = repair.deleteobsmarkers(repo.obsstore, indices)
2405 n = repair.deleteobsmarkers(repo.obsstore, indices)
2406 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2406 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2407
2407
2408 return
2408 return
2409
2409
2410 if precursor is not None:
2410 if precursor is not None:
2411 if opts[b'rev']:
2411 if opts[b'rev']:
2412 raise error.InputError(
2412 raise error.InputError(
2413 b'cannot select revision when creating marker'
2413 b'cannot select revision when creating marker'
2414 )
2414 )
2415 metadata = {}
2415 metadata = {}
2416 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2416 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2417 succs = tuple(parsenodeid(succ) for succ in successors)
2417 succs = tuple(parsenodeid(succ) for succ in successors)
2418 l = repo.lock()
2418 l = repo.lock()
2419 try:
2419 try:
2420 tr = repo.transaction(b'debugobsolete')
2420 tr = repo.transaction(b'debugobsolete')
2421 try:
2421 try:
2422 date = opts.get(b'date')
2422 date = opts.get(b'date')
2423 if date:
2423 if date:
2424 date = dateutil.parsedate(date)
2424 date = dateutil.parsedate(date)
2425 else:
2425 else:
2426 date = None
2426 date = None
2427 prec = parsenodeid(precursor)
2427 prec = parsenodeid(precursor)
2428 parents = None
2428 parents = None
2429 if opts[b'record_parents']:
2429 if opts[b'record_parents']:
2430 if prec not in repo.unfiltered():
2430 if prec not in repo.unfiltered():
2431 raise error.Abort(
2431 raise error.Abort(
2432 b'cannot used --record-parents on '
2432 b'cannot used --record-parents on '
2433 b'unknown changesets'
2433 b'unknown changesets'
2434 )
2434 )
2435 parents = repo.unfiltered()[prec].parents()
2435 parents = repo.unfiltered()[prec].parents()
2436 parents = tuple(p.node() for p in parents)
2436 parents = tuple(p.node() for p in parents)
2437 repo.obsstore.create(
2437 repo.obsstore.create(
2438 tr,
2438 tr,
2439 prec,
2439 prec,
2440 succs,
2440 succs,
2441 opts[b'flags'],
2441 opts[b'flags'],
2442 parents=parents,
2442 parents=parents,
2443 date=date,
2443 date=date,
2444 metadata=metadata,
2444 metadata=metadata,
2445 ui=ui,
2445 ui=ui,
2446 )
2446 )
2447 tr.close()
2447 tr.close()
2448 except ValueError as exc:
2448 except ValueError as exc:
2449 raise error.Abort(
2449 raise error.Abort(
2450 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2450 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2451 )
2451 )
2452 finally:
2452 finally:
2453 tr.release()
2453 tr.release()
2454 finally:
2454 finally:
2455 l.release()
2455 l.release()
2456 else:
2456 else:
2457 if opts[b'rev']:
2457 if opts[b'rev']:
2458 revs = scmutil.revrange(repo, opts[b'rev'])
2458 revs = scmutil.revrange(repo, opts[b'rev'])
2459 nodes = [repo[r].node() for r in revs]
2459 nodes = [repo[r].node() for r in revs]
2460 markers = list(
2460 markers = list(
2461 obsutil.getmarkers(
2461 obsutil.getmarkers(
2462 repo, nodes=nodes, exclusive=opts[b'exclusive']
2462 repo, nodes=nodes, exclusive=opts[b'exclusive']
2463 )
2463 )
2464 )
2464 )
2465 markers.sort(key=lambda x: x._data)
2465 markers.sort(key=lambda x: x._data)
2466 else:
2466 else:
2467 markers = obsutil.getmarkers(repo)
2467 markers = obsutil.getmarkers(repo)
2468
2468
2469 markerstoiter = markers
2469 markerstoiter = markers
2470 isrelevant = lambda m: True
2470 isrelevant = lambda m: True
2471 if opts.get(b'rev') and opts.get(b'index'):
2471 if opts.get(b'rev') and opts.get(b'index'):
2472 markerstoiter = obsutil.getmarkers(repo)
2472 markerstoiter = obsutil.getmarkers(repo)
2473 markerset = set(markers)
2473 markerset = set(markers)
2474 isrelevant = lambda m: m in markerset
2474 isrelevant = lambda m: m in markerset
2475
2475
2476 fm = ui.formatter(b'debugobsolete', opts)
2476 fm = ui.formatter(b'debugobsolete', opts)
2477 for i, m in enumerate(markerstoiter):
2477 for i, m in enumerate(markerstoiter):
2478 if not isrelevant(m):
2478 if not isrelevant(m):
2479 # marker can be irrelevant when we're iterating over a set
2479 # marker can be irrelevant when we're iterating over a set
2480 # of markers (markerstoiter) which is bigger than the set
2480 # of markers (markerstoiter) which is bigger than the set
2481 # of markers we want to display (markers)
2481 # of markers we want to display (markers)
2482 # this can happen if both --index and --rev options are
2482 # this can happen if both --index and --rev options are
2483 # provided and thus we need to iterate over all of the markers
2483 # provided and thus we need to iterate over all of the markers
2484 # to get the correct indices, but only display the ones that
2484 # to get the correct indices, but only display the ones that
2485 # are relevant to --rev value
2485 # are relevant to --rev value
2486 continue
2486 continue
2487 fm.startitem()
2487 fm.startitem()
2488 ind = i if opts.get(b'index') else None
2488 ind = i if opts.get(b'index') else None
2489 cmdutil.showmarker(fm, m, index=ind)
2489 cmdutil.showmarker(fm, m, index=ind)
2490 fm.end()
2490 fm.end()
2491
2491
2492
2492
2493 @command(
2493 @command(
2494 b'debugp1copies',
2494 b'debugp1copies',
2495 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2495 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2496 _(b'[-r REV]'),
2496 _(b'[-r REV]'),
2497 )
2497 )
2498 def debugp1copies(ui, repo, **opts):
2498 def debugp1copies(ui, repo, **opts):
2499 """dump copy information compared to p1"""
2499 """dump copy information compared to p1"""
2500
2500
2501 opts = pycompat.byteskwargs(opts)
2501 opts = pycompat.byteskwargs(opts)
2502 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2502 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2503 for dst, src in ctx.p1copies().items():
2503 for dst, src in ctx.p1copies().items():
2504 ui.write(b'%s -> %s\n' % (src, dst))
2504 ui.write(b'%s -> %s\n' % (src, dst))
2505
2505
2506
2506
2507 @command(
2507 @command(
2508 b'debugp2copies',
2508 b'debugp2copies',
2509 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2509 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2510 _(b'[-r REV]'),
2510 _(b'[-r REV]'),
2511 )
2511 )
2512 def debugp1copies(ui, repo, **opts):
2512 def debugp1copies(ui, repo, **opts):
2513 """dump copy information compared to p2"""
2513 """dump copy information compared to p2"""
2514
2514
2515 opts = pycompat.byteskwargs(opts)
2515 opts = pycompat.byteskwargs(opts)
2516 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2516 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2517 for dst, src in ctx.p2copies().items():
2517 for dst, src in ctx.p2copies().items():
2518 ui.write(b'%s -> %s\n' % (src, dst))
2518 ui.write(b'%s -> %s\n' % (src, dst))
2519
2519
2520
2520
2521 @command(
2521 @command(
2522 b'debugpathcomplete',
2522 b'debugpathcomplete',
2523 [
2523 [
2524 (b'f', b'full', None, _(b'complete an entire path')),
2524 (b'f', b'full', None, _(b'complete an entire path')),
2525 (b'n', b'normal', None, _(b'show only normal files')),
2525 (b'n', b'normal', None, _(b'show only normal files')),
2526 (b'a', b'added', None, _(b'show only added files')),
2526 (b'a', b'added', None, _(b'show only added files')),
2527 (b'r', b'removed', None, _(b'show only removed files')),
2527 (b'r', b'removed', None, _(b'show only removed files')),
2528 ],
2528 ],
2529 _(b'FILESPEC...'),
2529 _(b'FILESPEC...'),
2530 )
2530 )
2531 def debugpathcomplete(ui, repo, *specs, **opts):
2531 def debugpathcomplete(ui, repo, *specs, **opts):
2532 """complete part or all of a tracked path
2532 """complete part or all of a tracked path
2533
2533
2534 This command supports shells that offer path name completion. It
2534 This command supports shells that offer path name completion. It
2535 currently completes only files already known to the dirstate.
2535 currently completes only files already known to the dirstate.
2536
2536
2537 Completion extends only to the next path segment unless
2537 Completion extends only to the next path segment unless
2538 --full is specified, in which case entire paths are used."""
2538 --full is specified, in which case entire paths are used."""
2539
2539
2540 def complete(path, acceptable):
2540 def complete(path, acceptable):
2541 dirstate = repo.dirstate
2541 dirstate = repo.dirstate
2542 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2542 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2543 rootdir = repo.root + pycompat.ossep
2543 rootdir = repo.root + pycompat.ossep
2544 if spec != repo.root and not spec.startswith(rootdir):
2544 if spec != repo.root and not spec.startswith(rootdir):
2545 return [], []
2545 return [], []
2546 if os.path.isdir(spec):
2546 if os.path.isdir(spec):
2547 spec += b'/'
2547 spec += b'/'
2548 spec = spec[len(rootdir) :]
2548 spec = spec[len(rootdir) :]
2549 fixpaths = pycompat.ossep != b'/'
2549 fixpaths = pycompat.ossep != b'/'
2550 if fixpaths:
2550 if fixpaths:
2551 spec = spec.replace(pycompat.ossep, b'/')
2551 spec = spec.replace(pycompat.ossep, b'/')
2552 speclen = len(spec)
2552 speclen = len(spec)
2553 fullpaths = opts['full']
2553 fullpaths = opts['full']
2554 files, dirs = set(), set()
2554 files, dirs = set(), set()
2555 adddir, addfile = dirs.add, files.add
2555 adddir, addfile = dirs.add, files.add
2556 for f, st in pycompat.iteritems(dirstate):
2556 for f, st in pycompat.iteritems(dirstate):
2557 if f.startswith(spec) and st[0] in acceptable:
2557 if f.startswith(spec) and st[0] in acceptable:
2558 if fixpaths:
2558 if fixpaths:
2559 f = f.replace(b'/', pycompat.ossep)
2559 f = f.replace(b'/', pycompat.ossep)
2560 if fullpaths:
2560 if fullpaths:
2561 addfile(f)
2561 addfile(f)
2562 continue
2562 continue
2563 s = f.find(pycompat.ossep, speclen)
2563 s = f.find(pycompat.ossep, speclen)
2564 if s >= 0:
2564 if s >= 0:
2565 adddir(f[:s])
2565 adddir(f[:s])
2566 else:
2566 else:
2567 addfile(f)
2567 addfile(f)
2568 return files, dirs
2568 return files, dirs
2569
2569
2570 acceptable = b''
2570 acceptable = b''
2571 if opts['normal']:
2571 if opts['normal']:
2572 acceptable += b'nm'
2572 acceptable += b'nm'
2573 if opts['added']:
2573 if opts['added']:
2574 acceptable += b'a'
2574 acceptable += b'a'
2575 if opts['removed']:
2575 if opts['removed']:
2576 acceptable += b'r'
2576 acceptable += b'r'
2577 cwd = repo.getcwd()
2577 cwd = repo.getcwd()
2578 if not specs:
2578 if not specs:
2579 specs = [b'.']
2579 specs = [b'.']
2580
2580
2581 files, dirs = set(), set()
2581 files, dirs = set(), set()
2582 for spec in specs:
2582 for spec in specs:
2583 f, d = complete(spec, acceptable or b'nmar')
2583 f, d = complete(spec, acceptable or b'nmar')
2584 files.update(f)
2584 files.update(f)
2585 dirs.update(d)
2585 dirs.update(d)
2586 files.update(dirs)
2586 files.update(dirs)
2587 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2587 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2588 ui.write(b'\n')
2588 ui.write(b'\n')
2589
2589
2590
2590
2591 @command(
2591 @command(
2592 b'debugpathcopies',
2592 b'debugpathcopies',
2593 cmdutil.walkopts,
2593 cmdutil.walkopts,
2594 b'hg debugpathcopies REV1 REV2 [FILE]',
2594 b'hg debugpathcopies REV1 REV2 [FILE]',
2595 inferrepo=True,
2595 inferrepo=True,
2596 )
2596 )
2597 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2597 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2598 """show copies between two revisions"""
2598 """show copies between two revisions"""
2599 ctx1 = scmutil.revsingle(repo, rev1)
2599 ctx1 = scmutil.revsingle(repo, rev1)
2600 ctx2 = scmutil.revsingle(repo, rev2)
2600 ctx2 = scmutil.revsingle(repo, rev2)
2601 m = scmutil.match(ctx1, pats, opts)
2601 m = scmutil.match(ctx1, pats, opts)
2602 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2602 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2603 ui.write(b'%s -> %s\n' % (src, dst))
2603 ui.write(b'%s -> %s\n' % (src, dst))
2604
2604
2605
2605
2606 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2606 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2607 def debugpeer(ui, path):
2607 def debugpeer(ui, path):
2608 """establish a connection to a peer repository"""
2608 """establish a connection to a peer repository"""
2609 # Always enable peer request logging. Requires --debug to display
2609 # Always enable peer request logging. Requires --debug to display
2610 # though.
2610 # though.
2611 overrides = {
2611 overrides = {
2612 (b'devel', b'debug.peer-request'): True,
2612 (b'devel', b'debug.peer-request'): True,
2613 }
2613 }
2614
2614
2615 with ui.configoverride(overrides):
2615 with ui.configoverride(overrides):
2616 peer = hg.peer(ui, {}, path)
2616 peer = hg.peer(ui, {}, path)
2617
2617
2618 local = peer.local() is not None
2618 local = peer.local() is not None
2619 canpush = peer.canpush()
2619 canpush = peer.canpush()
2620
2620
2621 ui.write(_(b'url: %s\n') % peer.url())
2621 ui.write(_(b'url: %s\n') % peer.url())
2622 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2622 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2623 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2623 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2624
2624
2625
2625
2626 @command(
2626 @command(
2627 b'debugpickmergetool',
2627 b'debugpickmergetool',
2628 [
2628 [
2629 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2629 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2630 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2630 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2631 ]
2631 ]
2632 + cmdutil.walkopts
2632 + cmdutil.walkopts
2633 + cmdutil.mergetoolopts,
2633 + cmdutil.mergetoolopts,
2634 _(b'[PATTERN]...'),
2634 _(b'[PATTERN]...'),
2635 inferrepo=True,
2635 inferrepo=True,
2636 )
2636 )
2637 def debugpickmergetool(ui, repo, *pats, **opts):
2637 def debugpickmergetool(ui, repo, *pats, **opts):
2638 """examine which merge tool is chosen for specified file
2638 """examine which merge tool is chosen for specified file
2639
2639
2640 As described in :hg:`help merge-tools`, Mercurial examines
2640 As described in :hg:`help merge-tools`, Mercurial examines
2641 configurations below in this order to decide which merge tool is
2641 configurations below in this order to decide which merge tool is
2642 chosen for specified file.
2642 chosen for specified file.
2643
2643
2644 1. ``--tool`` option
2644 1. ``--tool`` option
2645 2. ``HGMERGE`` environment variable
2645 2. ``HGMERGE`` environment variable
2646 3. configurations in ``merge-patterns`` section
2646 3. configurations in ``merge-patterns`` section
2647 4. configuration of ``ui.merge``
2647 4. configuration of ``ui.merge``
2648 5. configurations in ``merge-tools`` section
2648 5. configurations in ``merge-tools`` section
2649 6. ``hgmerge`` tool (for historical reason only)
2649 6. ``hgmerge`` tool (for historical reason only)
2650 7. default tool for fallback (``:merge`` or ``:prompt``)
2650 7. default tool for fallback (``:merge`` or ``:prompt``)
2651
2651
2652 This command writes out examination result in the style below::
2652 This command writes out examination result in the style below::
2653
2653
2654 FILE = MERGETOOL
2654 FILE = MERGETOOL
2655
2655
2656 By default, all files known in the first parent context of the
2656 By default, all files known in the first parent context of the
2657 working directory are examined. Use file patterns and/or -I/-X
2657 working directory are examined. Use file patterns and/or -I/-X
2658 options to limit target files. -r/--rev is also useful to examine
2658 options to limit target files. -r/--rev is also useful to examine
2659 files in another context without actual updating to it.
2659 files in another context without actual updating to it.
2660
2660
2661 With --debug, this command shows warning messages while matching
2661 With --debug, this command shows warning messages while matching
2662 against ``merge-patterns`` and so on, too. It is recommended to
2662 against ``merge-patterns`` and so on, too. It is recommended to
2663 use this option with explicit file patterns and/or -I/-X options,
2663 use this option with explicit file patterns and/or -I/-X options,
2664 because this option increases amount of output per file according
2664 because this option increases amount of output per file according
2665 to configurations in hgrc.
2665 to configurations in hgrc.
2666
2666
2667 With -v/--verbose, this command shows configurations below at
2667 With -v/--verbose, this command shows configurations below at
2668 first (only if specified).
2668 first (only if specified).
2669
2669
2670 - ``--tool`` option
2670 - ``--tool`` option
2671 - ``HGMERGE`` environment variable
2671 - ``HGMERGE`` environment variable
2672 - configuration of ``ui.merge``
2672 - configuration of ``ui.merge``
2673
2673
2674 If merge tool is chosen before matching against
2674 If merge tool is chosen before matching against
2675 ``merge-patterns``, this command can't show any helpful
2675 ``merge-patterns``, this command can't show any helpful
2676 information, even with --debug. In such case, information above is
2676 information, even with --debug. In such case, information above is
2677 useful to know why a merge tool is chosen.
2677 useful to know why a merge tool is chosen.
2678 """
2678 """
2679 opts = pycompat.byteskwargs(opts)
2679 opts = pycompat.byteskwargs(opts)
2680 overrides = {}
2680 overrides = {}
2681 if opts[b'tool']:
2681 if opts[b'tool']:
2682 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2682 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2683 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2683 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2684
2684
2685 with ui.configoverride(overrides, b'debugmergepatterns'):
2685 with ui.configoverride(overrides, b'debugmergepatterns'):
2686 hgmerge = encoding.environ.get(b"HGMERGE")
2686 hgmerge = encoding.environ.get(b"HGMERGE")
2687 if hgmerge is not None:
2687 if hgmerge is not None:
2688 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2688 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2689 uimerge = ui.config(b"ui", b"merge")
2689 uimerge = ui.config(b"ui", b"merge")
2690 if uimerge:
2690 if uimerge:
2691 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2691 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2692
2692
2693 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2693 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2694 m = scmutil.match(ctx, pats, opts)
2694 m = scmutil.match(ctx, pats, opts)
2695 changedelete = opts[b'changedelete']
2695 changedelete = opts[b'changedelete']
2696 for path in ctx.walk(m):
2696 for path in ctx.walk(m):
2697 fctx = ctx[path]
2697 fctx = ctx[path]
2698 try:
2698 try:
2699 if not ui.debugflag:
2699 if not ui.debugflag:
2700 ui.pushbuffer(error=True)
2700 ui.pushbuffer(error=True)
2701 tool, toolpath = filemerge._picktool(
2701 tool, toolpath = filemerge._picktool(
2702 repo,
2702 repo,
2703 ui,
2703 ui,
2704 path,
2704 path,
2705 fctx.isbinary(),
2705 fctx.isbinary(),
2706 b'l' in fctx.flags(),
2706 b'l' in fctx.flags(),
2707 changedelete,
2707 changedelete,
2708 )
2708 )
2709 finally:
2709 finally:
2710 if not ui.debugflag:
2710 if not ui.debugflag:
2711 ui.popbuffer()
2711 ui.popbuffer()
2712 ui.write(b'%s = %s\n' % (path, tool))
2712 ui.write(b'%s = %s\n' % (path, tool))
2713
2713
2714
2714
2715 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2715 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2716 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2716 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2717 """access the pushkey key/value protocol
2717 """access the pushkey key/value protocol
2718
2718
2719 With two args, list the keys in the given namespace.
2719 With two args, list the keys in the given namespace.
2720
2720
2721 With five args, set a key to new if it currently is set to old.
2721 With five args, set a key to new if it currently is set to old.
2722 Reports success or failure.
2722 Reports success or failure.
2723 """
2723 """
2724
2724
2725 target = hg.peer(ui, {}, repopath)
2725 target = hg.peer(ui, {}, repopath)
2726 if keyinfo:
2726 if keyinfo:
2727 key, old, new = keyinfo
2727 key, old, new = keyinfo
2728 with target.commandexecutor() as e:
2728 with target.commandexecutor() as e:
2729 r = e.callcommand(
2729 r = e.callcommand(
2730 b'pushkey',
2730 b'pushkey',
2731 {
2731 {
2732 b'namespace': namespace,
2732 b'namespace': namespace,
2733 b'key': key,
2733 b'key': key,
2734 b'old': old,
2734 b'old': old,
2735 b'new': new,
2735 b'new': new,
2736 },
2736 },
2737 ).result()
2737 ).result()
2738
2738
2739 ui.status(pycompat.bytestr(r) + b'\n')
2739 ui.status(pycompat.bytestr(r) + b'\n')
2740 return not r
2740 return not r
2741 else:
2741 else:
2742 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2742 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2743 ui.write(
2743 ui.write(
2744 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2744 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2745 )
2745 )
2746
2746
2747
2747
2748 @command(b'debugpvec', [], _(b'A B'))
2748 @command(b'debugpvec', [], _(b'A B'))
2749 def debugpvec(ui, repo, a, b=None):
2749 def debugpvec(ui, repo, a, b=None):
2750 ca = scmutil.revsingle(repo, a)
2750 ca = scmutil.revsingle(repo, a)
2751 cb = scmutil.revsingle(repo, b)
2751 cb = scmutil.revsingle(repo, b)
2752 pa = pvec.ctxpvec(ca)
2752 pa = pvec.ctxpvec(ca)
2753 pb = pvec.ctxpvec(cb)
2753 pb = pvec.ctxpvec(cb)
2754 if pa == pb:
2754 if pa == pb:
2755 rel = b"="
2755 rel = b"="
2756 elif pa > pb:
2756 elif pa > pb:
2757 rel = b">"
2757 rel = b">"
2758 elif pa < pb:
2758 elif pa < pb:
2759 rel = b"<"
2759 rel = b"<"
2760 elif pa | pb:
2760 elif pa | pb:
2761 rel = b"|"
2761 rel = b"|"
2762 ui.write(_(b"a: %s\n") % pa)
2762 ui.write(_(b"a: %s\n") % pa)
2763 ui.write(_(b"b: %s\n") % pb)
2763 ui.write(_(b"b: %s\n") % pb)
2764 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2764 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2765 ui.write(
2765 ui.write(
2766 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2766 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2767 % (
2767 % (
2768 abs(pa._depth - pb._depth),
2768 abs(pa._depth - pb._depth),
2769 pvec._hamming(pa._vec, pb._vec),
2769 pvec._hamming(pa._vec, pb._vec),
2770 pa.distance(pb),
2770 pa.distance(pb),
2771 rel,
2771 rel,
2772 )
2772 )
2773 )
2773 )
2774
2774
2775
2775
2776 @command(
2776 @command(
2777 b'debugrebuilddirstate|debugrebuildstate',
2777 b'debugrebuilddirstate|debugrebuildstate',
2778 [
2778 [
2779 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2779 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2780 (
2780 (
2781 b'',
2781 b'',
2782 b'minimal',
2782 b'minimal',
2783 None,
2783 None,
2784 _(
2784 _(
2785 b'only rebuild files that are inconsistent with '
2785 b'only rebuild files that are inconsistent with '
2786 b'the working copy parent'
2786 b'the working copy parent'
2787 ),
2787 ),
2788 ),
2788 ),
2789 ],
2789 ],
2790 _(b'[-r REV]'),
2790 _(b'[-r REV]'),
2791 )
2791 )
2792 def debugrebuilddirstate(ui, repo, rev, **opts):
2792 def debugrebuilddirstate(ui, repo, rev, **opts):
2793 """rebuild the dirstate as it would look like for the given revision
2793 """rebuild the dirstate as it would look like for the given revision
2794
2794
2795 If no revision is specified the first current parent will be used.
2795 If no revision is specified the first current parent will be used.
2796
2796
2797 The dirstate will be set to the files of the given revision.
2797 The dirstate will be set to the files of the given revision.
2798 The actual working directory content or existing dirstate
2798 The actual working directory content or existing dirstate
2799 information such as adds or removes is not considered.
2799 information such as adds or removes is not considered.
2800
2800
2801 ``minimal`` will only rebuild the dirstate status for files that claim to be
2801 ``minimal`` will only rebuild the dirstate status for files that claim to be
2802 tracked but are not in the parent manifest, or that exist in the parent
2802 tracked but are not in the parent manifest, or that exist in the parent
2803 manifest but are not in the dirstate. It will not change adds, removes, or
2803 manifest but are not in the dirstate. It will not change adds, removes, or
2804 modified files that are in the working copy parent.
2804 modified files that are in the working copy parent.
2805
2805
2806 One use of this command is to make the next :hg:`status` invocation
2806 One use of this command is to make the next :hg:`status` invocation
2807 check the actual file content.
2807 check the actual file content.
2808 """
2808 """
2809 ctx = scmutil.revsingle(repo, rev)
2809 ctx = scmutil.revsingle(repo, rev)
2810 with repo.wlock():
2810 with repo.wlock():
2811 dirstate = repo.dirstate
2811 dirstate = repo.dirstate
2812 changedfiles = None
2812 changedfiles = None
2813 # See command doc for what minimal does.
2813 # See command doc for what minimal does.
2814 if opts.get('minimal'):
2814 if opts.get('minimal'):
2815 manifestfiles = set(ctx.manifest().keys())
2815 manifestfiles = set(ctx.manifest().keys())
2816 dirstatefiles = set(dirstate)
2816 dirstatefiles = set(dirstate)
2817 manifestonly = manifestfiles - dirstatefiles
2817 manifestonly = manifestfiles - dirstatefiles
2818 dsonly = dirstatefiles - manifestfiles
2818 dsonly = dirstatefiles - manifestfiles
2819 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2819 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2820 changedfiles = manifestonly | dsnotadded
2820 changedfiles = manifestonly | dsnotadded
2821
2821
2822 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2822 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2823
2823
2824
2824
2825 @command(b'debugrebuildfncache', [], b'')
2825 @command(b'debugrebuildfncache', [], b'')
2826 def debugrebuildfncache(ui, repo):
2826 def debugrebuildfncache(ui, repo):
2827 """rebuild the fncache file"""
2827 """rebuild the fncache file"""
2828 repair.rebuildfncache(ui, repo)
2828 repair.rebuildfncache(ui, repo)
2829
2829
2830
2830
2831 @command(
2831 @command(
2832 b'debugrename',
2832 b'debugrename',
2833 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2833 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2834 _(b'[-r REV] [FILE]...'),
2834 _(b'[-r REV] [FILE]...'),
2835 )
2835 )
2836 def debugrename(ui, repo, *pats, **opts):
2836 def debugrename(ui, repo, *pats, **opts):
2837 """dump rename information"""
2837 """dump rename information"""
2838
2838
2839 opts = pycompat.byteskwargs(opts)
2839 opts = pycompat.byteskwargs(opts)
2840 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2840 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2841 m = scmutil.match(ctx, pats, opts)
2841 m = scmutil.match(ctx, pats, opts)
2842 for abs in ctx.walk(m):
2842 for abs in ctx.walk(m):
2843 fctx = ctx[abs]
2843 fctx = ctx[abs]
2844 o = fctx.filelog().renamed(fctx.filenode())
2844 o = fctx.filelog().renamed(fctx.filenode())
2845 rel = repo.pathto(abs)
2845 rel = repo.pathto(abs)
2846 if o:
2846 if o:
2847 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2847 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2848 else:
2848 else:
2849 ui.write(_(b"%s not renamed\n") % rel)
2849 ui.write(_(b"%s not renamed\n") % rel)
2850
2850
2851
2851
2852 @command(b'debugrequires|debugrequirements', [], b'')
2852 @command(b'debugrequires|debugrequirements', [], b'')
2853 def debugrequirements(ui, repo):
2853 def debugrequirements(ui, repo):
2854 """ print the current repo requirements """
2854 """ print the current repo requirements """
2855 for r in sorted(repo.requirements):
2855 for r in sorted(repo.requirements):
2856 ui.write(b"%s\n" % r)
2856 ui.write(b"%s\n" % r)
2857
2857
2858
2858
2859 @command(
2859 @command(
2860 b'debugrevlog',
2860 b'debugrevlog',
2861 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2861 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2862 _(b'-c|-m|FILE'),
2862 _(b'-c|-m|FILE'),
2863 optionalrepo=True,
2863 optionalrepo=True,
2864 )
2864 )
2865 def debugrevlog(ui, repo, file_=None, **opts):
2865 def debugrevlog(ui, repo, file_=None, **opts):
2866 """show data and statistics about a revlog"""
2866 """show data and statistics about a revlog"""
2867 opts = pycompat.byteskwargs(opts)
2867 opts = pycompat.byteskwargs(opts)
2868 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2868 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2869
2869
2870 if opts.get(b"dump"):
2870 if opts.get(b"dump"):
2871 numrevs = len(r)
2871 numrevs = len(r)
2872 ui.write(
2872 ui.write(
2873 (
2873 (
2874 b"# rev p1rev p2rev start end deltastart base p1 p2"
2874 b"# rev p1rev p2rev start end deltastart base p1 p2"
2875 b" rawsize totalsize compression heads chainlen\n"
2875 b" rawsize totalsize compression heads chainlen\n"
2876 )
2876 )
2877 )
2877 )
2878 ts = 0
2878 ts = 0
2879 heads = set()
2879 heads = set()
2880
2880
2881 for rev in pycompat.xrange(numrevs):
2881 for rev in pycompat.xrange(numrevs):
2882 dbase = r.deltaparent(rev)
2882 dbase = r.deltaparent(rev)
2883 if dbase == -1:
2883 if dbase == -1:
2884 dbase = rev
2884 dbase = rev
2885 cbase = r.chainbase(rev)
2885 cbase = r.chainbase(rev)
2886 clen = r.chainlen(rev)
2886 clen = r.chainlen(rev)
2887 p1, p2 = r.parentrevs(rev)
2887 p1, p2 = r.parentrevs(rev)
2888 rs = r.rawsize(rev)
2888 rs = r.rawsize(rev)
2889 ts = ts + rs
2889 ts = ts + rs
2890 heads -= set(r.parentrevs(rev))
2890 heads -= set(r.parentrevs(rev))
2891 heads.add(rev)
2891 heads.add(rev)
2892 try:
2892 try:
2893 compression = ts / r.end(rev)
2893 compression = ts / r.end(rev)
2894 except ZeroDivisionError:
2894 except ZeroDivisionError:
2895 compression = 0
2895 compression = 0
2896 ui.write(
2896 ui.write(
2897 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2897 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2898 b"%11d %5d %8d\n"
2898 b"%11d %5d %8d\n"
2899 % (
2899 % (
2900 rev,
2900 rev,
2901 p1,
2901 p1,
2902 p2,
2902 p2,
2903 r.start(rev),
2903 r.start(rev),
2904 r.end(rev),
2904 r.end(rev),
2905 r.start(dbase),
2905 r.start(dbase),
2906 r.start(cbase),
2906 r.start(cbase),
2907 r.start(p1),
2907 r.start(p1),
2908 r.start(p2),
2908 r.start(p2),
2909 rs,
2909 rs,
2910 ts,
2910 ts,
2911 compression,
2911 compression,
2912 len(heads),
2912 len(heads),
2913 clen,
2913 clen,
2914 )
2914 )
2915 )
2915 )
2916 return 0
2916 return 0
2917
2917
2918 v = r.version
2918 v = r.version
2919 format = v & 0xFFFF
2919 format = v & 0xFFFF
2920 flags = []
2920 flags = []
2921 gdelta = False
2921 gdelta = False
2922 if v & revlog.FLAG_INLINE_DATA:
2922 if v & revlog.FLAG_INLINE_DATA:
2923 flags.append(b'inline')
2923 flags.append(b'inline')
2924 if v & revlog.FLAG_GENERALDELTA:
2924 if v & revlog.FLAG_GENERALDELTA:
2925 gdelta = True
2925 gdelta = True
2926 flags.append(b'generaldelta')
2926 flags.append(b'generaldelta')
2927 if not flags:
2927 if not flags:
2928 flags = [b'(none)']
2928 flags = [b'(none)']
2929
2929
2930 ### tracks merge vs single parent
2930 ### tracks merge vs single parent
2931 nummerges = 0
2931 nummerges = 0
2932
2932
2933 ### tracks ways the "delta" are build
2933 ### tracks ways the "delta" are build
2934 # nodelta
2934 # nodelta
2935 numempty = 0
2935 numempty = 0
2936 numemptytext = 0
2936 numemptytext = 0
2937 numemptydelta = 0
2937 numemptydelta = 0
2938 # full file content
2938 # full file content
2939 numfull = 0
2939 numfull = 0
2940 # intermediate snapshot against a prior snapshot
2940 # intermediate snapshot against a prior snapshot
2941 numsemi = 0
2941 numsemi = 0
2942 # snapshot count per depth
2942 # snapshot count per depth
2943 numsnapdepth = collections.defaultdict(lambda: 0)
2943 numsnapdepth = collections.defaultdict(lambda: 0)
2944 # delta against previous revision
2944 # delta against previous revision
2945 numprev = 0
2945 numprev = 0
2946 # delta against first or second parent (not prev)
2946 # delta against first or second parent (not prev)
2947 nump1 = 0
2947 nump1 = 0
2948 nump2 = 0
2948 nump2 = 0
2949 # delta against neither prev nor parents
2949 # delta against neither prev nor parents
2950 numother = 0
2950 numother = 0
2951 # delta against prev that are also first or second parent
2951 # delta against prev that are also first or second parent
2952 # (details of `numprev`)
2952 # (details of `numprev`)
2953 nump1prev = 0
2953 nump1prev = 0
2954 nump2prev = 0
2954 nump2prev = 0
2955
2955
2956 # data about delta chain of each revs
2956 # data about delta chain of each revs
2957 chainlengths = []
2957 chainlengths = []
2958 chainbases = []
2958 chainbases = []
2959 chainspans = []
2959 chainspans = []
2960
2960
2961 # data about each revision
2961 # data about each revision
2962 datasize = [None, 0, 0]
2962 datasize = [None, 0, 0]
2963 fullsize = [None, 0, 0]
2963 fullsize = [None, 0, 0]
2964 semisize = [None, 0, 0]
2964 semisize = [None, 0, 0]
2965 # snapshot count per depth
2965 # snapshot count per depth
2966 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2966 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2967 deltasize = [None, 0, 0]
2967 deltasize = [None, 0, 0]
2968 chunktypecounts = {}
2968 chunktypecounts = {}
2969 chunktypesizes = {}
2969 chunktypesizes = {}
2970
2970
2971 def addsize(size, l):
2971 def addsize(size, l):
2972 if l[0] is None or size < l[0]:
2972 if l[0] is None or size < l[0]:
2973 l[0] = size
2973 l[0] = size
2974 if size > l[1]:
2974 if size > l[1]:
2975 l[1] = size
2975 l[1] = size
2976 l[2] += size
2976 l[2] += size
2977
2977
2978 numrevs = len(r)
2978 numrevs = len(r)
2979 for rev in pycompat.xrange(numrevs):
2979 for rev in pycompat.xrange(numrevs):
2980 p1, p2 = r.parentrevs(rev)
2980 p1, p2 = r.parentrevs(rev)
2981 delta = r.deltaparent(rev)
2981 delta = r.deltaparent(rev)
2982 if format > 0:
2982 if format > 0:
2983 addsize(r.rawsize(rev), datasize)
2983 addsize(r.rawsize(rev), datasize)
2984 if p2 != nullrev:
2984 if p2 != nullrev:
2985 nummerges += 1
2985 nummerges += 1
2986 size = r.length(rev)
2986 size = r.length(rev)
2987 if delta == nullrev:
2987 if delta == nullrev:
2988 chainlengths.append(0)
2988 chainlengths.append(0)
2989 chainbases.append(r.start(rev))
2989 chainbases.append(r.start(rev))
2990 chainspans.append(size)
2990 chainspans.append(size)
2991 if size == 0:
2991 if size == 0:
2992 numempty += 1
2992 numempty += 1
2993 numemptytext += 1
2993 numemptytext += 1
2994 else:
2994 else:
2995 numfull += 1
2995 numfull += 1
2996 numsnapdepth[0] += 1
2996 numsnapdepth[0] += 1
2997 addsize(size, fullsize)
2997 addsize(size, fullsize)
2998 addsize(size, snapsizedepth[0])
2998 addsize(size, snapsizedepth[0])
2999 else:
2999 else:
3000 chainlengths.append(chainlengths[delta] + 1)
3000 chainlengths.append(chainlengths[delta] + 1)
3001 baseaddr = chainbases[delta]
3001 baseaddr = chainbases[delta]
3002 revaddr = r.start(rev)
3002 revaddr = r.start(rev)
3003 chainbases.append(baseaddr)
3003 chainbases.append(baseaddr)
3004 chainspans.append((revaddr - baseaddr) + size)
3004 chainspans.append((revaddr - baseaddr) + size)
3005 if size == 0:
3005 if size == 0:
3006 numempty += 1
3006 numempty += 1
3007 numemptydelta += 1
3007 numemptydelta += 1
3008 elif r.issnapshot(rev):
3008 elif r.issnapshot(rev):
3009 addsize(size, semisize)
3009 addsize(size, semisize)
3010 numsemi += 1
3010 numsemi += 1
3011 depth = r.snapshotdepth(rev)
3011 depth = r.snapshotdepth(rev)
3012 numsnapdepth[depth] += 1
3012 numsnapdepth[depth] += 1
3013 addsize(size, snapsizedepth[depth])
3013 addsize(size, snapsizedepth[depth])
3014 else:
3014 else:
3015 addsize(size, deltasize)
3015 addsize(size, deltasize)
3016 if delta == rev - 1:
3016 if delta == rev - 1:
3017 numprev += 1
3017 numprev += 1
3018 if delta == p1:
3018 if delta == p1:
3019 nump1prev += 1
3019 nump1prev += 1
3020 elif delta == p2:
3020 elif delta == p2:
3021 nump2prev += 1
3021 nump2prev += 1
3022 elif delta == p1:
3022 elif delta == p1:
3023 nump1 += 1
3023 nump1 += 1
3024 elif delta == p2:
3024 elif delta == p2:
3025 nump2 += 1
3025 nump2 += 1
3026 elif delta != nullrev:
3026 elif delta != nullrev:
3027 numother += 1
3027 numother += 1
3028
3028
3029 # Obtain data on the raw chunks in the revlog.
3029 # Obtain data on the raw chunks in the revlog.
3030 if util.safehasattr(r, b'_getsegmentforrevs'):
3030 if util.safehasattr(r, b'_getsegmentforrevs'):
3031 segment = r._getsegmentforrevs(rev, rev)[1]
3031 segment = r._getsegmentforrevs(rev, rev)[1]
3032 else:
3032 else:
3033 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3033 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3034 if segment:
3034 if segment:
3035 chunktype = bytes(segment[0:1])
3035 chunktype = bytes(segment[0:1])
3036 else:
3036 else:
3037 chunktype = b'empty'
3037 chunktype = b'empty'
3038
3038
3039 if chunktype not in chunktypecounts:
3039 if chunktype not in chunktypecounts:
3040 chunktypecounts[chunktype] = 0
3040 chunktypecounts[chunktype] = 0
3041 chunktypesizes[chunktype] = 0
3041 chunktypesizes[chunktype] = 0
3042
3042
3043 chunktypecounts[chunktype] += 1
3043 chunktypecounts[chunktype] += 1
3044 chunktypesizes[chunktype] += size
3044 chunktypesizes[chunktype] += size
3045
3045
3046 # Adjust size min value for empty cases
3046 # Adjust size min value for empty cases
3047 for size in (datasize, fullsize, semisize, deltasize):
3047 for size in (datasize, fullsize, semisize, deltasize):
3048 if size[0] is None:
3048 if size[0] is None:
3049 size[0] = 0
3049 size[0] = 0
3050
3050
3051 numdeltas = numrevs - numfull - numempty - numsemi
3051 numdeltas = numrevs - numfull - numempty - numsemi
3052 numoprev = numprev - nump1prev - nump2prev
3052 numoprev = numprev - nump1prev - nump2prev
3053 totalrawsize = datasize[2]
3053 totalrawsize = datasize[2]
3054 datasize[2] /= numrevs
3054 datasize[2] /= numrevs
3055 fulltotal = fullsize[2]
3055 fulltotal = fullsize[2]
3056 if numfull == 0:
3056 if numfull == 0:
3057 fullsize[2] = 0
3057 fullsize[2] = 0
3058 else:
3058 else:
3059 fullsize[2] /= numfull
3059 fullsize[2] /= numfull
3060 semitotal = semisize[2]
3060 semitotal = semisize[2]
3061 snaptotal = {}
3061 snaptotal = {}
3062 if numsemi > 0:
3062 if numsemi > 0:
3063 semisize[2] /= numsemi
3063 semisize[2] /= numsemi
3064 for depth in snapsizedepth:
3064 for depth in snapsizedepth:
3065 snaptotal[depth] = snapsizedepth[depth][2]
3065 snaptotal[depth] = snapsizedepth[depth][2]
3066 snapsizedepth[depth][2] /= numsnapdepth[depth]
3066 snapsizedepth[depth][2] /= numsnapdepth[depth]
3067
3067
3068 deltatotal = deltasize[2]
3068 deltatotal = deltasize[2]
3069 if numdeltas > 0:
3069 if numdeltas > 0:
3070 deltasize[2] /= numdeltas
3070 deltasize[2] /= numdeltas
3071 totalsize = fulltotal + semitotal + deltatotal
3071 totalsize = fulltotal + semitotal + deltatotal
3072 avgchainlen = sum(chainlengths) / numrevs
3072 avgchainlen = sum(chainlengths) / numrevs
3073 maxchainlen = max(chainlengths)
3073 maxchainlen = max(chainlengths)
3074 maxchainspan = max(chainspans)
3074 maxchainspan = max(chainspans)
3075 compratio = 1
3075 compratio = 1
3076 if totalsize:
3076 if totalsize:
3077 compratio = totalrawsize / totalsize
3077 compratio = totalrawsize / totalsize
3078
3078
3079 basedfmtstr = b'%%%dd\n'
3079 basedfmtstr = b'%%%dd\n'
3080 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3080 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3081
3081
3082 def dfmtstr(max):
3082 def dfmtstr(max):
3083 return basedfmtstr % len(str(max))
3083 return basedfmtstr % len(str(max))
3084
3084
3085 def pcfmtstr(max, padding=0):
3085 def pcfmtstr(max, padding=0):
3086 return basepcfmtstr % (len(str(max)), b' ' * padding)
3086 return basepcfmtstr % (len(str(max)), b' ' * padding)
3087
3087
3088 def pcfmt(value, total):
3088 def pcfmt(value, total):
3089 if total:
3089 if total:
3090 return (value, 100 * float(value) / total)
3090 return (value, 100 * float(value) / total)
3091 else:
3091 else:
3092 return value, 100.0
3092 return value, 100.0
3093
3093
3094 ui.writenoi18n(b'format : %d\n' % format)
3094 ui.writenoi18n(b'format : %d\n' % format)
3095 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3095 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3096
3096
3097 ui.write(b'\n')
3097 ui.write(b'\n')
3098 fmt = pcfmtstr(totalsize)
3098 fmt = pcfmtstr(totalsize)
3099 fmt2 = dfmtstr(totalsize)
3099 fmt2 = dfmtstr(totalsize)
3100 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3100 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3101 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3101 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3102 ui.writenoi18n(
3102 ui.writenoi18n(
3103 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3103 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3104 )
3104 )
3105 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3105 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3106 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3106 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3107 ui.writenoi18n(
3107 ui.writenoi18n(
3108 b' text : '
3108 b' text : '
3109 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3109 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3110 )
3110 )
3111 ui.writenoi18n(
3111 ui.writenoi18n(
3112 b' delta : '
3112 b' delta : '
3113 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3113 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3114 )
3114 )
3115 ui.writenoi18n(
3115 ui.writenoi18n(
3116 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3116 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3117 )
3117 )
3118 for depth in sorted(numsnapdepth):
3118 for depth in sorted(numsnapdepth):
3119 ui.write(
3119 ui.write(
3120 (b' lvl-%-3d : ' % depth)
3120 (b' lvl-%-3d : ' % depth)
3121 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3121 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3122 )
3122 )
3123 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3123 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3124 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3124 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3125 ui.writenoi18n(
3125 ui.writenoi18n(
3126 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3126 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3127 )
3127 )
3128 for depth in sorted(numsnapdepth):
3128 for depth in sorted(numsnapdepth):
3129 ui.write(
3129 ui.write(
3130 (b' lvl-%-3d : ' % depth)
3130 (b' lvl-%-3d : ' % depth)
3131 + fmt % pcfmt(snaptotal[depth], totalsize)
3131 + fmt % pcfmt(snaptotal[depth], totalsize)
3132 )
3132 )
3133 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3133 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3134
3134
3135 def fmtchunktype(chunktype):
3135 def fmtchunktype(chunktype):
3136 if chunktype == b'empty':
3136 if chunktype == b'empty':
3137 return b' %s : ' % chunktype
3137 return b' %s : ' % chunktype
3138 elif chunktype in pycompat.bytestr(string.ascii_letters):
3138 elif chunktype in pycompat.bytestr(string.ascii_letters):
3139 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3139 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3140 else:
3140 else:
3141 return b' 0x%s : ' % hex(chunktype)
3141 return b' 0x%s : ' % hex(chunktype)
3142
3142
3143 ui.write(b'\n')
3143 ui.write(b'\n')
3144 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3144 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3145 for chunktype in sorted(chunktypecounts):
3145 for chunktype in sorted(chunktypecounts):
3146 ui.write(fmtchunktype(chunktype))
3146 ui.write(fmtchunktype(chunktype))
3147 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3147 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3148 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3148 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3149 for chunktype in sorted(chunktypecounts):
3149 for chunktype in sorted(chunktypecounts):
3150 ui.write(fmtchunktype(chunktype))
3150 ui.write(fmtchunktype(chunktype))
3151 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3151 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3152
3152
3153 ui.write(b'\n')
3153 ui.write(b'\n')
3154 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3154 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3155 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3155 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3156 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3156 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3157 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3157 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3158 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3158 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3159
3159
3160 if format > 0:
3160 if format > 0:
3161 ui.write(b'\n')
3161 ui.write(b'\n')
3162 ui.writenoi18n(
3162 ui.writenoi18n(
3163 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3163 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3164 % tuple(datasize)
3164 % tuple(datasize)
3165 )
3165 )
3166 ui.writenoi18n(
3166 ui.writenoi18n(
3167 b'full revision size (min/max/avg) : %d / %d / %d\n'
3167 b'full revision size (min/max/avg) : %d / %d / %d\n'
3168 % tuple(fullsize)
3168 % tuple(fullsize)
3169 )
3169 )
3170 ui.writenoi18n(
3170 ui.writenoi18n(
3171 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3171 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3172 % tuple(semisize)
3172 % tuple(semisize)
3173 )
3173 )
3174 for depth in sorted(snapsizedepth):
3174 for depth in sorted(snapsizedepth):
3175 if depth == 0:
3175 if depth == 0:
3176 continue
3176 continue
3177 ui.writenoi18n(
3177 ui.writenoi18n(
3178 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3178 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3179 % ((depth,) + tuple(snapsizedepth[depth]))
3179 % ((depth,) + tuple(snapsizedepth[depth]))
3180 )
3180 )
3181 ui.writenoi18n(
3181 ui.writenoi18n(
3182 b'delta size (min/max/avg) : %d / %d / %d\n'
3182 b'delta size (min/max/avg) : %d / %d / %d\n'
3183 % tuple(deltasize)
3183 % tuple(deltasize)
3184 )
3184 )
3185
3185
3186 if numdeltas > 0:
3186 if numdeltas > 0:
3187 ui.write(b'\n')
3187 ui.write(b'\n')
3188 fmt = pcfmtstr(numdeltas)
3188 fmt = pcfmtstr(numdeltas)
3189 fmt2 = pcfmtstr(numdeltas, 4)
3189 fmt2 = pcfmtstr(numdeltas, 4)
3190 ui.writenoi18n(
3190 ui.writenoi18n(
3191 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3191 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3192 )
3192 )
3193 if numprev > 0:
3193 if numprev > 0:
3194 ui.writenoi18n(
3194 ui.writenoi18n(
3195 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3195 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3196 )
3196 )
3197 ui.writenoi18n(
3197 ui.writenoi18n(
3198 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3198 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3199 )
3199 )
3200 ui.writenoi18n(
3200 ui.writenoi18n(
3201 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3201 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3202 )
3202 )
3203 if gdelta:
3203 if gdelta:
3204 ui.writenoi18n(
3204 ui.writenoi18n(
3205 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3205 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3206 )
3206 )
3207 ui.writenoi18n(
3207 ui.writenoi18n(
3208 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3208 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3209 )
3209 )
3210 ui.writenoi18n(
3210 ui.writenoi18n(
3211 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3211 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3212 )
3212 )
3213
3213
3214
3214
3215 @command(
3215 @command(
3216 b'debugrevlogindex',
3216 b'debugrevlogindex',
3217 cmdutil.debugrevlogopts
3217 cmdutil.debugrevlogopts
3218 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3218 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3219 _(b'[-f FORMAT] -c|-m|FILE'),
3219 _(b'[-f FORMAT] -c|-m|FILE'),
3220 optionalrepo=True,
3220 optionalrepo=True,
3221 )
3221 )
3222 def debugrevlogindex(ui, repo, file_=None, **opts):
3222 def debugrevlogindex(ui, repo, file_=None, **opts):
3223 """dump the contents of a revlog index"""
3223 """dump the contents of a revlog index"""
3224 opts = pycompat.byteskwargs(opts)
3224 opts = pycompat.byteskwargs(opts)
3225 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3225 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3226 format = opts.get(b'format', 0)
3226 format = opts.get(b'format', 0)
3227 if format not in (0, 1):
3227 if format not in (0, 1):
3228 raise error.Abort(_(b"unknown format %d") % format)
3228 raise error.Abort(_(b"unknown format %d") % format)
3229
3229
3230 if ui.debugflag:
3230 if ui.debugflag:
3231 shortfn = hex
3231 shortfn = hex
3232 else:
3232 else:
3233 shortfn = short
3233 shortfn = short
3234
3234
3235 # There might not be anything in r, so have a sane default
3235 # There might not be anything in r, so have a sane default
3236 idlen = 12
3236 idlen = 12
3237 for i in r:
3237 for i in r:
3238 idlen = len(shortfn(r.node(i)))
3238 idlen = len(shortfn(r.node(i)))
3239 break
3239 break
3240
3240
3241 if format == 0:
3241 if format == 0:
3242 if ui.verbose:
3242 if ui.verbose:
3243 ui.writenoi18n(
3243 ui.writenoi18n(
3244 b" rev offset length linkrev %s %s p2\n"
3244 b" rev offset length linkrev %s %s p2\n"
3245 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3245 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3246 )
3246 )
3247 else:
3247 else:
3248 ui.writenoi18n(
3248 ui.writenoi18n(
3249 b" rev linkrev %s %s p2\n"
3249 b" rev linkrev %s %s p2\n"
3250 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3250 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3251 )
3251 )
3252 elif format == 1:
3252 elif format == 1:
3253 if ui.verbose:
3253 if ui.verbose:
3254 ui.writenoi18n(
3254 ui.writenoi18n(
3255 (
3255 (
3256 b" rev flag offset length size link p1"
3256 b" rev flag offset length size link p1"
3257 b" p2 %s\n"
3257 b" p2 %s\n"
3258 )
3258 )
3259 % b"nodeid".rjust(idlen)
3259 % b"nodeid".rjust(idlen)
3260 )
3260 )
3261 else:
3261 else:
3262 ui.writenoi18n(
3262 ui.writenoi18n(
3263 b" rev flag size link p1 p2 %s\n"
3263 b" rev flag size link p1 p2 %s\n"
3264 % b"nodeid".rjust(idlen)
3264 % b"nodeid".rjust(idlen)
3265 )
3265 )
3266
3266
3267 for i in r:
3267 for i in r:
3268 node = r.node(i)
3268 node = r.node(i)
3269 if format == 0:
3269 if format == 0:
3270 try:
3270 try:
3271 pp = r.parents(node)
3271 pp = r.parents(node)
3272 except Exception:
3272 except Exception:
3273 pp = [nullid, nullid]
3273 pp = [nullid, nullid]
3274 if ui.verbose:
3274 if ui.verbose:
3275 ui.write(
3275 ui.write(
3276 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3276 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3277 % (
3277 % (
3278 i,
3278 i,
3279 r.start(i),
3279 r.start(i),
3280 r.length(i),
3280 r.length(i),
3281 r.linkrev(i),
3281 r.linkrev(i),
3282 shortfn(node),
3282 shortfn(node),
3283 shortfn(pp[0]),
3283 shortfn(pp[0]),
3284 shortfn(pp[1]),
3284 shortfn(pp[1]),
3285 )
3285 )
3286 )
3286 )
3287 else:
3287 else:
3288 ui.write(
3288 ui.write(
3289 b"% 6d % 7d %s %s %s\n"
3289 b"% 6d % 7d %s %s %s\n"
3290 % (
3290 % (
3291 i,
3291 i,
3292 r.linkrev(i),
3292 r.linkrev(i),
3293 shortfn(node),
3293 shortfn(node),
3294 shortfn(pp[0]),
3294 shortfn(pp[0]),
3295 shortfn(pp[1]),
3295 shortfn(pp[1]),
3296 )
3296 )
3297 )
3297 )
3298 elif format == 1:
3298 elif format == 1:
3299 pr = r.parentrevs(i)
3299 pr = r.parentrevs(i)
3300 if ui.verbose:
3300 if ui.verbose:
3301 ui.write(
3301 ui.write(
3302 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3302 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3303 % (
3303 % (
3304 i,
3304 i,
3305 r.flags(i),
3305 r.flags(i),
3306 r.start(i),
3306 r.start(i),
3307 r.length(i),
3307 r.length(i),
3308 r.rawsize(i),
3308 r.rawsize(i),
3309 r.linkrev(i),
3309 r.linkrev(i),
3310 pr[0],
3310 pr[0],
3311 pr[1],
3311 pr[1],
3312 shortfn(node),
3312 shortfn(node),
3313 )
3313 )
3314 )
3314 )
3315 else:
3315 else:
3316 ui.write(
3316 ui.write(
3317 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3317 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3318 % (
3318 % (
3319 i,
3319 i,
3320 r.flags(i),
3320 r.flags(i),
3321 r.rawsize(i),
3321 r.rawsize(i),
3322 r.linkrev(i),
3322 r.linkrev(i),
3323 pr[0],
3323 pr[0],
3324 pr[1],
3324 pr[1],
3325 shortfn(node),
3325 shortfn(node),
3326 )
3326 )
3327 )
3327 )
3328
3328
3329
3329
3330 @command(
3330 @command(
3331 b'debugrevspec',
3331 b'debugrevspec',
3332 [
3332 [
3333 (
3333 (
3334 b'',
3334 b'',
3335 b'optimize',
3335 b'optimize',
3336 None,
3336 None,
3337 _(b'print parsed tree after optimizing (DEPRECATED)'),
3337 _(b'print parsed tree after optimizing (DEPRECATED)'),
3338 ),
3338 ),
3339 (
3339 (
3340 b'',
3340 b'',
3341 b'show-revs',
3341 b'show-revs',
3342 True,
3342 True,
3343 _(b'print list of result revisions (default)'),
3343 _(b'print list of result revisions (default)'),
3344 ),
3344 ),
3345 (
3345 (
3346 b's',
3346 b's',
3347 b'show-set',
3347 b'show-set',
3348 None,
3348 None,
3349 _(b'print internal representation of result set'),
3349 _(b'print internal representation of result set'),
3350 ),
3350 ),
3351 (
3351 (
3352 b'p',
3352 b'p',
3353 b'show-stage',
3353 b'show-stage',
3354 [],
3354 [],
3355 _(b'print parsed tree at the given stage'),
3355 _(b'print parsed tree at the given stage'),
3356 _(b'NAME'),
3356 _(b'NAME'),
3357 ),
3357 ),
3358 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3358 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3359 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3359 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3360 ],
3360 ],
3361 b'REVSPEC',
3361 b'REVSPEC',
3362 )
3362 )
3363 def debugrevspec(ui, repo, expr, **opts):
3363 def debugrevspec(ui, repo, expr, **opts):
3364 """parse and apply a revision specification
3364 """parse and apply a revision specification
3365
3365
3366 Use -p/--show-stage option to print the parsed tree at the given stages.
3366 Use -p/--show-stage option to print the parsed tree at the given stages.
3367 Use -p all to print tree at every stage.
3367 Use -p all to print tree at every stage.
3368
3368
3369 Use --no-show-revs option with -s or -p to print only the set
3369 Use --no-show-revs option with -s or -p to print only the set
3370 representation or the parsed tree respectively.
3370 representation or the parsed tree respectively.
3371
3371
3372 Use --verify-optimized to compare the optimized result with the unoptimized
3372 Use --verify-optimized to compare the optimized result with the unoptimized
3373 one. Returns 1 if the optimized result differs.
3373 one. Returns 1 if the optimized result differs.
3374 """
3374 """
3375 opts = pycompat.byteskwargs(opts)
3375 opts = pycompat.byteskwargs(opts)
3376 aliases = ui.configitems(b'revsetalias')
3376 aliases = ui.configitems(b'revsetalias')
3377 stages = [
3377 stages = [
3378 (b'parsed', lambda tree: tree),
3378 (b'parsed', lambda tree: tree),
3379 (
3379 (
3380 b'expanded',
3380 b'expanded',
3381 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3381 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3382 ),
3382 ),
3383 (b'concatenated', revsetlang.foldconcat),
3383 (b'concatenated', revsetlang.foldconcat),
3384 (b'analyzed', revsetlang.analyze),
3384 (b'analyzed', revsetlang.analyze),
3385 (b'optimized', revsetlang.optimize),
3385 (b'optimized', revsetlang.optimize),
3386 ]
3386 ]
3387 if opts[b'no_optimized']:
3387 if opts[b'no_optimized']:
3388 stages = stages[:-1]
3388 stages = stages[:-1]
3389 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3389 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3390 raise error.Abort(
3390 raise error.Abort(
3391 _(b'cannot use --verify-optimized with --no-optimized')
3391 _(b'cannot use --verify-optimized with --no-optimized')
3392 )
3392 )
3393 stagenames = {n for n, f in stages}
3393 stagenames = {n for n, f in stages}
3394
3394
3395 showalways = set()
3395 showalways = set()
3396 showchanged = set()
3396 showchanged = set()
3397 if ui.verbose and not opts[b'show_stage']:
3397 if ui.verbose and not opts[b'show_stage']:
3398 # show parsed tree by --verbose (deprecated)
3398 # show parsed tree by --verbose (deprecated)
3399 showalways.add(b'parsed')
3399 showalways.add(b'parsed')
3400 showchanged.update([b'expanded', b'concatenated'])
3400 showchanged.update([b'expanded', b'concatenated'])
3401 if opts[b'optimize']:
3401 if opts[b'optimize']:
3402 showalways.add(b'optimized')
3402 showalways.add(b'optimized')
3403 if opts[b'show_stage'] and opts[b'optimize']:
3403 if opts[b'show_stage'] and opts[b'optimize']:
3404 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3404 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3405 if opts[b'show_stage'] == [b'all']:
3405 if opts[b'show_stage'] == [b'all']:
3406 showalways.update(stagenames)
3406 showalways.update(stagenames)
3407 else:
3407 else:
3408 for n in opts[b'show_stage']:
3408 for n in opts[b'show_stage']:
3409 if n not in stagenames:
3409 if n not in stagenames:
3410 raise error.Abort(_(b'invalid stage name: %s') % n)
3410 raise error.Abort(_(b'invalid stage name: %s') % n)
3411 showalways.update(opts[b'show_stage'])
3411 showalways.update(opts[b'show_stage'])
3412
3412
3413 treebystage = {}
3413 treebystage = {}
3414 printedtree = None
3414 printedtree = None
3415 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3415 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3416 for n, f in stages:
3416 for n, f in stages:
3417 treebystage[n] = tree = f(tree)
3417 treebystage[n] = tree = f(tree)
3418 if n in showalways or (n in showchanged and tree != printedtree):
3418 if n in showalways or (n in showchanged and tree != printedtree):
3419 if opts[b'show_stage'] or n != b'parsed':
3419 if opts[b'show_stage'] or n != b'parsed':
3420 ui.write(b"* %s:\n" % n)
3420 ui.write(b"* %s:\n" % n)
3421 ui.write(revsetlang.prettyformat(tree), b"\n")
3421 ui.write(revsetlang.prettyformat(tree), b"\n")
3422 printedtree = tree
3422 printedtree = tree
3423
3423
3424 if opts[b'verify_optimized']:
3424 if opts[b'verify_optimized']:
3425 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3425 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3426 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3426 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3427 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3427 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3428 ui.writenoi18n(
3428 ui.writenoi18n(
3429 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3429 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3430 )
3430 )
3431 ui.writenoi18n(
3431 ui.writenoi18n(
3432 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3432 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3433 )
3433 )
3434 arevs = list(arevs)
3434 arevs = list(arevs)
3435 brevs = list(brevs)
3435 brevs = list(brevs)
3436 if arevs == brevs:
3436 if arevs == brevs:
3437 return 0
3437 return 0
3438 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3438 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3439 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3439 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3440 sm = difflib.SequenceMatcher(None, arevs, brevs)
3440 sm = difflib.SequenceMatcher(None, arevs, brevs)
3441 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3441 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3442 if tag in ('delete', 'replace'):
3442 if tag in ('delete', 'replace'):
3443 for c in arevs[alo:ahi]:
3443 for c in arevs[alo:ahi]:
3444 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3444 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3445 if tag in ('insert', 'replace'):
3445 if tag in ('insert', 'replace'):
3446 for c in brevs[blo:bhi]:
3446 for c in brevs[blo:bhi]:
3447 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3447 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3448 if tag == 'equal':
3448 if tag == 'equal':
3449 for c in arevs[alo:ahi]:
3449 for c in arevs[alo:ahi]:
3450 ui.write(b' %d\n' % c)
3450 ui.write(b' %d\n' % c)
3451 return 1
3451 return 1
3452
3452
3453 func = revset.makematcher(tree)
3453 func = revset.makematcher(tree)
3454 revs = func(repo)
3454 revs = func(repo)
3455 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3455 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3456 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3456 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3457 if not opts[b'show_revs']:
3457 if not opts[b'show_revs']:
3458 return
3458 return
3459 for c in revs:
3459 for c in revs:
3460 ui.write(b"%d\n" % c)
3460 ui.write(b"%d\n" % c)
3461
3461
3462
3462
3463 @command(
3463 @command(
3464 b'debugserve',
3464 b'debugserve',
3465 [
3465 [
3466 (
3466 (
3467 b'',
3467 b'',
3468 b'sshstdio',
3468 b'sshstdio',
3469 False,
3469 False,
3470 _(b'run an SSH server bound to process handles'),
3470 _(b'run an SSH server bound to process handles'),
3471 ),
3471 ),
3472 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3472 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3473 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3473 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3474 ],
3474 ],
3475 b'',
3475 b'',
3476 )
3476 )
3477 def debugserve(ui, repo, **opts):
3477 def debugserve(ui, repo, **opts):
3478 """run a server with advanced settings
3478 """run a server with advanced settings
3479
3479
3480 This command is similar to :hg:`serve`. It exists partially as a
3480 This command is similar to :hg:`serve`. It exists partially as a
3481 workaround to the fact that ``hg serve --stdio`` must have specific
3481 workaround to the fact that ``hg serve --stdio`` must have specific
3482 arguments for security reasons.
3482 arguments for security reasons.
3483 """
3483 """
3484 opts = pycompat.byteskwargs(opts)
3484 opts = pycompat.byteskwargs(opts)
3485
3485
3486 if not opts[b'sshstdio']:
3486 if not opts[b'sshstdio']:
3487 raise error.Abort(_(b'only --sshstdio is currently supported'))
3487 raise error.Abort(_(b'only --sshstdio is currently supported'))
3488
3488
3489 logfh = None
3489 logfh = None
3490
3490
3491 if opts[b'logiofd'] and opts[b'logiofile']:
3491 if opts[b'logiofd'] and opts[b'logiofile']:
3492 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3492 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3493
3493
3494 if opts[b'logiofd']:
3494 if opts[b'logiofd']:
3495 # Ideally we would be line buffered. But line buffering in binary
3495 # Ideally we would be line buffered. But line buffering in binary
3496 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3496 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3497 # buffering could have performance impacts. But since this isn't
3497 # buffering could have performance impacts. But since this isn't
3498 # performance critical code, it should be fine.
3498 # performance critical code, it should be fine.
3499 try:
3499 try:
3500 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3500 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3501 except OSError as e:
3501 except OSError as e:
3502 if e.errno != errno.ESPIPE:
3502 if e.errno != errno.ESPIPE:
3503 raise
3503 raise
3504 # can't seek a pipe, so `ab` mode fails on py3
3504 # can't seek a pipe, so `ab` mode fails on py3
3505 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3505 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3506 elif opts[b'logiofile']:
3506 elif opts[b'logiofile']:
3507 logfh = open(opts[b'logiofile'], b'ab', 0)
3507 logfh = open(opts[b'logiofile'], b'ab', 0)
3508
3508
3509 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3509 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3510 s.serve_forever()
3510 s.serve_forever()
3511
3511
3512
3512
3513 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3513 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3514 def debugsetparents(ui, repo, rev1, rev2=None):
3514 def debugsetparents(ui, repo, rev1, rev2=None):
3515 """manually set the parents of the current working directory (DANGEROUS)
3515 """manually set the parents of the current working directory (DANGEROUS)
3516
3516
3517 This command is not what you are looking for and should not be used. Using
3517 This command is not what you are looking for and should not be used. Using
3518 this command will most certainly results in slight corruption of the file
3518 this command will most certainly results in slight corruption of the file
3519 level histories withing your repository. DO NOT USE THIS COMMAND.
3519 level histories withing your repository. DO NOT USE THIS COMMAND.
3520
3520
3521 The command update the p1 and p2 field in the dirstate, and not touching
3521 The command update the p1 and p2 field in the dirstate, and not touching
3522 anything else. This useful for writing repository conversion tools, but
3522 anything else. This useful for writing repository conversion tools, but
3523 should be used with extreme care. For example, neither the working
3523 should be used with extreme care. For example, neither the working
3524 directory nor the dirstate is updated, so file status may be incorrect
3524 directory nor the dirstate is updated, so file status may be incorrect
3525 after running this command. Only used if you are one of the few people that
3525 after running this command. Only used if you are one of the few people that
3526 deeply unstand both conversion tools and file level histories. If you are
3526 deeply unstand both conversion tools and file level histories. If you are
3527 reading this help, you are not one of this people (most of them sailed west
3527 reading this help, you are not one of this people (most of them sailed west
3528 from Mithlond anyway.
3528 from Mithlond anyway.
3529
3529
3530 So one last time DO NOT USE THIS COMMAND.
3530 So one last time DO NOT USE THIS COMMAND.
3531
3531
3532 Returns 0 on success.
3532 Returns 0 on success.
3533 """
3533 """
3534
3534
3535 node1 = scmutil.revsingle(repo, rev1).node()
3535 node1 = scmutil.revsingle(repo, rev1).node()
3536 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3536 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3537
3537
3538 with repo.wlock():
3538 with repo.wlock():
3539 repo.setparents(node1, node2)
3539 repo.setparents(node1, node2)
3540
3540
3541
3541
3542 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3542 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3543 def debugsidedata(ui, repo, file_, rev=None, **opts):
3543 def debugsidedata(ui, repo, file_, rev=None, **opts):
3544 """dump the side data for a cl/manifest/file revision
3544 """dump the side data for a cl/manifest/file revision
3545
3545
3546 Use --verbose to dump the sidedata content."""
3546 Use --verbose to dump the sidedata content."""
3547 opts = pycompat.byteskwargs(opts)
3547 opts = pycompat.byteskwargs(opts)
3548 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3548 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3549 if rev is not None:
3549 if rev is not None:
3550 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3550 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3551 file_, rev = None, file_
3551 file_, rev = None, file_
3552 elif rev is None:
3552 elif rev is None:
3553 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3553 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3554 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3554 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3555 r = getattr(r, '_revlog', r)
3555 r = getattr(r, '_revlog', r)
3556 try:
3556 try:
3557 sidedata = r.sidedata(r.lookup(rev))
3557 sidedata = r.sidedata(r.lookup(rev))
3558 except KeyError:
3558 except KeyError:
3559 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3559 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3560 if sidedata:
3560 if sidedata:
3561 sidedata = list(sidedata.items())
3561 sidedata = list(sidedata.items())
3562 sidedata.sort()
3562 sidedata.sort()
3563 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3563 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3564 for key, value in sidedata:
3564 for key, value in sidedata:
3565 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3565 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3566 if ui.verbose:
3566 if ui.verbose:
3567 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3567 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3568
3568
3569
3569
3570 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3570 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3571 def debugssl(ui, repo, source=None, **opts):
3571 def debugssl(ui, repo, source=None, **opts):
3572 """test a secure connection to a server
3572 """test a secure connection to a server
3573
3573
3574 This builds the certificate chain for the server on Windows, installing the
3574 This builds the certificate chain for the server on Windows, installing the
3575 missing intermediates and trusted root via Windows Update if necessary. It
3575 missing intermediates and trusted root via Windows Update if necessary. It
3576 does nothing on other platforms.
3576 does nothing on other platforms.
3577
3577
3578 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3578 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3579 that server is used. See :hg:`help urls` for more information.
3579 that server is used. See :hg:`help urls` for more information.
3580
3580
3581 If the update succeeds, retry the original operation. Otherwise, the cause
3581 If the update succeeds, retry the original operation. Otherwise, the cause
3582 of the SSL error is likely another issue.
3582 of the SSL error is likely another issue.
3583 """
3583 """
3584 if not pycompat.iswindows:
3584 if not pycompat.iswindows:
3585 raise error.Abort(
3585 raise error.Abort(
3586 _(b'certificate chain building is only possible on Windows')
3586 _(b'certificate chain building is only possible on Windows')
3587 )
3587 )
3588
3588
3589 if not source:
3589 if not source:
3590 if not repo:
3590 if not repo:
3591 raise error.Abort(
3591 raise error.Abort(
3592 _(
3592 _(
3593 b"there is no Mercurial repository here, and no "
3593 b"there is no Mercurial repository here, and no "
3594 b"server specified"
3594 b"server specified"
3595 )
3595 )
3596 )
3596 )
3597 source = b"default"
3597 source = b"default"
3598
3598
3599 source, branches = hg.parseurl(ui.expandpath(source))
3599 source, branches = hg.parseurl(ui.expandpath(source))
3600 url = util.url(source)
3600 url = util.url(source)
3601
3601
3602 defaultport = {b'https': 443, b'ssh': 22}
3602 defaultport = {b'https': 443, b'ssh': 22}
3603 if url.scheme in defaultport:
3603 if url.scheme in defaultport:
3604 try:
3604 try:
3605 addr = (url.host, int(url.port or defaultport[url.scheme]))
3605 addr = (url.host, int(url.port or defaultport[url.scheme]))
3606 except ValueError:
3606 except ValueError:
3607 raise error.Abort(_(b"malformed port number in URL"))
3607 raise error.Abort(_(b"malformed port number in URL"))
3608 else:
3608 else:
3609 raise error.Abort(_(b"only https and ssh connections are supported"))
3609 raise error.Abort(_(b"only https and ssh connections are supported"))
3610
3610
3611 from . import win32
3611 from . import win32
3612
3612
3613 s = ssl.wrap_socket(
3613 s = ssl.wrap_socket(
3614 socket.socket(),
3614 socket.socket(),
3615 ssl_version=ssl.PROTOCOL_TLS,
3615 ssl_version=ssl.PROTOCOL_TLS,
3616 cert_reqs=ssl.CERT_NONE,
3616 cert_reqs=ssl.CERT_NONE,
3617 ca_certs=None,
3617 ca_certs=None,
3618 )
3618 )
3619
3619
3620 try:
3620 try:
3621 s.connect(addr)
3621 s.connect(addr)
3622 cert = s.getpeercert(True)
3622 cert = s.getpeercert(True)
3623
3623
3624 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3624 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3625
3625
3626 complete = win32.checkcertificatechain(cert, build=False)
3626 complete = win32.checkcertificatechain(cert, build=False)
3627
3627
3628 if not complete:
3628 if not complete:
3629 ui.status(_(b'certificate chain is incomplete, updating... '))
3629 ui.status(_(b'certificate chain is incomplete, updating... '))
3630
3630
3631 if not win32.checkcertificatechain(cert):
3631 if not win32.checkcertificatechain(cert):
3632 ui.status(_(b'failed.\n'))
3632 ui.status(_(b'failed.\n'))
3633 else:
3633 else:
3634 ui.status(_(b'done.\n'))
3634 ui.status(_(b'done.\n'))
3635 else:
3635 else:
3636 ui.status(_(b'full certificate chain is available\n'))
3636 ui.status(_(b'full certificate chain is available\n'))
3637 finally:
3637 finally:
3638 s.close()
3638 s.close()
3639
3639
3640
3640
3641 @command(
3641 @command(
3642 b"debugbackupbundle",
3642 b"debugbackupbundle",
3643 [
3643 [
3644 (
3644 (
3645 b"",
3645 b"",
3646 b"recover",
3646 b"recover",
3647 b"",
3647 b"",
3648 b"brings the specified changeset back into the repository",
3648 b"brings the specified changeset back into the repository",
3649 )
3649 )
3650 ]
3650 ]
3651 + cmdutil.logopts,
3651 + cmdutil.logopts,
3652 _(b"hg debugbackupbundle [--recover HASH]"),
3652 _(b"hg debugbackupbundle [--recover HASH]"),
3653 )
3653 )
3654 def debugbackupbundle(ui, repo, *pats, **opts):
3654 def debugbackupbundle(ui, repo, *pats, **opts):
3655 """lists the changesets available in backup bundles
3655 """lists the changesets available in backup bundles
3656
3656
3657 Without any arguments, this command prints a list of the changesets in each
3657 Without any arguments, this command prints a list of the changesets in each
3658 backup bundle.
3658 backup bundle.
3659
3659
3660 --recover takes a changeset hash and unbundles the first bundle that
3660 --recover takes a changeset hash and unbundles the first bundle that
3661 contains that hash, which puts that changeset back in your repository.
3661 contains that hash, which puts that changeset back in your repository.
3662
3662
3663 --verbose will print the entire commit message and the bundle path for that
3663 --verbose will print the entire commit message and the bundle path for that
3664 backup.
3664 backup.
3665 """
3665 """
3666 backups = list(
3666 backups = list(
3667 filter(
3667 filter(
3668 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3668 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3669 )
3669 )
3670 )
3670 )
3671 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3671 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3672
3672
3673 opts = pycompat.byteskwargs(opts)
3673 opts = pycompat.byteskwargs(opts)
3674 opts[b"bundle"] = b""
3674 opts[b"bundle"] = b""
3675 opts[b"force"] = None
3675 opts[b"force"] = None
3676 limit = logcmdutil.getlimit(opts)
3676 limit = logcmdutil.getlimit(opts)
3677
3677
3678 def display(other, chlist, displayer):
3678 def display(other, chlist, displayer):
3679 if opts.get(b"newest_first"):
3679 if opts.get(b"newest_first"):
3680 chlist.reverse()
3680 chlist.reverse()
3681 count = 0
3681 count = 0
3682 for n in chlist:
3682 for n in chlist:
3683 if limit is not None and count >= limit:
3683 if limit is not None and count >= limit:
3684 break
3684 break
3685 parents = [True for p in other.changelog.parents(n) if p != nullid]
3685 parents = [True for p in other.changelog.parents(n) if p != nullid]
3686 if opts.get(b"no_merges") and len(parents) == 2:
3686 if opts.get(b"no_merges") and len(parents) == 2:
3687 continue
3687 continue
3688 count += 1
3688 count += 1
3689 displayer.show(other[n])
3689 displayer.show(other[n])
3690
3690
3691 recovernode = opts.get(b"recover")
3691 recovernode = opts.get(b"recover")
3692 if recovernode:
3692 if recovernode:
3693 if scmutil.isrevsymbol(repo, recovernode):
3693 if scmutil.isrevsymbol(repo, recovernode):
3694 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3694 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3695 return
3695 return
3696 elif backups:
3696 elif backups:
3697 msg = _(
3697 msg = _(
3698 b"Recover changesets using: hg debugbackupbundle --recover "
3698 b"Recover changesets using: hg debugbackupbundle --recover "
3699 b"<changeset hash>\n\nAvailable backup changesets:"
3699 b"<changeset hash>\n\nAvailable backup changesets:"
3700 )
3700 )
3701 ui.status(msg, label=b"status.removed")
3701 ui.status(msg, label=b"status.removed")
3702 else:
3702 else:
3703 ui.status(_(b"no backup changesets found\n"))
3703 ui.status(_(b"no backup changesets found\n"))
3704 return
3704 return
3705
3705
3706 for backup in backups:
3706 for backup in backups:
3707 # Much of this is copied from the hg incoming logic
3707 # Much of this is copied from the hg incoming logic
3708 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3708 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3709 source, branches = hg.parseurl(source, opts.get(b"branch"))
3709 source, branches = hg.parseurl(source, opts.get(b"branch"))
3710 try:
3710 try:
3711 other = hg.peer(repo, opts, source)
3711 other = hg.peer(repo, opts, source)
3712 except error.LookupError as ex:
3712 except error.LookupError as ex:
3713 msg = _(b"\nwarning: unable to open bundle %s") % source
3713 msg = _(b"\nwarning: unable to open bundle %s") % source
3714 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3714 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3715 ui.warn(msg, hint=hint)
3715 ui.warn(msg, hint=hint)
3716 continue
3716 continue
3717 revs, checkout = hg.addbranchrevs(
3717 revs, checkout = hg.addbranchrevs(
3718 repo, other, branches, opts.get(b"rev")
3718 repo, other, branches, opts.get(b"rev")
3719 )
3719 )
3720
3720
3721 if revs:
3721 if revs:
3722 revs = [other.lookup(rev) for rev in revs]
3722 revs = [other.lookup(rev) for rev in revs]
3723
3723
3724 quiet = ui.quiet
3724 quiet = ui.quiet
3725 try:
3725 try:
3726 ui.quiet = True
3726 ui.quiet = True
3727 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3727 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3728 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3728 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3729 )
3729 )
3730 except error.LookupError:
3730 except error.LookupError:
3731 continue
3731 continue
3732 finally:
3732 finally:
3733 ui.quiet = quiet
3733 ui.quiet = quiet
3734
3734
3735 try:
3735 try:
3736 if not chlist:
3736 if not chlist:
3737 continue
3737 continue
3738 if recovernode:
3738 if recovernode:
3739 with repo.lock(), repo.transaction(b"unbundle") as tr:
3739 with repo.lock(), repo.transaction(b"unbundle") as tr:
3740 if scmutil.isrevsymbol(other, recovernode):
3740 if scmutil.isrevsymbol(other, recovernode):
3741 ui.status(_(b"Unbundling %s\n") % (recovernode))
3741 ui.status(_(b"Unbundling %s\n") % (recovernode))
3742 f = hg.openpath(ui, source)
3742 f = hg.openpath(ui, source)
3743 gen = exchange.readbundle(ui, f, source)
3743 gen = exchange.readbundle(ui, f, source)
3744 if isinstance(gen, bundle2.unbundle20):
3744 if isinstance(gen, bundle2.unbundle20):
3745 bundle2.applybundle(
3745 bundle2.applybundle(
3746 repo,
3746 repo,
3747 gen,
3747 gen,
3748 tr,
3748 tr,
3749 source=b"unbundle",
3749 source=b"unbundle",
3750 url=b"bundle:" + source,
3750 url=b"bundle:" + source,
3751 )
3751 )
3752 else:
3752 else:
3753 gen.apply(repo, b"unbundle", b"bundle:" + source)
3753 gen.apply(repo, b"unbundle", b"bundle:" + source)
3754 break
3754 break
3755 else:
3755 else:
3756 backupdate = encoding.strtolocal(
3756 backupdate = encoding.strtolocal(
3757 time.strftime(
3757 time.strftime(
3758 "%a %H:%M, %Y-%m-%d",
3758 "%a %H:%M, %Y-%m-%d",
3759 time.localtime(os.path.getmtime(source)),
3759 time.localtime(os.path.getmtime(source)),
3760 )
3760 )
3761 )
3761 )
3762 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3762 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3763 if ui.verbose:
3763 if ui.verbose:
3764 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3764 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3765 else:
3765 else:
3766 opts[
3766 opts[
3767 b"template"
3767 b"template"
3768 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3768 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3769 displayer = logcmdutil.changesetdisplayer(
3769 displayer = logcmdutil.changesetdisplayer(
3770 ui, other, opts, False
3770 ui, other, opts, False
3771 )
3771 )
3772 display(other, chlist, displayer)
3772 display(other, chlist, displayer)
3773 displayer.close()
3773 displayer.close()
3774 finally:
3774 finally:
3775 cleanupfn()
3775 cleanupfn()
3776
3776
3777
3777
3778 @command(
3778 @command(
3779 b'debugsub',
3779 b'debugsub',
3780 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3780 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3781 _(b'[-r REV] [REV]'),
3781 _(b'[-r REV] [REV]'),
3782 )
3782 )
3783 def debugsub(ui, repo, rev=None):
3783 def debugsub(ui, repo, rev=None):
3784 ctx = scmutil.revsingle(repo, rev, None)
3784 ctx = scmutil.revsingle(repo, rev, None)
3785 for k, v in sorted(ctx.substate.items()):
3785 for k, v in sorted(ctx.substate.items()):
3786 ui.writenoi18n(b'path %s\n' % k)
3786 ui.writenoi18n(b'path %s\n' % k)
3787 ui.writenoi18n(b' source %s\n' % v[0])
3787 ui.writenoi18n(b' source %s\n' % v[0])
3788 ui.writenoi18n(b' revision %s\n' % v[1])
3788 ui.writenoi18n(b' revision %s\n' % v[1])
3789
3789
3790
3790
3791 @command(b'debugshell', optionalrepo=True)
3791 @command(b'debugshell', optionalrepo=True)
3792 def debugshell(ui, repo):
3792 def debugshell(ui, repo):
3793 """run an interactive Python interpreter
3793 """run an interactive Python interpreter
3794
3794
3795 The local namespace is provided with a reference to the ui and
3795 The local namespace is provided with a reference to the ui and
3796 the repo instance (if available).
3796 the repo instance (if available).
3797 """
3797 """
3798 import code
3798 import code
3799
3799
3800 imported_objects = {
3800 imported_objects = {
3801 'ui': ui,
3801 'ui': ui,
3802 'repo': repo,
3802 'repo': repo,
3803 }
3803 }
3804
3804
3805 code.interact(local=imported_objects)
3805 code.interact(local=imported_objects)
3806
3806
3807
3807
3808 @command(
3808 @command(
3809 b'debugsuccessorssets',
3809 b'debugsuccessorssets',
3810 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3810 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3811 _(b'[REV]'),
3811 _(b'[REV]'),
3812 )
3812 )
3813 def debugsuccessorssets(ui, repo, *revs, **opts):
3813 def debugsuccessorssets(ui, repo, *revs, **opts):
3814 """show set of successors for revision
3814 """show set of successors for revision
3815
3815
3816 A successors set of changeset A is a consistent group of revisions that
3816 A successors set of changeset A is a consistent group of revisions that
3817 succeed A. It contains non-obsolete changesets only unless closests
3817 succeed A. It contains non-obsolete changesets only unless closests
3818 successors set is set.
3818 successors set is set.
3819
3819
3820 In most cases a changeset A has a single successors set containing a single
3820 In most cases a changeset A has a single successors set containing a single
3821 successor (changeset A replaced by A').
3821 successor (changeset A replaced by A').
3822
3822
3823 A changeset that is made obsolete with no successors are called "pruned".
3823 A changeset that is made obsolete with no successors are called "pruned".
3824 Such changesets have no successors sets at all.
3824 Such changesets have no successors sets at all.
3825
3825
3826 A changeset that has been "split" will have a successors set containing
3826 A changeset that has been "split" will have a successors set containing
3827 more than one successor.
3827 more than one successor.
3828
3828
3829 A changeset that has been rewritten in multiple different ways is called
3829 A changeset that has been rewritten in multiple different ways is called
3830 "divergent". Such changesets have multiple successor sets (each of which
3830 "divergent". Such changesets have multiple successor sets (each of which
3831 may also be split, i.e. have multiple successors).
3831 may also be split, i.e. have multiple successors).
3832
3832
3833 Results are displayed as follows::
3833 Results are displayed as follows::
3834
3834
3835 <rev1>
3835 <rev1>
3836 <successors-1A>
3836 <successors-1A>
3837 <rev2>
3837 <rev2>
3838 <successors-2A>
3838 <successors-2A>
3839 <successors-2B1> <successors-2B2> <successors-2B3>
3839 <successors-2B1> <successors-2B2> <successors-2B3>
3840
3840
3841 Here rev2 has two possible (i.e. divergent) successors sets. The first
3841 Here rev2 has two possible (i.e. divergent) successors sets. The first
3842 holds one element, whereas the second holds three (i.e. the changeset has
3842 holds one element, whereas the second holds three (i.e. the changeset has
3843 been split).
3843 been split).
3844 """
3844 """
3845 # passed to successorssets caching computation from one call to another
3845 # passed to successorssets caching computation from one call to another
3846 cache = {}
3846 cache = {}
3847 ctx2str = bytes
3847 ctx2str = bytes
3848 node2str = short
3848 node2str = short
3849 for rev in scmutil.revrange(repo, revs):
3849 for rev in scmutil.revrange(repo, revs):
3850 ctx = repo[rev]
3850 ctx = repo[rev]
3851 ui.write(b'%s\n' % ctx2str(ctx))
3851 ui.write(b'%s\n' % ctx2str(ctx))
3852 for succsset in obsutil.successorssets(
3852 for succsset in obsutil.successorssets(
3853 repo, ctx.node(), closest=opts['closest'], cache=cache
3853 repo, ctx.node(), closest=opts['closest'], cache=cache
3854 ):
3854 ):
3855 if succsset:
3855 if succsset:
3856 ui.write(b' ')
3856 ui.write(b' ')
3857 ui.write(node2str(succsset[0]))
3857 ui.write(node2str(succsset[0]))
3858 for node in succsset[1:]:
3858 for node in succsset[1:]:
3859 ui.write(b' ')
3859 ui.write(b' ')
3860 ui.write(node2str(node))
3860 ui.write(node2str(node))
3861 ui.write(b'\n')
3861 ui.write(b'\n')
3862
3862
3863
3863
3864 @command(b'debugtagscache', [])
3864 @command(b'debugtagscache', [])
3865 def debugtagscache(ui, repo):
3865 def debugtagscache(ui, repo):
3866 """display the contents of .hg/cache/hgtagsfnodes1"""
3866 """display the contents of .hg/cache/hgtagsfnodes1"""
3867 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3867 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3868 flog = repo.file(b'.hgtags')
3868 for r in repo:
3869 for r in repo:
3869 node = repo[r].node()
3870 node = repo[r].node()
3870 tagsnode = cache.getfnode(node, computemissing=False)
3871 tagsnode = cache.getfnode(node, computemissing=False)
3871 if tagsnode:
3872 if tagsnode:
3872 tagsnodedisplay = hex(tagsnode)
3873 tagsnodedisplay = hex(tagsnode)
3874 if not flog.hasnode(tagsnode):
3875 tagsnodedisplay += b' (unknown node)'
3873 elif tagsnode is None:
3876 elif tagsnode is None:
3874 tagsnodedisplay = b'missing'
3877 tagsnodedisplay = b'missing'
3875 else:
3878 else:
3876 tagsnodedisplay = b'invalid'
3879 tagsnodedisplay = b'invalid'
3877
3880
3878 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3881 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3879
3882
3880
3883
3881 @command(
3884 @command(
3882 b'debugtemplate',
3885 b'debugtemplate',
3883 [
3886 [
3884 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3887 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3885 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3888 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3886 ],
3889 ],
3887 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3890 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3888 optionalrepo=True,
3891 optionalrepo=True,
3889 )
3892 )
3890 def debugtemplate(ui, repo, tmpl, **opts):
3893 def debugtemplate(ui, repo, tmpl, **opts):
3891 """parse and apply a template
3894 """parse and apply a template
3892
3895
3893 If -r/--rev is given, the template is processed as a log template and
3896 If -r/--rev is given, the template is processed as a log template and
3894 applied to the given changesets. Otherwise, it is processed as a generic
3897 applied to the given changesets. Otherwise, it is processed as a generic
3895 template.
3898 template.
3896
3899
3897 Use --verbose to print the parsed tree.
3900 Use --verbose to print the parsed tree.
3898 """
3901 """
3899 revs = None
3902 revs = None
3900 if opts['rev']:
3903 if opts['rev']:
3901 if repo is None:
3904 if repo is None:
3902 raise error.RepoError(
3905 raise error.RepoError(
3903 _(b'there is no Mercurial repository here (.hg not found)')
3906 _(b'there is no Mercurial repository here (.hg not found)')
3904 )
3907 )
3905 revs = scmutil.revrange(repo, opts['rev'])
3908 revs = scmutil.revrange(repo, opts['rev'])
3906
3909
3907 props = {}
3910 props = {}
3908 for d in opts['define']:
3911 for d in opts['define']:
3909 try:
3912 try:
3910 k, v = (e.strip() for e in d.split(b'=', 1))
3913 k, v = (e.strip() for e in d.split(b'=', 1))
3911 if not k or k == b'ui':
3914 if not k or k == b'ui':
3912 raise ValueError
3915 raise ValueError
3913 props[k] = v
3916 props[k] = v
3914 except ValueError:
3917 except ValueError:
3915 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3918 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3916
3919
3917 if ui.verbose:
3920 if ui.verbose:
3918 aliases = ui.configitems(b'templatealias')
3921 aliases = ui.configitems(b'templatealias')
3919 tree = templater.parse(tmpl)
3922 tree = templater.parse(tmpl)
3920 ui.note(templater.prettyformat(tree), b'\n')
3923 ui.note(templater.prettyformat(tree), b'\n')
3921 newtree = templater.expandaliases(tree, aliases)
3924 newtree = templater.expandaliases(tree, aliases)
3922 if newtree != tree:
3925 if newtree != tree:
3923 ui.notenoi18n(
3926 ui.notenoi18n(
3924 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3927 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3925 )
3928 )
3926
3929
3927 if revs is None:
3930 if revs is None:
3928 tres = formatter.templateresources(ui, repo)
3931 tres = formatter.templateresources(ui, repo)
3929 t = formatter.maketemplater(ui, tmpl, resources=tres)
3932 t = formatter.maketemplater(ui, tmpl, resources=tres)
3930 if ui.verbose:
3933 if ui.verbose:
3931 kwds, funcs = t.symbolsuseddefault()
3934 kwds, funcs = t.symbolsuseddefault()
3932 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3935 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3933 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3936 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3934 ui.write(t.renderdefault(props))
3937 ui.write(t.renderdefault(props))
3935 else:
3938 else:
3936 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3939 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3937 if ui.verbose:
3940 if ui.verbose:
3938 kwds, funcs = displayer.t.symbolsuseddefault()
3941 kwds, funcs = displayer.t.symbolsuseddefault()
3939 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3942 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3940 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3943 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3941 for r in revs:
3944 for r in revs:
3942 displayer.show(repo[r], **pycompat.strkwargs(props))
3945 displayer.show(repo[r], **pycompat.strkwargs(props))
3943 displayer.close()
3946 displayer.close()
3944
3947
3945
3948
3946 @command(
3949 @command(
3947 b'debuguigetpass',
3950 b'debuguigetpass',
3948 [
3951 [
3949 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3952 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3950 ],
3953 ],
3951 _(b'[-p TEXT]'),
3954 _(b'[-p TEXT]'),
3952 norepo=True,
3955 norepo=True,
3953 )
3956 )
3954 def debuguigetpass(ui, prompt=b''):
3957 def debuguigetpass(ui, prompt=b''):
3955 """show prompt to type password"""
3958 """show prompt to type password"""
3956 r = ui.getpass(prompt)
3959 r = ui.getpass(prompt)
3957 if r is None:
3960 if r is None:
3958 r = b"<default response>"
3961 r = b"<default response>"
3959 ui.writenoi18n(b'response: %s\n' % r)
3962 ui.writenoi18n(b'response: %s\n' % r)
3960
3963
3961
3964
3962 @command(
3965 @command(
3963 b'debuguiprompt',
3966 b'debuguiprompt',
3964 [
3967 [
3965 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3968 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3966 ],
3969 ],
3967 _(b'[-p TEXT]'),
3970 _(b'[-p TEXT]'),
3968 norepo=True,
3971 norepo=True,
3969 )
3972 )
3970 def debuguiprompt(ui, prompt=b''):
3973 def debuguiprompt(ui, prompt=b''):
3971 """show plain prompt"""
3974 """show plain prompt"""
3972 r = ui.prompt(prompt)
3975 r = ui.prompt(prompt)
3973 ui.writenoi18n(b'response: %s\n' % r)
3976 ui.writenoi18n(b'response: %s\n' % r)
3974
3977
3975
3978
3976 @command(b'debugupdatecaches', [])
3979 @command(b'debugupdatecaches', [])
3977 def debugupdatecaches(ui, repo, *pats, **opts):
3980 def debugupdatecaches(ui, repo, *pats, **opts):
3978 """warm all known caches in the repository"""
3981 """warm all known caches in the repository"""
3979 with repo.wlock(), repo.lock():
3982 with repo.wlock(), repo.lock():
3980 repo.updatecaches(full=True)
3983 repo.updatecaches(full=True)
3981
3984
3982
3985
3983 @command(
3986 @command(
3984 b'debugupgraderepo',
3987 b'debugupgraderepo',
3985 [
3988 [
3986 (
3989 (
3987 b'o',
3990 b'o',
3988 b'optimize',
3991 b'optimize',
3989 [],
3992 [],
3990 _(b'extra optimization to perform'),
3993 _(b'extra optimization to perform'),
3991 _(b'NAME'),
3994 _(b'NAME'),
3992 ),
3995 ),
3993 (b'', b'run', False, _(b'performs an upgrade')),
3996 (b'', b'run', False, _(b'performs an upgrade')),
3994 (b'', b'backup', True, _(b'keep the old repository content around')),
3997 (b'', b'backup', True, _(b'keep the old repository content around')),
3995 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3998 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3996 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3999 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3997 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4000 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
3998 ],
4001 ],
3999 )
4002 )
4000 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4003 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4001 """upgrade a repository to use different features
4004 """upgrade a repository to use different features
4002
4005
4003 If no arguments are specified, the repository is evaluated for upgrade
4006 If no arguments are specified, the repository is evaluated for upgrade
4004 and a list of problems and potential optimizations is printed.
4007 and a list of problems and potential optimizations is printed.
4005
4008
4006 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4009 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4007 can be influenced via additional arguments. More details will be provided
4010 can be influenced via additional arguments. More details will be provided
4008 by the command output when run without ``--run``.
4011 by the command output when run without ``--run``.
4009
4012
4010 During the upgrade, the repository will be locked and no writes will be
4013 During the upgrade, the repository will be locked and no writes will be
4011 allowed.
4014 allowed.
4012
4015
4013 At the end of the upgrade, the repository may not be readable while new
4016 At the end of the upgrade, the repository may not be readable while new
4014 repository data is swapped in. This window will be as long as it takes to
4017 repository data is swapped in. This window will be as long as it takes to
4015 rename some directories inside the ``.hg`` directory. On most machines, this
4018 rename some directories inside the ``.hg`` directory. On most machines, this
4016 should complete almost instantaneously and the chances of a consumer being
4019 should complete almost instantaneously and the chances of a consumer being
4017 unable to access the repository should be low.
4020 unable to access the repository should be low.
4018
4021
4019 By default, all revlog will be upgraded. You can restrict this using flag
4022 By default, all revlog will be upgraded. You can restrict this using flag
4020 such as `--manifest`:
4023 such as `--manifest`:
4021
4024
4022 * `--manifest`: only optimize the manifest
4025 * `--manifest`: only optimize the manifest
4023 * `--no-manifest`: optimize all revlog but the manifest
4026 * `--no-manifest`: optimize all revlog but the manifest
4024 * `--changelog`: optimize the changelog only
4027 * `--changelog`: optimize the changelog only
4025 * `--no-changelog --no-manifest`: optimize filelogs only
4028 * `--no-changelog --no-manifest`: optimize filelogs only
4026 * `--filelogs`: optimize the filelogs only
4029 * `--filelogs`: optimize the filelogs only
4027 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4030 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4028 """
4031 """
4029 return upgrade.upgraderepo(
4032 return upgrade.upgraderepo(
4030 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4033 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4031 )
4034 )
4032
4035
4033
4036
4034 @command(
4037 @command(
4035 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4038 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4036 )
4039 )
4037 def debugwalk(ui, repo, *pats, **opts):
4040 def debugwalk(ui, repo, *pats, **opts):
4038 """show how files match on given patterns"""
4041 """show how files match on given patterns"""
4039 opts = pycompat.byteskwargs(opts)
4042 opts = pycompat.byteskwargs(opts)
4040 m = scmutil.match(repo[None], pats, opts)
4043 m = scmutil.match(repo[None], pats, opts)
4041 if ui.verbose:
4044 if ui.verbose:
4042 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4045 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4043 items = list(repo[None].walk(m))
4046 items = list(repo[None].walk(m))
4044 if not items:
4047 if not items:
4045 return
4048 return
4046 f = lambda fn: fn
4049 f = lambda fn: fn
4047 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4050 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4048 f = lambda fn: util.normpath(fn)
4051 f = lambda fn: util.normpath(fn)
4049 fmt = b'f %%-%ds %%-%ds %%s' % (
4052 fmt = b'f %%-%ds %%-%ds %%s' % (
4050 max([len(abs) for abs in items]),
4053 max([len(abs) for abs in items]),
4051 max([len(repo.pathto(abs)) for abs in items]),
4054 max([len(repo.pathto(abs)) for abs in items]),
4052 )
4055 )
4053 for abs in items:
4056 for abs in items:
4054 line = fmt % (
4057 line = fmt % (
4055 abs,
4058 abs,
4056 f(repo.pathto(abs)),
4059 f(repo.pathto(abs)),
4057 m.exact(abs) and b'exact' or b'',
4060 m.exact(abs) and b'exact' or b'',
4058 )
4061 )
4059 ui.write(b"%s\n" % line.rstrip())
4062 ui.write(b"%s\n" % line.rstrip())
4060
4063
4061
4064
4062 @command(b'debugwhyunstable', [], _(b'REV'))
4065 @command(b'debugwhyunstable', [], _(b'REV'))
4063 def debugwhyunstable(ui, repo, rev):
4066 def debugwhyunstable(ui, repo, rev):
4064 """explain instabilities of a changeset"""
4067 """explain instabilities of a changeset"""
4065 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4068 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4066 dnodes = b''
4069 dnodes = b''
4067 if entry.get(b'divergentnodes'):
4070 if entry.get(b'divergentnodes'):
4068 dnodes = (
4071 dnodes = (
4069 b' '.join(
4072 b' '.join(
4070 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4073 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4071 for ctx in entry[b'divergentnodes']
4074 for ctx in entry[b'divergentnodes']
4072 )
4075 )
4073 + b' '
4076 + b' '
4074 )
4077 )
4075 ui.write(
4078 ui.write(
4076 b'%s: %s%s %s\n'
4079 b'%s: %s%s %s\n'
4077 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4080 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4078 )
4081 )
4079
4082
4080
4083
4081 @command(
4084 @command(
4082 b'debugwireargs',
4085 b'debugwireargs',
4083 [
4086 [
4084 (b'', b'three', b'', b'three'),
4087 (b'', b'three', b'', b'three'),
4085 (b'', b'four', b'', b'four'),
4088 (b'', b'four', b'', b'four'),
4086 (b'', b'five', b'', b'five'),
4089 (b'', b'five', b'', b'five'),
4087 ]
4090 ]
4088 + cmdutil.remoteopts,
4091 + cmdutil.remoteopts,
4089 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4092 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4090 norepo=True,
4093 norepo=True,
4091 )
4094 )
4092 def debugwireargs(ui, repopath, *vals, **opts):
4095 def debugwireargs(ui, repopath, *vals, **opts):
4093 opts = pycompat.byteskwargs(opts)
4096 opts = pycompat.byteskwargs(opts)
4094 repo = hg.peer(ui, opts, repopath)
4097 repo = hg.peer(ui, opts, repopath)
4095 for opt in cmdutil.remoteopts:
4098 for opt in cmdutil.remoteopts:
4096 del opts[opt[1]]
4099 del opts[opt[1]]
4097 args = {}
4100 args = {}
4098 for k, v in pycompat.iteritems(opts):
4101 for k, v in pycompat.iteritems(opts):
4099 if v:
4102 if v:
4100 args[k] = v
4103 args[k] = v
4101 args = pycompat.strkwargs(args)
4104 args = pycompat.strkwargs(args)
4102 # run twice to check that we don't mess up the stream for the next command
4105 # run twice to check that we don't mess up the stream for the next command
4103 res1 = repo.debugwireargs(*vals, **args)
4106 res1 = repo.debugwireargs(*vals, **args)
4104 res2 = repo.debugwireargs(*vals, **args)
4107 res2 = repo.debugwireargs(*vals, **args)
4105 ui.write(b"%s\n" % res1)
4108 ui.write(b"%s\n" % res1)
4106 if res1 != res2:
4109 if res1 != res2:
4107 ui.warn(b"%s\n" % res2)
4110 ui.warn(b"%s\n" % res2)
4108
4111
4109
4112
4110 def _parsewirelangblocks(fh):
4113 def _parsewirelangblocks(fh):
4111 activeaction = None
4114 activeaction = None
4112 blocklines = []
4115 blocklines = []
4113 lastindent = 0
4116 lastindent = 0
4114
4117
4115 for line in fh:
4118 for line in fh:
4116 line = line.rstrip()
4119 line = line.rstrip()
4117 if not line:
4120 if not line:
4118 continue
4121 continue
4119
4122
4120 if line.startswith(b'#'):
4123 if line.startswith(b'#'):
4121 continue
4124 continue
4122
4125
4123 if not line.startswith(b' '):
4126 if not line.startswith(b' '):
4124 # New block. Flush previous one.
4127 # New block. Flush previous one.
4125 if activeaction:
4128 if activeaction:
4126 yield activeaction, blocklines
4129 yield activeaction, blocklines
4127
4130
4128 activeaction = line
4131 activeaction = line
4129 blocklines = []
4132 blocklines = []
4130 lastindent = 0
4133 lastindent = 0
4131 continue
4134 continue
4132
4135
4133 # Else we start with an indent.
4136 # Else we start with an indent.
4134
4137
4135 if not activeaction:
4138 if not activeaction:
4136 raise error.Abort(_(b'indented line outside of block'))
4139 raise error.Abort(_(b'indented line outside of block'))
4137
4140
4138 indent = len(line) - len(line.lstrip())
4141 indent = len(line) - len(line.lstrip())
4139
4142
4140 # If this line is indented more than the last line, concatenate it.
4143 # If this line is indented more than the last line, concatenate it.
4141 if indent > lastindent and blocklines:
4144 if indent > lastindent and blocklines:
4142 blocklines[-1] += line.lstrip()
4145 blocklines[-1] += line.lstrip()
4143 else:
4146 else:
4144 blocklines.append(line)
4147 blocklines.append(line)
4145 lastindent = indent
4148 lastindent = indent
4146
4149
4147 # Flush last block.
4150 # Flush last block.
4148 if activeaction:
4151 if activeaction:
4149 yield activeaction, blocklines
4152 yield activeaction, blocklines
4150
4153
4151
4154
4152 @command(
4155 @command(
4153 b'debugwireproto',
4156 b'debugwireproto',
4154 [
4157 [
4155 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4158 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4156 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4159 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4157 (
4160 (
4158 b'',
4161 b'',
4159 b'noreadstderr',
4162 b'noreadstderr',
4160 False,
4163 False,
4161 _(b'do not read from stderr of the remote'),
4164 _(b'do not read from stderr of the remote'),
4162 ),
4165 ),
4163 (
4166 (
4164 b'',
4167 b'',
4165 b'nologhandshake',
4168 b'nologhandshake',
4166 False,
4169 False,
4167 _(b'do not log I/O related to the peer handshake'),
4170 _(b'do not log I/O related to the peer handshake'),
4168 ),
4171 ),
4169 ]
4172 ]
4170 + cmdutil.remoteopts,
4173 + cmdutil.remoteopts,
4171 _(b'[PATH]'),
4174 _(b'[PATH]'),
4172 optionalrepo=True,
4175 optionalrepo=True,
4173 )
4176 )
4174 def debugwireproto(ui, repo, path=None, **opts):
4177 def debugwireproto(ui, repo, path=None, **opts):
4175 """send wire protocol commands to a server
4178 """send wire protocol commands to a server
4176
4179
4177 This command can be used to issue wire protocol commands to remote
4180 This command can be used to issue wire protocol commands to remote
4178 peers and to debug the raw data being exchanged.
4181 peers and to debug the raw data being exchanged.
4179
4182
4180 ``--localssh`` will start an SSH server against the current repository
4183 ``--localssh`` will start an SSH server against the current repository
4181 and connect to that. By default, the connection will perform a handshake
4184 and connect to that. By default, the connection will perform a handshake
4182 and establish an appropriate peer instance.
4185 and establish an appropriate peer instance.
4183
4186
4184 ``--peer`` can be used to bypass the handshake protocol and construct a
4187 ``--peer`` can be used to bypass the handshake protocol and construct a
4185 peer instance using the specified class type. Valid values are ``raw``,
4188 peer instance using the specified class type. Valid values are ``raw``,
4186 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4189 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4187 raw data payloads and don't support higher-level command actions.
4190 raw data payloads and don't support higher-level command actions.
4188
4191
4189 ``--noreadstderr`` can be used to disable automatic reading from stderr
4192 ``--noreadstderr`` can be used to disable automatic reading from stderr
4190 of the peer (for SSH connections only). Disabling automatic reading of
4193 of the peer (for SSH connections only). Disabling automatic reading of
4191 stderr is useful for making output more deterministic.
4194 stderr is useful for making output more deterministic.
4192
4195
4193 Commands are issued via a mini language which is specified via stdin.
4196 Commands are issued via a mini language which is specified via stdin.
4194 The language consists of individual actions to perform. An action is
4197 The language consists of individual actions to perform. An action is
4195 defined by a block. A block is defined as a line with no leading
4198 defined by a block. A block is defined as a line with no leading
4196 space followed by 0 or more lines with leading space. Blocks are
4199 space followed by 0 or more lines with leading space. Blocks are
4197 effectively a high-level command with additional metadata.
4200 effectively a high-level command with additional metadata.
4198
4201
4199 Lines beginning with ``#`` are ignored.
4202 Lines beginning with ``#`` are ignored.
4200
4203
4201 The following sections denote available actions.
4204 The following sections denote available actions.
4202
4205
4203 raw
4206 raw
4204 ---
4207 ---
4205
4208
4206 Send raw data to the server.
4209 Send raw data to the server.
4207
4210
4208 The block payload contains the raw data to send as one atomic send
4211 The block payload contains the raw data to send as one atomic send
4209 operation. The data may not actually be delivered in a single system
4212 operation. The data may not actually be delivered in a single system
4210 call: it depends on the abilities of the transport being used.
4213 call: it depends on the abilities of the transport being used.
4211
4214
4212 Each line in the block is de-indented and concatenated. Then, that
4215 Each line in the block is de-indented and concatenated. Then, that
4213 value is evaluated as a Python b'' literal. This allows the use of
4216 value is evaluated as a Python b'' literal. This allows the use of
4214 backslash escaping, etc.
4217 backslash escaping, etc.
4215
4218
4216 raw+
4219 raw+
4217 ----
4220 ----
4218
4221
4219 Behaves like ``raw`` except flushes output afterwards.
4222 Behaves like ``raw`` except flushes output afterwards.
4220
4223
4221 command <X>
4224 command <X>
4222 -----------
4225 -----------
4223
4226
4224 Send a request to run a named command, whose name follows the ``command``
4227 Send a request to run a named command, whose name follows the ``command``
4225 string.
4228 string.
4226
4229
4227 Arguments to the command are defined as lines in this block. The format of
4230 Arguments to the command are defined as lines in this block. The format of
4228 each line is ``<key> <value>``. e.g.::
4231 each line is ``<key> <value>``. e.g.::
4229
4232
4230 command listkeys
4233 command listkeys
4231 namespace bookmarks
4234 namespace bookmarks
4232
4235
4233 If the value begins with ``eval:``, it will be interpreted as a Python
4236 If the value begins with ``eval:``, it will be interpreted as a Python
4234 literal expression. Otherwise values are interpreted as Python b'' literals.
4237 literal expression. Otherwise values are interpreted as Python b'' literals.
4235 This allows sending complex types and encoding special byte sequences via
4238 This allows sending complex types and encoding special byte sequences via
4236 backslash escaping.
4239 backslash escaping.
4237
4240
4238 The following arguments have special meaning:
4241 The following arguments have special meaning:
4239
4242
4240 ``PUSHFILE``
4243 ``PUSHFILE``
4241 When defined, the *push* mechanism of the peer will be used instead
4244 When defined, the *push* mechanism of the peer will be used instead
4242 of the static request-response mechanism and the content of the
4245 of the static request-response mechanism and the content of the
4243 file specified in the value of this argument will be sent as the
4246 file specified in the value of this argument will be sent as the
4244 command payload.
4247 command payload.
4245
4248
4246 This can be used to submit a local bundle file to the remote.
4249 This can be used to submit a local bundle file to the remote.
4247
4250
4248 batchbegin
4251 batchbegin
4249 ----------
4252 ----------
4250
4253
4251 Instruct the peer to begin a batched send.
4254 Instruct the peer to begin a batched send.
4252
4255
4253 All ``command`` blocks are queued for execution until the next
4256 All ``command`` blocks are queued for execution until the next
4254 ``batchsubmit`` block.
4257 ``batchsubmit`` block.
4255
4258
4256 batchsubmit
4259 batchsubmit
4257 -----------
4260 -----------
4258
4261
4259 Submit previously queued ``command`` blocks as a batch request.
4262 Submit previously queued ``command`` blocks as a batch request.
4260
4263
4261 This action MUST be paired with a ``batchbegin`` action.
4264 This action MUST be paired with a ``batchbegin`` action.
4262
4265
4263 httprequest <method> <path>
4266 httprequest <method> <path>
4264 ---------------------------
4267 ---------------------------
4265
4268
4266 (HTTP peer only)
4269 (HTTP peer only)
4267
4270
4268 Send an HTTP request to the peer.
4271 Send an HTTP request to the peer.
4269
4272
4270 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4273 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4271
4274
4272 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4275 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4273 headers to add to the request. e.g. ``Accept: foo``.
4276 headers to add to the request. e.g. ``Accept: foo``.
4274
4277
4275 The following arguments are special:
4278 The following arguments are special:
4276
4279
4277 ``BODYFILE``
4280 ``BODYFILE``
4278 The content of the file defined as the value to this argument will be
4281 The content of the file defined as the value to this argument will be
4279 transferred verbatim as the HTTP request body.
4282 transferred verbatim as the HTTP request body.
4280
4283
4281 ``frame <type> <flags> <payload>``
4284 ``frame <type> <flags> <payload>``
4282 Send a unified protocol frame as part of the request body.
4285 Send a unified protocol frame as part of the request body.
4283
4286
4284 All frames will be collected and sent as the body to the HTTP
4287 All frames will be collected and sent as the body to the HTTP
4285 request.
4288 request.
4286
4289
4287 close
4290 close
4288 -----
4291 -----
4289
4292
4290 Close the connection to the server.
4293 Close the connection to the server.
4291
4294
4292 flush
4295 flush
4293 -----
4296 -----
4294
4297
4295 Flush data written to the server.
4298 Flush data written to the server.
4296
4299
4297 readavailable
4300 readavailable
4298 -------------
4301 -------------
4299
4302
4300 Close the write end of the connection and read all available data from
4303 Close the write end of the connection and read all available data from
4301 the server.
4304 the server.
4302
4305
4303 If the connection to the server encompasses multiple pipes, we poll both
4306 If the connection to the server encompasses multiple pipes, we poll both
4304 pipes and read available data.
4307 pipes and read available data.
4305
4308
4306 readline
4309 readline
4307 --------
4310 --------
4308
4311
4309 Read a line of output from the server. If there are multiple output
4312 Read a line of output from the server. If there are multiple output
4310 pipes, reads only the main pipe.
4313 pipes, reads only the main pipe.
4311
4314
4312 ereadline
4315 ereadline
4313 ---------
4316 ---------
4314
4317
4315 Like ``readline``, but read from the stderr pipe, if available.
4318 Like ``readline``, but read from the stderr pipe, if available.
4316
4319
4317 read <X>
4320 read <X>
4318 --------
4321 --------
4319
4322
4320 ``read()`` N bytes from the server's main output pipe.
4323 ``read()`` N bytes from the server's main output pipe.
4321
4324
4322 eread <X>
4325 eread <X>
4323 ---------
4326 ---------
4324
4327
4325 ``read()`` N bytes from the server's stderr pipe, if available.
4328 ``read()`` N bytes from the server's stderr pipe, if available.
4326
4329
4327 Specifying Unified Frame-Based Protocol Frames
4330 Specifying Unified Frame-Based Protocol Frames
4328 ----------------------------------------------
4331 ----------------------------------------------
4329
4332
4330 It is possible to emit a *Unified Frame-Based Protocol* by using special
4333 It is possible to emit a *Unified Frame-Based Protocol* by using special
4331 syntax.
4334 syntax.
4332
4335
4333 A frame is composed as a type, flags, and payload. These can be parsed
4336 A frame is composed as a type, flags, and payload. These can be parsed
4334 from a string of the form:
4337 from a string of the form:
4335
4338
4336 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4339 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4337
4340
4338 ``request-id`` and ``stream-id`` are integers defining the request and
4341 ``request-id`` and ``stream-id`` are integers defining the request and
4339 stream identifiers.
4342 stream identifiers.
4340
4343
4341 ``type`` can be an integer value for the frame type or the string name
4344 ``type`` can be an integer value for the frame type or the string name
4342 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4345 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4343 ``command-name``.
4346 ``command-name``.
4344
4347
4345 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4348 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4346 components. Each component (and there can be just one) can be an integer
4349 components. Each component (and there can be just one) can be an integer
4347 or a flag name for stream flags or frame flags, respectively. Values are
4350 or a flag name for stream flags or frame flags, respectively. Values are
4348 resolved to integers and then bitwise OR'd together.
4351 resolved to integers and then bitwise OR'd together.
4349
4352
4350 ``payload`` represents the raw frame payload. If it begins with
4353 ``payload`` represents the raw frame payload. If it begins with
4351 ``cbor:``, the following string is evaluated as Python code and the
4354 ``cbor:``, the following string is evaluated as Python code and the
4352 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4355 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4353 as a Python byte string literal.
4356 as a Python byte string literal.
4354 """
4357 """
4355 opts = pycompat.byteskwargs(opts)
4358 opts = pycompat.byteskwargs(opts)
4356
4359
4357 if opts[b'localssh'] and not repo:
4360 if opts[b'localssh'] and not repo:
4358 raise error.Abort(_(b'--localssh requires a repository'))
4361 raise error.Abort(_(b'--localssh requires a repository'))
4359
4362
4360 if opts[b'peer'] and opts[b'peer'] not in (
4363 if opts[b'peer'] and opts[b'peer'] not in (
4361 b'raw',
4364 b'raw',
4362 b'http2',
4365 b'http2',
4363 b'ssh1',
4366 b'ssh1',
4364 b'ssh2',
4367 b'ssh2',
4365 ):
4368 ):
4366 raise error.Abort(
4369 raise error.Abort(
4367 _(b'invalid value for --peer'),
4370 _(b'invalid value for --peer'),
4368 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4371 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4369 )
4372 )
4370
4373
4371 if path and opts[b'localssh']:
4374 if path and opts[b'localssh']:
4372 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4375 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4373
4376
4374 if ui.interactive():
4377 if ui.interactive():
4375 ui.write(_(b'(waiting for commands on stdin)\n'))
4378 ui.write(_(b'(waiting for commands on stdin)\n'))
4376
4379
4377 blocks = list(_parsewirelangblocks(ui.fin))
4380 blocks = list(_parsewirelangblocks(ui.fin))
4378
4381
4379 proc = None
4382 proc = None
4380 stdin = None
4383 stdin = None
4381 stdout = None
4384 stdout = None
4382 stderr = None
4385 stderr = None
4383 opener = None
4386 opener = None
4384
4387
4385 if opts[b'localssh']:
4388 if opts[b'localssh']:
4386 # We start the SSH server in its own process so there is process
4389 # We start the SSH server in its own process so there is process
4387 # separation. This prevents a whole class of potential bugs around
4390 # separation. This prevents a whole class of potential bugs around
4388 # shared state from interfering with server operation.
4391 # shared state from interfering with server operation.
4389 args = procutil.hgcmd() + [
4392 args = procutil.hgcmd() + [
4390 b'-R',
4393 b'-R',
4391 repo.root,
4394 repo.root,
4392 b'debugserve',
4395 b'debugserve',
4393 b'--sshstdio',
4396 b'--sshstdio',
4394 ]
4397 ]
4395 proc = subprocess.Popen(
4398 proc = subprocess.Popen(
4396 pycompat.rapply(procutil.tonativestr, args),
4399 pycompat.rapply(procutil.tonativestr, args),
4397 stdin=subprocess.PIPE,
4400 stdin=subprocess.PIPE,
4398 stdout=subprocess.PIPE,
4401 stdout=subprocess.PIPE,
4399 stderr=subprocess.PIPE,
4402 stderr=subprocess.PIPE,
4400 bufsize=0,
4403 bufsize=0,
4401 )
4404 )
4402
4405
4403 stdin = proc.stdin
4406 stdin = proc.stdin
4404 stdout = proc.stdout
4407 stdout = proc.stdout
4405 stderr = proc.stderr
4408 stderr = proc.stderr
4406
4409
4407 # We turn the pipes into observers so we can log I/O.
4410 # We turn the pipes into observers so we can log I/O.
4408 if ui.verbose or opts[b'peer'] == b'raw':
4411 if ui.verbose or opts[b'peer'] == b'raw':
4409 stdin = util.makeloggingfileobject(
4412 stdin = util.makeloggingfileobject(
4410 ui, proc.stdin, b'i', logdata=True
4413 ui, proc.stdin, b'i', logdata=True
4411 )
4414 )
4412 stdout = util.makeloggingfileobject(
4415 stdout = util.makeloggingfileobject(
4413 ui, proc.stdout, b'o', logdata=True
4416 ui, proc.stdout, b'o', logdata=True
4414 )
4417 )
4415 stderr = util.makeloggingfileobject(
4418 stderr = util.makeloggingfileobject(
4416 ui, proc.stderr, b'e', logdata=True
4419 ui, proc.stderr, b'e', logdata=True
4417 )
4420 )
4418
4421
4419 # --localssh also implies the peer connection settings.
4422 # --localssh also implies the peer connection settings.
4420
4423
4421 url = b'ssh://localserver'
4424 url = b'ssh://localserver'
4422 autoreadstderr = not opts[b'noreadstderr']
4425 autoreadstderr = not opts[b'noreadstderr']
4423
4426
4424 if opts[b'peer'] == b'ssh1':
4427 if opts[b'peer'] == b'ssh1':
4425 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4428 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4426 peer = sshpeer.sshv1peer(
4429 peer = sshpeer.sshv1peer(
4427 ui,
4430 ui,
4428 url,
4431 url,
4429 proc,
4432 proc,
4430 stdin,
4433 stdin,
4431 stdout,
4434 stdout,
4432 stderr,
4435 stderr,
4433 None,
4436 None,
4434 autoreadstderr=autoreadstderr,
4437 autoreadstderr=autoreadstderr,
4435 )
4438 )
4436 elif opts[b'peer'] == b'ssh2':
4439 elif opts[b'peer'] == b'ssh2':
4437 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4440 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4438 peer = sshpeer.sshv2peer(
4441 peer = sshpeer.sshv2peer(
4439 ui,
4442 ui,
4440 url,
4443 url,
4441 proc,
4444 proc,
4442 stdin,
4445 stdin,
4443 stdout,
4446 stdout,
4444 stderr,
4447 stderr,
4445 None,
4448 None,
4446 autoreadstderr=autoreadstderr,
4449 autoreadstderr=autoreadstderr,
4447 )
4450 )
4448 elif opts[b'peer'] == b'raw':
4451 elif opts[b'peer'] == b'raw':
4449 ui.write(_(b'using raw connection to peer\n'))
4452 ui.write(_(b'using raw connection to peer\n'))
4450 peer = None
4453 peer = None
4451 else:
4454 else:
4452 ui.write(_(b'creating ssh peer from handshake results\n'))
4455 ui.write(_(b'creating ssh peer from handshake results\n'))
4453 peer = sshpeer.makepeer(
4456 peer = sshpeer.makepeer(
4454 ui,
4457 ui,
4455 url,
4458 url,
4456 proc,
4459 proc,
4457 stdin,
4460 stdin,
4458 stdout,
4461 stdout,
4459 stderr,
4462 stderr,
4460 autoreadstderr=autoreadstderr,
4463 autoreadstderr=autoreadstderr,
4461 )
4464 )
4462
4465
4463 elif path:
4466 elif path:
4464 # We bypass hg.peer() so we can proxy the sockets.
4467 # We bypass hg.peer() so we can proxy the sockets.
4465 # TODO consider not doing this because we skip
4468 # TODO consider not doing this because we skip
4466 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4469 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4467 u = util.url(path)
4470 u = util.url(path)
4468 if u.scheme != b'http':
4471 if u.scheme != b'http':
4469 raise error.Abort(_(b'only http:// paths are currently supported'))
4472 raise error.Abort(_(b'only http:// paths are currently supported'))
4470
4473
4471 url, authinfo = u.authinfo()
4474 url, authinfo = u.authinfo()
4472 openerargs = {
4475 openerargs = {
4473 'useragent': b'Mercurial debugwireproto',
4476 'useragent': b'Mercurial debugwireproto',
4474 }
4477 }
4475
4478
4476 # Turn pipes/sockets into observers so we can log I/O.
4479 # Turn pipes/sockets into observers so we can log I/O.
4477 if ui.verbose:
4480 if ui.verbose:
4478 openerargs.update(
4481 openerargs.update(
4479 {
4482 {
4480 'loggingfh': ui,
4483 'loggingfh': ui,
4481 'loggingname': b's',
4484 'loggingname': b's',
4482 'loggingopts': {
4485 'loggingopts': {
4483 'logdata': True,
4486 'logdata': True,
4484 'logdataapis': False,
4487 'logdataapis': False,
4485 },
4488 },
4486 }
4489 }
4487 )
4490 )
4488
4491
4489 if ui.debugflag:
4492 if ui.debugflag:
4490 openerargs['loggingopts']['logdataapis'] = True
4493 openerargs['loggingopts']['logdataapis'] = True
4491
4494
4492 # Don't send default headers when in raw mode. This allows us to
4495 # Don't send default headers when in raw mode. This allows us to
4493 # bypass most of the behavior of our URL handling code so we can
4496 # bypass most of the behavior of our URL handling code so we can
4494 # have near complete control over what's sent on the wire.
4497 # have near complete control over what's sent on the wire.
4495 if opts[b'peer'] == b'raw':
4498 if opts[b'peer'] == b'raw':
4496 openerargs['sendaccept'] = False
4499 openerargs['sendaccept'] = False
4497
4500
4498 opener = urlmod.opener(ui, authinfo, **openerargs)
4501 opener = urlmod.opener(ui, authinfo, **openerargs)
4499
4502
4500 if opts[b'peer'] == b'http2':
4503 if opts[b'peer'] == b'http2':
4501 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4504 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4502 # We go through makepeer() because we need an API descriptor for
4505 # We go through makepeer() because we need an API descriptor for
4503 # the peer instance to be useful.
4506 # the peer instance to be useful.
4504 with ui.configoverride(
4507 with ui.configoverride(
4505 {(b'experimental', b'httppeer.advertise-v2'): True}
4508 {(b'experimental', b'httppeer.advertise-v2'): True}
4506 ):
4509 ):
4507 if opts[b'nologhandshake']:
4510 if opts[b'nologhandshake']:
4508 ui.pushbuffer()
4511 ui.pushbuffer()
4509
4512
4510 peer = httppeer.makepeer(ui, path, opener=opener)
4513 peer = httppeer.makepeer(ui, path, opener=opener)
4511
4514
4512 if opts[b'nologhandshake']:
4515 if opts[b'nologhandshake']:
4513 ui.popbuffer()
4516 ui.popbuffer()
4514
4517
4515 if not isinstance(peer, httppeer.httpv2peer):
4518 if not isinstance(peer, httppeer.httpv2peer):
4516 raise error.Abort(
4519 raise error.Abort(
4517 _(
4520 _(
4518 b'could not instantiate HTTP peer for '
4521 b'could not instantiate HTTP peer for '
4519 b'wire protocol version 2'
4522 b'wire protocol version 2'
4520 ),
4523 ),
4521 hint=_(
4524 hint=_(
4522 b'the server may not have the feature '
4525 b'the server may not have the feature '
4523 b'enabled or is not allowing this '
4526 b'enabled or is not allowing this '
4524 b'client version'
4527 b'client version'
4525 ),
4528 ),
4526 )
4529 )
4527
4530
4528 elif opts[b'peer'] == b'raw':
4531 elif opts[b'peer'] == b'raw':
4529 ui.write(_(b'using raw connection to peer\n'))
4532 ui.write(_(b'using raw connection to peer\n'))
4530 peer = None
4533 peer = None
4531 elif opts[b'peer']:
4534 elif opts[b'peer']:
4532 raise error.Abort(
4535 raise error.Abort(
4533 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4536 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4534 )
4537 )
4535 else:
4538 else:
4536 peer = httppeer.makepeer(ui, path, opener=opener)
4539 peer = httppeer.makepeer(ui, path, opener=opener)
4537
4540
4538 # We /could/ populate stdin/stdout with sock.makefile()...
4541 # We /could/ populate stdin/stdout with sock.makefile()...
4539 else:
4542 else:
4540 raise error.Abort(_(b'unsupported connection configuration'))
4543 raise error.Abort(_(b'unsupported connection configuration'))
4541
4544
4542 batchedcommands = None
4545 batchedcommands = None
4543
4546
4544 # Now perform actions based on the parsed wire language instructions.
4547 # Now perform actions based on the parsed wire language instructions.
4545 for action, lines in blocks:
4548 for action, lines in blocks:
4546 if action in (b'raw', b'raw+'):
4549 if action in (b'raw', b'raw+'):
4547 if not stdin:
4550 if not stdin:
4548 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4551 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4549
4552
4550 # Concatenate the data together.
4553 # Concatenate the data together.
4551 data = b''.join(l.lstrip() for l in lines)
4554 data = b''.join(l.lstrip() for l in lines)
4552 data = stringutil.unescapestr(data)
4555 data = stringutil.unescapestr(data)
4553 stdin.write(data)
4556 stdin.write(data)
4554
4557
4555 if action == b'raw+':
4558 if action == b'raw+':
4556 stdin.flush()
4559 stdin.flush()
4557 elif action == b'flush':
4560 elif action == b'flush':
4558 if not stdin:
4561 if not stdin:
4559 raise error.Abort(_(b'cannot call flush on this peer'))
4562 raise error.Abort(_(b'cannot call flush on this peer'))
4560 stdin.flush()
4563 stdin.flush()
4561 elif action.startswith(b'command'):
4564 elif action.startswith(b'command'):
4562 if not peer:
4565 if not peer:
4563 raise error.Abort(
4566 raise error.Abort(
4564 _(
4567 _(
4565 b'cannot send commands unless peer instance '
4568 b'cannot send commands unless peer instance '
4566 b'is available'
4569 b'is available'
4567 )
4570 )
4568 )
4571 )
4569
4572
4570 command = action.split(b' ', 1)[1]
4573 command = action.split(b' ', 1)[1]
4571
4574
4572 args = {}
4575 args = {}
4573 for line in lines:
4576 for line in lines:
4574 # We need to allow empty values.
4577 # We need to allow empty values.
4575 fields = line.lstrip().split(b' ', 1)
4578 fields = line.lstrip().split(b' ', 1)
4576 if len(fields) == 1:
4579 if len(fields) == 1:
4577 key = fields[0]
4580 key = fields[0]
4578 value = b''
4581 value = b''
4579 else:
4582 else:
4580 key, value = fields
4583 key, value = fields
4581
4584
4582 if value.startswith(b'eval:'):
4585 if value.startswith(b'eval:'):
4583 value = stringutil.evalpythonliteral(value[5:])
4586 value = stringutil.evalpythonliteral(value[5:])
4584 else:
4587 else:
4585 value = stringutil.unescapestr(value)
4588 value = stringutil.unescapestr(value)
4586
4589
4587 args[key] = value
4590 args[key] = value
4588
4591
4589 if batchedcommands is not None:
4592 if batchedcommands is not None:
4590 batchedcommands.append((command, args))
4593 batchedcommands.append((command, args))
4591 continue
4594 continue
4592
4595
4593 ui.status(_(b'sending %s command\n') % command)
4596 ui.status(_(b'sending %s command\n') % command)
4594
4597
4595 if b'PUSHFILE' in args:
4598 if b'PUSHFILE' in args:
4596 with open(args[b'PUSHFILE'], 'rb') as fh:
4599 with open(args[b'PUSHFILE'], 'rb') as fh:
4597 del args[b'PUSHFILE']
4600 del args[b'PUSHFILE']
4598 res, output = peer._callpush(
4601 res, output = peer._callpush(
4599 command, fh, **pycompat.strkwargs(args)
4602 command, fh, **pycompat.strkwargs(args)
4600 )
4603 )
4601 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4604 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4602 ui.status(
4605 ui.status(
4603 _(b'remote output: %s\n') % stringutil.escapestr(output)
4606 _(b'remote output: %s\n') % stringutil.escapestr(output)
4604 )
4607 )
4605 else:
4608 else:
4606 with peer.commandexecutor() as e:
4609 with peer.commandexecutor() as e:
4607 res = e.callcommand(command, args).result()
4610 res = e.callcommand(command, args).result()
4608
4611
4609 if isinstance(res, wireprotov2peer.commandresponse):
4612 if isinstance(res, wireprotov2peer.commandresponse):
4610 val = res.objects()
4613 val = res.objects()
4611 ui.status(
4614 ui.status(
4612 _(b'response: %s\n')
4615 _(b'response: %s\n')
4613 % stringutil.pprint(val, bprefix=True, indent=2)
4616 % stringutil.pprint(val, bprefix=True, indent=2)
4614 )
4617 )
4615 else:
4618 else:
4616 ui.status(
4619 ui.status(
4617 _(b'response: %s\n')
4620 _(b'response: %s\n')
4618 % stringutil.pprint(res, bprefix=True, indent=2)
4621 % stringutil.pprint(res, bprefix=True, indent=2)
4619 )
4622 )
4620
4623
4621 elif action == b'batchbegin':
4624 elif action == b'batchbegin':
4622 if batchedcommands is not None:
4625 if batchedcommands is not None:
4623 raise error.Abort(_(b'nested batchbegin not allowed'))
4626 raise error.Abort(_(b'nested batchbegin not allowed'))
4624
4627
4625 batchedcommands = []
4628 batchedcommands = []
4626 elif action == b'batchsubmit':
4629 elif action == b'batchsubmit':
4627 # There is a batching API we could go through. But it would be
4630 # There is a batching API we could go through. But it would be
4628 # difficult to normalize requests into function calls. It is easier
4631 # difficult to normalize requests into function calls. It is easier
4629 # to bypass this layer and normalize to commands + args.
4632 # to bypass this layer and normalize to commands + args.
4630 ui.status(
4633 ui.status(
4631 _(b'sending batch with %d sub-commands\n')
4634 _(b'sending batch with %d sub-commands\n')
4632 % len(batchedcommands)
4635 % len(batchedcommands)
4633 )
4636 )
4634 assert peer is not None
4637 assert peer is not None
4635 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4638 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4636 ui.status(
4639 ui.status(
4637 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4640 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4638 )
4641 )
4639
4642
4640 batchedcommands = None
4643 batchedcommands = None
4641
4644
4642 elif action.startswith(b'httprequest '):
4645 elif action.startswith(b'httprequest '):
4643 if not opener:
4646 if not opener:
4644 raise error.Abort(
4647 raise error.Abort(
4645 _(b'cannot use httprequest without an HTTP peer')
4648 _(b'cannot use httprequest without an HTTP peer')
4646 )
4649 )
4647
4650
4648 request = action.split(b' ', 2)
4651 request = action.split(b' ', 2)
4649 if len(request) != 3:
4652 if len(request) != 3:
4650 raise error.Abort(
4653 raise error.Abort(
4651 _(
4654 _(
4652 b'invalid httprequest: expected format is '
4655 b'invalid httprequest: expected format is '
4653 b'"httprequest <method> <path>'
4656 b'"httprequest <method> <path>'
4654 )
4657 )
4655 )
4658 )
4656
4659
4657 method, httppath = request[1:]
4660 method, httppath = request[1:]
4658 headers = {}
4661 headers = {}
4659 body = None
4662 body = None
4660 frames = []
4663 frames = []
4661 for line in lines:
4664 for line in lines:
4662 line = line.lstrip()
4665 line = line.lstrip()
4663 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4666 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4664 if m:
4667 if m:
4665 # Headers need to use native strings.
4668 # Headers need to use native strings.
4666 key = pycompat.strurl(m.group(1))
4669 key = pycompat.strurl(m.group(1))
4667 value = pycompat.strurl(m.group(2))
4670 value = pycompat.strurl(m.group(2))
4668 headers[key] = value
4671 headers[key] = value
4669 continue
4672 continue
4670
4673
4671 if line.startswith(b'BODYFILE '):
4674 if line.startswith(b'BODYFILE '):
4672 with open(line.split(b' ', 1), b'rb') as fh:
4675 with open(line.split(b' ', 1), b'rb') as fh:
4673 body = fh.read()
4676 body = fh.read()
4674 elif line.startswith(b'frame '):
4677 elif line.startswith(b'frame '):
4675 frame = wireprotoframing.makeframefromhumanstring(
4678 frame = wireprotoframing.makeframefromhumanstring(
4676 line[len(b'frame ') :]
4679 line[len(b'frame ') :]
4677 )
4680 )
4678
4681
4679 frames.append(frame)
4682 frames.append(frame)
4680 else:
4683 else:
4681 raise error.Abort(
4684 raise error.Abort(
4682 _(b'unknown argument to httprequest: %s') % line
4685 _(b'unknown argument to httprequest: %s') % line
4683 )
4686 )
4684
4687
4685 url = path + httppath
4688 url = path + httppath
4686
4689
4687 if frames:
4690 if frames:
4688 body = b''.join(bytes(f) for f in frames)
4691 body = b''.join(bytes(f) for f in frames)
4689
4692
4690 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4693 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4691
4694
4692 # urllib.Request insists on using has_data() as a proxy for
4695 # urllib.Request insists on using has_data() as a proxy for
4693 # determining the request method. Override that to use our
4696 # determining the request method. Override that to use our
4694 # explicitly requested method.
4697 # explicitly requested method.
4695 req.get_method = lambda: pycompat.sysstr(method)
4698 req.get_method = lambda: pycompat.sysstr(method)
4696
4699
4697 try:
4700 try:
4698 res = opener.open(req)
4701 res = opener.open(req)
4699 body = res.read()
4702 body = res.read()
4700 except util.urlerr.urlerror as e:
4703 except util.urlerr.urlerror as e:
4701 # read() method must be called, but only exists in Python 2
4704 # read() method must be called, but only exists in Python 2
4702 getattr(e, 'read', lambda: None)()
4705 getattr(e, 'read', lambda: None)()
4703 continue
4706 continue
4704
4707
4705 ct = res.headers.get('Content-Type')
4708 ct = res.headers.get('Content-Type')
4706 if ct == 'application/mercurial-cbor':
4709 if ct == 'application/mercurial-cbor':
4707 ui.write(
4710 ui.write(
4708 _(b'cbor> %s\n')
4711 _(b'cbor> %s\n')
4709 % stringutil.pprint(
4712 % stringutil.pprint(
4710 cborutil.decodeall(body), bprefix=True, indent=2
4713 cborutil.decodeall(body), bprefix=True, indent=2
4711 )
4714 )
4712 )
4715 )
4713
4716
4714 elif action == b'close':
4717 elif action == b'close':
4715 assert peer is not None
4718 assert peer is not None
4716 peer.close()
4719 peer.close()
4717 elif action == b'readavailable':
4720 elif action == b'readavailable':
4718 if not stdout or not stderr:
4721 if not stdout or not stderr:
4719 raise error.Abort(
4722 raise error.Abort(
4720 _(b'readavailable not available on this peer')
4723 _(b'readavailable not available on this peer')
4721 )
4724 )
4722
4725
4723 stdin.close()
4726 stdin.close()
4724 stdout.read()
4727 stdout.read()
4725 stderr.read()
4728 stderr.read()
4726
4729
4727 elif action == b'readline':
4730 elif action == b'readline':
4728 if not stdout:
4731 if not stdout:
4729 raise error.Abort(_(b'readline not available on this peer'))
4732 raise error.Abort(_(b'readline not available on this peer'))
4730 stdout.readline()
4733 stdout.readline()
4731 elif action == b'ereadline':
4734 elif action == b'ereadline':
4732 if not stderr:
4735 if not stderr:
4733 raise error.Abort(_(b'ereadline not available on this peer'))
4736 raise error.Abort(_(b'ereadline not available on this peer'))
4734 stderr.readline()
4737 stderr.readline()
4735 elif action.startswith(b'read '):
4738 elif action.startswith(b'read '):
4736 count = int(action.split(b' ', 1)[1])
4739 count = int(action.split(b' ', 1)[1])
4737 if not stdout:
4740 if not stdout:
4738 raise error.Abort(_(b'read not available on this peer'))
4741 raise error.Abort(_(b'read not available on this peer'))
4739 stdout.read(count)
4742 stdout.read(count)
4740 elif action.startswith(b'eread '):
4743 elif action.startswith(b'eread '):
4741 count = int(action.split(b' ', 1)[1])
4744 count = int(action.split(b' ', 1)[1])
4742 if not stderr:
4745 if not stderr:
4743 raise error.Abort(_(b'eread not available on this peer'))
4746 raise error.Abort(_(b'eread not available on this peer'))
4744 stderr.read(count)
4747 stderr.read(count)
4745 else:
4748 else:
4746 raise error.Abort(_(b'unknown action: %s') % action)
4749 raise error.Abort(_(b'unknown action: %s') % action)
4747
4750
4748 if batchedcommands is not None:
4751 if batchedcommands is not None:
4749 raise error.Abort(_(b'unclosed "batchbegin" request'))
4752 raise error.Abort(_(b'unclosed "batchbegin" request'))
4750
4753
4751 if peer:
4754 if peer:
4752 peer.close()
4755 peer.close()
4753
4756
4754 if proc:
4757 if proc:
4755 proc.kill()
4758 proc.kill()
@@ -1,935 +1,935
1 setup
1 setup
2
2
3 $ cat >> $HGRCPATH << EOF
3 $ cat >> $HGRCPATH << EOF
4 > [extensions]
4 > [extensions]
5 > blackbox=
5 > blackbox=
6 > mock=$TESTDIR/mockblackbox.py
6 > mock=$TESTDIR/mockblackbox.py
7 > [blackbox]
7 > [blackbox]
8 > track = command, commandfinish, tagscache
8 > track = command, commandfinish, tagscache
9 > EOF
9 > EOF
10
10
11 Helper functions:
11 Helper functions:
12
12
13 $ cacheexists() {
13 $ cacheexists() {
14 > [ -f .hg/cache/tags2-visible ] && echo "tag cache exists" || echo "no tag cache"
14 > [ -f .hg/cache/tags2-visible ] && echo "tag cache exists" || echo "no tag cache"
15 > }
15 > }
16
16
17 $ fnodescacheexists() {
17 $ fnodescacheexists() {
18 > [ -f .hg/cache/hgtagsfnodes1 ] && echo "fnodes cache exists" || echo "no fnodes cache"
18 > [ -f .hg/cache/hgtagsfnodes1 ] && echo "fnodes cache exists" || echo "no fnodes cache"
19 > }
19 > }
20
20
21 $ dumptags() {
21 $ dumptags() {
22 > rev=$1
22 > rev=$1
23 > echo "rev $rev: .hgtags:"
23 > echo "rev $rev: .hgtags:"
24 > hg cat -r$rev .hgtags
24 > hg cat -r$rev .hgtags
25 > }
25 > }
26
26
27 # XXX need to test that the tag cache works when we strip an old head
27 # XXX need to test that the tag cache works when we strip an old head
28 # and add a new one rooted off non-tip: i.e. node and rev of tip are the
28 # and add a new one rooted off non-tip: i.e. node and rev of tip are the
29 # same, but stuff has changed behind tip.
29 # same, but stuff has changed behind tip.
30
30
31 Setup:
31 Setup:
32
32
33 $ hg init t
33 $ hg init t
34 $ cd t
34 $ cd t
35 $ cacheexists
35 $ cacheexists
36 no tag cache
36 no tag cache
37 $ fnodescacheexists
37 $ fnodescacheexists
38 no fnodes cache
38 no fnodes cache
39 $ hg id
39 $ hg id
40 000000000000 tip
40 000000000000 tip
41 $ cacheexists
41 $ cacheexists
42 no tag cache
42 no tag cache
43 $ fnodescacheexists
43 $ fnodescacheexists
44 no fnodes cache
44 no fnodes cache
45 $ echo a > a
45 $ echo a > a
46 $ hg add a
46 $ hg add a
47 $ hg commit -m "test"
47 $ hg commit -m "test"
48 $ hg co
48 $ hg co
49 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
49 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
50 $ hg identify
50 $ hg identify
51 acb14030fe0a tip
51 acb14030fe0a tip
52 $ hg identify -r 'wdir()'
52 $ hg identify -r 'wdir()'
53 acb14030fe0a tip
53 acb14030fe0a tip
54 $ cacheexists
54 $ cacheexists
55 tag cache exists
55 tag cache exists
56 No fnodes cache because .hgtags file doesn't exist
56 No fnodes cache because .hgtags file doesn't exist
57 (this is an implementation detail)
57 (this is an implementation detail)
58 $ fnodescacheexists
58 $ fnodescacheexists
59 no fnodes cache
59 no fnodes cache
60
60
61 Try corrupting the cache
61 Try corrupting the cache
62
62
63 $ printf 'a b' > .hg/cache/tags2-visible
63 $ printf 'a b' > .hg/cache/tags2-visible
64 $ hg identify
64 $ hg identify
65 acb14030fe0a tip
65 acb14030fe0a tip
66 $ cacheexists
66 $ cacheexists
67 tag cache exists
67 tag cache exists
68 $ fnodescacheexists
68 $ fnodescacheexists
69 no fnodes cache
69 no fnodes cache
70 $ hg identify
70 $ hg identify
71 acb14030fe0a tip
71 acb14030fe0a tip
72
72
73 Create local tag with long name:
73 Create local tag with long name:
74
74
75 $ T=`hg identify --debug --id`
75 $ T=`hg identify --debug --id`
76 $ hg tag -l "This is a local tag with a really long name!"
76 $ hg tag -l "This is a local tag with a really long name!"
77 $ hg tags
77 $ hg tags
78 tip 0:acb14030fe0a
78 tip 0:acb14030fe0a
79 This is a local tag with a really long name! 0:acb14030fe0a
79 This is a local tag with a really long name! 0:acb14030fe0a
80 $ rm .hg/localtags
80 $ rm .hg/localtags
81
81
82 Create a tag behind hg's back:
82 Create a tag behind hg's back:
83
83
84 $ echo "$T first" > .hgtags
84 $ echo "$T first" > .hgtags
85 $ cat .hgtags
85 $ cat .hgtags
86 acb14030fe0a21b60322c440ad2d20cf7685a376 first
86 acb14030fe0a21b60322c440ad2d20cf7685a376 first
87 $ hg add .hgtags
87 $ hg add .hgtags
88 $ hg commit -m "add tags"
88 $ hg commit -m "add tags"
89 $ hg tags
89 $ hg tags
90 tip 1:b9154636be93
90 tip 1:b9154636be93
91 first 0:acb14030fe0a
91 first 0:acb14030fe0a
92 $ hg identify
92 $ hg identify
93 b9154636be93 tip
93 b9154636be93 tip
94
94
95 We should have a fnodes cache now that we have a real tag
95 We should have a fnodes cache now that we have a real tag
96 The cache should have an empty entry for rev 0 and a valid entry for rev 1.
96 The cache should have an empty entry for rev 0 and a valid entry for rev 1.
97
97
98
98
99 $ fnodescacheexists
99 $ fnodescacheexists
100 fnodes cache exists
100 fnodes cache exists
101 $ f --size --hexdump .hg/cache/hgtagsfnodes1
101 $ f --size --hexdump .hg/cache/hgtagsfnodes1
102 .hg/cache/hgtagsfnodes1: size=48
102 .hg/cache/hgtagsfnodes1: size=48
103 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
103 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
104 0010: ff ff ff ff ff ff ff ff b9 15 46 36 26 b7 b4 a7 |..........F6&...|
104 0010: ff ff ff ff ff ff ff ff b9 15 46 36 26 b7 b4 a7 |..........F6&...|
105 0020: 73 e0 9e e3 c5 2f 51 0e 19 e0 5e 1f f9 66 d8 59 |s..../Q...^..f.Y|
105 0020: 73 e0 9e e3 c5 2f 51 0e 19 e0 5e 1f f9 66 d8 59 |s..../Q...^..f.Y|
106 $ hg debugtagscache
106 $ hg debugtagscache
107 0 acb14030fe0a21b60322c440ad2d20cf7685a376 missing
107 0 acb14030fe0a21b60322c440ad2d20cf7685a376 missing
108 1 b9154636be938d3d431e75a7c906504a079bfe07 26b7b4a773e09ee3c52f510e19e05e1ff966d859
108 1 b9154636be938d3d431e75a7c906504a079bfe07 26b7b4a773e09ee3c52f510e19e05e1ff966d859
109
109
110 Repeat with cold tag cache:
110 Repeat with cold tag cache:
111
111
112 $ rm -f .hg/cache/tags2-visible .hg/cache/hgtagsfnodes1
112 $ rm -f .hg/cache/tags2-visible .hg/cache/hgtagsfnodes1
113 $ hg identify
113 $ hg identify
114 b9154636be93 tip
114 b9154636be93 tip
115
115
116 $ fnodescacheexists
116 $ fnodescacheexists
117 fnodes cache exists
117 fnodes cache exists
118 $ f --size --hexdump .hg/cache/hgtagsfnodes1
118 $ f --size --hexdump .hg/cache/hgtagsfnodes1
119 .hg/cache/hgtagsfnodes1: size=48
119 .hg/cache/hgtagsfnodes1: size=48
120 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
120 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
121 0010: ff ff ff ff ff ff ff ff b9 15 46 36 26 b7 b4 a7 |..........F6&...|
121 0010: ff ff ff ff ff ff ff ff b9 15 46 36 26 b7 b4 a7 |..........F6&...|
122 0020: 73 e0 9e e3 c5 2f 51 0e 19 e0 5e 1f f9 66 d8 59 |s..../Q...^..f.Y|
122 0020: 73 e0 9e e3 c5 2f 51 0e 19 e0 5e 1f f9 66 d8 59 |s..../Q...^..f.Y|
123
123
124 And again, but now unable to write tag cache or lock file:
124 And again, but now unable to write tag cache or lock file:
125
125
126 #if unix-permissions no-fsmonitor
126 #if unix-permissions no-fsmonitor
127
127
128 $ rm -f .hg/cache/tags2-visible .hg/cache/hgtagsfnodes1
128 $ rm -f .hg/cache/tags2-visible .hg/cache/hgtagsfnodes1
129 $ chmod 555 .hg/cache
129 $ chmod 555 .hg/cache
130 $ hg identify
130 $ hg identify
131 b9154636be93 tip
131 b9154636be93 tip
132 $ chmod 755 .hg/cache
132 $ chmod 755 .hg/cache
133
133
134 (this block should be protected by no-fsmonitor, because "chmod 555 .hg"
134 (this block should be protected by no-fsmonitor, because "chmod 555 .hg"
135 makes watchman fail at accessing to files under .hg)
135 makes watchman fail at accessing to files under .hg)
136
136
137 $ chmod 555 .hg
137 $ chmod 555 .hg
138 $ hg identify
138 $ hg identify
139 b9154636be93 tip
139 b9154636be93 tip
140 $ chmod 755 .hg
140 $ chmod 755 .hg
141 #endif
141 #endif
142
142
143 Tag cache debug info written to blackbox log
143 Tag cache debug info written to blackbox log
144
144
145 $ rm -f .hg/cache/tags2-visible .hg/cache/hgtagsfnodes1
145 $ rm -f .hg/cache/tags2-visible .hg/cache/hgtagsfnodes1
146 $ hg identify
146 $ hg identify
147 b9154636be93 tip
147 b9154636be93 tip
148 $ hg blackbox -l 6
148 $ hg blackbox -l 6
149 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> identify
149 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> identify
150 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> writing 48 bytes to cache/hgtagsfnodes1
150 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> writing 48 bytes to cache/hgtagsfnodes1
151 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> 0/2 cache hits/lookups in * seconds (glob)
151 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> 0/2 cache hits/lookups in * seconds (glob)
152 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> writing .hg/cache/tags2-visible with 1 tags
152 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> writing .hg/cache/tags2-visible with 1 tags
153 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> identify exited 0 after * seconds (glob)
153 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> identify exited 0 after * seconds (glob)
154 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> blackbox -l 6
154 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> blackbox -l 6
155
155
156 Failure to acquire lock results in no write
156 Failure to acquire lock results in no write
157
157
158 $ rm -f .hg/cache/tags2-visible .hg/cache/hgtagsfnodes1
158 $ rm -f .hg/cache/tags2-visible .hg/cache/hgtagsfnodes1
159 $ echo 'foo:1' > .hg/store/lock
159 $ echo 'foo:1' > .hg/store/lock
160 $ hg identify
160 $ hg identify
161 b9154636be93 tip
161 b9154636be93 tip
162 $ hg blackbox -l 6
162 $ hg blackbox -l 6
163 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> identify
163 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> identify
164 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> not writing .hg/cache/hgtagsfnodes1 because lock cannot be acquired
164 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> not writing .hg/cache/hgtagsfnodes1 because lock cannot be acquired
165 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> 0/2 cache hits/lookups in * seconds (glob)
165 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> 0/2 cache hits/lookups in * seconds (glob)
166 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> writing .hg/cache/tags2-visible with 1 tags
166 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> writing .hg/cache/tags2-visible with 1 tags
167 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> identify exited 0 after * seconds (glob)
167 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> identify exited 0 after * seconds (glob)
168 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> blackbox -l 6
168 1970/01/01 00:00:00 bob @b9154636be938d3d431e75a7c906504a079bfe07 (5000)> blackbox -l 6
169
169
170 $ fnodescacheexists
170 $ fnodescacheexists
171 no fnodes cache
171 no fnodes cache
172
172
173 $ rm .hg/store/lock
173 $ rm .hg/store/lock
174
174
175 $ rm -f .hg/cache/tags2-visible .hg/cache/hgtagsfnodes1
175 $ rm -f .hg/cache/tags2-visible .hg/cache/hgtagsfnodes1
176 $ hg identify
176 $ hg identify
177 b9154636be93 tip
177 b9154636be93 tip
178
178
179 Create a branch:
179 Create a branch:
180
180
181 $ echo bb > a
181 $ echo bb > a
182 $ hg status
182 $ hg status
183 M a
183 M a
184 $ hg identify
184 $ hg identify
185 b9154636be93+ tip
185 b9154636be93+ tip
186 $ hg co first
186 $ hg co first
187 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
187 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
188 $ hg id
188 $ hg id
189 acb14030fe0a+ first
189 acb14030fe0a+ first
190 $ hg id -r 'wdir()'
190 $ hg id -r 'wdir()'
191 acb14030fe0a+ first
191 acb14030fe0a+ first
192 $ hg -v id
192 $ hg -v id
193 acb14030fe0a+ first
193 acb14030fe0a+ first
194 $ hg status
194 $ hg status
195 M a
195 M a
196 $ echo 1 > b
196 $ echo 1 > b
197 $ hg add b
197 $ hg add b
198 $ hg commit -m "branch"
198 $ hg commit -m "branch"
199 created new head
199 created new head
200
200
201 Creating a new commit shouldn't append the .hgtags fnodes cache until
201 Creating a new commit shouldn't append the .hgtags fnodes cache until
202 tags info is accessed
202 tags info is accessed
203
203
204 $ f --size --hexdump .hg/cache/hgtagsfnodes1
204 $ f --size --hexdump .hg/cache/hgtagsfnodes1
205 .hg/cache/hgtagsfnodes1: size=48
205 .hg/cache/hgtagsfnodes1: size=48
206 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
206 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
207 0010: ff ff ff ff ff ff ff ff b9 15 46 36 26 b7 b4 a7 |..........F6&...|
207 0010: ff ff ff ff ff ff ff ff b9 15 46 36 26 b7 b4 a7 |..........F6&...|
208 0020: 73 e0 9e e3 c5 2f 51 0e 19 e0 5e 1f f9 66 d8 59 |s..../Q...^..f.Y|
208 0020: 73 e0 9e e3 c5 2f 51 0e 19 e0 5e 1f f9 66 d8 59 |s..../Q...^..f.Y|
209
209
210 $ hg id
210 $ hg id
211 c8edf04160c7 tip
211 c8edf04160c7 tip
212
212
213 First 4 bytes of record 3 are changeset fragment
213 First 4 bytes of record 3 are changeset fragment
214
214
215 $ f --size --hexdump .hg/cache/hgtagsfnodes1
215 $ f --size --hexdump .hg/cache/hgtagsfnodes1
216 .hg/cache/hgtagsfnodes1: size=72
216 .hg/cache/hgtagsfnodes1: size=72
217 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
217 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
218 0010: ff ff ff ff ff ff ff ff b9 15 46 36 26 b7 b4 a7 |..........F6&...|
218 0010: ff ff ff ff ff ff ff ff b9 15 46 36 26 b7 b4 a7 |..........F6&...|
219 0020: 73 e0 9e e3 c5 2f 51 0e 19 e0 5e 1f f9 66 d8 59 |s..../Q...^..f.Y|
219 0020: 73 e0 9e e3 c5 2f 51 0e 19 e0 5e 1f f9 66 d8 59 |s..../Q...^..f.Y|
220 0030: c8 ed f0 41 00 00 00 00 00 00 00 00 00 00 00 00 |...A............|
220 0030: c8 ed f0 41 00 00 00 00 00 00 00 00 00 00 00 00 |...A............|
221 0040: 00 00 00 00 00 00 00 00 |........|
221 0040: 00 00 00 00 00 00 00 00 |........|
222
222
223 Merge the two heads:
223 Merge the two heads:
224
224
225 $ hg merge 1
225 $ hg merge 1
226 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
226 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
227 (branch merge, don't forget to commit)
227 (branch merge, don't forget to commit)
228 $ hg blackbox -l3
228 $ hg blackbox -l3
229 1970/01/01 00:00:00 bob @c8edf04160c7f731e4589d66ab3ab3486a64ac28 (5000)> merge 1
229 1970/01/01 00:00:00 bob @c8edf04160c7f731e4589d66ab3ab3486a64ac28 (5000)> merge 1
230 1970/01/01 00:00:00 bob @c8edf04160c7f731e4589d66ab3ab3486a64ac28+b9154636be938d3d431e75a7c906504a079bfe07 (5000)> merge 1 exited 0 after * seconds (glob)
230 1970/01/01 00:00:00 bob @c8edf04160c7f731e4589d66ab3ab3486a64ac28+b9154636be938d3d431e75a7c906504a079bfe07 (5000)> merge 1 exited 0 after * seconds (glob)
231 1970/01/01 00:00:00 bob @c8edf04160c7f731e4589d66ab3ab3486a64ac28+b9154636be938d3d431e75a7c906504a079bfe07 (5000)> blackbox -l3
231 1970/01/01 00:00:00 bob @c8edf04160c7f731e4589d66ab3ab3486a64ac28+b9154636be938d3d431e75a7c906504a079bfe07 (5000)> blackbox -l3
232 $ hg id
232 $ hg id
233 c8edf04160c7+b9154636be93+ tip
233 c8edf04160c7+b9154636be93+ tip
234 $ hg status
234 $ hg status
235 M .hgtags
235 M .hgtags
236 $ hg commit -m "merge"
236 $ hg commit -m "merge"
237
237
238 Create a fake head, make sure tag not visible afterwards:
238 Create a fake head, make sure tag not visible afterwards:
239
239
240 $ cp .hgtags tags
240 $ cp .hgtags tags
241 $ hg tag last
241 $ hg tag last
242 $ hg rm .hgtags
242 $ hg rm .hgtags
243 $ hg commit -m "remove"
243 $ hg commit -m "remove"
244
244
245 $ mv tags .hgtags
245 $ mv tags .hgtags
246 $ hg add .hgtags
246 $ hg add .hgtags
247 $ hg commit -m "readd"
247 $ hg commit -m "readd"
248 $
248 $
249 $ hg tags
249 $ hg tags
250 tip 6:35ff301afafe
250 tip 6:35ff301afafe
251 first 0:acb14030fe0a
251 first 0:acb14030fe0a
252
252
253 Add invalid tags:
253 Add invalid tags:
254
254
255 $ echo "spam" >> .hgtags
255 $ echo "spam" >> .hgtags
256 $ echo >> .hgtags
256 $ echo >> .hgtags
257 $ echo "foo bar" >> .hgtags
257 $ echo "foo bar" >> .hgtags
258 $ echo "a5a5 invalid" >> .hg/localtags
258 $ echo "a5a5 invalid" >> .hg/localtags
259 $ cat .hgtags
259 $ cat .hgtags
260 acb14030fe0a21b60322c440ad2d20cf7685a376 first
260 acb14030fe0a21b60322c440ad2d20cf7685a376 first
261 spam
261 spam
262
262
263 foo bar
263 foo bar
264 $ hg commit -m "tags"
264 $ hg commit -m "tags"
265
265
266 Report tag parse error on other head:
266 Report tag parse error on other head:
267
267
268 $ hg up 3
268 $ hg up 3
269 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
269 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
270 $ echo 'x y' >> .hgtags
270 $ echo 'x y' >> .hgtags
271 $ hg commit -m "head"
271 $ hg commit -m "head"
272 created new head
272 created new head
273
273
274 $ hg tags --debug
274 $ hg tags --debug
275 .hgtags@75d9f02dfe28, line 2: cannot parse entry
275 .hgtags@75d9f02dfe28, line 2: cannot parse entry
276 .hgtags@75d9f02dfe28, line 4: node 'foo' is not well formed
276 .hgtags@75d9f02dfe28, line 4: node 'foo' is not well formed
277 .hgtags@c4be69a18c11, line 2: node 'x' is not well formed
277 .hgtags@c4be69a18c11, line 2: node 'x' is not well formed
278 tip 8:c4be69a18c11e8bc3a5fdbb576017c25f7d84663
278 tip 8:c4be69a18c11e8bc3a5fdbb576017c25f7d84663
279 first 0:acb14030fe0a21b60322c440ad2d20cf7685a376
279 first 0:acb14030fe0a21b60322c440ad2d20cf7685a376
280 $ hg tip
280 $ hg tip
281 changeset: 8:c4be69a18c11
281 changeset: 8:c4be69a18c11
282 tag: tip
282 tag: tip
283 parent: 3:ac5e980c4dc0
283 parent: 3:ac5e980c4dc0
284 user: test
284 user: test
285 date: Thu Jan 01 00:00:00 1970 +0000
285 date: Thu Jan 01 00:00:00 1970 +0000
286 summary: head
286 summary: head
287
287
288
288
289 Test tag precedence rules:
289 Test tag precedence rules:
290
290
291 $ cd ..
291 $ cd ..
292 $ hg init t2
292 $ hg init t2
293 $ cd t2
293 $ cd t2
294 $ echo foo > foo
294 $ echo foo > foo
295 $ hg add foo
295 $ hg add foo
296 $ hg ci -m 'add foo' # rev 0
296 $ hg ci -m 'add foo' # rev 0
297 $ hg tag bar # rev 1
297 $ hg tag bar # rev 1
298 $ echo >> foo
298 $ echo >> foo
299 $ hg ci -m 'change foo 1' # rev 2
299 $ hg ci -m 'change foo 1' # rev 2
300 $ hg up -C 1
300 $ hg up -C 1
301 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
301 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
302 $ hg tag -r 1 -f bar # rev 3
302 $ hg tag -r 1 -f bar # rev 3
303 $ hg up -C 1
303 $ hg up -C 1
304 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
304 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
305 $ echo >> foo
305 $ echo >> foo
306 $ hg ci -m 'change foo 2' # rev 4
306 $ hg ci -m 'change foo 2' # rev 4
307 created new head
307 created new head
308 $ hg tags
308 $ hg tags
309 tip 4:0c192d7d5e6b
309 tip 4:0c192d7d5e6b
310 bar 1:78391a272241
310 bar 1:78391a272241
311
311
312 Repeat in case of cache effects:
312 Repeat in case of cache effects:
313
313
314 $ hg tags
314 $ hg tags
315 tip 4:0c192d7d5e6b
315 tip 4:0c192d7d5e6b
316 bar 1:78391a272241
316 bar 1:78391a272241
317
317
318 Detailed dump of tag info:
318 Detailed dump of tag info:
319
319
320 $ hg heads -q # expect 4, 3, 2
320 $ hg heads -q # expect 4, 3, 2
321 4:0c192d7d5e6b
321 4:0c192d7d5e6b
322 3:6fa450212aeb
322 3:6fa450212aeb
323 2:7a94127795a3
323 2:7a94127795a3
324 $ dumptags 2
324 $ dumptags 2
325 rev 2: .hgtags:
325 rev 2: .hgtags:
326 bbd179dfa0a71671c253b3ae0aa1513b60d199fa bar
326 bbd179dfa0a71671c253b3ae0aa1513b60d199fa bar
327 $ dumptags 3
327 $ dumptags 3
328 rev 3: .hgtags:
328 rev 3: .hgtags:
329 bbd179dfa0a71671c253b3ae0aa1513b60d199fa bar
329 bbd179dfa0a71671c253b3ae0aa1513b60d199fa bar
330 bbd179dfa0a71671c253b3ae0aa1513b60d199fa bar
330 bbd179dfa0a71671c253b3ae0aa1513b60d199fa bar
331 78391a272241d70354aa14c874552cad6b51bb42 bar
331 78391a272241d70354aa14c874552cad6b51bb42 bar
332 $ dumptags 4
332 $ dumptags 4
333 rev 4: .hgtags:
333 rev 4: .hgtags:
334 bbd179dfa0a71671c253b3ae0aa1513b60d199fa bar
334 bbd179dfa0a71671c253b3ae0aa1513b60d199fa bar
335
335
336 Dump cache:
336 Dump cache:
337
337
338 $ cat .hg/cache/tags2-visible
338 $ cat .hg/cache/tags2-visible
339 4 0c192d7d5e6b78a714de54a2e9627952a877e25a
339 4 0c192d7d5e6b78a714de54a2e9627952a877e25a
340 bbd179dfa0a71671c253b3ae0aa1513b60d199fa bar
340 bbd179dfa0a71671c253b3ae0aa1513b60d199fa bar
341 bbd179dfa0a71671c253b3ae0aa1513b60d199fa bar
341 bbd179dfa0a71671c253b3ae0aa1513b60d199fa bar
342 78391a272241d70354aa14c874552cad6b51bb42 bar
342 78391a272241d70354aa14c874552cad6b51bb42 bar
343
343
344 $ f --size --hexdump .hg/cache/hgtagsfnodes1
344 $ f --size --hexdump .hg/cache/hgtagsfnodes1
345 .hg/cache/hgtagsfnodes1: size=120
345 .hg/cache/hgtagsfnodes1: size=120
346 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
346 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
347 0010: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
347 0010: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
348 0020: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
348 0020: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
349 0030: 7a 94 12 77 0c 04 f2 a8 af 31 de 17 fa b7 42 28 |z..w.....1....B(|
349 0030: 7a 94 12 77 0c 04 f2 a8 af 31 de 17 fa b7 42 28 |z..w.....1....B(|
350 0040: 78 ee 5a 2d ad bc 94 3d 6f a4 50 21 7d 3b 71 8c |x.Z-...=o.P!};q.|
350 0040: 78 ee 5a 2d ad bc 94 3d 6f a4 50 21 7d 3b 71 8c |x.Z-...=o.P!};q.|
351 0050: 96 4e f3 7b 89 e5 50 eb da fd 57 89 e7 6c e1 b0 |.N.{..P...W..l..|
351 0050: 96 4e f3 7b 89 e5 50 eb da fd 57 89 e7 6c e1 b0 |.N.{..P...W..l..|
352 0060: 0c 19 2d 7d 0c 04 f2 a8 af 31 de 17 fa b7 42 28 |..-}.....1....B(|
352 0060: 0c 19 2d 7d 0c 04 f2 a8 af 31 de 17 fa b7 42 28 |..-}.....1....B(|
353 0070: 78 ee 5a 2d ad bc 94 3d |x.Z-...=|
353 0070: 78 ee 5a 2d ad bc 94 3d |x.Z-...=|
354
354
355 Corrupt the .hgtags fnodes cache
355 Corrupt the .hgtags fnodes cache
356 Extra junk data at the end should get overwritten on next cache update
356 Extra junk data at the end should get overwritten on next cache update
357
357
358 $ echo extra >> .hg/cache/hgtagsfnodes1
358 $ echo extra >> .hg/cache/hgtagsfnodes1
359 $ echo dummy1 > foo
359 $ echo dummy1 > foo
360 $ hg commit -m throwaway1
360 $ hg commit -m throwaway1
361
361
362 $ hg tags
362 $ hg tags
363 tip 5:8dbfe60eff30
363 tip 5:8dbfe60eff30
364 bar 1:78391a272241
364 bar 1:78391a272241
365
365
366 $ hg blackbox -l 6
366 $ hg blackbox -l 6
367 1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> tags
367 1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> tags
368 1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> writing 24 bytes to cache/hgtagsfnodes1
368 1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> writing 24 bytes to cache/hgtagsfnodes1
369 1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> 3/4 cache hits/lookups in * seconds (glob)
369 1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> 3/4 cache hits/lookups in * seconds (glob)
370 1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> writing .hg/cache/tags2-visible with 1 tags
370 1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> writing .hg/cache/tags2-visible with 1 tags
371 1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> tags exited 0 after * seconds (glob)
371 1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> tags exited 0 after * seconds (glob)
372 1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> blackbox -l 6
372 1970/01/01 00:00:00 bob @8dbfe60eff306a54259cfe007db9e330e7ecf866 (5000)> blackbox -l 6
373
373
374 On junk data + missing cache entries, hg also overwrites the junk.
374 On junk data + missing cache entries, hg also overwrites the junk.
375
375
376 $ rm -f .hg/cache/tags2-visible
376 $ rm -f .hg/cache/tags2-visible
377 >>> import os
377 >>> import os
378 >>> with open(".hg/cache/hgtagsfnodes1", "ab+") as fp:
378 >>> with open(".hg/cache/hgtagsfnodes1", "ab+") as fp:
379 ... fp.seek(-10, os.SEEK_END) and None
379 ... fp.seek(-10, os.SEEK_END) and None
380 ... fp.truncate() and None
380 ... fp.truncate() and None
381
381
382 $ hg debugtagscache | tail -2
382 $ hg debugtagscache | tail -2
383 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d
383 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d
384 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 missing
384 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 missing
385 $ hg tags
385 $ hg tags
386 tip 5:8dbfe60eff30
386 tip 5:8dbfe60eff30
387 bar 1:78391a272241
387 bar 1:78391a272241
388 $ hg debugtagscache | tail -2
388 $ hg debugtagscache | tail -2
389 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d
389 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d
390 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 0c04f2a8af31de17fab7422878ee5a2dadbc943d
390 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 0c04f2a8af31de17fab7422878ee5a2dadbc943d
391
391
392 If the 4 bytes of node hash for a record don't match an existing node, the entry
392 If the 4 bytes of node hash for a record don't match an existing node, the entry
393 is flagged as invalid.
393 is flagged as invalid.
394
394
395 >>> import os
395 >>> import os
396 >>> with open(".hg/cache/hgtagsfnodes1", "rb+") as fp:
396 >>> with open(".hg/cache/hgtagsfnodes1", "rb+") as fp:
397 ... fp.seek(-24, os.SEEK_END) and None
397 ... fp.seek(-24, os.SEEK_END) and None
398 ... fp.write(b'\xde\xad') and None
398 ... fp.write(b'\xde\xad') and None
399
399
400 $ f --size --hexdump .hg/cache/hgtagsfnodes1
400 $ f --size --hexdump .hg/cache/hgtagsfnodes1
401 .hg/cache/hgtagsfnodes1: size=144
401 .hg/cache/hgtagsfnodes1: size=144
402 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
402 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
403 0010: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
403 0010: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
404 0020: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
404 0020: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
405 0030: 7a 94 12 77 0c 04 f2 a8 af 31 de 17 fa b7 42 28 |z..w.....1....B(|
405 0030: 7a 94 12 77 0c 04 f2 a8 af 31 de 17 fa b7 42 28 |z..w.....1....B(|
406 0040: 78 ee 5a 2d ad bc 94 3d 6f a4 50 21 7d 3b 71 8c |x.Z-...=o.P!};q.|
406 0040: 78 ee 5a 2d ad bc 94 3d 6f a4 50 21 7d 3b 71 8c |x.Z-...=o.P!};q.|
407 0050: 96 4e f3 7b 89 e5 50 eb da fd 57 89 e7 6c e1 b0 |.N.{..P...W..l..|
407 0050: 96 4e f3 7b 89 e5 50 eb da fd 57 89 e7 6c e1 b0 |.N.{..P...W..l..|
408 0060: 0c 19 2d 7d 0c 04 f2 a8 af 31 de 17 fa b7 42 28 |..-}.....1....B(|
408 0060: 0c 19 2d 7d 0c 04 f2 a8 af 31 de 17 fa b7 42 28 |..-}.....1....B(|
409 0070: 78 ee 5a 2d ad bc 94 3d de ad e6 0e 0c 04 f2 a8 |x.Z-...=........|
409 0070: 78 ee 5a 2d ad bc 94 3d de ad e6 0e 0c 04 f2 a8 |x.Z-...=........|
410 0080: af 31 de 17 fa b7 42 28 78 ee 5a 2d ad bc 94 3d |.1....B(x.Z-...=|
410 0080: af 31 de 17 fa b7 42 28 78 ee 5a 2d ad bc 94 3d |.1....B(x.Z-...=|
411
411
412 $ hg debugtagscache | tail -2
412 $ hg debugtagscache | tail -2
413 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d
413 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d
414 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 invalid
414 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 invalid
415
415
416 $ hg tags
416 $ hg tags
417 tip 5:8dbfe60eff30
417 tip 5:8dbfe60eff30
418 bar 1:78391a272241
418 bar 1:78391a272241
419
419
420 BUG: If the filenode part of an entry in hgtagsfnodes is corrupt and
420 BUG: If the filenode part of an entry in hgtagsfnodes is corrupt and
421 tags2-visible is missing, `hg tags` aborts. Corrupting the leading 4 bytes of
421 tags2-visible is missing, `hg tags` aborts. Corrupting the leading 4 bytes of
422 node hash (as above) doesn't seem to trigger the issue. Also note that the
422 node hash (as above) doesn't seem to trigger the issue. Also note that the
423 debug command hides the corruption, both with and without tags2-visible.
423 debug command hides the corruption, both with and without tags2-visible.
424
424
425 $ mv .hg/cache/hgtagsfnodes1 .hg/cache/hgtagsfnodes1.bak
425 $ mv .hg/cache/hgtagsfnodes1 .hg/cache/hgtagsfnodes1.bak
426 $ hg debugupdatecaches
426 $ hg debugupdatecaches
427
427
428 >>> import os
428 >>> import os
429 >>> with open(".hg/cache/hgtagsfnodes1", "rb+") as fp:
429 >>> with open(".hg/cache/hgtagsfnodes1", "rb+") as fp:
430 ... fp.seek(-16, os.SEEK_END) and None
430 ... fp.seek(-16, os.SEEK_END) and None
431 ... fp.write(b'\xde\xad') and None
431 ... fp.write(b'\xde\xad') and None
432
432
433 $ f --size --hexdump .hg/cache/hgtagsfnodes1
433 $ f --size --hexdump .hg/cache/hgtagsfnodes1
434 .hg/cache/hgtagsfnodes1: size=144
434 .hg/cache/hgtagsfnodes1: size=144
435 0000: bb d1 79 df 00 00 00 00 00 00 00 00 00 00 00 00 |..y.............|
435 0000: bb d1 79 df 00 00 00 00 00 00 00 00 00 00 00 00 |..y.............|
436 0010: 00 00 00 00 00 00 00 00 78 39 1a 27 0c 04 f2 a8 |........x9.'....|
436 0010: 00 00 00 00 00 00 00 00 78 39 1a 27 0c 04 f2 a8 |........x9.'....|
437 0020: af 31 de 17 fa b7 42 28 78 ee 5a 2d ad bc 94 3d |.1....B(x.Z-...=|
437 0020: af 31 de 17 fa b7 42 28 78 ee 5a 2d ad bc 94 3d |.1....B(x.Z-...=|
438 0030: 7a 94 12 77 0c 04 f2 a8 af 31 de 17 fa b7 42 28 |z..w.....1....B(|
438 0030: 7a 94 12 77 0c 04 f2 a8 af 31 de 17 fa b7 42 28 |z..w.....1....B(|
439 0040: 78 ee 5a 2d ad bc 94 3d 6f a4 50 21 7d 3b 71 8c |x.Z-...=o.P!};q.|
439 0040: 78 ee 5a 2d ad bc 94 3d 6f a4 50 21 7d 3b 71 8c |x.Z-...=o.P!};q.|
440 0050: 96 4e f3 7b 89 e5 50 eb da fd 57 89 e7 6c e1 b0 |.N.{..P...W..l..|
440 0050: 96 4e f3 7b 89 e5 50 eb da fd 57 89 e7 6c e1 b0 |.N.{..P...W..l..|
441 0060: 0c 19 2d 7d 0c 04 f2 a8 af 31 de 17 fa b7 42 28 |..-}.....1....B(|
441 0060: 0c 19 2d 7d 0c 04 f2 a8 af 31 de 17 fa b7 42 28 |..-}.....1....B(|
442 0070: 78 ee 5a 2d ad bc 94 3d 8d bf e6 0e 0c 04 f2 a8 |x.Z-...=........|
442 0070: 78 ee 5a 2d ad bc 94 3d 8d bf e6 0e 0c 04 f2 a8 |x.Z-...=........|
443 0080: de ad de 17 fa b7 42 28 78 ee 5a 2d ad bc 94 3d |......B(x.Z-...=|
443 0080: de ad de 17 fa b7 42 28 78 ee 5a 2d ad bc 94 3d |......B(x.Z-...=|
444
444
445 $ hg debugtagscache | tail -2
445 $ hg debugtagscache | tail -2
446 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d
446 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d
447 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 0c04f2a8deadde17fab7422878ee5a2dadbc943d
447 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 0c04f2a8deadde17fab7422878ee5a2dadbc943d (unknown node)
448
448
449 $ rm -f .hg/cache/tags2-visible
449 $ rm -f .hg/cache/tags2-visible
450 $ hg debugtagscache | tail -2
450 $ hg debugtagscache | tail -2
451 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d
451 4 0c192d7d5e6b78a714de54a2e9627952a877e25a 0c04f2a8af31de17fab7422878ee5a2dadbc943d
452 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 0c04f2a8deadde17fab7422878ee5a2dadbc943d
452 5 8dbfe60eff306a54259cfe007db9e330e7ecf866 0c04f2a8deadde17fab7422878ee5a2dadbc943d (unknown node)
453
453
454 $ hg tags
454 $ hg tags
455 abort: data/.hgtags.i@0c04f2a8deadde17fab7422878ee5a2dadbc943d: no match found
455 abort: data/.hgtags.i@0c04f2a8deadde17fab7422878ee5a2dadbc943d: no match found
456 [50]
456 [50]
457
457
458 BUG: Unless this file is restored, the `hg tags` in the next unix-permissions
458 BUG: Unless this file is restored, the `hg tags` in the next unix-permissions
459 conditional will fail: "abort: data/.hgtags.i@0c04f2a8dead: no match found"
459 conditional will fail: "abort: data/.hgtags.i@0c04f2a8dead: no match found"
460
460
461 $ mv .hg/cache/hgtagsfnodes1.bak .hg/cache/hgtagsfnodes1
461 $ mv .hg/cache/hgtagsfnodes1.bak .hg/cache/hgtagsfnodes1
462
462
463 #if unix-permissions no-root
463 #if unix-permissions no-root
464 Errors writing to .hgtags fnodes cache are silently ignored
464 Errors writing to .hgtags fnodes cache are silently ignored
465
465
466 $ echo dummy2 > foo
466 $ echo dummy2 > foo
467 $ hg commit -m throwaway2
467 $ hg commit -m throwaway2
468
468
469 $ chmod a-w .hg/cache/hgtagsfnodes1
469 $ chmod a-w .hg/cache/hgtagsfnodes1
470 $ rm -f .hg/cache/tags2-visible
470 $ rm -f .hg/cache/tags2-visible
471
471
472 $ hg tags
472 $ hg tags
473 tip 6:b968051b5cf3
473 tip 6:b968051b5cf3
474 bar 1:78391a272241
474 bar 1:78391a272241
475
475
476 $ hg blackbox -l 6
476 $ hg blackbox -l 6
477 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags
477 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags
478 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> couldn't write cache/hgtagsfnodes1: [Errno *] * (glob)
478 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> couldn't write cache/hgtagsfnodes1: [Errno *] * (glob)
479 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> 2/4 cache hits/lookups in * seconds (glob)
479 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> 2/4 cache hits/lookups in * seconds (glob)
480 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> writing .hg/cache/tags2-visible with 1 tags
480 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> writing .hg/cache/tags2-visible with 1 tags
481 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags exited 0 after * seconds (glob)
481 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags exited 0 after * seconds (glob)
482 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> blackbox -l 6
482 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> blackbox -l 6
483
483
484 $ chmod a+w .hg/cache/hgtagsfnodes1
484 $ chmod a+w .hg/cache/hgtagsfnodes1
485
485
486 $ rm -f .hg/cache/tags2-visible
486 $ rm -f .hg/cache/tags2-visible
487 $ hg tags
487 $ hg tags
488 tip 6:b968051b5cf3
488 tip 6:b968051b5cf3
489 bar 1:78391a272241
489 bar 1:78391a272241
490
490
491 $ hg blackbox -l 6
491 $ hg blackbox -l 6
492 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags
492 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags
493 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> writing 24 bytes to cache/hgtagsfnodes1
493 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> writing 24 bytes to cache/hgtagsfnodes1
494 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> 2/4 cache hits/lookups in * seconds (glob)
494 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> 2/4 cache hits/lookups in * seconds (glob)
495 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> writing .hg/cache/tags2-visible with 1 tags
495 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> writing .hg/cache/tags2-visible with 1 tags
496 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags exited 0 after * seconds (glob)
496 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> tags exited 0 after * seconds (glob)
497 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> blackbox -l 6
497 1970/01/01 00:00:00 bob @b968051b5cf3f624b771779c6d5f84f1d4c3fb5d (5000)> blackbox -l 6
498
498
499 $ f --size .hg/cache/hgtagsfnodes1
499 $ f --size .hg/cache/hgtagsfnodes1
500 .hg/cache/hgtagsfnodes1: size=168
500 .hg/cache/hgtagsfnodes1: size=168
501
501
502 $ hg -q --config extensions.strip= strip -r 6 --no-backup
502 $ hg -q --config extensions.strip= strip -r 6 --no-backup
503 #endif
503 #endif
504
504
505 Stripping doesn't truncate the tags cache until new data is available
505 Stripping doesn't truncate the tags cache until new data is available
506
506
507 $ rm -f .hg/cache/hgtagsfnodes1 .hg/cache/tags2-visible
507 $ rm -f .hg/cache/hgtagsfnodes1 .hg/cache/tags2-visible
508 $ hg tags
508 $ hg tags
509 tip 5:8dbfe60eff30
509 tip 5:8dbfe60eff30
510 bar 1:78391a272241
510 bar 1:78391a272241
511
511
512 $ f --size .hg/cache/hgtagsfnodes1
512 $ f --size .hg/cache/hgtagsfnodes1
513 .hg/cache/hgtagsfnodes1: size=144
513 .hg/cache/hgtagsfnodes1: size=144
514
514
515 $ hg -q --config extensions.strip= strip -r 5 --no-backup
515 $ hg -q --config extensions.strip= strip -r 5 --no-backup
516 $ hg tags
516 $ hg tags
517 tip 4:0c192d7d5e6b
517 tip 4:0c192d7d5e6b
518 bar 1:78391a272241
518 bar 1:78391a272241
519
519
520 $ hg blackbox -l 5
520 $ hg blackbox -l 5
521 1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> writing 24 bytes to cache/hgtagsfnodes1
521 1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> writing 24 bytes to cache/hgtagsfnodes1
522 1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> 2/4 cache hits/lookups in * seconds (glob)
522 1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> 2/4 cache hits/lookups in * seconds (glob)
523 1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> writing .hg/cache/tags2-visible with 1 tags
523 1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> writing .hg/cache/tags2-visible with 1 tags
524 1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> tags exited 0 after * seconds (glob)
524 1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> tags exited 0 after * seconds (glob)
525 1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> blackbox -l 5
525 1970/01/01 00:00:00 bob @0c192d7d5e6b78a714de54a2e9627952a877e25a (5000)> blackbox -l 5
526
526
527 $ f --size .hg/cache/hgtagsfnodes1
527 $ f --size .hg/cache/hgtagsfnodes1
528 .hg/cache/hgtagsfnodes1: size=120
528 .hg/cache/hgtagsfnodes1: size=120
529
529
530 $ echo dummy > foo
530 $ echo dummy > foo
531 $ hg commit -m throwaway3
531 $ hg commit -m throwaway3
532
532
533 $ hg tags
533 $ hg tags
534 tip 5:035f65efb448
534 tip 5:035f65efb448
535 bar 1:78391a272241
535 bar 1:78391a272241
536
536
537 $ hg blackbox -l 6
537 $ hg blackbox -l 6
538 1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> tags
538 1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> tags
539 1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> writing 24 bytes to cache/hgtagsfnodes1
539 1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> writing 24 bytes to cache/hgtagsfnodes1
540 1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> 3/4 cache hits/lookups in * seconds (glob)
540 1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> 3/4 cache hits/lookups in * seconds (glob)
541 1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> writing .hg/cache/tags2-visible with 1 tags
541 1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> writing .hg/cache/tags2-visible with 1 tags
542 1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> tags exited 0 after * seconds (glob)
542 1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> tags exited 0 after * seconds (glob)
543 1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> blackbox -l 6
543 1970/01/01 00:00:00 bob @035f65efb448350f4772141702a81ab1df48c465 (5000)> blackbox -l 6
544 $ f --size .hg/cache/hgtagsfnodes1
544 $ f --size .hg/cache/hgtagsfnodes1
545 .hg/cache/hgtagsfnodes1: size=144
545 .hg/cache/hgtagsfnodes1: size=144
546
546
547 $ hg -q --config extensions.strip= strip -r 5 --no-backup
547 $ hg -q --config extensions.strip= strip -r 5 --no-backup
548
548
549 Test tag removal:
549 Test tag removal:
550
550
551 $ hg tag --remove bar # rev 5
551 $ hg tag --remove bar # rev 5
552 $ hg tip -vp
552 $ hg tip -vp
553 changeset: 5:5f6e8655b1c7
553 changeset: 5:5f6e8655b1c7
554 tag: tip
554 tag: tip
555 user: test
555 user: test
556 date: Thu Jan 01 00:00:00 1970 +0000
556 date: Thu Jan 01 00:00:00 1970 +0000
557 files: .hgtags
557 files: .hgtags
558 description:
558 description:
559 Removed tag bar
559 Removed tag bar
560
560
561
561
562 diff -r 0c192d7d5e6b -r 5f6e8655b1c7 .hgtags
562 diff -r 0c192d7d5e6b -r 5f6e8655b1c7 .hgtags
563 --- a/.hgtags Thu Jan 01 00:00:00 1970 +0000
563 --- a/.hgtags Thu Jan 01 00:00:00 1970 +0000
564 +++ b/.hgtags Thu Jan 01 00:00:00 1970 +0000
564 +++ b/.hgtags Thu Jan 01 00:00:00 1970 +0000
565 @@ -1,1 +1,3 @@
565 @@ -1,1 +1,3 @@
566 bbd179dfa0a71671c253b3ae0aa1513b60d199fa bar
566 bbd179dfa0a71671c253b3ae0aa1513b60d199fa bar
567 +78391a272241d70354aa14c874552cad6b51bb42 bar
567 +78391a272241d70354aa14c874552cad6b51bb42 bar
568 +0000000000000000000000000000000000000000 bar
568 +0000000000000000000000000000000000000000 bar
569
569
570 $ hg tags
570 $ hg tags
571 tip 5:5f6e8655b1c7
571 tip 5:5f6e8655b1c7
572 $ hg tags # again, try to expose cache bugs
572 $ hg tags # again, try to expose cache bugs
573 tip 5:5f6e8655b1c7
573 tip 5:5f6e8655b1c7
574
574
575 Remove nonexistent tag:
575 Remove nonexistent tag:
576
576
577 $ hg tag --remove foobar
577 $ hg tag --remove foobar
578 abort: tag 'foobar' does not exist
578 abort: tag 'foobar' does not exist
579 [10]
579 [10]
580 $ hg tip
580 $ hg tip
581 changeset: 5:5f6e8655b1c7
581 changeset: 5:5f6e8655b1c7
582 tag: tip
582 tag: tip
583 user: test
583 user: test
584 date: Thu Jan 01 00:00:00 1970 +0000
584 date: Thu Jan 01 00:00:00 1970 +0000
585 summary: Removed tag bar
585 summary: Removed tag bar
586
586
587
587
588 Undo a tag with rollback:
588 Undo a tag with rollback:
589
589
590 $ hg rollback # destroy rev 5 (restore bar)
590 $ hg rollback # destroy rev 5 (restore bar)
591 repository tip rolled back to revision 4 (undo commit)
591 repository tip rolled back to revision 4 (undo commit)
592 working directory now based on revision 4
592 working directory now based on revision 4
593 $ hg tags
593 $ hg tags
594 tip 4:0c192d7d5e6b
594 tip 4:0c192d7d5e6b
595 bar 1:78391a272241
595 bar 1:78391a272241
596 $ hg tags
596 $ hg tags
597 tip 4:0c192d7d5e6b
597 tip 4:0c192d7d5e6b
598 bar 1:78391a272241
598 bar 1:78391a272241
599
599
600 Test tag rank:
600 Test tag rank:
601
601
602 $ cd ..
602 $ cd ..
603 $ hg init t3
603 $ hg init t3
604 $ cd t3
604 $ cd t3
605 $ echo foo > foo
605 $ echo foo > foo
606 $ hg add foo
606 $ hg add foo
607 $ hg ci -m 'add foo' # rev 0
607 $ hg ci -m 'add foo' # rev 0
608 $ hg tag -f bar # rev 1 bar -> 0
608 $ hg tag -f bar # rev 1 bar -> 0
609 $ hg tag -f bar # rev 2 bar -> 1
609 $ hg tag -f bar # rev 2 bar -> 1
610 $ hg tag -fr 0 bar # rev 3 bar -> 0
610 $ hg tag -fr 0 bar # rev 3 bar -> 0
611 $ hg tag -fr 1 bar # rev 4 bar -> 1
611 $ hg tag -fr 1 bar # rev 4 bar -> 1
612 $ hg tag -fr 0 bar # rev 5 bar -> 0
612 $ hg tag -fr 0 bar # rev 5 bar -> 0
613 $ hg tags
613 $ hg tags
614 tip 5:85f05169d91d
614 tip 5:85f05169d91d
615 bar 0:bbd179dfa0a7
615 bar 0:bbd179dfa0a7
616 $ hg co 3
616 $ hg co 3
617 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
617 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
618 $ echo barbar > foo
618 $ echo barbar > foo
619 $ hg ci -m 'change foo' # rev 6
619 $ hg ci -m 'change foo' # rev 6
620 created new head
620 created new head
621 $ hg tags
621 $ hg tags
622 tip 6:735c3ca72986
622 tip 6:735c3ca72986
623 bar 0:bbd179dfa0a7
623 bar 0:bbd179dfa0a7
624
624
625 Don't allow moving tag without -f:
625 Don't allow moving tag without -f:
626
626
627 $ hg tag -r 3 bar
627 $ hg tag -r 3 bar
628 abort: tag 'bar' already exists (use -f to force)
628 abort: tag 'bar' already exists (use -f to force)
629 [10]
629 [10]
630 $ hg tags
630 $ hg tags
631 tip 6:735c3ca72986
631 tip 6:735c3ca72986
632 bar 0:bbd179dfa0a7
632 bar 0:bbd179dfa0a7
633
633
634 Strip 1: expose an old head:
634 Strip 1: expose an old head:
635
635
636 $ hg --config extensions.mq= strip 5
636 $ hg --config extensions.mq= strip 5
637 saved backup bundle to $TESTTMP/t3/.hg/strip-backup/*-backup.hg (glob)
637 saved backup bundle to $TESTTMP/t3/.hg/strip-backup/*-backup.hg (glob)
638 $ hg tags # partly stale cache
638 $ hg tags # partly stale cache
639 tip 5:735c3ca72986
639 tip 5:735c3ca72986
640 bar 1:78391a272241
640 bar 1:78391a272241
641 $ hg tags # up-to-date cache
641 $ hg tags # up-to-date cache
642 tip 5:735c3ca72986
642 tip 5:735c3ca72986
643 bar 1:78391a272241
643 bar 1:78391a272241
644
644
645 Strip 2: destroy whole branch, no old head exposed
645 Strip 2: destroy whole branch, no old head exposed
646
646
647 $ hg --config extensions.mq= strip 4
647 $ hg --config extensions.mq= strip 4
648 saved backup bundle to $TESTTMP/t3/.hg/strip-backup/*-backup.hg (glob)
648 saved backup bundle to $TESTTMP/t3/.hg/strip-backup/*-backup.hg (glob)
649 $ hg tags # partly stale
649 $ hg tags # partly stale
650 tip 4:735c3ca72986
650 tip 4:735c3ca72986
651 bar 0:bbd179dfa0a7
651 bar 0:bbd179dfa0a7
652 $ rm -f .hg/cache/tags2-visible
652 $ rm -f .hg/cache/tags2-visible
653 $ hg tags # cold cache
653 $ hg tags # cold cache
654 tip 4:735c3ca72986
654 tip 4:735c3ca72986
655 bar 0:bbd179dfa0a7
655 bar 0:bbd179dfa0a7
656
656
657 Test tag rank with 3 heads:
657 Test tag rank with 3 heads:
658
658
659 $ cd ..
659 $ cd ..
660 $ hg init t4
660 $ hg init t4
661 $ cd t4
661 $ cd t4
662 $ echo foo > foo
662 $ echo foo > foo
663 $ hg add
663 $ hg add
664 adding foo
664 adding foo
665 $ hg ci -m 'add foo' # rev 0
665 $ hg ci -m 'add foo' # rev 0
666 $ hg tag bar # rev 1 bar -> 0
666 $ hg tag bar # rev 1 bar -> 0
667 $ hg tag -f bar # rev 2 bar -> 1
667 $ hg tag -f bar # rev 2 bar -> 1
668 $ hg up -qC 0
668 $ hg up -qC 0
669 $ hg tag -fr 2 bar # rev 3 bar -> 2
669 $ hg tag -fr 2 bar # rev 3 bar -> 2
670 $ hg tags
670 $ hg tags
671 tip 3:197c21bbbf2c
671 tip 3:197c21bbbf2c
672 bar 2:6fa450212aeb
672 bar 2:6fa450212aeb
673 $ hg up -qC 0
673 $ hg up -qC 0
674 $ hg tag -m 'retag rev 0' -fr 0 bar # rev 4 bar -> 0, but bar stays at 2
674 $ hg tag -m 'retag rev 0' -fr 0 bar # rev 4 bar -> 0, but bar stays at 2
675
675
676 Bar should still point to rev 2:
676 Bar should still point to rev 2:
677
677
678 $ hg tags
678 $ hg tags
679 tip 4:3b4b14ed0202
679 tip 4:3b4b14ed0202
680 bar 2:6fa450212aeb
680 bar 2:6fa450212aeb
681
681
682 Test that removing global/local tags does not get confused when trying
682 Test that removing global/local tags does not get confused when trying
683 to remove a tag of type X which actually only exists as a type Y:
683 to remove a tag of type X which actually only exists as a type Y:
684
684
685 $ cd ..
685 $ cd ..
686 $ hg init t5
686 $ hg init t5
687 $ cd t5
687 $ cd t5
688 $ echo foo > foo
688 $ echo foo > foo
689 $ hg add
689 $ hg add
690 adding foo
690 adding foo
691 $ hg ci -m 'add foo' # rev 0
691 $ hg ci -m 'add foo' # rev 0
692
692
693 $ hg tag -r 0 -l localtag
693 $ hg tag -r 0 -l localtag
694 $ hg tag --remove localtag
694 $ hg tag --remove localtag
695 abort: tag 'localtag' is not a global tag
695 abort: tag 'localtag' is not a global tag
696 [10]
696 [10]
697 $
697 $
698 $ hg tag -r 0 globaltag
698 $ hg tag -r 0 globaltag
699 $ hg tag --remove -l globaltag
699 $ hg tag --remove -l globaltag
700 abort: tag 'globaltag' is not a local tag
700 abort: tag 'globaltag' is not a local tag
701 [10]
701 [10]
702 $ hg tags -v
702 $ hg tags -v
703 tip 1:a0b6fe111088
703 tip 1:a0b6fe111088
704 localtag 0:bbd179dfa0a7 local
704 localtag 0:bbd179dfa0a7 local
705 globaltag 0:bbd179dfa0a7
705 globaltag 0:bbd179dfa0a7
706
706
707 Templated output:
707 Templated output:
708
708
709 (immediate values)
709 (immediate values)
710
710
711 $ hg tags -T '{pad(tag, 9)} {rev}:{node} ({type})\n'
711 $ hg tags -T '{pad(tag, 9)} {rev}:{node} ({type})\n'
712 tip 1:a0b6fe111088c8c29567d3876cc466aa02927cae ()
712 tip 1:a0b6fe111088c8c29567d3876cc466aa02927cae ()
713 localtag 0:bbd179dfa0a71671c253b3ae0aa1513b60d199fa (local)
713 localtag 0:bbd179dfa0a71671c253b3ae0aa1513b60d199fa (local)
714 globaltag 0:bbd179dfa0a71671c253b3ae0aa1513b60d199fa ()
714 globaltag 0:bbd179dfa0a71671c253b3ae0aa1513b60d199fa ()
715
715
716 (ctx/revcache dependent)
716 (ctx/revcache dependent)
717
717
718 $ hg tags -T '{pad(tag, 9)} {rev} {file_adds}\n'
718 $ hg tags -T '{pad(tag, 9)} {rev} {file_adds}\n'
719 tip 1 .hgtags
719 tip 1 .hgtags
720 localtag 0 foo
720 localtag 0 foo
721 globaltag 0 foo
721 globaltag 0 foo
722
722
723 $ hg tags -T '{pad(tag, 9)} {rev}:{node|shortest}\n'
723 $ hg tags -T '{pad(tag, 9)} {rev}:{node|shortest}\n'
724 tip 1:a0b6
724 tip 1:a0b6
725 localtag 0:bbd1
725 localtag 0:bbd1
726 globaltag 0:bbd1
726 globaltag 0:bbd1
727
727
728 Test for issue3911
728 Test for issue3911
729
729
730 $ hg tag -r 0 -l localtag2
730 $ hg tag -r 0 -l localtag2
731 $ hg tag -l --remove localtag2
731 $ hg tag -l --remove localtag2
732 $ hg tags -v
732 $ hg tags -v
733 tip 1:a0b6fe111088
733 tip 1:a0b6fe111088
734 localtag 0:bbd179dfa0a7 local
734 localtag 0:bbd179dfa0a7 local
735 globaltag 0:bbd179dfa0a7
735 globaltag 0:bbd179dfa0a7
736
736
737 $ hg tag -r 1 -f localtag
737 $ hg tag -r 1 -f localtag
738 $ hg tags -v
738 $ hg tags -v
739 tip 2:5c70a037bb37
739 tip 2:5c70a037bb37
740 localtag 1:a0b6fe111088
740 localtag 1:a0b6fe111088
741 globaltag 0:bbd179dfa0a7
741 globaltag 0:bbd179dfa0a7
742
742
743 $ hg tags -v
743 $ hg tags -v
744 tip 2:5c70a037bb37
744 tip 2:5c70a037bb37
745 localtag 1:a0b6fe111088
745 localtag 1:a0b6fe111088
746 globaltag 0:bbd179dfa0a7
746 globaltag 0:bbd179dfa0a7
747
747
748 $ hg tag -r 1 localtag2
748 $ hg tag -r 1 localtag2
749 $ hg tags -v
749 $ hg tags -v
750 tip 3:bbfb8cd42be2
750 tip 3:bbfb8cd42be2
751 localtag2 1:a0b6fe111088
751 localtag2 1:a0b6fe111088
752 localtag 1:a0b6fe111088
752 localtag 1:a0b6fe111088
753 globaltag 0:bbd179dfa0a7
753 globaltag 0:bbd179dfa0a7
754
754
755 $ hg tags -v
755 $ hg tags -v
756 tip 3:bbfb8cd42be2
756 tip 3:bbfb8cd42be2
757 localtag2 1:a0b6fe111088
757 localtag2 1:a0b6fe111088
758 localtag 1:a0b6fe111088
758 localtag 1:a0b6fe111088
759 globaltag 0:bbd179dfa0a7
759 globaltag 0:bbd179dfa0a7
760
760
761 $ cd ..
761 $ cd ..
762
762
763 Create a repository with tags data to test .hgtags fnodes transfer
763 Create a repository with tags data to test .hgtags fnodes transfer
764
764
765 $ hg init tagsserver
765 $ hg init tagsserver
766 $ cd tagsserver
766 $ cd tagsserver
767 $ touch foo
767 $ touch foo
768 $ hg -q commit -A -m initial
768 $ hg -q commit -A -m initial
769 $ hg tag -m 'tag 0.1' 0.1
769 $ hg tag -m 'tag 0.1' 0.1
770 $ echo second > foo
770 $ echo second > foo
771 $ hg commit -m second
771 $ hg commit -m second
772 $ hg tag -m 'tag 0.2' 0.2
772 $ hg tag -m 'tag 0.2' 0.2
773 $ hg tags
773 $ hg tags
774 tip 3:40f0358cb314
774 tip 3:40f0358cb314
775 0.2 2:f63cc8fe54e4
775 0.2 2:f63cc8fe54e4
776 0.1 0:96ee1d7354c4
776 0.1 0:96ee1d7354c4
777 $ cd ..
777 $ cd ..
778
778
779 Cloning should pull down hgtags fnodes mappings and write the cache file
779 Cloning should pull down hgtags fnodes mappings and write the cache file
780
780
781 $ hg clone --pull tagsserver tagsclient
781 $ hg clone --pull tagsserver tagsclient
782 requesting all changes
782 requesting all changes
783 adding changesets
783 adding changesets
784 adding manifests
784 adding manifests
785 adding file changes
785 adding file changes
786 added 4 changesets with 4 changes to 2 files
786 added 4 changesets with 4 changes to 2 files
787 new changesets 96ee1d7354c4:40f0358cb314
787 new changesets 96ee1d7354c4:40f0358cb314
788 updating to branch default
788 updating to branch default
789 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
789 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
790
790
791 Missing tags2* files means the cache wasn't written through the normal mechanism.
791 Missing tags2* files means the cache wasn't written through the normal mechanism.
792
792
793 $ ls tagsclient/.hg/cache
793 $ ls tagsclient/.hg/cache
794 branch2-base
794 branch2-base
795 branch2-immutable
795 branch2-immutable
796 branch2-served
796 branch2-served
797 branch2-served.hidden
797 branch2-served.hidden
798 branch2-visible
798 branch2-visible
799 branch2-visible-hidden
799 branch2-visible-hidden
800 hgtagsfnodes1
800 hgtagsfnodes1
801 rbc-names-v1
801 rbc-names-v1
802 rbc-revs-v1
802 rbc-revs-v1
803 tags2
803 tags2
804 tags2-served
804 tags2-served
805
805
806 Cache should contain the head only, even though other nodes have tags data
806 Cache should contain the head only, even though other nodes have tags data
807
807
808 $ f --size --hexdump tagsclient/.hg/cache/hgtagsfnodes1
808 $ f --size --hexdump tagsclient/.hg/cache/hgtagsfnodes1
809 tagsclient/.hg/cache/hgtagsfnodes1: size=96
809 tagsclient/.hg/cache/hgtagsfnodes1: size=96
810 0000: 96 ee 1d 73 00 00 00 00 00 00 00 00 00 00 00 00 |...s............|
810 0000: 96 ee 1d 73 00 00 00 00 00 00 00 00 00 00 00 00 |...s............|
811 0010: 00 00 00 00 00 00 00 00 c4 da b0 c2 94 65 e1 c6 |.............e..|
811 0010: 00 00 00 00 00 00 00 00 c4 da b0 c2 94 65 e1 c6 |.............e..|
812 0020: 0d f7 f0 dd 32 04 ea 57 78 c8 97 97 79 fc d5 95 |....2..Wx...y...|
812 0020: 0d f7 f0 dd 32 04 ea 57 78 c8 97 97 79 fc d5 95 |....2..Wx...y...|
813 0030: f6 3c c8 fe 94 65 e1 c6 0d f7 f0 dd 32 04 ea 57 |.<...e......2..W|
813 0030: f6 3c c8 fe 94 65 e1 c6 0d f7 f0 dd 32 04 ea 57 |.<...e......2..W|
814 0040: 78 c8 97 97 79 fc d5 95 40 f0 35 8c 19 e0 a7 d3 |x...y...@.5.....|
814 0040: 78 c8 97 97 79 fc d5 95 40 f0 35 8c 19 e0 a7 d3 |x...y...@.5.....|
815 0050: 8a 5c 6a 82 4d cf fb a5 87 d0 2f a3 1e 4f 2f 8a |.\j.M...../..O/.|
815 0050: 8a 5c 6a 82 4d cf fb a5 87 d0 2f a3 1e 4f 2f 8a |.\j.M...../..O/.|
816
816
817 Running hg tags should produce tags2* file and not change cache
817 Running hg tags should produce tags2* file and not change cache
818
818
819 $ hg -R tagsclient tags
819 $ hg -R tagsclient tags
820 tip 3:40f0358cb314
820 tip 3:40f0358cb314
821 0.2 2:f63cc8fe54e4
821 0.2 2:f63cc8fe54e4
822 0.1 0:96ee1d7354c4
822 0.1 0:96ee1d7354c4
823
823
824 $ ls tagsclient/.hg/cache
824 $ ls tagsclient/.hg/cache
825 branch2-base
825 branch2-base
826 branch2-immutable
826 branch2-immutable
827 branch2-served
827 branch2-served
828 branch2-served.hidden
828 branch2-served.hidden
829 branch2-visible
829 branch2-visible
830 branch2-visible-hidden
830 branch2-visible-hidden
831 hgtagsfnodes1
831 hgtagsfnodes1
832 rbc-names-v1
832 rbc-names-v1
833 rbc-revs-v1
833 rbc-revs-v1
834 tags2
834 tags2
835 tags2-served
835 tags2-served
836 tags2-visible
836 tags2-visible
837
837
838 $ f --size --hexdump tagsclient/.hg/cache/hgtagsfnodes1
838 $ f --size --hexdump tagsclient/.hg/cache/hgtagsfnodes1
839 tagsclient/.hg/cache/hgtagsfnodes1: size=96
839 tagsclient/.hg/cache/hgtagsfnodes1: size=96
840 0000: 96 ee 1d 73 00 00 00 00 00 00 00 00 00 00 00 00 |...s............|
840 0000: 96 ee 1d 73 00 00 00 00 00 00 00 00 00 00 00 00 |...s............|
841 0010: 00 00 00 00 00 00 00 00 c4 da b0 c2 94 65 e1 c6 |.............e..|
841 0010: 00 00 00 00 00 00 00 00 c4 da b0 c2 94 65 e1 c6 |.............e..|
842 0020: 0d f7 f0 dd 32 04 ea 57 78 c8 97 97 79 fc d5 95 |....2..Wx...y...|
842 0020: 0d f7 f0 dd 32 04 ea 57 78 c8 97 97 79 fc d5 95 |....2..Wx...y...|
843 0030: f6 3c c8 fe 94 65 e1 c6 0d f7 f0 dd 32 04 ea 57 |.<...e......2..W|
843 0030: f6 3c c8 fe 94 65 e1 c6 0d f7 f0 dd 32 04 ea 57 |.<...e......2..W|
844 0040: 78 c8 97 97 79 fc d5 95 40 f0 35 8c 19 e0 a7 d3 |x...y...@.5.....|
844 0040: 78 c8 97 97 79 fc d5 95 40 f0 35 8c 19 e0 a7 d3 |x...y...@.5.....|
845 0050: 8a 5c 6a 82 4d cf fb a5 87 d0 2f a3 1e 4f 2f 8a |.\j.M...../..O/.|
845 0050: 8a 5c 6a 82 4d cf fb a5 87 d0 2f a3 1e 4f 2f 8a |.\j.M...../..O/.|
846
846
847 Check that the bundle includes cache data
847 Check that the bundle includes cache data
848
848
849 $ hg -R tagsclient bundle --all ./test-cache-in-bundle-all-rev.hg
849 $ hg -R tagsclient bundle --all ./test-cache-in-bundle-all-rev.hg
850 4 changesets found
850 4 changesets found
851 $ hg debugbundle ./test-cache-in-bundle-all-rev.hg
851 $ hg debugbundle ./test-cache-in-bundle-all-rev.hg
852 Stream params: {Compression: BZ}
852 Stream params: {Compression: BZ}
853 changegroup -- {nbchanges: 4, version: 02} (mandatory: True)
853 changegroup -- {nbchanges: 4, version: 02} (mandatory: True)
854 96ee1d7354c4ad7372047672c36a1f561e3a6a4c
854 96ee1d7354c4ad7372047672c36a1f561e3a6a4c
855 c4dab0c2fd337eb9191f80c3024830a4889a8f34
855 c4dab0c2fd337eb9191f80c3024830a4889a8f34
856 f63cc8fe54e4d326f8d692805d70e092f851ddb1
856 f63cc8fe54e4d326f8d692805d70e092f851ddb1
857 40f0358cb314c824a5929ee527308d90e023bc10
857 40f0358cb314c824a5929ee527308d90e023bc10
858 hgtagsfnodes -- {} (mandatory: True)
858 hgtagsfnodes -- {} (mandatory: True)
859 cache:rev-branch-cache -- {} (mandatory: False)
859 cache:rev-branch-cache -- {} (mandatory: False)
860
860
861 Check that local clone includes cache data
861 Check that local clone includes cache data
862
862
863 $ hg clone tagsclient tags-local-clone
863 $ hg clone tagsclient tags-local-clone
864 updating to branch default
864 updating to branch default
865 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
865 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
866 $ (cd tags-local-clone/.hg/cache/; ls -1 tag*)
866 $ (cd tags-local-clone/.hg/cache/; ls -1 tag*)
867 tags2
867 tags2
868 tags2-served
868 tags2-served
869 tags2-visible
869 tags2-visible
870
870
871 Avoid writing logs on trying to delete an already deleted tag
871 Avoid writing logs on trying to delete an already deleted tag
872 $ hg init issue5752
872 $ hg init issue5752
873 $ cd issue5752
873 $ cd issue5752
874 $ echo > a
874 $ echo > a
875 $ hg commit -Am 'add a'
875 $ hg commit -Am 'add a'
876 adding a
876 adding a
877 $ hg tag a
877 $ hg tag a
878 $ hg tags
878 $ hg tags
879 tip 1:bd7ee4f3939b
879 tip 1:bd7ee4f3939b
880 a 0:a8a82d372bb3
880 a 0:a8a82d372bb3
881 $ hg log
881 $ hg log
882 changeset: 1:bd7ee4f3939b
882 changeset: 1:bd7ee4f3939b
883 tag: tip
883 tag: tip
884 user: test
884 user: test
885 date: Thu Jan 01 00:00:00 1970 +0000
885 date: Thu Jan 01 00:00:00 1970 +0000
886 summary: Added tag a for changeset a8a82d372bb3
886 summary: Added tag a for changeset a8a82d372bb3
887
887
888 changeset: 0:a8a82d372bb3
888 changeset: 0:a8a82d372bb3
889 tag: a
889 tag: a
890 user: test
890 user: test
891 date: Thu Jan 01 00:00:00 1970 +0000
891 date: Thu Jan 01 00:00:00 1970 +0000
892 summary: add a
892 summary: add a
893
893
894 $ hg tag --remove a
894 $ hg tag --remove a
895 $ hg log
895 $ hg log
896 changeset: 2:e7feacc7ec9e
896 changeset: 2:e7feacc7ec9e
897 tag: tip
897 tag: tip
898 user: test
898 user: test
899 date: Thu Jan 01 00:00:00 1970 +0000
899 date: Thu Jan 01 00:00:00 1970 +0000
900 summary: Removed tag a
900 summary: Removed tag a
901
901
902 changeset: 1:bd7ee4f3939b
902 changeset: 1:bd7ee4f3939b
903 user: test
903 user: test
904 date: Thu Jan 01 00:00:00 1970 +0000
904 date: Thu Jan 01 00:00:00 1970 +0000
905 summary: Added tag a for changeset a8a82d372bb3
905 summary: Added tag a for changeset a8a82d372bb3
906
906
907 changeset: 0:a8a82d372bb3
907 changeset: 0:a8a82d372bb3
908 user: test
908 user: test
909 date: Thu Jan 01 00:00:00 1970 +0000
909 date: Thu Jan 01 00:00:00 1970 +0000
910 summary: add a
910 summary: add a
911
911
912 $ hg tag --remove a
912 $ hg tag --remove a
913 abort: tag 'a' is already removed
913 abort: tag 'a' is already removed
914 [10]
914 [10]
915 $ hg log
915 $ hg log
916 changeset: 2:e7feacc7ec9e
916 changeset: 2:e7feacc7ec9e
917 tag: tip
917 tag: tip
918 user: test
918 user: test
919 date: Thu Jan 01 00:00:00 1970 +0000
919 date: Thu Jan 01 00:00:00 1970 +0000
920 summary: Removed tag a
920 summary: Removed tag a
921
921
922 changeset: 1:bd7ee4f3939b
922 changeset: 1:bd7ee4f3939b
923 user: test
923 user: test
924 date: Thu Jan 01 00:00:00 1970 +0000
924 date: Thu Jan 01 00:00:00 1970 +0000
925 summary: Added tag a for changeset a8a82d372bb3
925 summary: Added tag a for changeset a8a82d372bb3
926
926
927 changeset: 0:a8a82d372bb3
927 changeset: 0:a8a82d372bb3
928 user: test
928 user: test
929 date: Thu Jan 01 00:00:00 1970 +0000
929 date: Thu Jan 01 00:00:00 1970 +0000
930 summary: add a
930 summary: add a
931
931
932 $ cat .hgtags
932 $ cat .hgtags
933 a8a82d372bb35b42ff736e74f07c23bcd99c371f a
933 a8a82d372bb35b42ff736e74f07c23bcd99c371f a
934 a8a82d372bb35b42ff736e74f07c23bcd99c371f a
934 a8a82d372bb35b42ff736e74f07c23bcd99c371f a
935 0000000000000000000000000000000000000000 a
935 0000000000000000000000000000000000000000 a
General Comments 0
You need to be logged in to leave comments. Login now