##// END OF EJS Templates
debugdeltachain: protect against 0 readsize...
Boris Feld -
r38669:0f4c2c70 default
parent child Browse files
Show More
@@ -1,3173 +1,3176 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .thirdparty import (
35 from .thirdparty import (
36 cbor,
36 cbor,
37 )
37 )
38 from . import (
38 from . import (
39 bundle2,
39 bundle2,
40 changegroup,
40 changegroup,
41 cmdutil,
41 cmdutil,
42 color,
42 color,
43 context,
43 context,
44 dagparser,
44 dagparser,
45 dagutil,
45 dagutil,
46 encoding,
46 encoding,
47 error,
47 error,
48 exchange,
48 exchange,
49 extensions,
49 extensions,
50 filemerge,
50 filemerge,
51 fileset,
51 fileset,
52 formatter,
52 formatter,
53 hg,
53 hg,
54 httppeer,
54 httppeer,
55 localrepo,
55 localrepo,
56 lock as lockmod,
56 lock as lockmod,
57 logcmdutil,
57 logcmdutil,
58 merge as mergemod,
58 merge as mergemod,
59 obsolete,
59 obsolete,
60 obsutil,
60 obsutil,
61 phases,
61 phases,
62 policy,
62 policy,
63 pvec,
63 pvec,
64 pycompat,
64 pycompat,
65 registrar,
65 registrar,
66 repair,
66 repair,
67 revlog,
67 revlog,
68 revset,
68 revset,
69 revsetlang,
69 revsetlang,
70 scmutil,
70 scmutil,
71 setdiscovery,
71 setdiscovery,
72 simplemerge,
72 simplemerge,
73 sshpeer,
73 sshpeer,
74 sslutil,
74 sslutil,
75 streamclone,
75 streamclone,
76 templater,
76 templater,
77 treediscovery,
77 treediscovery,
78 upgrade,
78 upgrade,
79 url as urlmod,
79 url as urlmod,
80 util,
80 util,
81 vfs as vfsmod,
81 vfs as vfsmod,
82 wireprotoframing,
82 wireprotoframing,
83 wireprotoserver,
83 wireprotoserver,
84 wireprotov2peer,
84 wireprotov2peer,
85 )
85 )
86 from .utils import (
86 from .utils import (
87 dateutil,
87 dateutil,
88 procutil,
88 procutil,
89 stringutil,
89 stringutil,
90 )
90 )
91
91
92 release = lockmod.release
92 release = lockmod.release
93
93
94 command = registrar.command()
94 command = registrar.command()
95
95
96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
97 def debugancestor(ui, repo, *args):
97 def debugancestor(ui, repo, *args):
98 """find the ancestor revision of two revisions in a given index"""
98 """find the ancestor revision of two revisions in a given index"""
99 if len(args) == 3:
99 if len(args) == 3:
100 index, rev1, rev2 = args
100 index, rev1, rev2 = args
101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
102 lookup = r.lookup
102 lookup = r.lookup
103 elif len(args) == 2:
103 elif len(args) == 2:
104 if not repo:
104 if not repo:
105 raise error.Abort(_('there is no Mercurial repository here '
105 raise error.Abort(_('there is no Mercurial repository here '
106 '(.hg not found)'))
106 '(.hg not found)'))
107 rev1, rev2 = args
107 rev1, rev2 = args
108 r = repo.changelog
108 r = repo.changelog
109 lookup = repo.lookup
109 lookup = repo.lookup
110 else:
110 else:
111 raise error.Abort(_('either two or three arguments required'))
111 raise error.Abort(_('either two or three arguments required'))
112 a = r.ancestor(lookup(rev1), lookup(rev2))
112 a = r.ancestor(lookup(rev1), lookup(rev2))
113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
114
114
115 @command('debugapplystreamclonebundle', [], 'FILE')
115 @command('debugapplystreamclonebundle', [], 'FILE')
116 def debugapplystreamclonebundle(ui, repo, fname):
116 def debugapplystreamclonebundle(ui, repo, fname):
117 """apply a stream clone bundle file"""
117 """apply a stream clone bundle file"""
118 f = hg.openpath(ui, fname)
118 f = hg.openpath(ui, fname)
119 gen = exchange.readbundle(ui, f, fname)
119 gen = exchange.readbundle(ui, f, fname)
120 gen.apply(repo)
120 gen.apply(repo)
121
121
122 @command('debugbuilddag',
122 @command('debugbuilddag',
123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
125 ('n', 'new-file', None, _('add new file at each rev'))],
125 ('n', 'new-file', None, _('add new file at each rev'))],
126 _('[OPTION]... [TEXT]'))
126 _('[OPTION]... [TEXT]'))
127 def debugbuilddag(ui, repo, text=None,
127 def debugbuilddag(ui, repo, text=None,
128 mergeable_file=False,
128 mergeable_file=False,
129 overwritten_file=False,
129 overwritten_file=False,
130 new_file=False):
130 new_file=False):
131 """builds a repo with a given DAG from scratch in the current empty repo
131 """builds a repo with a given DAG from scratch in the current empty repo
132
132
133 The description of the DAG is read from stdin if not given on the
133 The description of the DAG is read from stdin if not given on the
134 command line.
134 command line.
135
135
136 Elements:
136 Elements:
137
137
138 - "+n" is a linear run of n nodes based on the current default parent
138 - "+n" is a linear run of n nodes based on the current default parent
139 - "." is a single node based on the current default parent
139 - "." is a single node based on the current default parent
140 - "$" resets the default parent to null (implied at the start);
140 - "$" resets the default parent to null (implied at the start);
141 otherwise the default parent is always the last node created
141 otherwise the default parent is always the last node created
142 - "<p" sets the default parent to the backref p
142 - "<p" sets the default parent to the backref p
143 - "*p" is a fork at parent p, which is a backref
143 - "*p" is a fork at parent p, which is a backref
144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
145 - "/p2" is a merge of the preceding node and p2
145 - "/p2" is a merge of the preceding node and p2
146 - ":tag" defines a local tag for the preceding node
146 - ":tag" defines a local tag for the preceding node
147 - "@branch" sets the named branch for subsequent nodes
147 - "@branch" sets the named branch for subsequent nodes
148 - "#...\\n" is a comment up to the end of the line
148 - "#...\\n" is a comment up to the end of the line
149
149
150 Whitespace between the above elements is ignored.
150 Whitespace between the above elements is ignored.
151
151
152 A backref is either
152 A backref is either
153
153
154 - a number n, which references the node curr-n, where curr is the current
154 - a number n, which references the node curr-n, where curr is the current
155 node, or
155 node, or
156 - the name of a local tag you placed earlier using ":tag", or
156 - the name of a local tag you placed earlier using ":tag", or
157 - empty to denote the default parent.
157 - empty to denote the default parent.
158
158
159 All string valued-elements are either strictly alphanumeric, or must
159 All string valued-elements are either strictly alphanumeric, or must
160 be enclosed in double quotes ("..."), with "\\" as escape character.
160 be enclosed in double quotes ("..."), with "\\" as escape character.
161 """
161 """
162
162
163 if text is None:
163 if text is None:
164 ui.status(_("reading DAG from stdin\n"))
164 ui.status(_("reading DAG from stdin\n"))
165 text = ui.fin.read()
165 text = ui.fin.read()
166
166
167 cl = repo.changelog
167 cl = repo.changelog
168 if len(cl) > 0:
168 if len(cl) > 0:
169 raise error.Abort(_('repository is not empty'))
169 raise error.Abort(_('repository is not empty'))
170
170
171 # determine number of revs in DAG
171 # determine number of revs in DAG
172 total = 0
172 total = 0
173 for type, data in dagparser.parsedag(text):
173 for type, data in dagparser.parsedag(text):
174 if type == 'n':
174 if type == 'n':
175 total += 1
175 total += 1
176
176
177 if mergeable_file:
177 if mergeable_file:
178 linesperrev = 2
178 linesperrev = 2
179 # make a file with k lines per rev
179 # make a file with k lines per rev
180 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
180 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
181 initialmergedlines.append("")
181 initialmergedlines.append("")
182
182
183 tags = []
183 tags = []
184 progress = ui.makeprogress(_('building'), unit=_('revisions'),
184 progress = ui.makeprogress(_('building'), unit=_('revisions'),
185 total=total)
185 total=total)
186 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
186 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
187 at = -1
187 at = -1
188 atbranch = 'default'
188 atbranch = 'default'
189 nodeids = []
189 nodeids = []
190 id = 0
190 id = 0
191 progress.update(id)
191 progress.update(id)
192 for type, data in dagparser.parsedag(text):
192 for type, data in dagparser.parsedag(text):
193 if type == 'n':
193 if type == 'n':
194 ui.note(('node %s\n' % pycompat.bytestr(data)))
194 ui.note(('node %s\n' % pycompat.bytestr(data)))
195 id, ps = data
195 id, ps = data
196
196
197 files = []
197 files = []
198 filecontent = {}
198 filecontent = {}
199
199
200 p2 = None
200 p2 = None
201 if mergeable_file:
201 if mergeable_file:
202 fn = "mf"
202 fn = "mf"
203 p1 = repo[ps[0]]
203 p1 = repo[ps[0]]
204 if len(ps) > 1:
204 if len(ps) > 1:
205 p2 = repo[ps[1]]
205 p2 = repo[ps[1]]
206 pa = p1.ancestor(p2)
206 pa = p1.ancestor(p2)
207 base, local, other = [x[fn].data() for x in (pa, p1,
207 base, local, other = [x[fn].data() for x in (pa, p1,
208 p2)]
208 p2)]
209 m3 = simplemerge.Merge3Text(base, local, other)
209 m3 = simplemerge.Merge3Text(base, local, other)
210 ml = [l.strip() for l in m3.merge_lines()]
210 ml = [l.strip() for l in m3.merge_lines()]
211 ml.append("")
211 ml.append("")
212 elif at > 0:
212 elif at > 0:
213 ml = p1[fn].data().split("\n")
213 ml = p1[fn].data().split("\n")
214 else:
214 else:
215 ml = initialmergedlines
215 ml = initialmergedlines
216 ml[id * linesperrev] += " r%i" % id
216 ml[id * linesperrev] += " r%i" % id
217 mergedtext = "\n".join(ml)
217 mergedtext = "\n".join(ml)
218 files.append(fn)
218 files.append(fn)
219 filecontent[fn] = mergedtext
219 filecontent[fn] = mergedtext
220
220
221 if overwritten_file:
221 if overwritten_file:
222 fn = "of"
222 fn = "of"
223 files.append(fn)
223 files.append(fn)
224 filecontent[fn] = "r%i\n" % id
224 filecontent[fn] = "r%i\n" % id
225
225
226 if new_file:
226 if new_file:
227 fn = "nf%i" % id
227 fn = "nf%i" % id
228 files.append(fn)
228 files.append(fn)
229 filecontent[fn] = "r%i\n" % id
229 filecontent[fn] = "r%i\n" % id
230 if len(ps) > 1:
230 if len(ps) > 1:
231 if not p2:
231 if not p2:
232 p2 = repo[ps[1]]
232 p2 = repo[ps[1]]
233 for fn in p2:
233 for fn in p2:
234 if fn.startswith("nf"):
234 if fn.startswith("nf"):
235 files.append(fn)
235 files.append(fn)
236 filecontent[fn] = p2[fn].data()
236 filecontent[fn] = p2[fn].data()
237
237
238 def fctxfn(repo, cx, path):
238 def fctxfn(repo, cx, path):
239 if path in filecontent:
239 if path in filecontent:
240 return context.memfilectx(repo, cx, path,
240 return context.memfilectx(repo, cx, path,
241 filecontent[path])
241 filecontent[path])
242 return None
242 return None
243
243
244 if len(ps) == 0 or ps[0] < 0:
244 if len(ps) == 0 or ps[0] < 0:
245 pars = [None, None]
245 pars = [None, None]
246 elif len(ps) == 1:
246 elif len(ps) == 1:
247 pars = [nodeids[ps[0]], None]
247 pars = [nodeids[ps[0]], None]
248 else:
248 else:
249 pars = [nodeids[p] for p in ps]
249 pars = [nodeids[p] for p in ps]
250 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
250 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
251 date=(id, 0),
251 date=(id, 0),
252 user="debugbuilddag",
252 user="debugbuilddag",
253 extra={'branch': atbranch})
253 extra={'branch': atbranch})
254 nodeid = repo.commitctx(cx)
254 nodeid = repo.commitctx(cx)
255 nodeids.append(nodeid)
255 nodeids.append(nodeid)
256 at = id
256 at = id
257 elif type == 'l':
257 elif type == 'l':
258 id, name = data
258 id, name = data
259 ui.note(('tag %s\n' % name))
259 ui.note(('tag %s\n' % name))
260 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
260 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
261 elif type == 'a':
261 elif type == 'a':
262 ui.note(('branch %s\n' % data))
262 ui.note(('branch %s\n' % data))
263 atbranch = data
263 atbranch = data
264 progress.update(id)
264 progress.update(id)
265
265
266 if tags:
266 if tags:
267 repo.vfs.write("localtags", "".join(tags))
267 repo.vfs.write("localtags", "".join(tags))
268
268
269 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
269 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
270 indent_string = ' ' * indent
270 indent_string = ' ' * indent
271 if all:
271 if all:
272 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
272 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
273 % indent_string)
273 % indent_string)
274
274
275 def showchunks(named):
275 def showchunks(named):
276 ui.write("\n%s%s\n" % (indent_string, named))
276 ui.write("\n%s%s\n" % (indent_string, named))
277 for deltadata in gen.deltaiter():
277 for deltadata in gen.deltaiter():
278 node, p1, p2, cs, deltabase, delta, flags = deltadata
278 node, p1, p2, cs, deltabase, delta, flags = deltadata
279 ui.write("%s%s %s %s %s %s %d\n" %
279 ui.write("%s%s %s %s %s %s %d\n" %
280 (indent_string, hex(node), hex(p1), hex(p2),
280 (indent_string, hex(node), hex(p1), hex(p2),
281 hex(cs), hex(deltabase), len(delta)))
281 hex(cs), hex(deltabase), len(delta)))
282
282
283 chunkdata = gen.changelogheader()
283 chunkdata = gen.changelogheader()
284 showchunks("changelog")
284 showchunks("changelog")
285 chunkdata = gen.manifestheader()
285 chunkdata = gen.manifestheader()
286 showchunks("manifest")
286 showchunks("manifest")
287 for chunkdata in iter(gen.filelogheader, {}):
287 for chunkdata in iter(gen.filelogheader, {}):
288 fname = chunkdata['filename']
288 fname = chunkdata['filename']
289 showchunks(fname)
289 showchunks(fname)
290 else:
290 else:
291 if isinstance(gen, bundle2.unbundle20):
291 if isinstance(gen, bundle2.unbundle20):
292 raise error.Abort(_('use debugbundle2 for this file'))
292 raise error.Abort(_('use debugbundle2 for this file'))
293 chunkdata = gen.changelogheader()
293 chunkdata = gen.changelogheader()
294 for deltadata in gen.deltaiter():
294 for deltadata in gen.deltaiter():
295 node, p1, p2, cs, deltabase, delta, flags = deltadata
295 node, p1, p2, cs, deltabase, delta, flags = deltadata
296 ui.write("%s%s\n" % (indent_string, hex(node)))
296 ui.write("%s%s\n" % (indent_string, hex(node)))
297
297
298 def _debugobsmarkers(ui, part, indent=0, **opts):
298 def _debugobsmarkers(ui, part, indent=0, **opts):
299 """display version and markers contained in 'data'"""
299 """display version and markers contained in 'data'"""
300 opts = pycompat.byteskwargs(opts)
300 opts = pycompat.byteskwargs(opts)
301 data = part.read()
301 data = part.read()
302 indent_string = ' ' * indent
302 indent_string = ' ' * indent
303 try:
303 try:
304 version, markers = obsolete._readmarkers(data)
304 version, markers = obsolete._readmarkers(data)
305 except error.UnknownVersion as exc:
305 except error.UnknownVersion as exc:
306 msg = "%sunsupported version: %s (%d bytes)\n"
306 msg = "%sunsupported version: %s (%d bytes)\n"
307 msg %= indent_string, exc.version, len(data)
307 msg %= indent_string, exc.version, len(data)
308 ui.write(msg)
308 ui.write(msg)
309 else:
309 else:
310 msg = "%sversion: %d (%d bytes)\n"
310 msg = "%sversion: %d (%d bytes)\n"
311 msg %= indent_string, version, len(data)
311 msg %= indent_string, version, len(data)
312 ui.write(msg)
312 ui.write(msg)
313 fm = ui.formatter('debugobsolete', opts)
313 fm = ui.formatter('debugobsolete', opts)
314 for rawmarker in sorted(markers):
314 for rawmarker in sorted(markers):
315 m = obsutil.marker(None, rawmarker)
315 m = obsutil.marker(None, rawmarker)
316 fm.startitem()
316 fm.startitem()
317 fm.plain(indent_string)
317 fm.plain(indent_string)
318 cmdutil.showmarker(fm, m)
318 cmdutil.showmarker(fm, m)
319 fm.end()
319 fm.end()
320
320
321 def _debugphaseheads(ui, data, indent=0):
321 def _debugphaseheads(ui, data, indent=0):
322 """display version and markers contained in 'data'"""
322 """display version and markers contained in 'data'"""
323 indent_string = ' ' * indent
323 indent_string = ' ' * indent
324 headsbyphase = phases.binarydecode(data)
324 headsbyphase = phases.binarydecode(data)
325 for phase in phases.allphases:
325 for phase in phases.allphases:
326 for head in headsbyphase[phase]:
326 for head in headsbyphase[phase]:
327 ui.write(indent_string)
327 ui.write(indent_string)
328 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
328 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
329
329
330 def _quasirepr(thing):
330 def _quasirepr(thing):
331 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
331 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
332 return '{%s}' % (
332 return '{%s}' % (
333 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
333 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
334 return pycompat.bytestr(repr(thing))
334 return pycompat.bytestr(repr(thing))
335
335
336 def _debugbundle2(ui, gen, all=None, **opts):
336 def _debugbundle2(ui, gen, all=None, **opts):
337 """lists the contents of a bundle2"""
337 """lists the contents of a bundle2"""
338 if not isinstance(gen, bundle2.unbundle20):
338 if not isinstance(gen, bundle2.unbundle20):
339 raise error.Abort(_('not a bundle2 file'))
339 raise error.Abort(_('not a bundle2 file'))
340 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
340 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
341 parttypes = opts.get(r'part_type', [])
341 parttypes = opts.get(r'part_type', [])
342 for part in gen.iterparts():
342 for part in gen.iterparts():
343 if parttypes and part.type not in parttypes:
343 if parttypes and part.type not in parttypes:
344 continue
344 continue
345 msg = '%s -- %s (mandatory: %r)\n'
345 msg = '%s -- %s (mandatory: %r)\n'
346 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
346 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
347 if part.type == 'changegroup':
347 if part.type == 'changegroup':
348 version = part.params.get('version', '01')
348 version = part.params.get('version', '01')
349 cg = changegroup.getunbundler(version, part, 'UN')
349 cg = changegroup.getunbundler(version, part, 'UN')
350 if not ui.quiet:
350 if not ui.quiet:
351 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
351 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
352 if part.type == 'obsmarkers':
352 if part.type == 'obsmarkers':
353 if not ui.quiet:
353 if not ui.quiet:
354 _debugobsmarkers(ui, part, indent=4, **opts)
354 _debugobsmarkers(ui, part, indent=4, **opts)
355 if part.type == 'phase-heads':
355 if part.type == 'phase-heads':
356 if not ui.quiet:
356 if not ui.quiet:
357 _debugphaseheads(ui, part, indent=4)
357 _debugphaseheads(ui, part, indent=4)
358
358
359 @command('debugbundle',
359 @command('debugbundle',
360 [('a', 'all', None, _('show all details')),
360 [('a', 'all', None, _('show all details')),
361 ('', 'part-type', [], _('show only the named part type')),
361 ('', 'part-type', [], _('show only the named part type')),
362 ('', 'spec', None, _('print the bundlespec of the bundle'))],
362 ('', 'spec', None, _('print the bundlespec of the bundle'))],
363 _('FILE'),
363 _('FILE'),
364 norepo=True)
364 norepo=True)
365 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
365 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
366 """lists the contents of a bundle"""
366 """lists the contents of a bundle"""
367 with hg.openpath(ui, bundlepath) as f:
367 with hg.openpath(ui, bundlepath) as f:
368 if spec:
368 if spec:
369 spec = exchange.getbundlespec(ui, f)
369 spec = exchange.getbundlespec(ui, f)
370 ui.write('%s\n' % spec)
370 ui.write('%s\n' % spec)
371 return
371 return
372
372
373 gen = exchange.readbundle(ui, f, bundlepath)
373 gen = exchange.readbundle(ui, f, bundlepath)
374 if isinstance(gen, bundle2.unbundle20):
374 if isinstance(gen, bundle2.unbundle20):
375 return _debugbundle2(ui, gen, all=all, **opts)
375 return _debugbundle2(ui, gen, all=all, **opts)
376 _debugchangegroup(ui, gen, all=all, **opts)
376 _debugchangegroup(ui, gen, all=all, **opts)
377
377
378 @command('debugcapabilities',
378 @command('debugcapabilities',
379 [], _('PATH'),
379 [], _('PATH'),
380 norepo=True)
380 norepo=True)
381 def debugcapabilities(ui, path, **opts):
381 def debugcapabilities(ui, path, **opts):
382 """lists the capabilities of a remote peer"""
382 """lists the capabilities of a remote peer"""
383 opts = pycompat.byteskwargs(opts)
383 opts = pycompat.byteskwargs(opts)
384 peer = hg.peer(ui, opts, path)
384 peer = hg.peer(ui, opts, path)
385 caps = peer.capabilities()
385 caps = peer.capabilities()
386 ui.write(('Main capabilities:\n'))
386 ui.write(('Main capabilities:\n'))
387 for c in sorted(caps):
387 for c in sorted(caps):
388 ui.write((' %s\n') % c)
388 ui.write((' %s\n') % c)
389 b2caps = bundle2.bundle2caps(peer)
389 b2caps = bundle2.bundle2caps(peer)
390 if b2caps:
390 if b2caps:
391 ui.write(('Bundle2 capabilities:\n'))
391 ui.write(('Bundle2 capabilities:\n'))
392 for key, values in sorted(b2caps.iteritems()):
392 for key, values in sorted(b2caps.iteritems()):
393 ui.write((' %s\n') % key)
393 ui.write((' %s\n') % key)
394 for v in values:
394 for v in values:
395 ui.write((' %s\n') % v)
395 ui.write((' %s\n') % v)
396
396
397 @command('debugcheckstate', [], '')
397 @command('debugcheckstate', [], '')
398 def debugcheckstate(ui, repo):
398 def debugcheckstate(ui, repo):
399 """validate the correctness of the current dirstate"""
399 """validate the correctness of the current dirstate"""
400 parent1, parent2 = repo.dirstate.parents()
400 parent1, parent2 = repo.dirstate.parents()
401 m1 = repo[parent1].manifest()
401 m1 = repo[parent1].manifest()
402 m2 = repo[parent2].manifest()
402 m2 = repo[parent2].manifest()
403 errors = 0
403 errors = 0
404 for f in repo.dirstate:
404 for f in repo.dirstate:
405 state = repo.dirstate[f]
405 state = repo.dirstate[f]
406 if state in "nr" and f not in m1:
406 if state in "nr" and f not in m1:
407 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
407 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
408 errors += 1
408 errors += 1
409 if state in "a" and f in m1:
409 if state in "a" and f in m1:
410 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
410 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
411 errors += 1
411 errors += 1
412 if state in "m" and f not in m1 and f not in m2:
412 if state in "m" and f not in m1 and f not in m2:
413 ui.warn(_("%s in state %s, but not in either manifest\n") %
413 ui.warn(_("%s in state %s, but not in either manifest\n") %
414 (f, state))
414 (f, state))
415 errors += 1
415 errors += 1
416 for f in m1:
416 for f in m1:
417 state = repo.dirstate[f]
417 state = repo.dirstate[f]
418 if state not in "nrm":
418 if state not in "nrm":
419 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
419 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
420 errors += 1
420 errors += 1
421 if errors:
421 if errors:
422 error = _(".hg/dirstate inconsistent with current parent's manifest")
422 error = _(".hg/dirstate inconsistent with current parent's manifest")
423 raise error.Abort(error)
423 raise error.Abort(error)
424
424
425 @command('debugcolor',
425 @command('debugcolor',
426 [('', 'style', None, _('show all configured styles'))],
426 [('', 'style', None, _('show all configured styles'))],
427 'hg debugcolor')
427 'hg debugcolor')
428 def debugcolor(ui, repo, **opts):
428 def debugcolor(ui, repo, **opts):
429 """show available color, effects or style"""
429 """show available color, effects or style"""
430 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
430 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
431 if opts.get(r'style'):
431 if opts.get(r'style'):
432 return _debugdisplaystyle(ui)
432 return _debugdisplaystyle(ui)
433 else:
433 else:
434 return _debugdisplaycolor(ui)
434 return _debugdisplaycolor(ui)
435
435
436 def _debugdisplaycolor(ui):
436 def _debugdisplaycolor(ui):
437 ui = ui.copy()
437 ui = ui.copy()
438 ui._styles.clear()
438 ui._styles.clear()
439 for effect in color._activeeffects(ui).keys():
439 for effect in color._activeeffects(ui).keys():
440 ui._styles[effect] = effect
440 ui._styles[effect] = effect
441 if ui._terminfoparams:
441 if ui._terminfoparams:
442 for k, v in ui.configitems('color'):
442 for k, v in ui.configitems('color'):
443 if k.startswith('color.'):
443 if k.startswith('color.'):
444 ui._styles[k] = k[6:]
444 ui._styles[k] = k[6:]
445 elif k.startswith('terminfo.'):
445 elif k.startswith('terminfo.'):
446 ui._styles[k] = k[9:]
446 ui._styles[k] = k[9:]
447 ui.write(_('available colors:\n'))
447 ui.write(_('available colors:\n'))
448 # sort label with a '_' after the other to group '_background' entry.
448 # sort label with a '_' after the other to group '_background' entry.
449 items = sorted(ui._styles.items(),
449 items = sorted(ui._styles.items(),
450 key=lambda i: ('_' in i[0], i[0], i[1]))
450 key=lambda i: ('_' in i[0], i[0], i[1]))
451 for colorname, label in items:
451 for colorname, label in items:
452 ui.write(('%s\n') % colorname, label=label)
452 ui.write(('%s\n') % colorname, label=label)
453
453
454 def _debugdisplaystyle(ui):
454 def _debugdisplaystyle(ui):
455 ui.write(_('available style:\n'))
455 ui.write(_('available style:\n'))
456 if not ui._styles:
456 if not ui._styles:
457 return
457 return
458 width = max(len(s) for s in ui._styles)
458 width = max(len(s) for s in ui._styles)
459 for label, effects in sorted(ui._styles.items()):
459 for label, effects in sorted(ui._styles.items()):
460 ui.write('%s' % label, label=label)
460 ui.write('%s' % label, label=label)
461 if effects:
461 if effects:
462 # 50
462 # 50
463 ui.write(': ')
463 ui.write(': ')
464 ui.write(' ' * (max(0, width - len(label))))
464 ui.write(' ' * (max(0, width - len(label))))
465 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
465 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
466 ui.write('\n')
466 ui.write('\n')
467
467
468 @command('debugcreatestreamclonebundle', [], 'FILE')
468 @command('debugcreatestreamclonebundle', [], 'FILE')
469 def debugcreatestreamclonebundle(ui, repo, fname):
469 def debugcreatestreamclonebundle(ui, repo, fname):
470 """create a stream clone bundle file
470 """create a stream clone bundle file
471
471
472 Stream bundles are special bundles that are essentially archives of
472 Stream bundles are special bundles that are essentially archives of
473 revlog files. They are commonly used for cloning very quickly.
473 revlog files. They are commonly used for cloning very quickly.
474 """
474 """
475 # TODO we may want to turn this into an abort when this functionality
475 # TODO we may want to turn this into an abort when this functionality
476 # is moved into `hg bundle`.
476 # is moved into `hg bundle`.
477 if phases.hassecret(repo):
477 if phases.hassecret(repo):
478 ui.warn(_('(warning: stream clone bundle will contain secret '
478 ui.warn(_('(warning: stream clone bundle will contain secret '
479 'revisions)\n'))
479 'revisions)\n'))
480
480
481 requirements, gen = streamclone.generatebundlev1(repo)
481 requirements, gen = streamclone.generatebundlev1(repo)
482 changegroup.writechunks(ui, gen, fname)
482 changegroup.writechunks(ui, gen, fname)
483
483
484 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
484 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
485
485
486 @command('debugdag',
486 @command('debugdag',
487 [('t', 'tags', None, _('use tags as labels')),
487 [('t', 'tags', None, _('use tags as labels')),
488 ('b', 'branches', None, _('annotate with branch names')),
488 ('b', 'branches', None, _('annotate with branch names')),
489 ('', 'dots', None, _('use dots for runs')),
489 ('', 'dots', None, _('use dots for runs')),
490 ('s', 'spaces', None, _('separate elements by spaces'))],
490 ('s', 'spaces', None, _('separate elements by spaces'))],
491 _('[OPTION]... [FILE [REV]...]'),
491 _('[OPTION]... [FILE [REV]...]'),
492 optionalrepo=True)
492 optionalrepo=True)
493 def debugdag(ui, repo, file_=None, *revs, **opts):
493 def debugdag(ui, repo, file_=None, *revs, **opts):
494 """format the changelog or an index DAG as a concise textual description
494 """format the changelog or an index DAG as a concise textual description
495
495
496 If you pass a revlog index, the revlog's DAG is emitted. If you list
496 If you pass a revlog index, the revlog's DAG is emitted. If you list
497 revision numbers, they get labeled in the output as rN.
497 revision numbers, they get labeled in the output as rN.
498
498
499 Otherwise, the changelog DAG of the current repo is emitted.
499 Otherwise, the changelog DAG of the current repo is emitted.
500 """
500 """
501 spaces = opts.get(r'spaces')
501 spaces = opts.get(r'spaces')
502 dots = opts.get(r'dots')
502 dots = opts.get(r'dots')
503 if file_:
503 if file_:
504 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
504 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
505 file_)
505 file_)
506 revs = set((int(r) for r in revs))
506 revs = set((int(r) for r in revs))
507 def events():
507 def events():
508 for r in rlog:
508 for r in rlog:
509 yield 'n', (r, list(p for p in rlog.parentrevs(r)
509 yield 'n', (r, list(p for p in rlog.parentrevs(r)
510 if p != -1))
510 if p != -1))
511 if r in revs:
511 if r in revs:
512 yield 'l', (r, "r%i" % r)
512 yield 'l', (r, "r%i" % r)
513 elif repo:
513 elif repo:
514 cl = repo.changelog
514 cl = repo.changelog
515 tags = opts.get(r'tags')
515 tags = opts.get(r'tags')
516 branches = opts.get(r'branches')
516 branches = opts.get(r'branches')
517 if tags:
517 if tags:
518 labels = {}
518 labels = {}
519 for l, n in repo.tags().items():
519 for l, n in repo.tags().items():
520 labels.setdefault(cl.rev(n), []).append(l)
520 labels.setdefault(cl.rev(n), []).append(l)
521 def events():
521 def events():
522 b = "default"
522 b = "default"
523 for r in cl:
523 for r in cl:
524 if branches:
524 if branches:
525 newb = cl.read(cl.node(r))[5]['branch']
525 newb = cl.read(cl.node(r))[5]['branch']
526 if newb != b:
526 if newb != b:
527 yield 'a', newb
527 yield 'a', newb
528 b = newb
528 b = newb
529 yield 'n', (r, list(p for p in cl.parentrevs(r)
529 yield 'n', (r, list(p for p in cl.parentrevs(r)
530 if p != -1))
530 if p != -1))
531 if tags:
531 if tags:
532 ls = labels.get(r)
532 ls = labels.get(r)
533 if ls:
533 if ls:
534 for l in ls:
534 for l in ls:
535 yield 'l', (r, l)
535 yield 'l', (r, l)
536 else:
536 else:
537 raise error.Abort(_('need repo for changelog dag'))
537 raise error.Abort(_('need repo for changelog dag'))
538
538
539 for line in dagparser.dagtextlines(events(),
539 for line in dagparser.dagtextlines(events(),
540 addspaces=spaces,
540 addspaces=spaces,
541 wraplabels=True,
541 wraplabels=True,
542 wrapannotations=True,
542 wrapannotations=True,
543 wrapnonlinear=dots,
543 wrapnonlinear=dots,
544 usedots=dots,
544 usedots=dots,
545 maxlinewidth=70):
545 maxlinewidth=70):
546 ui.write(line)
546 ui.write(line)
547 ui.write("\n")
547 ui.write("\n")
548
548
549 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
549 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
550 def debugdata(ui, repo, file_, rev=None, **opts):
550 def debugdata(ui, repo, file_, rev=None, **opts):
551 """dump the contents of a data file revision"""
551 """dump the contents of a data file revision"""
552 opts = pycompat.byteskwargs(opts)
552 opts = pycompat.byteskwargs(opts)
553 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
553 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
554 if rev is not None:
554 if rev is not None:
555 raise error.CommandError('debugdata', _('invalid arguments'))
555 raise error.CommandError('debugdata', _('invalid arguments'))
556 file_, rev = None, file_
556 file_, rev = None, file_
557 elif rev is None:
557 elif rev is None:
558 raise error.CommandError('debugdata', _('invalid arguments'))
558 raise error.CommandError('debugdata', _('invalid arguments'))
559 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
559 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
560 try:
560 try:
561 ui.write(r.revision(r.lookup(rev), raw=True))
561 ui.write(r.revision(r.lookup(rev), raw=True))
562 except KeyError:
562 except KeyError:
563 raise error.Abort(_('invalid revision identifier %s') % rev)
563 raise error.Abort(_('invalid revision identifier %s') % rev)
564
564
565 @command('debugdate',
565 @command('debugdate',
566 [('e', 'extended', None, _('try extended date formats'))],
566 [('e', 'extended', None, _('try extended date formats'))],
567 _('[-e] DATE [RANGE]'),
567 _('[-e] DATE [RANGE]'),
568 norepo=True, optionalrepo=True)
568 norepo=True, optionalrepo=True)
569 def debugdate(ui, date, range=None, **opts):
569 def debugdate(ui, date, range=None, **opts):
570 """parse and display a date"""
570 """parse and display a date"""
571 if opts[r"extended"]:
571 if opts[r"extended"]:
572 d = dateutil.parsedate(date, util.extendeddateformats)
572 d = dateutil.parsedate(date, util.extendeddateformats)
573 else:
573 else:
574 d = dateutil.parsedate(date)
574 d = dateutil.parsedate(date)
575 ui.write(("internal: %d %d\n") % d)
575 ui.write(("internal: %d %d\n") % d)
576 ui.write(("standard: %s\n") % dateutil.datestr(d))
576 ui.write(("standard: %s\n") % dateutil.datestr(d))
577 if range:
577 if range:
578 m = dateutil.matchdate(range)
578 m = dateutil.matchdate(range)
579 ui.write(("match: %s\n") % m(d[0]))
579 ui.write(("match: %s\n") % m(d[0]))
580
580
581 @command('debugdeltachain',
581 @command('debugdeltachain',
582 cmdutil.debugrevlogopts + cmdutil.formatteropts,
582 cmdutil.debugrevlogopts + cmdutil.formatteropts,
583 _('-c|-m|FILE'),
583 _('-c|-m|FILE'),
584 optionalrepo=True)
584 optionalrepo=True)
585 def debugdeltachain(ui, repo, file_=None, **opts):
585 def debugdeltachain(ui, repo, file_=None, **opts):
586 """dump information about delta chains in a revlog
586 """dump information about delta chains in a revlog
587
587
588 Output can be templatized. Available template keywords are:
588 Output can be templatized. Available template keywords are:
589
589
590 :``rev``: revision number
590 :``rev``: revision number
591 :``chainid``: delta chain identifier (numbered by unique base)
591 :``chainid``: delta chain identifier (numbered by unique base)
592 :``chainlen``: delta chain length to this revision
592 :``chainlen``: delta chain length to this revision
593 :``prevrev``: previous revision in delta chain
593 :``prevrev``: previous revision in delta chain
594 :``deltatype``: role of delta / how it was computed
594 :``deltatype``: role of delta / how it was computed
595 :``compsize``: compressed size of revision
595 :``compsize``: compressed size of revision
596 :``uncompsize``: uncompressed size of revision
596 :``uncompsize``: uncompressed size of revision
597 :``chainsize``: total size of compressed revisions in chain
597 :``chainsize``: total size of compressed revisions in chain
598 :``chainratio``: total chain size divided by uncompressed revision size
598 :``chainratio``: total chain size divided by uncompressed revision size
599 (new delta chains typically start at ratio 2.00)
599 (new delta chains typically start at ratio 2.00)
600 :``lindist``: linear distance from base revision in delta chain to end
600 :``lindist``: linear distance from base revision in delta chain to end
601 of this revision
601 of this revision
602 :``extradist``: total size of revisions not part of this delta chain from
602 :``extradist``: total size of revisions not part of this delta chain from
603 base of delta chain to end of this revision; a measurement
603 base of delta chain to end of this revision; a measurement
604 of how much extra data we need to read/seek across to read
604 of how much extra data we need to read/seek across to read
605 the delta chain for this revision
605 the delta chain for this revision
606 :``extraratio``: extradist divided by chainsize; another representation of
606 :``extraratio``: extradist divided by chainsize; another representation of
607 how much unrelated data is needed to load this delta chain
607 how much unrelated data is needed to load this delta chain
608
608
609 If the repository is configured to use the sparse read, additional keywords
609 If the repository is configured to use the sparse read, additional keywords
610 are available:
610 are available:
611
611
612 :``readsize``: total size of data read from the disk for a revision
612 :``readsize``: total size of data read from the disk for a revision
613 (sum of the sizes of all the blocks)
613 (sum of the sizes of all the blocks)
614 :``largestblock``: size of the largest block of data read from the disk
614 :``largestblock``: size of the largest block of data read from the disk
615 :``readdensity``: density of useful bytes in the data read from the disk
615 :``readdensity``: density of useful bytes in the data read from the disk
616 :``srchunks``: in how many data hunks the whole revision would be read
616 :``srchunks``: in how many data hunks the whole revision would be read
617
617
618 The sparse read can be enabled with experimental.sparse-read = True
618 The sparse read can be enabled with experimental.sparse-read = True
619 """
619 """
620 opts = pycompat.byteskwargs(opts)
620 opts = pycompat.byteskwargs(opts)
621 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
621 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
622 index = r.index
622 index = r.index
623 start = r.start
623 start = r.start
624 length = r.length
624 length = r.length
625 generaldelta = r.version & revlog.FLAG_GENERALDELTA
625 generaldelta = r.version & revlog.FLAG_GENERALDELTA
626 withsparseread = getattr(r, '_withsparseread', False)
626 withsparseread = getattr(r, '_withsparseread', False)
627
627
628 def revinfo(rev):
628 def revinfo(rev):
629 e = index[rev]
629 e = index[rev]
630 compsize = e[1]
630 compsize = e[1]
631 uncompsize = e[2]
631 uncompsize = e[2]
632 chainsize = 0
632 chainsize = 0
633
633
634 if generaldelta:
634 if generaldelta:
635 if e[3] == e[5]:
635 if e[3] == e[5]:
636 deltatype = 'p1'
636 deltatype = 'p1'
637 elif e[3] == e[6]:
637 elif e[3] == e[6]:
638 deltatype = 'p2'
638 deltatype = 'p2'
639 elif e[3] == rev - 1:
639 elif e[3] == rev - 1:
640 deltatype = 'prev'
640 deltatype = 'prev'
641 elif e[3] == rev:
641 elif e[3] == rev:
642 deltatype = 'base'
642 deltatype = 'base'
643 else:
643 else:
644 deltatype = 'other'
644 deltatype = 'other'
645 else:
645 else:
646 if e[3] == rev:
646 if e[3] == rev:
647 deltatype = 'base'
647 deltatype = 'base'
648 else:
648 else:
649 deltatype = 'prev'
649 deltatype = 'prev'
650
650
651 chain = r._deltachain(rev)[0]
651 chain = r._deltachain(rev)[0]
652 for iterrev in chain:
652 for iterrev in chain:
653 e = index[iterrev]
653 e = index[iterrev]
654 chainsize += e[1]
654 chainsize += e[1]
655
655
656 return compsize, uncompsize, deltatype, chain, chainsize
656 return compsize, uncompsize, deltatype, chain, chainsize
657
657
658 fm = ui.formatter('debugdeltachain', opts)
658 fm = ui.formatter('debugdeltachain', opts)
659
659
660 fm.plain(' rev chain# chainlen prev delta '
660 fm.plain(' rev chain# chainlen prev delta '
661 'size rawsize chainsize ratio lindist extradist '
661 'size rawsize chainsize ratio lindist extradist '
662 'extraratio')
662 'extraratio')
663 if withsparseread:
663 if withsparseread:
664 fm.plain(' readsize largestblk rddensity srchunks')
664 fm.plain(' readsize largestblk rddensity srchunks')
665 fm.plain('\n')
665 fm.plain('\n')
666
666
667 chainbases = {}
667 chainbases = {}
668 for rev in r:
668 for rev in r:
669 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
669 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
670 chainbase = chain[0]
670 chainbase = chain[0]
671 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
671 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
672 basestart = start(chainbase)
672 basestart = start(chainbase)
673 revstart = start(rev)
673 revstart = start(rev)
674 lineardist = revstart + comp - basestart
674 lineardist = revstart + comp - basestart
675 extradist = lineardist - chainsize
675 extradist = lineardist - chainsize
676 try:
676 try:
677 prevrev = chain[-2]
677 prevrev = chain[-2]
678 except IndexError:
678 except IndexError:
679 prevrev = -1
679 prevrev = -1
680
680
681 if uncomp != 0:
681 if uncomp != 0:
682 chainratio = float(chainsize) / float(uncomp)
682 chainratio = float(chainsize) / float(uncomp)
683 else:
683 else:
684 chainratio = chainsize
684 chainratio = chainsize
685
685
686 if chainsize != 0:
686 if chainsize != 0:
687 extraratio = float(extradist) / float(chainsize)
687 extraratio = float(extradist) / float(chainsize)
688 else:
688 else:
689 extraratio = extradist
689 extraratio = extradist
690
690
691 fm.startitem()
691 fm.startitem()
692 fm.write('rev chainid chainlen prevrev deltatype compsize '
692 fm.write('rev chainid chainlen prevrev deltatype compsize '
693 'uncompsize chainsize chainratio lindist extradist '
693 'uncompsize chainsize chainratio lindist extradist '
694 'extraratio',
694 'extraratio',
695 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
695 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
696 rev, chainid, len(chain), prevrev, deltatype, comp,
696 rev, chainid, len(chain), prevrev, deltatype, comp,
697 uncomp, chainsize, chainratio, lineardist, extradist,
697 uncomp, chainsize, chainratio, lineardist, extradist,
698 extraratio,
698 extraratio,
699 rev=rev, chainid=chainid, chainlen=len(chain),
699 rev=rev, chainid=chainid, chainlen=len(chain),
700 prevrev=prevrev, deltatype=deltatype, compsize=comp,
700 prevrev=prevrev, deltatype=deltatype, compsize=comp,
701 uncompsize=uncomp, chainsize=chainsize,
701 uncompsize=uncomp, chainsize=chainsize,
702 chainratio=chainratio, lindist=lineardist,
702 chainratio=chainratio, lindist=lineardist,
703 extradist=extradist, extraratio=extraratio)
703 extradist=extradist, extraratio=extraratio)
704 if withsparseread:
704 if withsparseread:
705 readsize = 0
705 readsize = 0
706 largestblock = 0
706 largestblock = 0
707 srchunks = 0
707 srchunks = 0
708
708
709 for revschunk in revlog._slicechunk(r, chain):
709 for revschunk in revlog._slicechunk(r, chain):
710 srchunks += 1
710 srchunks += 1
711 blkend = start(revschunk[-1]) + length(revschunk[-1])
711 blkend = start(revschunk[-1]) + length(revschunk[-1])
712 blksize = blkend - start(revschunk[0])
712 blksize = blkend - start(revschunk[0])
713
713
714 readsize += blksize
714 readsize += blksize
715 if largestblock < blksize:
715 if largestblock < blksize:
716 largestblock = blksize
716 largestblock = blksize
717
717
718 readdensity = float(chainsize) / float(readsize)
718 if readsize:
719 readdensity = float(chainsize) / float(readsize)
720 else:
721 readdensity = 1
719
722
720 fm.write('readsize largestblock readdensity srchunks',
723 fm.write('readsize largestblock readdensity srchunks',
721 ' %10d %10d %9.5f %8d',
724 ' %10d %10d %9.5f %8d',
722 readsize, largestblock, readdensity, srchunks,
725 readsize, largestblock, readdensity, srchunks,
723 readsize=readsize, largestblock=largestblock,
726 readsize=readsize, largestblock=largestblock,
724 readdensity=readdensity, srchunks=srchunks)
727 readdensity=readdensity, srchunks=srchunks)
725
728
726 fm.plain('\n')
729 fm.plain('\n')
727
730
728 fm.end()
731 fm.end()
729
732
730 @command('debugdirstate|debugstate',
733 @command('debugdirstate|debugstate',
731 [('', 'nodates', None, _('do not display the saved mtime')),
734 [('', 'nodates', None, _('do not display the saved mtime')),
732 ('', 'datesort', None, _('sort by saved mtime'))],
735 ('', 'datesort', None, _('sort by saved mtime'))],
733 _('[OPTION]...'))
736 _('[OPTION]...'))
734 def debugstate(ui, repo, **opts):
737 def debugstate(ui, repo, **opts):
735 """show the contents of the current dirstate"""
738 """show the contents of the current dirstate"""
736
739
737 nodates = opts.get(r'nodates')
740 nodates = opts.get(r'nodates')
738 datesort = opts.get(r'datesort')
741 datesort = opts.get(r'datesort')
739
742
740 timestr = ""
743 timestr = ""
741 if datesort:
744 if datesort:
742 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
745 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
743 else:
746 else:
744 keyfunc = None # sort by filename
747 keyfunc = None # sort by filename
745 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
748 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
746 if ent[3] == -1:
749 if ent[3] == -1:
747 timestr = 'unset '
750 timestr = 'unset '
748 elif nodates:
751 elif nodates:
749 timestr = 'set '
752 timestr = 'set '
750 else:
753 else:
751 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
754 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
752 time.localtime(ent[3]))
755 time.localtime(ent[3]))
753 timestr = encoding.strtolocal(timestr)
756 timestr = encoding.strtolocal(timestr)
754 if ent[1] & 0o20000:
757 if ent[1] & 0o20000:
755 mode = 'lnk'
758 mode = 'lnk'
756 else:
759 else:
757 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
760 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
758 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
761 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
759 for f in repo.dirstate.copies():
762 for f in repo.dirstate.copies():
760 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
763 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
761
764
762 @command('debugdiscovery',
765 @command('debugdiscovery',
763 [('', 'old', None, _('use old-style discovery')),
766 [('', 'old', None, _('use old-style discovery')),
764 ('', 'nonheads', None,
767 ('', 'nonheads', None,
765 _('use old-style discovery with non-heads included')),
768 _('use old-style discovery with non-heads included')),
766 ('', 'rev', [], 'restrict discovery to this set of revs'),
769 ('', 'rev', [], 'restrict discovery to this set of revs'),
767 ] + cmdutil.remoteopts,
770 ] + cmdutil.remoteopts,
768 _('[--rev REV] [OTHER]'))
771 _('[--rev REV] [OTHER]'))
769 def debugdiscovery(ui, repo, remoteurl="default", **opts):
772 def debugdiscovery(ui, repo, remoteurl="default", **opts):
770 """runs the changeset discovery protocol in isolation"""
773 """runs the changeset discovery protocol in isolation"""
771 opts = pycompat.byteskwargs(opts)
774 opts = pycompat.byteskwargs(opts)
772 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
775 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
773 remote = hg.peer(repo, opts, remoteurl)
776 remote = hg.peer(repo, opts, remoteurl)
774 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
777 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
775
778
776 # make sure tests are repeatable
779 # make sure tests are repeatable
777 random.seed(12323)
780 random.seed(12323)
778
781
779 def doit(pushedrevs, remoteheads, remote=remote):
782 def doit(pushedrevs, remoteheads, remote=remote):
780 if opts.get('old'):
783 if opts.get('old'):
781 if not util.safehasattr(remote, 'branches'):
784 if not util.safehasattr(remote, 'branches'):
782 # enable in-client legacy support
785 # enable in-client legacy support
783 remote = localrepo.locallegacypeer(remote.local())
786 remote = localrepo.locallegacypeer(remote.local())
784 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
787 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
785 force=True)
788 force=True)
786 common = set(common)
789 common = set(common)
787 if not opts.get('nonheads'):
790 if not opts.get('nonheads'):
788 ui.write(("unpruned common: %s\n") %
791 ui.write(("unpruned common: %s\n") %
789 " ".join(sorted(short(n) for n in common)))
792 " ".join(sorted(short(n) for n in common)))
790 dag = dagutil.revlogdag(repo.changelog)
793 dag = dagutil.revlogdag(repo.changelog)
791 all = dag.ancestorset(dag.internalizeall(common))
794 all = dag.ancestorset(dag.internalizeall(common))
792 common = dag.externalizeall(dag.headsetofconnecteds(all))
795 common = dag.externalizeall(dag.headsetofconnecteds(all))
793 else:
796 else:
794 nodes = None
797 nodes = None
795 if pushedrevs:
798 if pushedrevs:
796 revs = scmutil.revrange(repo, pushedrevs)
799 revs = scmutil.revrange(repo, pushedrevs)
797 nodes = [repo[r].node() for r in revs]
800 nodes = [repo[r].node() for r in revs]
798 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
801 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
799 ancestorsof=nodes)
802 ancestorsof=nodes)
800 common = set(common)
803 common = set(common)
801 rheads = set(hds)
804 rheads = set(hds)
802 lheads = set(repo.heads())
805 lheads = set(repo.heads())
803 ui.write(("common heads: %s\n") %
806 ui.write(("common heads: %s\n") %
804 " ".join(sorted(short(n) for n in common)))
807 " ".join(sorted(short(n) for n in common)))
805 if lheads <= common:
808 if lheads <= common:
806 ui.write(("local is subset\n"))
809 ui.write(("local is subset\n"))
807 elif rheads <= common:
810 elif rheads <= common:
808 ui.write(("remote is subset\n"))
811 ui.write(("remote is subset\n"))
809
812
810 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
813 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
811 localrevs = opts['rev']
814 localrevs = opts['rev']
812 doit(localrevs, remoterevs)
815 doit(localrevs, remoterevs)
813
816
814 _chunksize = 4 << 10
817 _chunksize = 4 << 10
815
818
816 @command('debugdownload',
819 @command('debugdownload',
817 [
820 [
818 ('o', 'output', '', _('path')),
821 ('o', 'output', '', _('path')),
819 ],
822 ],
820 optionalrepo=True)
823 optionalrepo=True)
821 def debugdownload(ui, repo, url, output=None, **opts):
824 def debugdownload(ui, repo, url, output=None, **opts):
822 """download a resource using Mercurial logic and config
825 """download a resource using Mercurial logic and config
823 """
826 """
824 fh = urlmod.open(ui, url, output)
827 fh = urlmod.open(ui, url, output)
825
828
826 dest = ui
829 dest = ui
827 if output:
830 if output:
828 dest = open(output, "wb", _chunksize)
831 dest = open(output, "wb", _chunksize)
829 try:
832 try:
830 data = fh.read(_chunksize)
833 data = fh.read(_chunksize)
831 while data:
834 while data:
832 dest.write(data)
835 dest.write(data)
833 data = fh.read(_chunksize)
836 data = fh.read(_chunksize)
834 finally:
837 finally:
835 if output:
838 if output:
836 dest.close()
839 dest.close()
837
840
838 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
841 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
839 def debugextensions(ui, repo, **opts):
842 def debugextensions(ui, repo, **opts):
840 '''show information about active extensions'''
843 '''show information about active extensions'''
841 opts = pycompat.byteskwargs(opts)
844 opts = pycompat.byteskwargs(opts)
842 exts = extensions.extensions(ui)
845 exts = extensions.extensions(ui)
843 hgver = util.version()
846 hgver = util.version()
844 fm = ui.formatter('debugextensions', opts)
847 fm = ui.formatter('debugextensions', opts)
845 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
848 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
846 isinternal = extensions.ismoduleinternal(extmod)
849 isinternal = extensions.ismoduleinternal(extmod)
847 extsource = pycompat.fsencode(extmod.__file__)
850 extsource = pycompat.fsencode(extmod.__file__)
848 if isinternal:
851 if isinternal:
849 exttestedwith = [] # never expose magic string to users
852 exttestedwith = [] # never expose magic string to users
850 else:
853 else:
851 exttestedwith = getattr(extmod, 'testedwith', '').split()
854 exttestedwith = getattr(extmod, 'testedwith', '').split()
852 extbuglink = getattr(extmod, 'buglink', None)
855 extbuglink = getattr(extmod, 'buglink', None)
853
856
854 fm.startitem()
857 fm.startitem()
855
858
856 if ui.quiet or ui.verbose:
859 if ui.quiet or ui.verbose:
857 fm.write('name', '%s\n', extname)
860 fm.write('name', '%s\n', extname)
858 else:
861 else:
859 fm.write('name', '%s', extname)
862 fm.write('name', '%s', extname)
860 if isinternal or hgver in exttestedwith:
863 if isinternal or hgver in exttestedwith:
861 fm.plain('\n')
864 fm.plain('\n')
862 elif not exttestedwith:
865 elif not exttestedwith:
863 fm.plain(_(' (untested!)\n'))
866 fm.plain(_(' (untested!)\n'))
864 else:
867 else:
865 lasttestedversion = exttestedwith[-1]
868 lasttestedversion = exttestedwith[-1]
866 fm.plain(' (%s!)\n' % lasttestedversion)
869 fm.plain(' (%s!)\n' % lasttestedversion)
867
870
868 fm.condwrite(ui.verbose and extsource, 'source',
871 fm.condwrite(ui.verbose and extsource, 'source',
869 _(' location: %s\n'), extsource or "")
872 _(' location: %s\n'), extsource or "")
870
873
871 if ui.verbose:
874 if ui.verbose:
872 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
875 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
873 fm.data(bundled=isinternal)
876 fm.data(bundled=isinternal)
874
877
875 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
878 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
876 _(' tested with: %s\n'),
879 _(' tested with: %s\n'),
877 fm.formatlist(exttestedwith, name='ver'))
880 fm.formatlist(exttestedwith, name='ver'))
878
881
879 fm.condwrite(ui.verbose and extbuglink, 'buglink',
882 fm.condwrite(ui.verbose and extbuglink, 'buglink',
880 _(' bug reporting: %s\n'), extbuglink or "")
883 _(' bug reporting: %s\n'), extbuglink or "")
881
884
882 fm.end()
885 fm.end()
883
886
884 @command('debugfileset',
887 @command('debugfileset',
885 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
888 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
886 ('', 'all-files', False,
889 ('', 'all-files', False,
887 _('test files from all revisions and working directory'))],
890 _('test files from all revisions and working directory'))],
888 _('[-r REV] [--all-files] FILESPEC'))
891 _('[-r REV] [--all-files] FILESPEC'))
889 def debugfileset(ui, repo, expr, **opts):
892 def debugfileset(ui, repo, expr, **opts):
890 '''parse and apply a fileset specification'''
893 '''parse and apply a fileset specification'''
891 opts = pycompat.byteskwargs(opts)
894 opts = pycompat.byteskwargs(opts)
892 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
895 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
893 if ui.verbose:
896 if ui.verbose:
894 tree = fileset.parse(expr)
897 tree = fileset.parse(expr)
895 ui.note(fileset.prettyformat(tree), "\n")
898 ui.note(fileset.prettyformat(tree), "\n")
896
899
897 files = set()
900 files = set()
898 if opts['all_files']:
901 if opts['all_files']:
899 for r in repo:
902 for r in repo:
900 c = repo[r]
903 c = repo[r]
901 files.update(c.files())
904 files.update(c.files())
902 files.update(c.substate)
905 files.update(c.substate)
903 if opts['all_files'] or ctx.rev() is None:
906 if opts['all_files'] or ctx.rev() is None:
904 wctx = repo[None]
907 wctx = repo[None]
905 files.update(repo.dirstate.walk(scmutil.matchall(repo),
908 files.update(repo.dirstate.walk(scmutil.matchall(repo),
906 subrepos=list(wctx.substate),
909 subrepos=list(wctx.substate),
907 unknown=True, ignored=True))
910 unknown=True, ignored=True))
908 files.update(wctx.substate)
911 files.update(wctx.substate)
909 else:
912 else:
910 files.update(ctx.files())
913 files.update(ctx.files())
911 files.update(ctx.substate)
914 files.update(ctx.substate)
912
915
913 m = ctx.matchfileset(expr)
916 m = ctx.matchfileset(expr)
914 for f in sorted(files):
917 for f in sorted(files):
915 if not m(f):
918 if not m(f):
916 continue
919 continue
917 ui.write("%s\n" % f)
920 ui.write("%s\n" % f)
918
921
919 @command('debugformat',
922 @command('debugformat',
920 [] + cmdutil.formatteropts,
923 [] + cmdutil.formatteropts,
921 _(''))
924 _(''))
922 def debugformat(ui, repo, **opts):
925 def debugformat(ui, repo, **opts):
923 """display format information about the current repository
926 """display format information about the current repository
924
927
925 Use --verbose to get extra information about current config value and
928 Use --verbose to get extra information about current config value and
926 Mercurial default."""
929 Mercurial default."""
927 opts = pycompat.byteskwargs(opts)
930 opts = pycompat.byteskwargs(opts)
928 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
931 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
929 maxvariantlength = max(len('format-variant'), maxvariantlength)
932 maxvariantlength = max(len('format-variant'), maxvariantlength)
930
933
931 def makeformatname(name):
934 def makeformatname(name):
932 return '%s:' + (' ' * (maxvariantlength - len(name)))
935 return '%s:' + (' ' * (maxvariantlength - len(name)))
933
936
934 fm = ui.formatter('debugformat', opts)
937 fm = ui.formatter('debugformat', opts)
935 if fm.isplain():
938 if fm.isplain():
936 def formatvalue(value):
939 def formatvalue(value):
937 if util.safehasattr(value, 'startswith'):
940 if util.safehasattr(value, 'startswith'):
938 return value
941 return value
939 if value:
942 if value:
940 return 'yes'
943 return 'yes'
941 else:
944 else:
942 return 'no'
945 return 'no'
943 else:
946 else:
944 formatvalue = pycompat.identity
947 formatvalue = pycompat.identity
945
948
946 fm.plain('format-variant')
949 fm.plain('format-variant')
947 fm.plain(' ' * (maxvariantlength - len('format-variant')))
950 fm.plain(' ' * (maxvariantlength - len('format-variant')))
948 fm.plain(' repo')
951 fm.plain(' repo')
949 if ui.verbose:
952 if ui.verbose:
950 fm.plain(' config default')
953 fm.plain(' config default')
951 fm.plain('\n')
954 fm.plain('\n')
952 for fv in upgrade.allformatvariant:
955 for fv in upgrade.allformatvariant:
953 fm.startitem()
956 fm.startitem()
954 repovalue = fv.fromrepo(repo)
957 repovalue = fv.fromrepo(repo)
955 configvalue = fv.fromconfig(repo)
958 configvalue = fv.fromconfig(repo)
956
959
957 if repovalue != configvalue:
960 if repovalue != configvalue:
958 namelabel = 'formatvariant.name.mismatchconfig'
961 namelabel = 'formatvariant.name.mismatchconfig'
959 repolabel = 'formatvariant.repo.mismatchconfig'
962 repolabel = 'formatvariant.repo.mismatchconfig'
960 elif repovalue != fv.default:
963 elif repovalue != fv.default:
961 namelabel = 'formatvariant.name.mismatchdefault'
964 namelabel = 'formatvariant.name.mismatchdefault'
962 repolabel = 'formatvariant.repo.mismatchdefault'
965 repolabel = 'formatvariant.repo.mismatchdefault'
963 else:
966 else:
964 namelabel = 'formatvariant.name.uptodate'
967 namelabel = 'formatvariant.name.uptodate'
965 repolabel = 'formatvariant.repo.uptodate'
968 repolabel = 'formatvariant.repo.uptodate'
966
969
967 fm.write('name', makeformatname(fv.name), fv.name,
970 fm.write('name', makeformatname(fv.name), fv.name,
968 label=namelabel)
971 label=namelabel)
969 fm.write('repo', ' %3s', formatvalue(repovalue),
972 fm.write('repo', ' %3s', formatvalue(repovalue),
970 label=repolabel)
973 label=repolabel)
971 if fv.default != configvalue:
974 if fv.default != configvalue:
972 configlabel = 'formatvariant.config.special'
975 configlabel = 'formatvariant.config.special'
973 else:
976 else:
974 configlabel = 'formatvariant.config.default'
977 configlabel = 'formatvariant.config.default'
975 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
978 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
976 label=configlabel)
979 label=configlabel)
977 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
980 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
978 label='formatvariant.default')
981 label='formatvariant.default')
979 fm.plain('\n')
982 fm.plain('\n')
980 fm.end()
983 fm.end()
981
984
982 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
985 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
983 def debugfsinfo(ui, path="."):
986 def debugfsinfo(ui, path="."):
984 """show information detected about current filesystem"""
987 """show information detected about current filesystem"""
985 ui.write(('path: %s\n') % path)
988 ui.write(('path: %s\n') % path)
986 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
989 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
987 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
990 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
988 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
991 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
989 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
992 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
990 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
993 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
991 casesensitive = '(unknown)'
994 casesensitive = '(unknown)'
992 try:
995 try:
993 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
996 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
994 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
997 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
995 except OSError:
998 except OSError:
996 pass
999 pass
997 ui.write(('case-sensitive: %s\n') % casesensitive)
1000 ui.write(('case-sensitive: %s\n') % casesensitive)
998
1001
999 @command('debuggetbundle',
1002 @command('debuggetbundle',
1000 [('H', 'head', [], _('id of head node'), _('ID')),
1003 [('H', 'head', [], _('id of head node'), _('ID')),
1001 ('C', 'common', [], _('id of common node'), _('ID')),
1004 ('C', 'common', [], _('id of common node'), _('ID')),
1002 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1005 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1003 _('REPO FILE [-H|-C ID]...'),
1006 _('REPO FILE [-H|-C ID]...'),
1004 norepo=True)
1007 norepo=True)
1005 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1008 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1006 """retrieves a bundle from a repo
1009 """retrieves a bundle from a repo
1007
1010
1008 Every ID must be a full-length hex node id string. Saves the bundle to the
1011 Every ID must be a full-length hex node id string. Saves the bundle to the
1009 given file.
1012 given file.
1010 """
1013 """
1011 opts = pycompat.byteskwargs(opts)
1014 opts = pycompat.byteskwargs(opts)
1012 repo = hg.peer(ui, opts, repopath)
1015 repo = hg.peer(ui, opts, repopath)
1013 if not repo.capable('getbundle'):
1016 if not repo.capable('getbundle'):
1014 raise error.Abort("getbundle() not supported by target repository")
1017 raise error.Abort("getbundle() not supported by target repository")
1015 args = {}
1018 args = {}
1016 if common:
1019 if common:
1017 args[r'common'] = [bin(s) for s in common]
1020 args[r'common'] = [bin(s) for s in common]
1018 if head:
1021 if head:
1019 args[r'heads'] = [bin(s) for s in head]
1022 args[r'heads'] = [bin(s) for s in head]
1020 # TODO: get desired bundlecaps from command line.
1023 # TODO: get desired bundlecaps from command line.
1021 args[r'bundlecaps'] = None
1024 args[r'bundlecaps'] = None
1022 bundle = repo.getbundle('debug', **args)
1025 bundle = repo.getbundle('debug', **args)
1023
1026
1024 bundletype = opts.get('type', 'bzip2').lower()
1027 bundletype = opts.get('type', 'bzip2').lower()
1025 btypes = {'none': 'HG10UN',
1028 btypes = {'none': 'HG10UN',
1026 'bzip2': 'HG10BZ',
1029 'bzip2': 'HG10BZ',
1027 'gzip': 'HG10GZ',
1030 'gzip': 'HG10GZ',
1028 'bundle2': 'HG20'}
1031 'bundle2': 'HG20'}
1029 bundletype = btypes.get(bundletype)
1032 bundletype = btypes.get(bundletype)
1030 if bundletype not in bundle2.bundletypes:
1033 if bundletype not in bundle2.bundletypes:
1031 raise error.Abort(_('unknown bundle type specified with --type'))
1034 raise error.Abort(_('unknown bundle type specified with --type'))
1032 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1035 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1033
1036
1034 @command('debugignore', [], '[FILE]')
1037 @command('debugignore', [], '[FILE]')
1035 def debugignore(ui, repo, *files, **opts):
1038 def debugignore(ui, repo, *files, **opts):
1036 """display the combined ignore pattern and information about ignored files
1039 """display the combined ignore pattern and information about ignored files
1037
1040
1038 With no argument display the combined ignore pattern.
1041 With no argument display the combined ignore pattern.
1039
1042
1040 Given space separated file names, shows if the given file is ignored and
1043 Given space separated file names, shows if the given file is ignored and
1041 if so, show the ignore rule (file and line number) that matched it.
1044 if so, show the ignore rule (file and line number) that matched it.
1042 """
1045 """
1043 ignore = repo.dirstate._ignore
1046 ignore = repo.dirstate._ignore
1044 if not files:
1047 if not files:
1045 # Show all the patterns
1048 # Show all the patterns
1046 ui.write("%s\n" % pycompat.byterepr(ignore))
1049 ui.write("%s\n" % pycompat.byterepr(ignore))
1047 else:
1050 else:
1048 m = scmutil.match(repo[None], pats=files)
1051 m = scmutil.match(repo[None], pats=files)
1049 for f in m.files():
1052 for f in m.files():
1050 nf = util.normpath(f)
1053 nf = util.normpath(f)
1051 ignored = None
1054 ignored = None
1052 ignoredata = None
1055 ignoredata = None
1053 if nf != '.':
1056 if nf != '.':
1054 if ignore(nf):
1057 if ignore(nf):
1055 ignored = nf
1058 ignored = nf
1056 ignoredata = repo.dirstate._ignorefileandline(nf)
1059 ignoredata = repo.dirstate._ignorefileandline(nf)
1057 else:
1060 else:
1058 for p in util.finddirs(nf):
1061 for p in util.finddirs(nf):
1059 if ignore(p):
1062 if ignore(p):
1060 ignored = p
1063 ignored = p
1061 ignoredata = repo.dirstate._ignorefileandline(p)
1064 ignoredata = repo.dirstate._ignorefileandline(p)
1062 break
1065 break
1063 if ignored:
1066 if ignored:
1064 if ignored == nf:
1067 if ignored == nf:
1065 ui.write(_("%s is ignored\n") % m.uipath(f))
1068 ui.write(_("%s is ignored\n") % m.uipath(f))
1066 else:
1069 else:
1067 ui.write(_("%s is ignored because of "
1070 ui.write(_("%s is ignored because of "
1068 "containing folder %s\n")
1071 "containing folder %s\n")
1069 % (m.uipath(f), ignored))
1072 % (m.uipath(f), ignored))
1070 ignorefile, lineno, line = ignoredata
1073 ignorefile, lineno, line = ignoredata
1071 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1074 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1072 % (ignorefile, lineno, line))
1075 % (ignorefile, lineno, line))
1073 else:
1076 else:
1074 ui.write(_("%s is not ignored\n") % m.uipath(f))
1077 ui.write(_("%s is not ignored\n") % m.uipath(f))
1075
1078
1076 @command('debugindex', cmdutil.debugrevlogopts +
1079 @command('debugindex', cmdutil.debugrevlogopts +
1077 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1080 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1078 _('[-f FORMAT] -c|-m|FILE'),
1081 _('[-f FORMAT] -c|-m|FILE'),
1079 optionalrepo=True)
1082 optionalrepo=True)
1080 def debugindex(ui, repo, file_=None, **opts):
1083 def debugindex(ui, repo, file_=None, **opts):
1081 """dump the contents of an index file"""
1084 """dump the contents of an index file"""
1082 opts = pycompat.byteskwargs(opts)
1085 opts = pycompat.byteskwargs(opts)
1083 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1086 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1084 format = opts.get('format', 0)
1087 format = opts.get('format', 0)
1085 if format not in (0, 1):
1088 if format not in (0, 1):
1086 raise error.Abort(_("unknown format %d") % format)
1089 raise error.Abort(_("unknown format %d") % format)
1087
1090
1088 if ui.debugflag:
1091 if ui.debugflag:
1089 shortfn = hex
1092 shortfn = hex
1090 else:
1093 else:
1091 shortfn = short
1094 shortfn = short
1092
1095
1093 # There might not be anything in r, so have a sane default
1096 # There might not be anything in r, so have a sane default
1094 idlen = 12
1097 idlen = 12
1095 for i in r:
1098 for i in r:
1096 idlen = len(shortfn(r.node(i)))
1099 idlen = len(shortfn(r.node(i)))
1097 break
1100 break
1098
1101
1099 if format == 0:
1102 if format == 0:
1100 if ui.verbose:
1103 if ui.verbose:
1101 ui.write((" rev offset length linkrev"
1104 ui.write((" rev offset length linkrev"
1102 " %s %s p2\n") % ("nodeid".ljust(idlen),
1105 " %s %s p2\n") % ("nodeid".ljust(idlen),
1103 "p1".ljust(idlen)))
1106 "p1".ljust(idlen)))
1104 else:
1107 else:
1105 ui.write((" rev linkrev %s %s p2\n") % (
1108 ui.write((" rev linkrev %s %s p2\n") % (
1106 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1109 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1107 elif format == 1:
1110 elif format == 1:
1108 if ui.verbose:
1111 if ui.verbose:
1109 ui.write((" rev flag offset length size link p1"
1112 ui.write((" rev flag offset length size link p1"
1110 " p2 %s\n") % "nodeid".rjust(idlen))
1113 " p2 %s\n") % "nodeid".rjust(idlen))
1111 else:
1114 else:
1112 ui.write((" rev flag size link p1 p2 %s\n") %
1115 ui.write((" rev flag size link p1 p2 %s\n") %
1113 "nodeid".rjust(idlen))
1116 "nodeid".rjust(idlen))
1114
1117
1115 for i in r:
1118 for i in r:
1116 node = r.node(i)
1119 node = r.node(i)
1117 if format == 0:
1120 if format == 0:
1118 try:
1121 try:
1119 pp = r.parents(node)
1122 pp = r.parents(node)
1120 except Exception:
1123 except Exception:
1121 pp = [nullid, nullid]
1124 pp = [nullid, nullid]
1122 if ui.verbose:
1125 if ui.verbose:
1123 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1126 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1124 i, r.start(i), r.length(i), r.linkrev(i),
1127 i, r.start(i), r.length(i), r.linkrev(i),
1125 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1128 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1126 else:
1129 else:
1127 ui.write("% 6d % 7d %s %s %s\n" % (
1130 ui.write("% 6d % 7d %s %s %s\n" % (
1128 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1131 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1129 shortfn(pp[1])))
1132 shortfn(pp[1])))
1130 elif format == 1:
1133 elif format == 1:
1131 pr = r.parentrevs(i)
1134 pr = r.parentrevs(i)
1132 if ui.verbose:
1135 if ui.verbose:
1133 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1136 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1134 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1137 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1135 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1138 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1136 else:
1139 else:
1137 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1140 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1138 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1141 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1139 shortfn(node)))
1142 shortfn(node)))
1140
1143
1141 @command('debugindexdot', cmdutil.debugrevlogopts,
1144 @command('debugindexdot', cmdutil.debugrevlogopts,
1142 _('-c|-m|FILE'), optionalrepo=True)
1145 _('-c|-m|FILE'), optionalrepo=True)
1143 def debugindexdot(ui, repo, file_=None, **opts):
1146 def debugindexdot(ui, repo, file_=None, **opts):
1144 """dump an index DAG as a graphviz dot file"""
1147 """dump an index DAG as a graphviz dot file"""
1145 opts = pycompat.byteskwargs(opts)
1148 opts = pycompat.byteskwargs(opts)
1146 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1149 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1147 ui.write(("digraph G {\n"))
1150 ui.write(("digraph G {\n"))
1148 for i in r:
1151 for i in r:
1149 node = r.node(i)
1152 node = r.node(i)
1150 pp = r.parents(node)
1153 pp = r.parents(node)
1151 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1154 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1152 if pp[1] != nullid:
1155 if pp[1] != nullid:
1153 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1156 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1154 ui.write("}\n")
1157 ui.write("}\n")
1155
1158
1156 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1159 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1157 def debuginstall(ui, **opts):
1160 def debuginstall(ui, **opts):
1158 '''test Mercurial installation
1161 '''test Mercurial installation
1159
1162
1160 Returns 0 on success.
1163 Returns 0 on success.
1161 '''
1164 '''
1162 opts = pycompat.byteskwargs(opts)
1165 opts = pycompat.byteskwargs(opts)
1163
1166
1164 def writetemp(contents):
1167 def writetemp(contents):
1165 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1168 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1166 f = os.fdopen(fd, r"wb")
1169 f = os.fdopen(fd, r"wb")
1167 f.write(contents)
1170 f.write(contents)
1168 f.close()
1171 f.close()
1169 return name
1172 return name
1170
1173
1171 problems = 0
1174 problems = 0
1172
1175
1173 fm = ui.formatter('debuginstall', opts)
1176 fm = ui.formatter('debuginstall', opts)
1174 fm.startitem()
1177 fm.startitem()
1175
1178
1176 # encoding
1179 # encoding
1177 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1180 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1178 err = None
1181 err = None
1179 try:
1182 try:
1180 codecs.lookup(pycompat.sysstr(encoding.encoding))
1183 codecs.lookup(pycompat.sysstr(encoding.encoding))
1181 except LookupError as inst:
1184 except LookupError as inst:
1182 err = stringutil.forcebytestr(inst)
1185 err = stringutil.forcebytestr(inst)
1183 problems += 1
1186 problems += 1
1184 fm.condwrite(err, 'encodingerror', _(" %s\n"
1187 fm.condwrite(err, 'encodingerror', _(" %s\n"
1185 " (check that your locale is properly set)\n"), err)
1188 " (check that your locale is properly set)\n"), err)
1186
1189
1187 # Python
1190 # Python
1188 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1191 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1189 pycompat.sysexecutable)
1192 pycompat.sysexecutable)
1190 fm.write('pythonver', _("checking Python version (%s)\n"),
1193 fm.write('pythonver', _("checking Python version (%s)\n"),
1191 ("%d.%d.%d" % sys.version_info[:3]))
1194 ("%d.%d.%d" % sys.version_info[:3]))
1192 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1195 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1193 os.path.dirname(pycompat.fsencode(os.__file__)))
1196 os.path.dirname(pycompat.fsencode(os.__file__)))
1194
1197
1195 security = set(sslutil.supportedprotocols)
1198 security = set(sslutil.supportedprotocols)
1196 if sslutil.hassni:
1199 if sslutil.hassni:
1197 security.add('sni')
1200 security.add('sni')
1198
1201
1199 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1202 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1200 fm.formatlist(sorted(security), name='protocol',
1203 fm.formatlist(sorted(security), name='protocol',
1201 fmt='%s', sep=','))
1204 fmt='%s', sep=','))
1202
1205
1203 # These are warnings, not errors. So don't increment problem count. This
1206 # These are warnings, not errors. So don't increment problem count. This
1204 # may change in the future.
1207 # may change in the future.
1205 if 'tls1.2' not in security:
1208 if 'tls1.2' not in security:
1206 fm.plain(_(' TLS 1.2 not supported by Python install; '
1209 fm.plain(_(' TLS 1.2 not supported by Python install; '
1207 'network connections lack modern security\n'))
1210 'network connections lack modern security\n'))
1208 if 'sni' not in security:
1211 if 'sni' not in security:
1209 fm.plain(_(' SNI not supported by Python install; may have '
1212 fm.plain(_(' SNI not supported by Python install; may have '
1210 'connectivity issues with some servers\n'))
1213 'connectivity issues with some servers\n'))
1211
1214
1212 # TODO print CA cert info
1215 # TODO print CA cert info
1213
1216
1214 # hg version
1217 # hg version
1215 hgver = util.version()
1218 hgver = util.version()
1216 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1219 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1217 hgver.split('+')[0])
1220 hgver.split('+')[0])
1218 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1221 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1219 '+'.join(hgver.split('+')[1:]))
1222 '+'.join(hgver.split('+')[1:]))
1220
1223
1221 # compiled modules
1224 # compiled modules
1222 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1225 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1223 policy.policy)
1226 policy.policy)
1224 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1227 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1225 os.path.dirname(pycompat.fsencode(__file__)))
1228 os.path.dirname(pycompat.fsencode(__file__)))
1226
1229
1227 if policy.policy in ('c', 'allow'):
1230 if policy.policy in ('c', 'allow'):
1228 err = None
1231 err = None
1229 try:
1232 try:
1230 from .cext import (
1233 from .cext import (
1231 base85,
1234 base85,
1232 bdiff,
1235 bdiff,
1233 mpatch,
1236 mpatch,
1234 osutil,
1237 osutil,
1235 )
1238 )
1236 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1239 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1237 except Exception as inst:
1240 except Exception as inst:
1238 err = stringutil.forcebytestr(inst)
1241 err = stringutil.forcebytestr(inst)
1239 problems += 1
1242 problems += 1
1240 fm.condwrite(err, 'extensionserror', " %s\n", err)
1243 fm.condwrite(err, 'extensionserror', " %s\n", err)
1241
1244
1242 compengines = util.compengines._engines.values()
1245 compengines = util.compengines._engines.values()
1243 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1246 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1244 fm.formatlist(sorted(e.name() for e in compengines),
1247 fm.formatlist(sorted(e.name() for e in compengines),
1245 name='compengine', fmt='%s', sep=', '))
1248 name='compengine', fmt='%s', sep=', '))
1246 fm.write('compenginesavail', _('checking available compression engines '
1249 fm.write('compenginesavail', _('checking available compression engines '
1247 '(%s)\n'),
1250 '(%s)\n'),
1248 fm.formatlist(sorted(e.name() for e in compengines
1251 fm.formatlist(sorted(e.name() for e in compengines
1249 if e.available()),
1252 if e.available()),
1250 name='compengine', fmt='%s', sep=', '))
1253 name='compengine', fmt='%s', sep=', '))
1251 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1254 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1252 fm.write('compenginesserver', _('checking available compression engines '
1255 fm.write('compenginesserver', _('checking available compression engines '
1253 'for wire protocol (%s)\n'),
1256 'for wire protocol (%s)\n'),
1254 fm.formatlist([e.name() for e in wirecompengines
1257 fm.formatlist([e.name() for e in wirecompengines
1255 if e.wireprotosupport()],
1258 if e.wireprotosupport()],
1256 name='compengine', fmt='%s', sep=', '))
1259 name='compengine', fmt='%s', sep=', '))
1257 re2 = 'missing'
1260 re2 = 'missing'
1258 if util._re2:
1261 if util._re2:
1259 re2 = 'available'
1262 re2 = 'available'
1260 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1263 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1261 fm.data(re2=bool(util._re2))
1264 fm.data(re2=bool(util._re2))
1262
1265
1263 # templates
1266 # templates
1264 p = templater.templatepaths()
1267 p = templater.templatepaths()
1265 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1268 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1266 fm.condwrite(not p, '', _(" no template directories found\n"))
1269 fm.condwrite(not p, '', _(" no template directories found\n"))
1267 if p:
1270 if p:
1268 m = templater.templatepath("map-cmdline.default")
1271 m = templater.templatepath("map-cmdline.default")
1269 if m:
1272 if m:
1270 # template found, check if it is working
1273 # template found, check if it is working
1271 err = None
1274 err = None
1272 try:
1275 try:
1273 templater.templater.frommapfile(m)
1276 templater.templater.frommapfile(m)
1274 except Exception as inst:
1277 except Exception as inst:
1275 err = stringutil.forcebytestr(inst)
1278 err = stringutil.forcebytestr(inst)
1276 p = None
1279 p = None
1277 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1280 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1278 else:
1281 else:
1279 p = None
1282 p = None
1280 fm.condwrite(p, 'defaulttemplate',
1283 fm.condwrite(p, 'defaulttemplate',
1281 _("checking default template (%s)\n"), m)
1284 _("checking default template (%s)\n"), m)
1282 fm.condwrite(not m, 'defaulttemplatenotfound',
1285 fm.condwrite(not m, 'defaulttemplatenotfound',
1283 _(" template '%s' not found\n"), "default")
1286 _(" template '%s' not found\n"), "default")
1284 if not p:
1287 if not p:
1285 problems += 1
1288 problems += 1
1286 fm.condwrite(not p, '',
1289 fm.condwrite(not p, '',
1287 _(" (templates seem to have been installed incorrectly)\n"))
1290 _(" (templates seem to have been installed incorrectly)\n"))
1288
1291
1289 # editor
1292 # editor
1290 editor = ui.geteditor()
1293 editor = ui.geteditor()
1291 editor = util.expandpath(editor)
1294 editor = util.expandpath(editor)
1292 editorbin = procutil.shellsplit(editor)[0]
1295 editorbin = procutil.shellsplit(editor)[0]
1293 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1296 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1294 cmdpath = procutil.findexe(editorbin)
1297 cmdpath = procutil.findexe(editorbin)
1295 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1298 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1296 _(" No commit editor set and can't find %s in PATH\n"
1299 _(" No commit editor set and can't find %s in PATH\n"
1297 " (specify a commit editor in your configuration"
1300 " (specify a commit editor in your configuration"
1298 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1301 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1299 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1302 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1300 _(" Can't find editor '%s' in PATH\n"
1303 _(" Can't find editor '%s' in PATH\n"
1301 " (specify a commit editor in your configuration"
1304 " (specify a commit editor in your configuration"
1302 " file)\n"), not cmdpath and editorbin)
1305 " file)\n"), not cmdpath and editorbin)
1303 if not cmdpath and editor != 'vi':
1306 if not cmdpath and editor != 'vi':
1304 problems += 1
1307 problems += 1
1305
1308
1306 # check username
1309 # check username
1307 username = None
1310 username = None
1308 err = None
1311 err = None
1309 try:
1312 try:
1310 username = ui.username()
1313 username = ui.username()
1311 except error.Abort as e:
1314 except error.Abort as e:
1312 err = stringutil.forcebytestr(e)
1315 err = stringutil.forcebytestr(e)
1313 problems += 1
1316 problems += 1
1314
1317
1315 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1318 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1316 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1319 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1317 " (specify a username in your configuration file)\n"), err)
1320 " (specify a username in your configuration file)\n"), err)
1318
1321
1319 fm.condwrite(not problems, '',
1322 fm.condwrite(not problems, '',
1320 _("no problems detected\n"))
1323 _("no problems detected\n"))
1321 if not problems:
1324 if not problems:
1322 fm.data(problems=problems)
1325 fm.data(problems=problems)
1323 fm.condwrite(problems, 'problems',
1326 fm.condwrite(problems, 'problems',
1324 _("%d problems detected,"
1327 _("%d problems detected,"
1325 " please check your install!\n"), problems)
1328 " please check your install!\n"), problems)
1326 fm.end()
1329 fm.end()
1327
1330
1328 return problems
1331 return problems
1329
1332
1330 @command('debugknown', [], _('REPO ID...'), norepo=True)
1333 @command('debugknown', [], _('REPO ID...'), norepo=True)
1331 def debugknown(ui, repopath, *ids, **opts):
1334 def debugknown(ui, repopath, *ids, **opts):
1332 """test whether node ids are known to a repo
1335 """test whether node ids are known to a repo
1333
1336
1334 Every ID must be a full-length hex node id string. Returns a list of 0s
1337 Every ID must be a full-length hex node id string. Returns a list of 0s
1335 and 1s indicating unknown/known.
1338 and 1s indicating unknown/known.
1336 """
1339 """
1337 opts = pycompat.byteskwargs(opts)
1340 opts = pycompat.byteskwargs(opts)
1338 repo = hg.peer(ui, opts, repopath)
1341 repo = hg.peer(ui, opts, repopath)
1339 if not repo.capable('known'):
1342 if not repo.capable('known'):
1340 raise error.Abort("known() not supported by target repository")
1343 raise error.Abort("known() not supported by target repository")
1341 flags = repo.known([bin(s) for s in ids])
1344 flags = repo.known([bin(s) for s in ids])
1342 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1345 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1343
1346
1344 @command('debuglabelcomplete', [], _('LABEL...'))
1347 @command('debuglabelcomplete', [], _('LABEL...'))
1345 def debuglabelcomplete(ui, repo, *args):
1348 def debuglabelcomplete(ui, repo, *args):
1346 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1349 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1347 debugnamecomplete(ui, repo, *args)
1350 debugnamecomplete(ui, repo, *args)
1348
1351
1349 @command('debuglocks',
1352 @command('debuglocks',
1350 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1353 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1351 ('W', 'force-wlock', None,
1354 ('W', 'force-wlock', None,
1352 _('free the working state lock (DANGEROUS)')),
1355 _('free the working state lock (DANGEROUS)')),
1353 ('s', 'set-lock', None, _('set the store lock until stopped')),
1356 ('s', 'set-lock', None, _('set the store lock until stopped')),
1354 ('S', 'set-wlock', None,
1357 ('S', 'set-wlock', None,
1355 _('set the working state lock until stopped'))],
1358 _('set the working state lock until stopped'))],
1356 _('[OPTION]...'))
1359 _('[OPTION]...'))
1357 def debuglocks(ui, repo, **opts):
1360 def debuglocks(ui, repo, **opts):
1358 """show or modify state of locks
1361 """show or modify state of locks
1359
1362
1360 By default, this command will show which locks are held. This
1363 By default, this command will show which locks are held. This
1361 includes the user and process holding the lock, the amount of time
1364 includes the user and process holding the lock, the amount of time
1362 the lock has been held, and the machine name where the process is
1365 the lock has been held, and the machine name where the process is
1363 running if it's not local.
1366 running if it's not local.
1364
1367
1365 Locks protect the integrity of Mercurial's data, so should be
1368 Locks protect the integrity of Mercurial's data, so should be
1366 treated with care. System crashes or other interruptions may cause
1369 treated with care. System crashes or other interruptions may cause
1367 locks to not be properly released, though Mercurial will usually
1370 locks to not be properly released, though Mercurial will usually
1368 detect and remove such stale locks automatically.
1371 detect and remove such stale locks automatically.
1369
1372
1370 However, detecting stale locks may not always be possible (for
1373 However, detecting stale locks may not always be possible (for
1371 instance, on a shared filesystem). Removing locks may also be
1374 instance, on a shared filesystem). Removing locks may also be
1372 blocked by filesystem permissions.
1375 blocked by filesystem permissions.
1373
1376
1374 Setting a lock will prevent other commands from changing the data.
1377 Setting a lock will prevent other commands from changing the data.
1375 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1378 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1376 The set locks are removed when the command exits.
1379 The set locks are removed when the command exits.
1377
1380
1378 Returns 0 if no locks are held.
1381 Returns 0 if no locks are held.
1379
1382
1380 """
1383 """
1381
1384
1382 if opts.get(r'force_lock'):
1385 if opts.get(r'force_lock'):
1383 repo.svfs.unlink('lock')
1386 repo.svfs.unlink('lock')
1384 if opts.get(r'force_wlock'):
1387 if opts.get(r'force_wlock'):
1385 repo.vfs.unlink('wlock')
1388 repo.vfs.unlink('wlock')
1386 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1389 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1387 return 0
1390 return 0
1388
1391
1389 locks = []
1392 locks = []
1390 try:
1393 try:
1391 if opts.get(r'set_wlock'):
1394 if opts.get(r'set_wlock'):
1392 try:
1395 try:
1393 locks.append(repo.wlock(False))
1396 locks.append(repo.wlock(False))
1394 except error.LockHeld:
1397 except error.LockHeld:
1395 raise error.Abort(_('wlock is already held'))
1398 raise error.Abort(_('wlock is already held'))
1396 if opts.get(r'set_lock'):
1399 if opts.get(r'set_lock'):
1397 try:
1400 try:
1398 locks.append(repo.lock(False))
1401 locks.append(repo.lock(False))
1399 except error.LockHeld:
1402 except error.LockHeld:
1400 raise error.Abort(_('lock is already held'))
1403 raise error.Abort(_('lock is already held'))
1401 if len(locks):
1404 if len(locks):
1402 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1405 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1403 return 0
1406 return 0
1404 finally:
1407 finally:
1405 release(*locks)
1408 release(*locks)
1406
1409
1407 now = time.time()
1410 now = time.time()
1408 held = 0
1411 held = 0
1409
1412
1410 def report(vfs, name, method):
1413 def report(vfs, name, method):
1411 # this causes stale locks to get reaped for more accurate reporting
1414 # this causes stale locks to get reaped for more accurate reporting
1412 try:
1415 try:
1413 l = method(False)
1416 l = method(False)
1414 except error.LockHeld:
1417 except error.LockHeld:
1415 l = None
1418 l = None
1416
1419
1417 if l:
1420 if l:
1418 l.release()
1421 l.release()
1419 else:
1422 else:
1420 try:
1423 try:
1421 st = vfs.lstat(name)
1424 st = vfs.lstat(name)
1422 age = now - st[stat.ST_MTIME]
1425 age = now - st[stat.ST_MTIME]
1423 user = util.username(st.st_uid)
1426 user = util.username(st.st_uid)
1424 locker = vfs.readlock(name)
1427 locker = vfs.readlock(name)
1425 if ":" in locker:
1428 if ":" in locker:
1426 host, pid = locker.split(':')
1429 host, pid = locker.split(':')
1427 if host == socket.gethostname():
1430 if host == socket.gethostname():
1428 locker = 'user %s, process %s' % (user, pid)
1431 locker = 'user %s, process %s' % (user, pid)
1429 else:
1432 else:
1430 locker = 'user %s, process %s, host %s' \
1433 locker = 'user %s, process %s, host %s' \
1431 % (user, pid, host)
1434 % (user, pid, host)
1432 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1435 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1433 return 1
1436 return 1
1434 except OSError as e:
1437 except OSError as e:
1435 if e.errno != errno.ENOENT:
1438 if e.errno != errno.ENOENT:
1436 raise
1439 raise
1437
1440
1438 ui.write(("%-6s free\n") % (name + ":"))
1441 ui.write(("%-6s free\n") % (name + ":"))
1439 return 0
1442 return 0
1440
1443
1441 held += report(repo.svfs, "lock", repo.lock)
1444 held += report(repo.svfs, "lock", repo.lock)
1442 held += report(repo.vfs, "wlock", repo.wlock)
1445 held += report(repo.vfs, "wlock", repo.wlock)
1443
1446
1444 return held
1447 return held
1445
1448
1446 @command('debugmergestate', [], '')
1449 @command('debugmergestate', [], '')
1447 def debugmergestate(ui, repo, *args):
1450 def debugmergestate(ui, repo, *args):
1448 """print merge state
1451 """print merge state
1449
1452
1450 Use --verbose to print out information about whether v1 or v2 merge state
1453 Use --verbose to print out information about whether v1 or v2 merge state
1451 was chosen."""
1454 was chosen."""
1452 def _hashornull(h):
1455 def _hashornull(h):
1453 if h == nullhex:
1456 if h == nullhex:
1454 return 'null'
1457 return 'null'
1455 else:
1458 else:
1456 return h
1459 return h
1457
1460
1458 def printrecords(version):
1461 def printrecords(version):
1459 ui.write(('* version %d records\n') % version)
1462 ui.write(('* version %d records\n') % version)
1460 if version == 1:
1463 if version == 1:
1461 records = v1records
1464 records = v1records
1462 else:
1465 else:
1463 records = v2records
1466 records = v2records
1464
1467
1465 for rtype, record in records:
1468 for rtype, record in records:
1466 # pretty print some record types
1469 # pretty print some record types
1467 if rtype == 'L':
1470 if rtype == 'L':
1468 ui.write(('local: %s\n') % record)
1471 ui.write(('local: %s\n') % record)
1469 elif rtype == 'O':
1472 elif rtype == 'O':
1470 ui.write(('other: %s\n') % record)
1473 ui.write(('other: %s\n') % record)
1471 elif rtype == 'm':
1474 elif rtype == 'm':
1472 driver, mdstate = record.split('\0', 1)
1475 driver, mdstate = record.split('\0', 1)
1473 ui.write(('merge driver: %s (state "%s")\n')
1476 ui.write(('merge driver: %s (state "%s")\n')
1474 % (driver, mdstate))
1477 % (driver, mdstate))
1475 elif rtype in 'FDC':
1478 elif rtype in 'FDC':
1476 r = record.split('\0')
1479 r = record.split('\0')
1477 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1480 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1478 if version == 1:
1481 if version == 1:
1479 onode = 'not stored in v1 format'
1482 onode = 'not stored in v1 format'
1480 flags = r[7]
1483 flags = r[7]
1481 else:
1484 else:
1482 onode, flags = r[7:9]
1485 onode, flags = r[7:9]
1483 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1486 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1484 % (f, rtype, state, _hashornull(hash)))
1487 % (f, rtype, state, _hashornull(hash)))
1485 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1488 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1486 ui.write((' ancestor path: %s (node %s)\n')
1489 ui.write((' ancestor path: %s (node %s)\n')
1487 % (afile, _hashornull(anode)))
1490 % (afile, _hashornull(anode)))
1488 ui.write((' other path: %s (node %s)\n')
1491 ui.write((' other path: %s (node %s)\n')
1489 % (ofile, _hashornull(onode)))
1492 % (ofile, _hashornull(onode)))
1490 elif rtype == 'f':
1493 elif rtype == 'f':
1491 filename, rawextras = record.split('\0', 1)
1494 filename, rawextras = record.split('\0', 1)
1492 extras = rawextras.split('\0')
1495 extras = rawextras.split('\0')
1493 i = 0
1496 i = 0
1494 extrastrings = []
1497 extrastrings = []
1495 while i < len(extras):
1498 while i < len(extras):
1496 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1499 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1497 i += 2
1500 i += 2
1498
1501
1499 ui.write(('file extras: %s (%s)\n')
1502 ui.write(('file extras: %s (%s)\n')
1500 % (filename, ', '.join(extrastrings)))
1503 % (filename, ', '.join(extrastrings)))
1501 elif rtype == 'l':
1504 elif rtype == 'l':
1502 labels = record.split('\0', 2)
1505 labels = record.split('\0', 2)
1503 labels = [l for l in labels if len(l) > 0]
1506 labels = [l for l in labels if len(l) > 0]
1504 ui.write(('labels:\n'))
1507 ui.write(('labels:\n'))
1505 ui.write((' local: %s\n' % labels[0]))
1508 ui.write((' local: %s\n' % labels[0]))
1506 ui.write((' other: %s\n' % labels[1]))
1509 ui.write((' other: %s\n' % labels[1]))
1507 if len(labels) > 2:
1510 if len(labels) > 2:
1508 ui.write((' base: %s\n' % labels[2]))
1511 ui.write((' base: %s\n' % labels[2]))
1509 else:
1512 else:
1510 ui.write(('unrecognized entry: %s\t%s\n')
1513 ui.write(('unrecognized entry: %s\t%s\n')
1511 % (rtype, record.replace('\0', '\t')))
1514 % (rtype, record.replace('\0', '\t')))
1512
1515
1513 # Avoid mergestate.read() since it may raise an exception for unsupported
1516 # Avoid mergestate.read() since it may raise an exception for unsupported
1514 # merge state records. We shouldn't be doing this, but this is OK since this
1517 # merge state records. We shouldn't be doing this, but this is OK since this
1515 # command is pretty low-level.
1518 # command is pretty low-level.
1516 ms = mergemod.mergestate(repo)
1519 ms = mergemod.mergestate(repo)
1517
1520
1518 # sort so that reasonable information is on top
1521 # sort so that reasonable information is on top
1519 v1records = ms._readrecordsv1()
1522 v1records = ms._readrecordsv1()
1520 v2records = ms._readrecordsv2()
1523 v2records = ms._readrecordsv2()
1521 order = 'LOml'
1524 order = 'LOml'
1522 def key(r):
1525 def key(r):
1523 idx = order.find(r[0])
1526 idx = order.find(r[0])
1524 if idx == -1:
1527 if idx == -1:
1525 return (1, r[1])
1528 return (1, r[1])
1526 else:
1529 else:
1527 return (0, idx)
1530 return (0, idx)
1528 v1records.sort(key=key)
1531 v1records.sort(key=key)
1529 v2records.sort(key=key)
1532 v2records.sort(key=key)
1530
1533
1531 if not v1records and not v2records:
1534 if not v1records and not v2records:
1532 ui.write(('no merge state found\n'))
1535 ui.write(('no merge state found\n'))
1533 elif not v2records:
1536 elif not v2records:
1534 ui.note(('no version 2 merge state\n'))
1537 ui.note(('no version 2 merge state\n'))
1535 printrecords(1)
1538 printrecords(1)
1536 elif ms._v1v2match(v1records, v2records):
1539 elif ms._v1v2match(v1records, v2records):
1537 ui.note(('v1 and v2 states match: using v2\n'))
1540 ui.note(('v1 and v2 states match: using v2\n'))
1538 printrecords(2)
1541 printrecords(2)
1539 else:
1542 else:
1540 ui.note(('v1 and v2 states mismatch: using v1\n'))
1543 ui.note(('v1 and v2 states mismatch: using v1\n'))
1541 printrecords(1)
1544 printrecords(1)
1542 if ui.verbose:
1545 if ui.verbose:
1543 printrecords(2)
1546 printrecords(2)
1544
1547
1545 @command('debugnamecomplete', [], _('NAME...'))
1548 @command('debugnamecomplete', [], _('NAME...'))
1546 def debugnamecomplete(ui, repo, *args):
1549 def debugnamecomplete(ui, repo, *args):
1547 '''complete "names" - tags, open branch names, bookmark names'''
1550 '''complete "names" - tags, open branch names, bookmark names'''
1548
1551
1549 names = set()
1552 names = set()
1550 # since we previously only listed open branches, we will handle that
1553 # since we previously only listed open branches, we will handle that
1551 # specially (after this for loop)
1554 # specially (after this for loop)
1552 for name, ns in repo.names.iteritems():
1555 for name, ns in repo.names.iteritems():
1553 if name != 'branches':
1556 if name != 'branches':
1554 names.update(ns.listnames(repo))
1557 names.update(ns.listnames(repo))
1555 names.update(tag for (tag, heads, tip, closed)
1558 names.update(tag for (tag, heads, tip, closed)
1556 in repo.branchmap().iterbranches() if not closed)
1559 in repo.branchmap().iterbranches() if not closed)
1557 completions = set()
1560 completions = set()
1558 if not args:
1561 if not args:
1559 args = ['']
1562 args = ['']
1560 for a in args:
1563 for a in args:
1561 completions.update(n for n in names if n.startswith(a))
1564 completions.update(n for n in names if n.startswith(a))
1562 ui.write('\n'.join(sorted(completions)))
1565 ui.write('\n'.join(sorted(completions)))
1563 ui.write('\n')
1566 ui.write('\n')
1564
1567
1565 @command('debugobsolete',
1568 @command('debugobsolete',
1566 [('', 'flags', 0, _('markers flag')),
1569 [('', 'flags', 0, _('markers flag')),
1567 ('', 'record-parents', False,
1570 ('', 'record-parents', False,
1568 _('record parent information for the precursor')),
1571 _('record parent information for the precursor')),
1569 ('r', 'rev', [], _('display markers relevant to REV')),
1572 ('r', 'rev', [], _('display markers relevant to REV')),
1570 ('', 'exclusive', False, _('restrict display to markers only '
1573 ('', 'exclusive', False, _('restrict display to markers only '
1571 'relevant to REV')),
1574 'relevant to REV')),
1572 ('', 'index', False, _('display index of the marker')),
1575 ('', 'index', False, _('display index of the marker')),
1573 ('', 'delete', [], _('delete markers specified by indices')),
1576 ('', 'delete', [], _('delete markers specified by indices')),
1574 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1577 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1575 _('[OBSOLETED [REPLACEMENT ...]]'))
1578 _('[OBSOLETED [REPLACEMENT ...]]'))
1576 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1579 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1577 """create arbitrary obsolete marker
1580 """create arbitrary obsolete marker
1578
1581
1579 With no arguments, displays the list of obsolescence markers."""
1582 With no arguments, displays the list of obsolescence markers."""
1580
1583
1581 opts = pycompat.byteskwargs(opts)
1584 opts = pycompat.byteskwargs(opts)
1582
1585
1583 def parsenodeid(s):
1586 def parsenodeid(s):
1584 try:
1587 try:
1585 # We do not use revsingle/revrange functions here to accept
1588 # We do not use revsingle/revrange functions here to accept
1586 # arbitrary node identifiers, possibly not present in the
1589 # arbitrary node identifiers, possibly not present in the
1587 # local repository.
1590 # local repository.
1588 n = bin(s)
1591 n = bin(s)
1589 if len(n) != len(nullid):
1592 if len(n) != len(nullid):
1590 raise TypeError()
1593 raise TypeError()
1591 return n
1594 return n
1592 except TypeError:
1595 except TypeError:
1593 raise error.Abort('changeset references must be full hexadecimal '
1596 raise error.Abort('changeset references must be full hexadecimal '
1594 'node identifiers')
1597 'node identifiers')
1595
1598
1596 if opts.get('delete'):
1599 if opts.get('delete'):
1597 indices = []
1600 indices = []
1598 for v in opts.get('delete'):
1601 for v in opts.get('delete'):
1599 try:
1602 try:
1600 indices.append(int(v))
1603 indices.append(int(v))
1601 except ValueError:
1604 except ValueError:
1602 raise error.Abort(_('invalid index value: %r') % v,
1605 raise error.Abort(_('invalid index value: %r') % v,
1603 hint=_('use integers for indices'))
1606 hint=_('use integers for indices'))
1604
1607
1605 if repo.currenttransaction():
1608 if repo.currenttransaction():
1606 raise error.Abort(_('cannot delete obsmarkers in the middle '
1609 raise error.Abort(_('cannot delete obsmarkers in the middle '
1607 'of transaction.'))
1610 'of transaction.'))
1608
1611
1609 with repo.lock():
1612 with repo.lock():
1610 n = repair.deleteobsmarkers(repo.obsstore, indices)
1613 n = repair.deleteobsmarkers(repo.obsstore, indices)
1611 ui.write(_('deleted %i obsolescence markers\n') % n)
1614 ui.write(_('deleted %i obsolescence markers\n') % n)
1612
1615
1613 return
1616 return
1614
1617
1615 if precursor is not None:
1618 if precursor is not None:
1616 if opts['rev']:
1619 if opts['rev']:
1617 raise error.Abort('cannot select revision when creating marker')
1620 raise error.Abort('cannot select revision when creating marker')
1618 metadata = {}
1621 metadata = {}
1619 metadata['user'] = opts['user'] or ui.username()
1622 metadata['user'] = opts['user'] or ui.username()
1620 succs = tuple(parsenodeid(succ) for succ in successors)
1623 succs = tuple(parsenodeid(succ) for succ in successors)
1621 l = repo.lock()
1624 l = repo.lock()
1622 try:
1625 try:
1623 tr = repo.transaction('debugobsolete')
1626 tr = repo.transaction('debugobsolete')
1624 try:
1627 try:
1625 date = opts.get('date')
1628 date = opts.get('date')
1626 if date:
1629 if date:
1627 date = dateutil.parsedate(date)
1630 date = dateutil.parsedate(date)
1628 else:
1631 else:
1629 date = None
1632 date = None
1630 prec = parsenodeid(precursor)
1633 prec = parsenodeid(precursor)
1631 parents = None
1634 parents = None
1632 if opts['record_parents']:
1635 if opts['record_parents']:
1633 if prec not in repo.unfiltered():
1636 if prec not in repo.unfiltered():
1634 raise error.Abort('cannot used --record-parents on '
1637 raise error.Abort('cannot used --record-parents on '
1635 'unknown changesets')
1638 'unknown changesets')
1636 parents = repo.unfiltered()[prec].parents()
1639 parents = repo.unfiltered()[prec].parents()
1637 parents = tuple(p.node() for p in parents)
1640 parents = tuple(p.node() for p in parents)
1638 repo.obsstore.create(tr, prec, succs, opts['flags'],
1641 repo.obsstore.create(tr, prec, succs, opts['flags'],
1639 parents=parents, date=date,
1642 parents=parents, date=date,
1640 metadata=metadata, ui=ui)
1643 metadata=metadata, ui=ui)
1641 tr.close()
1644 tr.close()
1642 except ValueError as exc:
1645 except ValueError as exc:
1643 raise error.Abort(_('bad obsmarker input: %s') %
1646 raise error.Abort(_('bad obsmarker input: %s') %
1644 pycompat.bytestr(exc))
1647 pycompat.bytestr(exc))
1645 finally:
1648 finally:
1646 tr.release()
1649 tr.release()
1647 finally:
1650 finally:
1648 l.release()
1651 l.release()
1649 else:
1652 else:
1650 if opts['rev']:
1653 if opts['rev']:
1651 revs = scmutil.revrange(repo, opts['rev'])
1654 revs = scmutil.revrange(repo, opts['rev'])
1652 nodes = [repo[r].node() for r in revs]
1655 nodes = [repo[r].node() for r in revs]
1653 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1656 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1654 exclusive=opts['exclusive']))
1657 exclusive=opts['exclusive']))
1655 markers.sort(key=lambda x: x._data)
1658 markers.sort(key=lambda x: x._data)
1656 else:
1659 else:
1657 markers = obsutil.getmarkers(repo)
1660 markers = obsutil.getmarkers(repo)
1658
1661
1659 markerstoiter = markers
1662 markerstoiter = markers
1660 isrelevant = lambda m: True
1663 isrelevant = lambda m: True
1661 if opts.get('rev') and opts.get('index'):
1664 if opts.get('rev') and opts.get('index'):
1662 markerstoiter = obsutil.getmarkers(repo)
1665 markerstoiter = obsutil.getmarkers(repo)
1663 markerset = set(markers)
1666 markerset = set(markers)
1664 isrelevant = lambda m: m in markerset
1667 isrelevant = lambda m: m in markerset
1665
1668
1666 fm = ui.formatter('debugobsolete', opts)
1669 fm = ui.formatter('debugobsolete', opts)
1667 for i, m in enumerate(markerstoiter):
1670 for i, m in enumerate(markerstoiter):
1668 if not isrelevant(m):
1671 if not isrelevant(m):
1669 # marker can be irrelevant when we're iterating over a set
1672 # marker can be irrelevant when we're iterating over a set
1670 # of markers (markerstoiter) which is bigger than the set
1673 # of markers (markerstoiter) which is bigger than the set
1671 # of markers we want to display (markers)
1674 # of markers we want to display (markers)
1672 # this can happen if both --index and --rev options are
1675 # this can happen if both --index and --rev options are
1673 # provided and thus we need to iterate over all of the markers
1676 # provided and thus we need to iterate over all of the markers
1674 # to get the correct indices, but only display the ones that
1677 # to get the correct indices, but only display the ones that
1675 # are relevant to --rev value
1678 # are relevant to --rev value
1676 continue
1679 continue
1677 fm.startitem()
1680 fm.startitem()
1678 ind = i if opts.get('index') else None
1681 ind = i if opts.get('index') else None
1679 cmdutil.showmarker(fm, m, index=ind)
1682 cmdutil.showmarker(fm, m, index=ind)
1680 fm.end()
1683 fm.end()
1681
1684
1682 @command('debugpathcomplete',
1685 @command('debugpathcomplete',
1683 [('f', 'full', None, _('complete an entire path')),
1686 [('f', 'full', None, _('complete an entire path')),
1684 ('n', 'normal', None, _('show only normal files')),
1687 ('n', 'normal', None, _('show only normal files')),
1685 ('a', 'added', None, _('show only added files')),
1688 ('a', 'added', None, _('show only added files')),
1686 ('r', 'removed', None, _('show only removed files'))],
1689 ('r', 'removed', None, _('show only removed files'))],
1687 _('FILESPEC...'))
1690 _('FILESPEC...'))
1688 def debugpathcomplete(ui, repo, *specs, **opts):
1691 def debugpathcomplete(ui, repo, *specs, **opts):
1689 '''complete part or all of a tracked path
1692 '''complete part or all of a tracked path
1690
1693
1691 This command supports shells that offer path name completion. It
1694 This command supports shells that offer path name completion. It
1692 currently completes only files already known to the dirstate.
1695 currently completes only files already known to the dirstate.
1693
1696
1694 Completion extends only to the next path segment unless
1697 Completion extends only to the next path segment unless
1695 --full is specified, in which case entire paths are used.'''
1698 --full is specified, in which case entire paths are used.'''
1696
1699
1697 def complete(path, acceptable):
1700 def complete(path, acceptable):
1698 dirstate = repo.dirstate
1701 dirstate = repo.dirstate
1699 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1702 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1700 rootdir = repo.root + pycompat.ossep
1703 rootdir = repo.root + pycompat.ossep
1701 if spec != repo.root and not spec.startswith(rootdir):
1704 if spec != repo.root and not spec.startswith(rootdir):
1702 return [], []
1705 return [], []
1703 if os.path.isdir(spec):
1706 if os.path.isdir(spec):
1704 spec += '/'
1707 spec += '/'
1705 spec = spec[len(rootdir):]
1708 spec = spec[len(rootdir):]
1706 fixpaths = pycompat.ossep != '/'
1709 fixpaths = pycompat.ossep != '/'
1707 if fixpaths:
1710 if fixpaths:
1708 spec = spec.replace(pycompat.ossep, '/')
1711 spec = spec.replace(pycompat.ossep, '/')
1709 speclen = len(spec)
1712 speclen = len(spec)
1710 fullpaths = opts[r'full']
1713 fullpaths = opts[r'full']
1711 files, dirs = set(), set()
1714 files, dirs = set(), set()
1712 adddir, addfile = dirs.add, files.add
1715 adddir, addfile = dirs.add, files.add
1713 for f, st in dirstate.iteritems():
1716 for f, st in dirstate.iteritems():
1714 if f.startswith(spec) and st[0] in acceptable:
1717 if f.startswith(spec) and st[0] in acceptable:
1715 if fixpaths:
1718 if fixpaths:
1716 f = f.replace('/', pycompat.ossep)
1719 f = f.replace('/', pycompat.ossep)
1717 if fullpaths:
1720 if fullpaths:
1718 addfile(f)
1721 addfile(f)
1719 continue
1722 continue
1720 s = f.find(pycompat.ossep, speclen)
1723 s = f.find(pycompat.ossep, speclen)
1721 if s >= 0:
1724 if s >= 0:
1722 adddir(f[:s])
1725 adddir(f[:s])
1723 else:
1726 else:
1724 addfile(f)
1727 addfile(f)
1725 return files, dirs
1728 return files, dirs
1726
1729
1727 acceptable = ''
1730 acceptable = ''
1728 if opts[r'normal']:
1731 if opts[r'normal']:
1729 acceptable += 'nm'
1732 acceptable += 'nm'
1730 if opts[r'added']:
1733 if opts[r'added']:
1731 acceptable += 'a'
1734 acceptable += 'a'
1732 if opts[r'removed']:
1735 if opts[r'removed']:
1733 acceptable += 'r'
1736 acceptable += 'r'
1734 cwd = repo.getcwd()
1737 cwd = repo.getcwd()
1735 if not specs:
1738 if not specs:
1736 specs = ['.']
1739 specs = ['.']
1737
1740
1738 files, dirs = set(), set()
1741 files, dirs = set(), set()
1739 for spec in specs:
1742 for spec in specs:
1740 f, d = complete(spec, acceptable or 'nmar')
1743 f, d = complete(spec, acceptable or 'nmar')
1741 files.update(f)
1744 files.update(f)
1742 dirs.update(d)
1745 dirs.update(d)
1743 files.update(dirs)
1746 files.update(dirs)
1744 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1747 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1745 ui.write('\n')
1748 ui.write('\n')
1746
1749
1747 @command('debugpeer', [], _('PATH'), norepo=True)
1750 @command('debugpeer', [], _('PATH'), norepo=True)
1748 def debugpeer(ui, path):
1751 def debugpeer(ui, path):
1749 """establish a connection to a peer repository"""
1752 """establish a connection to a peer repository"""
1750 # Always enable peer request logging. Requires --debug to display
1753 # Always enable peer request logging. Requires --debug to display
1751 # though.
1754 # though.
1752 overrides = {
1755 overrides = {
1753 ('devel', 'debug.peer-request'): True,
1756 ('devel', 'debug.peer-request'): True,
1754 }
1757 }
1755
1758
1756 with ui.configoverride(overrides):
1759 with ui.configoverride(overrides):
1757 peer = hg.peer(ui, {}, path)
1760 peer = hg.peer(ui, {}, path)
1758
1761
1759 local = peer.local() is not None
1762 local = peer.local() is not None
1760 canpush = peer.canpush()
1763 canpush = peer.canpush()
1761
1764
1762 ui.write(_('url: %s\n') % peer.url())
1765 ui.write(_('url: %s\n') % peer.url())
1763 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1766 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1764 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1767 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1765
1768
1766 @command('debugpickmergetool',
1769 @command('debugpickmergetool',
1767 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1770 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1768 ('', 'changedelete', None, _('emulate merging change and delete')),
1771 ('', 'changedelete', None, _('emulate merging change and delete')),
1769 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1772 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1770 _('[PATTERN]...'),
1773 _('[PATTERN]...'),
1771 inferrepo=True)
1774 inferrepo=True)
1772 def debugpickmergetool(ui, repo, *pats, **opts):
1775 def debugpickmergetool(ui, repo, *pats, **opts):
1773 """examine which merge tool is chosen for specified file
1776 """examine which merge tool is chosen for specified file
1774
1777
1775 As described in :hg:`help merge-tools`, Mercurial examines
1778 As described in :hg:`help merge-tools`, Mercurial examines
1776 configurations below in this order to decide which merge tool is
1779 configurations below in this order to decide which merge tool is
1777 chosen for specified file.
1780 chosen for specified file.
1778
1781
1779 1. ``--tool`` option
1782 1. ``--tool`` option
1780 2. ``HGMERGE`` environment variable
1783 2. ``HGMERGE`` environment variable
1781 3. configurations in ``merge-patterns`` section
1784 3. configurations in ``merge-patterns`` section
1782 4. configuration of ``ui.merge``
1785 4. configuration of ``ui.merge``
1783 5. configurations in ``merge-tools`` section
1786 5. configurations in ``merge-tools`` section
1784 6. ``hgmerge`` tool (for historical reason only)
1787 6. ``hgmerge`` tool (for historical reason only)
1785 7. default tool for fallback (``:merge`` or ``:prompt``)
1788 7. default tool for fallback (``:merge`` or ``:prompt``)
1786
1789
1787 This command writes out examination result in the style below::
1790 This command writes out examination result in the style below::
1788
1791
1789 FILE = MERGETOOL
1792 FILE = MERGETOOL
1790
1793
1791 By default, all files known in the first parent context of the
1794 By default, all files known in the first parent context of the
1792 working directory are examined. Use file patterns and/or -I/-X
1795 working directory are examined. Use file patterns and/or -I/-X
1793 options to limit target files. -r/--rev is also useful to examine
1796 options to limit target files. -r/--rev is also useful to examine
1794 files in another context without actual updating to it.
1797 files in another context without actual updating to it.
1795
1798
1796 With --debug, this command shows warning messages while matching
1799 With --debug, this command shows warning messages while matching
1797 against ``merge-patterns`` and so on, too. It is recommended to
1800 against ``merge-patterns`` and so on, too. It is recommended to
1798 use this option with explicit file patterns and/or -I/-X options,
1801 use this option with explicit file patterns and/or -I/-X options,
1799 because this option increases amount of output per file according
1802 because this option increases amount of output per file according
1800 to configurations in hgrc.
1803 to configurations in hgrc.
1801
1804
1802 With -v/--verbose, this command shows configurations below at
1805 With -v/--verbose, this command shows configurations below at
1803 first (only if specified).
1806 first (only if specified).
1804
1807
1805 - ``--tool`` option
1808 - ``--tool`` option
1806 - ``HGMERGE`` environment variable
1809 - ``HGMERGE`` environment variable
1807 - configuration of ``ui.merge``
1810 - configuration of ``ui.merge``
1808
1811
1809 If merge tool is chosen before matching against
1812 If merge tool is chosen before matching against
1810 ``merge-patterns``, this command can't show any helpful
1813 ``merge-patterns``, this command can't show any helpful
1811 information, even with --debug. In such case, information above is
1814 information, even with --debug. In such case, information above is
1812 useful to know why a merge tool is chosen.
1815 useful to know why a merge tool is chosen.
1813 """
1816 """
1814 opts = pycompat.byteskwargs(opts)
1817 opts = pycompat.byteskwargs(opts)
1815 overrides = {}
1818 overrides = {}
1816 if opts['tool']:
1819 if opts['tool']:
1817 overrides[('ui', 'forcemerge')] = opts['tool']
1820 overrides[('ui', 'forcemerge')] = opts['tool']
1818 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1821 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1819
1822
1820 with ui.configoverride(overrides, 'debugmergepatterns'):
1823 with ui.configoverride(overrides, 'debugmergepatterns'):
1821 hgmerge = encoding.environ.get("HGMERGE")
1824 hgmerge = encoding.environ.get("HGMERGE")
1822 if hgmerge is not None:
1825 if hgmerge is not None:
1823 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1826 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1824 uimerge = ui.config("ui", "merge")
1827 uimerge = ui.config("ui", "merge")
1825 if uimerge:
1828 if uimerge:
1826 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1829 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1827
1830
1828 ctx = scmutil.revsingle(repo, opts.get('rev'))
1831 ctx = scmutil.revsingle(repo, opts.get('rev'))
1829 m = scmutil.match(ctx, pats, opts)
1832 m = scmutil.match(ctx, pats, opts)
1830 changedelete = opts['changedelete']
1833 changedelete = opts['changedelete']
1831 for path in ctx.walk(m):
1834 for path in ctx.walk(m):
1832 fctx = ctx[path]
1835 fctx = ctx[path]
1833 try:
1836 try:
1834 if not ui.debugflag:
1837 if not ui.debugflag:
1835 ui.pushbuffer(error=True)
1838 ui.pushbuffer(error=True)
1836 tool, toolpath = filemerge._picktool(repo, ui, path,
1839 tool, toolpath = filemerge._picktool(repo, ui, path,
1837 fctx.isbinary(),
1840 fctx.isbinary(),
1838 'l' in fctx.flags(),
1841 'l' in fctx.flags(),
1839 changedelete)
1842 changedelete)
1840 finally:
1843 finally:
1841 if not ui.debugflag:
1844 if not ui.debugflag:
1842 ui.popbuffer()
1845 ui.popbuffer()
1843 ui.write(('%s = %s\n') % (path, tool))
1846 ui.write(('%s = %s\n') % (path, tool))
1844
1847
1845 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1848 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1846 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1849 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1847 '''access the pushkey key/value protocol
1850 '''access the pushkey key/value protocol
1848
1851
1849 With two args, list the keys in the given namespace.
1852 With two args, list the keys in the given namespace.
1850
1853
1851 With five args, set a key to new if it currently is set to old.
1854 With five args, set a key to new if it currently is set to old.
1852 Reports success or failure.
1855 Reports success or failure.
1853 '''
1856 '''
1854
1857
1855 target = hg.peer(ui, {}, repopath)
1858 target = hg.peer(ui, {}, repopath)
1856 if keyinfo:
1859 if keyinfo:
1857 key, old, new = keyinfo
1860 key, old, new = keyinfo
1858 with target.commandexecutor() as e:
1861 with target.commandexecutor() as e:
1859 r = e.callcommand('pushkey', {
1862 r = e.callcommand('pushkey', {
1860 'namespace': namespace,
1863 'namespace': namespace,
1861 'key': key,
1864 'key': key,
1862 'old': old,
1865 'old': old,
1863 'new': new,
1866 'new': new,
1864 }).result()
1867 }).result()
1865
1868
1866 ui.status(pycompat.bytestr(r) + '\n')
1869 ui.status(pycompat.bytestr(r) + '\n')
1867 return not r
1870 return not r
1868 else:
1871 else:
1869 for k, v in sorted(target.listkeys(namespace).iteritems()):
1872 for k, v in sorted(target.listkeys(namespace).iteritems()):
1870 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1873 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1871 stringutil.escapestr(v)))
1874 stringutil.escapestr(v)))
1872
1875
1873 @command('debugpvec', [], _('A B'))
1876 @command('debugpvec', [], _('A B'))
1874 def debugpvec(ui, repo, a, b=None):
1877 def debugpvec(ui, repo, a, b=None):
1875 ca = scmutil.revsingle(repo, a)
1878 ca = scmutil.revsingle(repo, a)
1876 cb = scmutil.revsingle(repo, b)
1879 cb = scmutil.revsingle(repo, b)
1877 pa = pvec.ctxpvec(ca)
1880 pa = pvec.ctxpvec(ca)
1878 pb = pvec.ctxpvec(cb)
1881 pb = pvec.ctxpvec(cb)
1879 if pa == pb:
1882 if pa == pb:
1880 rel = "="
1883 rel = "="
1881 elif pa > pb:
1884 elif pa > pb:
1882 rel = ">"
1885 rel = ">"
1883 elif pa < pb:
1886 elif pa < pb:
1884 rel = "<"
1887 rel = "<"
1885 elif pa | pb:
1888 elif pa | pb:
1886 rel = "|"
1889 rel = "|"
1887 ui.write(_("a: %s\n") % pa)
1890 ui.write(_("a: %s\n") % pa)
1888 ui.write(_("b: %s\n") % pb)
1891 ui.write(_("b: %s\n") % pb)
1889 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1892 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1890 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1893 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1891 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1894 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1892 pa.distance(pb), rel))
1895 pa.distance(pb), rel))
1893
1896
1894 @command('debugrebuilddirstate|debugrebuildstate',
1897 @command('debugrebuilddirstate|debugrebuildstate',
1895 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1898 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1896 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1899 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1897 'the working copy parent')),
1900 'the working copy parent')),
1898 ],
1901 ],
1899 _('[-r REV]'))
1902 _('[-r REV]'))
1900 def debugrebuilddirstate(ui, repo, rev, **opts):
1903 def debugrebuilddirstate(ui, repo, rev, **opts):
1901 """rebuild the dirstate as it would look like for the given revision
1904 """rebuild the dirstate as it would look like for the given revision
1902
1905
1903 If no revision is specified the first current parent will be used.
1906 If no revision is specified the first current parent will be used.
1904
1907
1905 The dirstate will be set to the files of the given revision.
1908 The dirstate will be set to the files of the given revision.
1906 The actual working directory content or existing dirstate
1909 The actual working directory content or existing dirstate
1907 information such as adds or removes is not considered.
1910 information such as adds or removes is not considered.
1908
1911
1909 ``minimal`` will only rebuild the dirstate status for files that claim to be
1912 ``minimal`` will only rebuild the dirstate status for files that claim to be
1910 tracked but are not in the parent manifest, or that exist in the parent
1913 tracked but are not in the parent manifest, or that exist in the parent
1911 manifest but are not in the dirstate. It will not change adds, removes, or
1914 manifest but are not in the dirstate. It will not change adds, removes, or
1912 modified files that are in the working copy parent.
1915 modified files that are in the working copy parent.
1913
1916
1914 One use of this command is to make the next :hg:`status` invocation
1917 One use of this command is to make the next :hg:`status` invocation
1915 check the actual file content.
1918 check the actual file content.
1916 """
1919 """
1917 ctx = scmutil.revsingle(repo, rev)
1920 ctx = scmutil.revsingle(repo, rev)
1918 with repo.wlock():
1921 with repo.wlock():
1919 dirstate = repo.dirstate
1922 dirstate = repo.dirstate
1920 changedfiles = None
1923 changedfiles = None
1921 # See command doc for what minimal does.
1924 # See command doc for what minimal does.
1922 if opts.get(r'minimal'):
1925 if opts.get(r'minimal'):
1923 manifestfiles = set(ctx.manifest().keys())
1926 manifestfiles = set(ctx.manifest().keys())
1924 dirstatefiles = set(dirstate)
1927 dirstatefiles = set(dirstate)
1925 manifestonly = manifestfiles - dirstatefiles
1928 manifestonly = manifestfiles - dirstatefiles
1926 dsonly = dirstatefiles - manifestfiles
1929 dsonly = dirstatefiles - manifestfiles
1927 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1930 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1928 changedfiles = manifestonly | dsnotadded
1931 changedfiles = manifestonly | dsnotadded
1929
1932
1930 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1933 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1931
1934
1932 @command('debugrebuildfncache', [], '')
1935 @command('debugrebuildfncache', [], '')
1933 def debugrebuildfncache(ui, repo):
1936 def debugrebuildfncache(ui, repo):
1934 """rebuild the fncache file"""
1937 """rebuild the fncache file"""
1935 repair.rebuildfncache(ui, repo)
1938 repair.rebuildfncache(ui, repo)
1936
1939
1937 @command('debugrename',
1940 @command('debugrename',
1938 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1941 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1939 _('[-r REV] FILE'))
1942 _('[-r REV] FILE'))
1940 def debugrename(ui, repo, file1, *pats, **opts):
1943 def debugrename(ui, repo, file1, *pats, **opts):
1941 """dump rename information"""
1944 """dump rename information"""
1942
1945
1943 opts = pycompat.byteskwargs(opts)
1946 opts = pycompat.byteskwargs(opts)
1944 ctx = scmutil.revsingle(repo, opts.get('rev'))
1947 ctx = scmutil.revsingle(repo, opts.get('rev'))
1945 m = scmutil.match(ctx, (file1,) + pats, opts)
1948 m = scmutil.match(ctx, (file1,) + pats, opts)
1946 for abs in ctx.walk(m):
1949 for abs in ctx.walk(m):
1947 fctx = ctx[abs]
1950 fctx = ctx[abs]
1948 o = fctx.filelog().renamed(fctx.filenode())
1951 o = fctx.filelog().renamed(fctx.filenode())
1949 rel = m.rel(abs)
1952 rel = m.rel(abs)
1950 if o:
1953 if o:
1951 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1954 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1952 else:
1955 else:
1953 ui.write(_("%s not renamed\n") % rel)
1956 ui.write(_("%s not renamed\n") % rel)
1954
1957
1955 @command('debugrevlog', cmdutil.debugrevlogopts +
1958 @command('debugrevlog', cmdutil.debugrevlogopts +
1956 [('d', 'dump', False, _('dump index data'))],
1959 [('d', 'dump', False, _('dump index data'))],
1957 _('-c|-m|FILE'),
1960 _('-c|-m|FILE'),
1958 optionalrepo=True)
1961 optionalrepo=True)
1959 def debugrevlog(ui, repo, file_=None, **opts):
1962 def debugrevlog(ui, repo, file_=None, **opts):
1960 """show data and statistics about a revlog"""
1963 """show data and statistics about a revlog"""
1961 opts = pycompat.byteskwargs(opts)
1964 opts = pycompat.byteskwargs(opts)
1962 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1965 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1963
1966
1964 if opts.get("dump"):
1967 if opts.get("dump"):
1965 numrevs = len(r)
1968 numrevs = len(r)
1966 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1969 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1967 " rawsize totalsize compression heads chainlen\n"))
1970 " rawsize totalsize compression heads chainlen\n"))
1968 ts = 0
1971 ts = 0
1969 heads = set()
1972 heads = set()
1970
1973
1971 for rev in xrange(numrevs):
1974 for rev in xrange(numrevs):
1972 dbase = r.deltaparent(rev)
1975 dbase = r.deltaparent(rev)
1973 if dbase == -1:
1976 if dbase == -1:
1974 dbase = rev
1977 dbase = rev
1975 cbase = r.chainbase(rev)
1978 cbase = r.chainbase(rev)
1976 clen = r.chainlen(rev)
1979 clen = r.chainlen(rev)
1977 p1, p2 = r.parentrevs(rev)
1980 p1, p2 = r.parentrevs(rev)
1978 rs = r.rawsize(rev)
1981 rs = r.rawsize(rev)
1979 ts = ts + rs
1982 ts = ts + rs
1980 heads -= set(r.parentrevs(rev))
1983 heads -= set(r.parentrevs(rev))
1981 heads.add(rev)
1984 heads.add(rev)
1982 try:
1985 try:
1983 compression = ts / r.end(rev)
1986 compression = ts / r.end(rev)
1984 except ZeroDivisionError:
1987 except ZeroDivisionError:
1985 compression = 0
1988 compression = 0
1986 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1989 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1987 "%11d %5d %8d\n" %
1990 "%11d %5d %8d\n" %
1988 (rev, p1, p2, r.start(rev), r.end(rev),
1991 (rev, p1, p2, r.start(rev), r.end(rev),
1989 r.start(dbase), r.start(cbase),
1992 r.start(dbase), r.start(cbase),
1990 r.start(p1), r.start(p2),
1993 r.start(p1), r.start(p2),
1991 rs, ts, compression, len(heads), clen))
1994 rs, ts, compression, len(heads), clen))
1992 return 0
1995 return 0
1993
1996
1994 v = r.version
1997 v = r.version
1995 format = v & 0xFFFF
1998 format = v & 0xFFFF
1996 flags = []
1999 flags = []
1997 gdelta = False
2000 gdelta = False
1998 if v & revlog.FLAG_INLINE_DATA:
2001 if v & revlog.FLAG_INLINE_DATA:
1999 flags.append('inline')
2002 flags.append('inline')
2000 if v & revlog.FLAG_GENERALDELTA:
2003 if v & revlog.FLAG_GENERALDELTA:
2001 gdelta = True
2004 gdelta = True
2002 flags.append('generaldelta')
2005 flags.append('generaldelta')
2003 if not flags:
2006 if not flags:
2004 flags = ['(none)']
2007 flags = ['(none)']
2005
2008
2006 nummerges = 0
2009 nummerges = 0
2007 numfull = 0
2010 numfull = 0
2008 numprev = 0
2011 numprev = 0
2009 nump1 = 0
2012 nump1 = 0
2010 nump2 = 0
2013 nump2 = 0
2011 numother = 0
2014 numother = 0
2012 nump1prev = 0
2015 nump1prev = 0
2013 nump2prev = 0
2016 nump2prev = 0
2014 chainlengths = []
2017 chainlengths = []
2015 chainbases = []
2018 chainbases = []
2016 chainspans = []
2019 chainspans = []
2017
2020
2018 datasize = [None, 0, 0]
2021 datasize = [None, 0, 0]
2019 fullsize = [None, 0, 0]
2022 fullsize = [None, 0, 0]
2020 deltasize = [None, 0, 0]
2023 deltasize = [None, 0, 0]
2021 chunktypecounts = {}
2024 chunktypecounts = {}
2022 chunktypesizes = {}
2025 chunktypesizes = {}
2023
2026
2024 def addsize(size, l):
2027 def addsize(size, l):
2025 if l[0] is None or size < l[0]:
2028 if l[0] is None or size < l[0]:
2026 l[0] = size
2029 l[0] = size
2027 if size > l[1]:
2030 if size > l[1]:
2028 l[1] = size
2031 l[1] = size
2029 l[2] += size
2032 l[2] += size
2030
2033
2031 numrevs = len(r)
2034 numrevs = len(r)
2032 for rev in xrange(numrevs):
2035 for rev in xrange(numrevs):
2033 p1, p2 = r.parentrevs(rev)
2036 p1, p2 = r.parentrevs(rev)
2034 delta = r.deltaparent(rev)
2037 delta = r.deltaparent(rev)
2035 if format > 0:
2038 if format > 0:
2036 addsize(r.rawsize(rev), datasize)
2039 addsize(r.rawsize(rev), datasize)
2037 if p2 != nullrev:
2040 if p2 != nullrev:
2038 nummerges += 1
2041 nummerges += 1
2039 size = r.length(rev)
2042 size = r.length(rev)
2040 if delta == nullrev:
2043 if delta == nullrev:
2041 chainlengths.append(0)
2044 chainlengths.append(0)
2042 chainbases.append(r.start(rev))
2045 chainbases.append(r.start(rev))
2043 chainspans.append(size)
2046 chainspans.append(size)
2044 numfull += 1
2047 numfull += 1
2045 addsize(size, fullsize)
2048 addsize(size, fullsize)
2046 else:
2049 else:
2047 chainlengths.append(chainlengths[delta] + 1)
2050 chainlengths.append(chainlengths[delta] + 1)
2048 baseaddr = chainbases[delta]
2051 baseaddr = chainbases[delta]
2049 revaddr = r.start(rev)
2052 revaddr = r.start(rev)
2050 chainbases.append(baseaddr)
2053 chainbases.append(baseaddr)
2051 chainspans.append((revaddr - baseaddr) + size)
2054 chainspans.append((revaddr - baseaddr) + size)
2052 addsize(size, deltasize)
2055 addsize(size, deltasize)
2053 if delta == rev - 1:
2056 if delta == rev - 1:
2054 numprev += 1
2057 numprev += 1
2055 if delta == p1:
2058 if delta == p1:
2056 nump1prev += 1
2059 nump1prev += 1
2057 elif delta == p2:
2060 elif delta == p2:
2058 nump2prev += 1
2061 nump2prev += 1
2059 elif delta == p1:
2062 elif delta == p1:
2060 nump1 += 1
2063 nump1 += 1
2061 elif delta == p2:
2064 elif delta == p2:
2062 nump2 += 1
2065 nump2 += 1
2063 elif delta != nullrev:
2066 elif delta != nullrev:
2064 numother += 1
2067 numother += 1
2065
2068
2066 # Obtain data on the raw chunks in the revlog.
2069 # Obtain data on the raw chunks in the revlog.
2067 segment = r._getsegmentforrevs(rev, rev)[1]
2070 segment = r._getsegmentforrevs(rev, rev)[1]
2068 if segment:
2071 if segment:
2069 chunktype = bytes(segment[0:1])
2072 chunktype = bytes(segment[0:1])
2070 else:
2073 else:
2071 chunktype = 'empty'
2074 chunktype = 'empty'
2072
2075
2073 if chunktype not in chunktypecounts:
2076 if chunktype not in chunktypecounts:
2074 chunktypecounts[chunktype] = 0
2077 chunktypecounts[chunktype] = 0
2075 chunktypesizes[chunktype] = 0
2078 chunktypesizes[chunktype] = 0
2076
2079
2077 chunktypecounts[chunktype] += 1
2080 chunktypecounts[chunktype] += 1
2078 chunktypesizes[chunktype] += size
2081 chunktypesizes[chunktype] += size
2079
2082
2080 # Adjust size min value for empty cases
2083 # Adjust size min value for empty cases
2081 for size in (datasize, fullsize, deltasize):
2084 for size in (datasize, fullsize, deltasize):
2082 if size[0] is None:
2085 if size[0] is None:
2083 size[0] = 0
2086 size[0] = 0
2084
2087
2085 numdeltas = numrevs - numfull
2088 numdeltas = numrevs - numfull
2086 numoprev = numprev - nump1prev - nump2prev
2089 numoprev = numprev - nump1prev - nump2prev
2087 totalrawsize = datasize[2]
2090 totalrawsize = datasize[2]
2088 datasize[2] /= numrevs
2091 datasize[2] /= numrevs
2089 fulltotal = fullsize[2]
2092 fulltotal = fullsize[2]
2090 fullsize[2] /= numfull
2093 fullsize[2] /= numfull
2091 deltatotal = deltasize[2]
2094 deltatotal = deltasize[2]
2092 if numrevs - numfull > 0:
2095 if numrevs - numfull > 0:
2093 deltasize[2] /= numrevs - numfull
2096 deltasize[2] /= numrevs - numfull
2094 totalsize = fulltotal + deltatotal
2097 totalsize = fulltotal + deltatotal
2095 avgchainlen = sum(chainlengths) / numrevs
2098 avgchainlen = sum(chainlengths) / numrevs
2096 maxchainlen = max(chainlengths)
2099 maxchainlen = max(chainlengths)
2097 maxchainspan = max(chainspans)
2100 maxchainspan = max(chainspans)
2098 compratio = 1
2101 compratio = 1
2099 if totalsize:
2102 if totalsize:
2100 compratio = totalrawsize / totalsize
2103 compratio = totalrawsize / totalsize
2101
2104
2102 basedfmtstr = '%%%dd\n'
2105 basedfmtstr = '%%%dd\n'
2103 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2106 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2104
2107
2105 def dfmtstr(max):
2108 def dfmtstr(max):
2106 return basedfmtstr % len(str(max))
2109 return basedfmtstr % len(str(max))
2107 def pcfmtstr(max, padding=0):
2110 def pcfmtstr(max, padding=0):
2108 return basepcfmtstr % (len(str(max)), ' ' * padding)
2111 return basepcfmtstr % (len(str(max)), ' ' * padding)
2109
2112
2110 def pcfmt(value, total):
2113 def pcfmt(value, total):
2111 if total:
2114 if total:
2112 return (value, 100 * float(value) / total)
2115 return (value, 100 * float(value) / total)
2113 else:
2116 else:
2114 return value, 100.0
2117 return value, 100.0
2115
2118
2116 ui.write(('format : %d\n') % format)
2119 ui.write(('format : %d\n') % format)
2117 ui.write(('flags : %s\n') % ', '.join(flags))
2120 ui.write(('flags : %s\n') % ', '.join(flags))
2118
2121
2119 ui.write('\n')
2122 ui.write('\n')
2120 fmt = pcfmtstr(totalsize)
2123 fmt = pcfmtstr(totalsize)
2121 fmt2 = dfmtstr(totalsize)
2124 fmt2 = dfmtstr(totalsize)
2122 ui.write(('revisions : ') + fmt2 % numrevs)
2125 ui.write(('revisions : ') + fmt2 % numrevs)
2123 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2126 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2124 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2127 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2125 ui.write(('revisions : ') + fmt2 % numrevs)
2128 ui.write(('revisions : ') + fmt2 % numrevs)
2126 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2129 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2127 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2130 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2128 ui.write(('revision size : ') + fmt2 % totalsize)
2131 ui.write(('revision size : ') + fmt2 % totalsize)
2129 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2132 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2130 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2133 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2131
2134
2132 def fmtchunktype(chunktype):
2135 def fmtchunktype(chunktype):
2133 if chunktype == 'empty':
2136 if chunktype == 'empty':
2134 return ' %s : ' % chunktype
2137 return ' %s : ' % chunktype
2135 elif chunktype in pycompat.bytestr(string.ascii_letters):
2138 elif chunktype in pycompat.bytestr(string.ascii_letters):
2136 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2139 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2137 else:
2140 else:
2138 return ' 0x%s : ' % hex(chunktype)
2141 return ' 0x%s : ' % hex(chunktype)
2139
2142
2140 ui.write('\n')
2143 ui.write('\n')
2141 ui.write(('chunks : ') + fmt2 % numrevs)
2144 ui.write(('chunks : ') + fmt2 % numrevs)
2142 for chunktype in sorted(chunktypecounts):
2145 for chunktype in sorted(chunktypecounts):
2143 ui.write(fmtchunktype(chunktype))
2146 ui.write(fmtchunktype(chunktype))
2144 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2147 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2145 ui.write(('chunks size : ') + fmt2 % totalsize)
2148 ui.write(('chunks size : ') + fmt2 % totalsize)
2146 for chunktype in sorted(chunktypecounts):
2149 for chunktype in sorted(chunktypecounts):
2147 ui.write(fmtchunktype(chunktype))
2150 ui.write(fmtchunktype(chunktype))
2148 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2151 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2149
2152
2150 ui.write('\n')
2153 ui.write('\n')
2151 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2154 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2152 ui.write(('avg chain length : ') + fmt % avgchainlen)
2155 ui.write(('avg chain length : ') + fmt % avgchainlen)
2153 ui.write(('max chain length : ') + fmt % maxchainlen)
2156 ui.write(('max chain length : ') + fmt % maxchainlen)
2154 ui.write(('max chain reach : ') + fmt % maxchainspan)
2157 ui.write(('max chain reach : ') + fmt % maxchainspan)
2155 ui.write(('compression ratio : ') + fmt % compratio)
2158 ui.write(('compression ratio : ') + fmt % compratio)
2156
2159
2157 if format > 0:
2160 if format > 0:
2158 ui.write('\n')
2161 ui.write('\n')
2159 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2162 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2160 % tuple(datasize))
2163 % tuple(datasize))
2161 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2164 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2162 % tuple(fullsize))
2165 % tuple(fullsize))
2163 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2166 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2164 % tuple(deltasize))
2167 % tuple(deltasize))
2165
2168
2166 if numdeltas > 0:
2169 if numdeltas > 0:
2167 ui.write('\n')
2170 ui.write('\n')
2168 fmt = pcfmtstr(numdeltas)
2171 fmt = pcfmtstr(numdeltas)
2169 fmt2 = pcfmtstr(numdeltas, 4)
2172 fmt2 = pcfmtstr(numdeltas, 4)
2170 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2173 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2171 if numprev > 0:
2174 if numprev > 0:
2172 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2175 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2173 numprev))
2176 numprev))
2174 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2177 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2175 numprev))
2178 numprev))
2176 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2179 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2177 numprev))
2180 numprev))
2178 if gdelta:
2181 if gdelta:
2179 ui.write(('deltas against p1 : ')
2182 ui.write(('deltas against p1 : ')
2180 + fmt % pcfmt(nump1, numdeltas))
2183 + fmt % pcfmt(nump1, numdeltas))
2181 ui.write(('deltas against p2 : ')
2184 ui.write(('deltas against p2 : ')
2182 + fmt % pcfmt(nump2, numdeltas))
2185 + fmt % pcfmt(nump2, numdeltas))
2183 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2186 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2184 numdeltas))
2187 numdeltas))
2185
2188
2186 @command('debugrevspec',
2189 @command('debugrevspec',
2187 [('', 'optimize', None,
2190 [('', 'optimize', None,
2188 _('print parsed tree after optimizing (DEPRECATED)')),
2191 _('print parsed tree after optimizing (DEPRECATED)')),
2189 ('', 'show-revs', True, _('print list of result revisions (default)')),
2192 ('', 'show-revs', True, _('print list of result revisions (default)')),
2190 ('s', 'show-set', None, _('print internal representation of result set')),
2193 ('s', 'show-set', None, _('print internal representation of result set')),
2191 ('p', 'show-stage', [],
2194 ('p', 'show-stage', [],
2192 _('print parsed tree at the given stage'), _('NAME')),
2195 _('print parsed tree at the given stage'), _('NAME')),
2193 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2196 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2194 ('', 'verify-optimized', False, _('verify optimized result')),
2197 ('', 'verify-optimized', False, _('verify optimized result')),
2195 ],
2198 ],
2196 ('REVSPEC'))
2199 ('REVSPEC'))
2197 def debugrevspec(ui, repo, expr, **opts):
2200 def debugrevspec(ui, repo, expr, **opts):
2198 """parse and apply a revision specification
2201 """parse and apply a revision specification
2199
2202
2200 Use -p/--show-stage option to print the parsed tree at the given stages.
2203 Use -p/--show-stage option to print the parsed tree at the given stages.
2201 Use -p all to print tree at every stage.
2204 Use -p all to print tree at every stage.
2202
2205
2203 Use --no-show-revs option with -s or -p to print only the set
2206 Use --no-show-revs option with -s or -p to print only the set
2204 representation or the parsed tree respectively.
2207 representation or the parsed tree respectively.
2205
2208
2206 Use --verify-optimized to compare the optimized result with the unoptimized
2209 Use --verify-optimized to compare the optimized result with the unoptimized
2207 one. Returns 1 if the optimized result differs.
2210 one. Returns 1 if the optimized result differs.
2208 """
2211 """
2209 opts = pycompat.byteskwargs(opts)
2212 opts = pycompat.byteskwargs(opts)
2210 aliases = ui.configitems('revsetalias')
2213 aliases = ui.configitems('revsetalias')
2211 stages = [
2214 stages = [
2212 ('parsed', lambda tree: tree),
2215 ('parsed', lambda tree: tree),
2213 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2216 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2214 ui.warn)),
2217 ui.warn)),
2215 ('concatenated', revsetlang.foldconcat),
2218 ('concatenated', revsetlang.foldconcat),
2216 ('analyzed', revsetlang.analyze),
2219 ('analyzed', revsetlang.analyze),
2217 ('optimized', revsetlang.optimize),
2220 ('optimized', revsetlang.optimize),
2218 ]
2221 ]
2219 if opts['no_optimized']:
2222 if opts['no_optimized']:
2220 stages = stages[:-1]
2223 stages = stages[:-1]
2221 if opts['verify_optimized'] and opts['no_optimized']:
2224 if opts['verify_optimized'] and opts['no_optimized']:
2222 raise error.Abort(_('cannot use --verify-optimized with '
2225 raise error.Abort(_('cannot use --verify-optimized with '
2223 '--no-optimized'))
2226 '--no-optimized'))
2224 stagenames = set(n for n, f in stages)
2227 stagenames = set(n for n, f in stages)
2225
2228
2226 showalways = set()
2229 showalways = set()
2227 showchanged = set()
2230 showchanged = set()
2228 if ui.verbose and not opts['show_stage']:
2231 if ui.verbose and not opts['show_stage']:
2229 # show parsed tree by --verbose (deprecated)
2232 # show parsed tree by --verbose (deprecated)
2230 showalways.add('parsed')
2233 showalways.add('parsed')
2231 showchanged.update(['expanded', 'concatenated'])
2234 showchanged.update(['expanded', 'concatenated'])
2232 if opts['optimize']:
2235 if opts['optimize']:
2233 showalways.add('optimized')
2236 showalways.add('optimized')
2234 if opts['show_stage'] and opts['optimize']:
2237 if opts['show_stage'] and opts['optimize']:
2235 raise error.Abort(_('cannot use --optimize with --show-stage'))
2238 raise error.Abort(_('cannot use --optimize with --show-stage'))
2236 if opts['show_stage'] == ['all']:
2239 if opts['show_stage'] == ['all']:
2237 showalways.update(stagenames)
2240 showalways.update(stagenames)
2238 else:
2241 else:
2239 for n in opts['show_stage']:
2242 for n in opts['show_stage']:
2240 if n not in stagenames:
2243 if n not in stagenames:
2241 raise error.Abort(_('invalid stage name: %s') % n)
2244 raise error.Abort(_('invalid stage name: %s') % n)
2242 showalways.update(opts['show_stage'])
2245 showalways.update(opts['show_stage'])
2243
2246
2244 treebystage = {}
2247 treebystage = {}
2245 printedtree = None
2248 printedtree = None
2246 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2249 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2247 for n, f in stages:
2250 for n, f in stages:
2248 treebystage[n] = tree = f(tree)
2251 treebystage[n] = tree = f(tree)
2249 if n in showalways or (n in showchanged and tree != printedtree):
2252 if n in showalways or (n in showchanged and tree != printedtree):
2250 if opts['show_stage'] or n != 'parsed':
2253 if opts['show_stage'] or n != 'parsed':
2251 ui.write(("* %s:\n") % n)
2254 ui.write(("* %s:\n") % n)
2252 ui.write(revsetlang.prettyformat(tree), "\n")
2255 ui.write(revsetlang.prettyformat(tree), "\n")
2253 printedtree = tree
2256 printedtree = tree
2254
2257
2255 if opts['verify_optimized']:
2258 if opts['verify_optimized']:
2256 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2259 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2257 brevs = revset.makematcher(treebystage['optimized'])(repo)
2260 brevs = revset.makematcher(treebystage['optimized'])(repo)
2258 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2261 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2259 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2262 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2260 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2263 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2261 arevs = list(arevs)
2264 arevs = list(arevs)
2262 brevs = list(brevs)
2265 brevs = list(brevs)
2263 if arevs == brevs:
2266 if arevs == brevs:
2264 return 0
2267 return 0
2265 ui.write(('--- analyzed\n'), label='diff.file_a')
2268 ui.write(('--- analyzed\n'), label='diff.file_a')
2266 ui.write(('+++ optimized\n'), label='diff.file_b')
2269 ui.write(('+++ optimized\n'), label='diff.file_b')
2267 sm = difflib.SequenceMatcher(None, arevs, brevs)
2270 sm = difflib.SequenceMatcher(None, arevs, brevs)
2268 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2271 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2269 if tag in ('delete', 'replace'):
2272 if tag in ('delete', 'replace'):
2270 for c in arevs[alo:ahi]:
2273 for c in arevs[alo:ahi]:
2271 ui.write('-%s\n' % c, label='diff.deleted')
2274 ui.write('-%s\n' % c, label='diff.deleted')
2272 if tag in ('insert', 'replace'):
2275 if tag in ('insert', 'replace'):
2273 for c in brevs[blo:bhi]:
2276 for c in brevs[blo:bhi]:
2274 ui.write('+%s\n' % c, label='diff.inserted')
2277 ui.write('+%s\n' % c, label='diff.inserted')
2275 if tag == 'equal':
2278 if tag == 'equal':
2276 for c in arevs[alo:ahi]:
2279 for c in arevs[alo:ahi]:
2277 ui.write(' %s\n' % c)
2280 ui.write(' %s\n' % c)
2278 return 1
2281 return 1
2279
2282
2280 func = revset.makematcher(tree)
2283 func = revset.makematcher(tree)
2281 revs = func(repo)
2284 revs = func(repo)
2282 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2285 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2283 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2286 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2284 if not opts['show_revs']:
2287 if not opts['show_revs']:
2285 return
2288 return
2286 for c in revs:
2289 for c in revs:
2287 ui.write("%d\n" % c)
2290 ui.write("%d\n" % c)
2288
2291
2289 @command('debugserve', [
2292 @command('debugserve', [
2290 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2293 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2291 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2294 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2292 ('', 'logiofile', '', _('file to log server I/O to')),
2295 ('', 'logiofile', '', _('file to log server I/O to')),
2293 ], '')
2296 ], '')
2294 def debugserve(ui, repo, **opts):
2297 def debugserve(ui, repo, **opts):
2295 """run a server with advanced settings
2298 """run a server with advanced settings
2296
2299
2297 This command is similar to :hg:`serve`. It exists partially as a
2300 This command is similar to :hg:`serve`. It exists partially as a
2298 workaround to the fact that ``hg serve --stdio`` must have specific
2301 workaround to the fact that ``hg serve --stdio`` must have specific
2299 arguments for security reasons.
2302 arguments for security reasons.
2300 """
2303 """
2301 opts = pycompat.byteskwargs(opts)
2304 opts = pycompat.byteskwargs(opts)
2302
2305
2303 if not opts['sshstdio']:
2306 if not opts['sshstdio']:
2304 raise error.Abort(_('only --sshstdio is currently supported'))
2307 raise error.Abort(_('only --sshstdio is currently supported'))
2305
2308
2306 logfh = None
2309 logfh = None
2307
2310
2308 if opts['logiofd'] and opts['logiofile']:
2311 if opts['logiofd'] and opts['logiofile']:
2309 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2312 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2310
2313
2311 if opts['logiofd']:
2314 if opts['logiofd']:
2312 # Line buffered because output is line based.
2315 # Line buffered because output is line based.
2313 try:
2316 try:
2314 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2317 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2315 except OSError as e:
2318 except OSError as e:
2316 if e.errno != errno.ESPIPE:
2319 if e.errno != errno.ESPIPE:
2317 raise
2320 raise
2318 # can't seek a pipe, so `ab` mode fails on py3
2321 # can't seek a pipe, so `ab` mode fails on py3
2319 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2322 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2320 elif opts['logiofile']:
2323 elif opts['logiofile']:
2321 logfh = open(opts['logiofile'], 'ab', 1)
2324 logfh = open(opts['logiofile'], 'ab', 1)
2322
2325
2323 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2326 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2324 s.serve_forever()
2327 s.serve_forever()
2325
2328
2326 @command('debugsetparents', [], _('REV1 [REV2]'))
2329 @command('debugsetparents', [], _('REV1 [REV2]'))
2327 def debugsetparents(ui, repo, rev1, rev2=None):
2330 def debugsetparents(ui, repo, rev1, rev2=None):
2328 """manually set the parents of the current working directory
2331 """manually set the parents of the current working directory
2329
2332
2330 This is useful for writing repository conversion tools, but should
2333 This is useful for writing repository conversion tools, but should
2331 be used with care. For example, neither the working directory nor the
2334 be used with care. For example, neither the working directory nor the
2332 dirstate is updated, so file status may be incorrect after running this
2335 dirstate is updated, so file status may be incorrect after running this
2333 command.
2336 command.
2334
2337
2335 Returns 0 on success.
2338 Returns 0 on success.
2336 """
2339 """
2337
2340
2338 node1 = scmutil.revsingle(repo, rev1).node()
2341 node1 = scmutil.revsingle(repo, rev1).node()
2339 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2342 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2340
2343
2341 with repo.wlock():
2344 with repo.wlock():
2342 repo.setparents(node1, node2)
2345 repo.setparents(node1, node2)
2343
2346
2344 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2347 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2345 def debugssl(ui, repo, source=None, **opts):
2348 def debugssl(ui, repo, source=None, **opts):
2346 '''test a secure connection to a server
2349 '''test a secure connection to a server
2347
2350
2348 This builds the certificate chain for the server on Windows, installing the
2351 This builds the certificate chain for the server on Windows, installing the
2349 missing intermediates and trusted root via Windows Update if necessary. It
2352 missing intermediates and trusted root via Windows Update if necessary. It
2350 does nothing on other platforms.
2353 does nothing on other platforms.
2351
2354
2352 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2355 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2353 that server is used. See :hg:`help urls` for more information.
2356 that server is used. See :hg:`help urls` for more information.
2354
2357
2355 If the update succeeds, retry the original operation. Otherwise, the cause
2358 If the update succeeds, retry the original operation. Otherwise, the cause
2356 of the SSL error is likely another issue.
2359 of the SSL error is likely another issue.
2357 '''
2360 '''
2358 if not pycompat.iswindows:
2361 if not pycompat.iswindows:
2359 raise error.Abort(_('certificate chain building is only possible on '
2362 raise error.Abort(_('certificate chain building is only possible on '
2360 'Windows'))
2363 'Windows'))
2361
2364
2362 if not source:
2365 if not source:
2363 if not repo:
2366 if not repo:
2364 raise error.Abort(_("there is no Mercurial repository here, and no "
2367 raise error.Abort(_("there is no Mercurial repository here, and no "
2365 "server specified"))
2368 "server specified"))
2366 source = "default"
2369 source = "default"
2367
2370
2368 source, branches = hg.parseurl(ui.expandpath(source))
2371 source, branches = hg.parseurl(ui.expandpath(source))
2369 url = util.url(source)
2372 url = util.url(source)
2370 addr = None
2373 addr = None
2371
2374
2372 defaultport = {'https': 443, 'ssh': 22}
2375 defaultport = {'https': 443, 'ssh': 22}
2373 if url.scheme in defaultport:
2376 if url.scheme in defaultport:
2374 try:
2377 try:
2375 addr = (url.host, int(url.port or defaultport[url.scheme]))
2378 addr = (url.host, int(url.port or defaultport[url.scheme]))
2376 except ValueError:
2379 except ValueError:
2377 raise error.Abort(_("malformed port number in URL"))
2380 raise error.Abort(_("malformed port number in URL"))
2378 else:
2381 else:
2379 raise error.Abort(_("only https and ssh connections are supported"))
2382 raise error.Abort(_("only https and ssh connections are supported"))
2380
2383
2381 from . import win32
2384 from . import win32
2382
2385
2383 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2386 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2384 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2387 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2385
2388
2386 try:
2389 try:
2387 s.connect(addr)
2390 s.connect(addr)
2388 cert = s.getpeercert(True)
2391 cert = s.getpeercert(True)
2389
2392
2390 ui.status(_('checking the certificate chain for %s\n') % url.host)
2393 ui.status(_('checking the certificate chain for %s\n') % url.host)
2391
2394
2392 complete = win32.checkcertificatechain(cert, build=False)
2395 complete = win32.checkcertificatechain(cert, build=False)
2393
2396
2394 if not complete:
2397 if not complete:
2395 ui.status(_('certificate chain is incomplete, updating... '))
2398 ui.status(_('certificate chain is incomplete, updating... '))
2396
2399
2397 if not win32.checkcertificatechain(cert):
2400 if not win32.checkcertificatechain(cert):
2398 ui.status(_('failed.\n'))
2401 ui.status(_('failed.\n'))
2399 else:
2402 else:
2400 ui.status(_('done.\n'))
2403 ui.status(_('done.\n'))
2401 else:
2404 else:
2402 ui.status(_('full certificate chain is available\n'))
2405 ui.status(_('full certificate chain is available\n'))
2403 finally:
2406 finally:
2404 s.close()
2407 s.close()
2405
2408
2406 @command('debugsub',
2409 @command('debugsub',
2407 [('r', 'rev', '',
2410 [('r', 'rev', '',
2408 _('revision to check'), _('REV'))],
2411 _('revision to check'), _('REV'))],
2409 _('[-r REV] [REV]'))
2412 _('[-r REV] [REV]'))
2410 def debugsub(ui, repo, rev=None):
2413 def debugsub(ui, repo, rev=None):
2411 ctx = scmutil.revsingle(repo, rev, None)
2414 ctx = scmutil.revsingle(repo, rev, None)
2412 for k, v in sorted(ctx.substate.items()):
2415 for k, v in sorted(ctx.substate.items()):
2413 ui.write(('path %s\n') % k)
2416 ui.write(('path %s\n') % k)
2414 ui.write((' source %s\n') % v[0])
2417 ui.write((' source %s\n') % v[0])
2415 ui.write((' revision %s\n') % v[1])
2418 ui.write((' revision %s\n') % v[1])
2416
2419
2417 @command('debugsuccessorssets',
2420 @command('debugsuccessorssets',
2418 [('', 'closest', False, _('return closest successors sets only'))],
2421 [('', 'closest', False, _('return closest successors sets only'))],
2419 _('[REV]'))
2422 _('[REV]'))
2420 def debugsuccessorssets(ui, repo, *revs, **opts):
2423 def debugsuccessorssets(ui, repo, *revs, **opts):
2421 """show set of successors for revision
2424 """show set of successors for revision
2422
2425
2423 A successors set of changeset A is a consistent group of revisions that
2426 A successors set of changeset A is a consistent group of revisions that
2424 succeed A. It contains non-obsolete changesets only unless closests
2427 succeed A. It contains non-obsolete changesets only unless closests
2425 successors set is set.
2428 successors set is set.
2426
2429
2427 In most cases a changeset A has a single successors set containing a single
2430 In most cases a changeset A has a single successors set containing a single
2428 successor (changeset A replaced by A').
2431 successor (changeset A replaced by A').
2429
2432
2430 A changeset that is made obsolete with no successors are called "pruned".
2433 A changeset that is made obsolete with no successors are called "pruned".
2431 Such changesets have no successors sets at all.
2434 Such changesets have no successors sets at all.
2432
2435
2433 A changeset that has been "split" will have a successors set containing
2436 A changeset that has been "split" will have a successors set containing
2434 more than one successor.
2437 more than one successor.
2435
2438
2436 A changeset that has been rewritten in multiple different ways is called
2439 A changeset that has been rewritten in multiple different ways is called
2437 "divergent". Such changesets have multiple successor sets (each of which
2440 "divergent". Such changesets have multiple successor sets (each of which
2438 may also be split, i.e. have multiple successors).
2441 may also be split, i.e. have multiple successors).
2439
2442
2440 Results are displayed as follows::
2443 Results are displayed as follows::
2441
2444
2442 <rev1>
2445 <rev1>
2443 <successors-1A>
2446 <successors-1A>
2444 <rev2>
2447 <rev2>
2445 <successors-2A>
2448 <successors-2A>
2446 <successors-2B1> <successors-2B2> <successors-2B3>
2449 <successors-2B1> <successors-2B2> <successors-2B3>
2447
2450
2448 Here rev2 has two possible (i.e. divergent) successors sets. The first
2451 Here rev2 has two possible (i.e. divergent) successors sets. The first
2449 holds one element, whereas the second holds three (i.e. the changeset has
2452 holds one element, whereas the second holds three (i.e. the changeset has
2450 been split).
2453 been split).
2451 """
2454 """
2452 # passed to successorssets caching computation from one call to another
2455 # passed to successorssets caching computation from one call to another
2453 cache = {}
2456 cache = {}
2454 ctx2str = bytes
2457 ctx2str = bytes
2455 node2str = short
2458 node2str = short
2456 for rev in scmutil.revrange(repo, revs):
2459 for rev in scmutil.revrange(repo, revs):
2457 ctx = repo[rev]
2460 ctx = repo[rev]
2458 ui.write('%s\n'% ctx2str(ctx))
2461 ui.write('%s\n'% ctx2str(ctx))
2459 for succsset in obsutil.successorssets(repo, ctx.node(),
2462 for succsset in obsutil.successorssets(repo, ctx.node(),
2460 closest=opts[r'closest'],
2463 closest=opts[r'closest'],
2461 cache=cache):
2464 cache=cache):
2462 if succsset:
2465 if succsset:
2463 ui.write(' ')
2466 ui.write(' ')
2464 ui.write(node2str(succsset[0]))
2467 ui.write(node2str(succsset[0]))
2465 for node in succsset[1:]:
2468 for node in succsset[1:]:
2466 ui.write(' ')
2469 ui.write(' ')
2467 ui.write(node2str(node))
2470 ui.write(node2str(node))
2468 ui.write('\n')
2471 ui.write('\n')
2469
2472
2470 @command('debugtemplate',
2473 @command('debugtemplate',
2471 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2474 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2472 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2475 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2473 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2476 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2474 optionalrepo=True)
2477 optionalrepo=True)
2475 def debugtemplate(ui, repo, tmpl, **opts):
2478 def debugtemplate(ui, repo, tmpl, **opts):
2476 """parse and apply a template
2479 """parse and apply a template
2477
2480
2478 If -r/--rev is given, the template is processed as a log template and
2481 If -r/--rev is given, the template is processed as a log template and
2479 applied to the given changesets. Otherwise, it is processed as a generic
2482 applied to the given changesets. Otherwise, it is processed as a generic
2480 template.
2483 template.
2481
2484
2482 Use --verbose to print the parsed tree.
2485 Use --verbose to print the parsed tree.
2483 """
2486 """
2484 revs = None
2487 revs = None
2485 if opts[r'rev']:
2488 if opts[r'rev']:
2486 if repo is None:
2489 if repo is None:
2487 raise error.RepoError(_('there is no Mercurial repository here '
2490 raise error.RepoError(_('there is no Mercurial repository here '
2488 '(.hg not found)'))
2491 '(.hg not found)'))
2489 revs = scmutil.revrange(repo, opts[r'rev'])
2492 revs = scmutil.revrange(repo, opts[r'rev'])
2490
2493
2491 props = {}
2494 props = {}
2492 for d in opts[r'define']:
2495 for d in opts[r'define']:
2493 try:
2496 try:
2494 k, v = (e.strip() for e in d.split('=', 1))
2497 k, v = (e.strip() for e in d.split('=', 1))
2495 if not k or k == 'ui':
2498 if not k or k == 'ui':
2496 raise ValueError
2499 raise ValueError
2497 props[k] = v
2500 props[k] = v
2498 except ValueError:
2501 except ValueError:
2499 raise error.Abort(_('malformed keyword definition: %s') % d)
2502 raise error.Abort(_('malformed keyword definition: %s') % d)
2500
2503
2501 if ui.verbose:
2504 if ui.verbose:
2502 aliases = ui.configitems('templatealias')
2505 aliases = ui.configitems('templatealias')
2503 tree = templater.parse(tmpl)
2506 tree = templater.parse(tmpl)
2504 ui.note(templater.prettyformat(tree), '\n')
2507 ui.note(templater.prettyformat(tree), '\n')
2505 newtree = templater.expandaliases(tree, aliases)
2508 newtree = templater.expandaliases(tree, aliases)
2506 if newtree != tree:
2509 if newtree != tree:
2507 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2510 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2508
2511
2509 if revs is None:
2512 if revs is None:
2510 tres = formatter.templateresources(ui, repo)
2513 tres = formatter.templateresources(ui, repo)
2511 t = formatter.maketemplater(ui, tmpl, resources=tres)
2514 t = formatter.maketemplater(ui, tmpl, resources=tres)
2512 if ui.verbose:
2515 if ui.verbose:
2513 kwds, funcs = t.symbolsuseddefault()
2516 kwds, funcs = t.symbolsuseddefault()
2514 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2517 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2515 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2518 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2516 ui.write(t.renderdefault(props))
2519 ui.write(t.renderdefault(props))
2517 else:
2520 else:
2518 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2521 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2519 if ui.verbose:
2522 if ui.verbose:
2520 kwds, funcs = displayer.t.symbolsuseddefault()
2523 kwds, funcs = displayer.t.symbolsuseddefault()
2521 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2524 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2522 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2525 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2523 for r in revs:
2526 for r in revs:
2524 displayer.show(repo[r], **pycompat.strkwargs(props))
2527 displayer.show(repo[r], **pycompat.strkwargs(props))
2525 displayer.close()
2528 displayer.close()
2526
2529
2527 @command('debuguigetpass', [
2530 @command('debuguigetpass', [
2528 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2531 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2529 ], _('[-p TEXT]'), norepo=True)
2532 ], _('[-p TEXT]'), norepo=True)
2530 def debuguigetpass(ui, prompt=''):
2533 def debuguigetpass(ui, prompt=''):
2531 """show prompt to type password"""
2534 """show prompt to type password"""
2532 r = ui.getpass(prompt)
2535 r = ui.getpass(prompt)
2533 ui.write(('respose: %s\n') % r)
2536 ui.write(('respose: %s\n') % r)
2534
2537
2535 @command('debuguiprompt', [
2538 @command('debuguiprompt', [
2536 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2539 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2537 ], _('[-p TEXT]'), norepo=True)
2540 ], _('[-p TEXT]'), norepo=True)
2538 def debuguiprompt(ui, prompt=''):
2541 def debuguiprompt(ui, prompt=''):
2539 """show plain prompt"""
2542 """show plain prompt"""
2540 r = ui.prompt(prompt)
2543 r = ui.prompt(prompt)
2541 ui.write(('response: %s\n') % r)
2544 ui.write(('response: %s\n') % r)
2542
2545
2543 @command('debugupdatecaches', [])
2546 @command('debugupdatecaches', [])
2544 def debugupdatecaches(ui, repo, *pats, **opts):
2547 def debugupdatecaches(ui, repo, *pats, **opts):
2545 """warm all known caches in the repository"""
2548 """warm all known caches in the repository"""
2546 with repo.wlock(), repo.lock():
2549 with repo.wlock(), repo.lock():
2547 repo.updatecaches(full=True)
2550 repo.updatecaches(full=True)
2548
2551
2549 @command('debugupgraderepo', [
2552 @command('debugupgraderepo', [
2550 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2553 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2551 ('', 'run', False, _('performs an upgrade')),
2554 ('', 'run', False, _('performs an upgrade')),
2552 ])
2555 ])
2553 def debugupgraderepo(ui, repo, run=False, optimize=None):
2556 def debugupgraderepo(ui, repo, run=False, optimize=None):
2554 """upgrade a repository to use different features
2557 """upgrade a repository to use different features
2555
2558
2556 If no arguments are specified, the repository is evaluated for upgrade
2559 If no arguments are specified, the repository is evaluated for upgrade
2557 and a list of problems and potential optimizations is printed.
2560 and a list of problems and potential optimizations is printed.
2558
2561
2559 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2562 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2560 can be influenced via additional arguments. More details will be provided
2563 can be influenced via additional arguments. More details will be provided
2561 by the command output when run without ``--run``.
2564 by the command output when run without ``--run``.
2562
2565
2563 During the upgrade, the repository will be locked and no writes will be
2566 During the upgrade, the repository will be locked and no writes will be
2564 allowed.
2567 allowed.
2565
2568
2566 At the end of the upgrade, the repository may not be readable while new
2569 At the end of the upgrade, the repository may not be readable while new
2567 repository data is swapped in. This window will be as long as it takes to
2570 repository data is swapped in. This window will be as long as it takes to
2568 rename some directories inside the ``.hg`` directory. On most machines, this
2571 rename some directories inside the ``.hg`` directory. On most machines, this
2569 should complete almost instantaneously and the chances of a consumer being
2572 should complete almost instantaneously and the chances of a consumer being
2570 unable to access the repository should be low.
2573 unable to access the repository should be low.
2571 """
2574 """
2572 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2575 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2573
2576
2574 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2577 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2575 inferrepo=True)
2578 inferrepo=True)
2576 def debugwalk(ui, repo, *pats, **opts):
2579 def debugwalk(ui, repo, *pats, **opts):
2577 """show how files match on given patterns"""
2580 """show how files match on given patterns"""
2578 opts = pycompat.byteskwargs(opts)
2581 opts = pycompat.byteskwargs(opts)
2579 m = scmutil.match(repo[None], pats, opts)
2582 m = scmutil.match(repo[None], pats, opts)
2580 if ui.verbose:
2583 if ui.verbose:
2581 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2584 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2582 items = list(repo[None].walk(m))
2585 items = list(repo[None].walk(m))
2583 if not items:
2586 if not items:
2584 return
2587 return
2585 f = lambda fn: fn
2588 f = lambda fn: fn
2586 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2589 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2587 f = lambda fn: util.normpath(fn)
2590 f = lambda fn: util.normpath(fn)
2588 fmt = 'f %%-%ds %%-%ds %%s' % (
2591 fmt = 'f %%-%ds %%-%ds %%s' % (
2589 max([len(abs) for abs in items]),
2592 max([len(abs) for abs in items]),
2590 max([len(m.rel(abs)) for abs in items]))
2593 max([len(m.rel(abs)) for abs in items]))
2591 for abs in items:
2594 for abs in items:
2592 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2595 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2593 ui.write("%s\n" % line.rstrip())
2596 ui.write("%s\n" % line.rstrip())
2594
2597
2595 @command('debugwhyunstable', [], _('REV'))
2598 @command('debugwhyunstable', [], _('REV'))
2596 def debugwhyunstable(ui, repo, rev):
2599 def debugwhyunstable(ui, repo, rev):
2597 """explain instabilities of a changeset"""
2600 """explain instabilities of a changeset"""
2598 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2601 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2599 dnodes = ''
2602 dnodes = ''
2600 if entry.get('divergentnodes'):
2603 if entry.get('divergentnodes'):
2601 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2604 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2602 for ctx in entry['divergentnodes']) + ' '
2605 for ctx in entry['divergentnodes']) + ' '
2603 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2606 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2604 entry['reason'], entry['node']))
2607 entry['reason'], entry['node']))
2605
2608
2606 @command('debugwireargs',
2609 @command('debugwireargs',
2607 [('', 'three', '', 'three'),
2610 [('', 'three', '', 'three'),
2608 ('', 'four', '', 'four'),
2611 ('', 'four', '', 'four'),
2609 ('', 'five', '', 'five'),
2612 ('', 'five', '', 'five'),
2610 ] + cmdutil.remoteopts,
2613 ] + cmdutil.remoteopts,
2611 _('REPO [OPTIONS]... [ONE [TWO]]'),
2614 _('REPO [OPTIONS]... [ONE [TWO]]'),
2612 norepo=True)
2615 norepo=True)
2613 def debugwireargs(ui, repopath, *vals, **opts):
2616 def debugwireargs(ui, repopath, *vals, **opts):
2614 opts = pycompat.byteskwargs(opts)
2617 opts = pycompat.byteskwargs(opts)
2615 repo = hg.peer(ui, opts, repopath)
2618 repo = hg.peer(ui, opts, repopath)
2616 for opt in cmdutil.remoteopts:
2619 for opt in cmdutil.remoteopts:
2617 del opts[opt[1]]
2620 del opts[opt[1]]
2618 args = {}
2621 args = {}
2619 for k, v in opts.iteritems():
2622 for k, v in opts.iteritems():
2620 if v:
2623 if v:
2621 args[k] = v
2624 args[k] = v
2622 args = pycompat.strkwargs(args)
2625 args = pycompat.strkwargs(args)
2623 # run twice to check that we don't mess up the stream for the next command
2626 # run twice to check that we don't mess up the stream for the next command
2624 res1 = repo.debugwireargs(*vals, **args)
2627 res1 = repo.debugwireargs(*vals, **args)
2625 res2 = repo.debugwireargs(*vals, **args)
2628 res2 = repo.debugwireargs(*vals, **args)
2626 ui.write("%s\n" % res1)
2629 ui.write("%s\n" % res1)
2627 if res1 != res2:
2630 if res1 != res2:
2628 ui.warn("%s\n" % res2)
2631 ui.warn("%s\n" % res2)
2629
2632
2630 def _parsewirelangblocks(fh):
2633 def _parsewirelangblocks(fh):
2631 activeaction = None
2634 activeaction = None
2632 blocklines = []
2635 blocklines = []
2633
2636
2634 for line in fh:
2637 for line in fh:
2635 line = line.rstrip()
2638 line = line.rstrip()
2636 if not line:
2639 if not line:
2637 continue
2640 continue
2638
2641
2639 if line.startswith(b'#'):
2642 if line.startswith(b'#'):
2640 continue
2643 continue
2641
2644
2642 if not line.startswith(' '):
2645 if not line.startswith(' '):
2643 # New block. Flush previous one.
2646 # New block. Flush previous one.
2644 if activeaction:
2647 if activeaction:
2645 yield activeaction, blocklines
2648 yield activeaction, blocklines
2646
2649
2647 activeaction = line
2650 activeaction = line
2648 blocklines = []
2651 blocklines = []
2649 continue
2652 continue
2650
2653
2651 # Else we start with an indent.
2654 # Else we start with an indent.
2652
2655
2653 if not activeaction:
2656 if not activeaction:
2654 raise error.Abort(_('indented line outside of block'))
2657 raise error.Abort(_('indented line outside of block'))
2655
2658
2656 blocklines.append(line)
2659 blocklines.append(line)
2657
2660
2658 # Flush last block.
2661 # Flush last block.
2659 if activeaction:
2662 if activeaction:
2660 yield activeaction, blocklines
2663 yield activeaction, blocklines
2661
2664
2662 @command('debugwireproto',
2665 @command('debugwireproto',
2663 [
2666 [
2664 ('', 'localssh', False, _('start an SSH server for this repo')),
2667 ('', 'localssh', False, _('start an SSH server for this repo')),
2665 ('', 'peer', '', _('construct a specific version of the peer')),
2668 ('', 'peer', '', _('construct a specific version of the peer')),
2666 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2669 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2667 ('', 'nologhandshake', False,
2670 ('', 'nologhandshake', False,
2668 _('do not log I/O related to the peer handshake')),
2671 _('do not log I/O related to the peer handshake')),
2669 ] + cmdutil.remoteopts,
2672 ] + cmdutil.remoteopts,
2670 _('[PATH]'),
2673 _('[PATH]'),
2671 optionalrepo=True)
2674 optionalrepo=True)
2672 def debugwireproto(ui, repo, path=None, **opts):
2675 def debugwireproto(ui, repo, path=None, **opts):
2673 """send wire protocol commands to a server
2676 """send wire protocol commands to a server
2674
2677
2675 This command can be used to issue wire protocol commands to remote
2678 This command can be used to issue wire protocol commands to remote
2676 peers and to debug the raw data being exchanged.
2679 peers and to debug the raw data being exchanged.
2677
2680
2678 ``--localssh`` will start an SSH server against the current repository
2681 ``--localssh`` will start an SSH server against the current repository
2679 and connect to that. By default, the connection will perform a handshake
2682 and connect to that. By default, the connection will perform a handshake
2680 and establish an appropriate peer instance.
2683 and establish an appropriate peer instance.
2681
2684
2682 ``--peer`` can be used to bypass the handshake protocol and construct a
2685 ``--peer`` can be used to bypass the handshake protocol and construct a
2683 peer instance using the specified class type. Valid values are ``raw``,
2686 peer instance using the specified class type. Valid values are ``raw``,
2684 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2687 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2685 raw data payloads and don't support higher-level command actions.
2688 raw data payloads and don't support higher-level command actions.
2686
2689
2687 ``--noreadstderr`` can be used to disable automatic reading from stderr
2690 ``--noreadstderr`` can be used to disable automatic reading from stderr
2688 of the peer (for SSH connections only). Disabling automatic reading of
2691 of the peer (for SSH connections only). Disabling automatic reading of
2689 stderr is useful for making output more deterministic.
2692 stderr is useful for making output more deterministic.
2690
2693
2691 Commands are issued via a mini language which is specified via stdin.
2694 Commands are issued via a mini language which is specified via stdin.
2692 The language consists of individual actions to perform. An action is
2695 The language consists of individual actions to perform. An action is
2693 defined by a block. A block is defined as a line with no leading
2696 defined by a block. A block is defined as a line with no leading
2694 space followed by 0 or more lines with leading space. Blocks are
2697 space followed by 0 or more lines with leading space. Blocks are
2695 effectively a high-level command with additional metadata.
2698 effectively a high-level command with additional metadata.
2696
2699
2697 Lines beginning with ``#`` are ignored.
2700 Lines beginning with ``#`` are ignored.
2698
2701
2699 The following sections denote available actions.
2702 The following sections denote available actions.
2700
2703
2701 raw
2704 raw
2702 ---
2705 ---
2703
2706
2704 Send raw data to the server.
2707 Send raw data to the server.
2705
2708
2706 The block payload contains the raw data to send as one atomic send
2709 The block payload contains the raw data to send as one atomic send
2707 operation. The data may not actually be delivered in a single system
2710 operation. The data may not actually be delivered in a single system
2708 call: it depends on the abilities of the transport being used.
2711 call: it depends on the abilities of the transport being used.
2709
2712
2710 Each line in the block is de-indented and concatenated. Then, that
2713 Each line in the block is de-indented and concatenated. Then, that
2711 value is evaluated as a Python b'' literal. This allows the use of
2714 value is evaluated as a Python b'' literal. This allows the use of
2712 backslash escaping, etc.
2715 backslash escaping, etc.
2713
2716
2714 raw+
2717 raw+
2715 ----
2718 ----
2716
2719
2717 Behaves like ``raw`` except flushes output afterwards.
2720 Behaves like ``raw`` except flushes output afterwards.
2718
2721
2719 command <X>
2722 command <X>
2720 -----------
2723 -----------
2721
2724
2722 Send a request to run a named command, whose name follows the ``command``
2725 Send a request to run a named command, whose name follows the ``command``
2723 string.
2726 string.
2724
2727
2725 Arguments to the command are defined as lines in this block. The format of
2728 Arguments to the command are defined as lines in this block. The format of
2726 each line is ``<key> <value>``. e.g.::
2729 each line is ``<key> <value>``. e.g.::
2727
2730
2728 command listkeys
2731 command listkeys
2729 namespace bookmarks
2732 namespace bookmarks
2730
2733
2731 If the value begins with ``eval:``, it will be interpreted as a Python
2734 If the value begins with ``eval:``, it will be interpreted as a Python
2732 literal expression. Otherwise values are interpreted as Python b'' literals.
2735 literal expression. Otherwise values are interpreted as Python b'' literals.
2733 This allows sending complex types and encoding special byte sequences via
2736 This allows sending complex types and encoding special byte sequences via
2734 backslash escaping.
2737 backslash escaping.
2735
2738
2736 The following arguments have special meaning:
2739 The following arguments have special meaning:
2737
2740
2738 ``PUSHFILE``
2741 ``PUSHFILE``
2739 When defined, the *push* mechanism of the peer will be used instead
2742 When defined, the *push* mechanism of the peer will be used instead
2740 of the static request-response mechanism and the content of the
2743 of the static request-response mechanism and the content of the
2741 file specified in the value of this argument will be sent as the
2744 file specified in the value of this argument will be sent as the
2742 command payload.
2745 command payload.
2743
2746
2744 This can be used to submit a local bundle file to the remote.
2747 This can be used to submit a local bundle file to the remote.
2745
2748
2746 batchbegin
2749 batchbegin
2747 ----------
2750 ----------
2748
2751
2749 Instruct the peer to begin a batched send.
2752 Instruct the peer to begin a batched send.
2750
2753
2751 All ``command`` blocks are queued for execution until the next
2754 All ``command`` blocks are queued for execution until the next
2752 ``batchsubmit`` block.
2755 ``batchsubmit`` block.
2753
2756
2754 batchsubmit
2757 batchsubmit
2755 -----------
2758 -----------
2756
2759
2757 Submit previously queued ``command`` blocks as a batch request.
2760 Submit previously queued ``command`` blocks as a batch request.
2758
2761
2759 This action MUST be paired with a ``batchbegin`` action.
2762 This action MUST be paired with a ``batchbegin`` action.
2760
2763
2761 httprequest <method> <path>
2764 httprequest <method> <path>
2762 ---------------------------
2765 ---------------------------
2763
2766
2764 (HTTP peer only)
2767 (HTTP peer only)
2765
2768
2766 Send an HTTP request to the peer.
2769 Send an HTTP request to the peer.
2767
2770
2768 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2771 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2769
2772
2770 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2773 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2771 headers to add to the request. e.g. ``Accept: foo``.
2774 headers to add to the request. e.g. ``Accept: foo``.
2772
2775
2773 The following arguments are special:
2776 The following arguments are special:
2774
2777
2775 ``BODYFILE``
2778 ``BODYFILE``
2776 The content of the file defined as the value to this argument will be
2779 The content of the file defined as the value to this argument will be
2777 transferred verbatim as the HTTP request body.
2780 transferred verbatim as the HTTP request body.
2778
2781
2779 ``frame <type> <flags> <payload>``
2782 ``frame <type> <flags> <payload>``
2780 Send a unified protocol frame as part of the request body.
2783 Send a unified protocol frame as part of the request body.
2781
2784
2782 All frames will be collected and sent as the body to the HTTP
2785 All frames will be collected and sent as the body to the HTTP
2783 request.
2786 request.
2784
2787
2785 close
2788 close
2786 -----
2789 -----
2787
2790
2788 Close the connection to the server.
2791 Close the connection to the server.
2789
2792
2790 flush
2793 flush
2791 -----
2794 -----
2792
2795
2793 Flush data written to the server.
2796 Flush data written to the server.
2794
2797
2795 readavailable
2798 readavailable
2796 -------------
2799 -------------
2797
2800
2798 Close the write end of the connection and read all available data from
2801 Close the write end of the connection and read all available data from
2799 the server.
2802 the server.
2800
2803
2801 If the connection to the server encompasses multiple pipes, we poll both
2804 If the connection to the server encompasses multiple pipes, we poll both
2802 pipes and read available data.
2805 pipes and read available data.
2803
2806
2804 readline
2807 readline
2805 --------
2808 --------
2806
2809
2807 Read a line of output from the server. If there are multiple output
2810 Read a line of output from the server. If there are multiple output
2808 pipes, reads only the main pipe.
2811 pipes, reads only the main pipe.
2809
2812
2810 ereadline
2813 ereadline
2811 ---------
2814 ---------
2812
2815
2813 Like ``readline``, but read from the stderr pipe, if available.
2816 Like ``readline``, but read from the stderr pipe, if available.
2814
2817
2815 read <X>
2818 read <X>
2816 --------
2819 --------
2817
2820
2818 ``read()`` N bytes from the server's main output pipe.
2821 ``read()`` N bytes from the server's main output pipe.
2819
2822
2820 eread <X>
2823 eread <X>
2821 ---------
2824 ---------
2822
2825
2823 ``read()`` N bytes from the server's stderr pipe, if available.
2826 ``read()`` N bytes from the server's stderr pipe, if available.
2824
2827
2825 Specifying Unified Frame-Based Protocol Frames
2828 Specifying Unified Frame-Based Protocol Frames
2826 ----------------------------------------------
2829 ----------------------------------------------
2827
2830
2828 It is possible to emit a *Unified Frame-Based Protocol* by using special
2831 It is possible to emit a *Unified Frame-Based Protocol* by using special
2829 syntax.
2832 syntax.
2830
2833
2831 A frame is composed as a type, flags, and payload. These can be parsed
2834 A frame is composed as a type, flags, and payload. These can be parsed
2832 from a string of the form:
2835 from a string of the form:
2833
2836
2834 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2837 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2835
2838
2836 ``request-id`` and ``stream-id`` are integers defining the request and
2839 ``request-id`` and ``stream-id`` are integers defining the request and
2837 stream identifiers.
2840 stream identifiers.
2838
2841
2839 ``type`` can be an integer value for the frame type or the string name
2842 ``type`` can be an integer value for the frame type or the string name
2840 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2843 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2841 ``command-name``.
2844 ``command-name``.
2842
2845
2843 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2846 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2844 components. Each component (and there can be just one) can be an integer
2847 components. Each component (and there can be just one) can be an integer
2845 or a flag name for stream flags or frame flags, respectively. Values are
2848 or a flag name for stream flags or frame flags, respectively. Values are
2846 resolved to integers and then bitwise OR'd together.
2849 resolved to integers and then bitwise OR'd together.
2847
2850
2848 ``payload`` represents the raw frame payload. If it begins with
2851 ``payload`` represents the raw frame payload. If it begins with
2849 ``cbor:``, the following string is evaluated as Python code and the
2852 ``cbor:``, the following string is evaluated as Python code and the
2850 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2853 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2851 as a Python byte string literal.
2854 as a Python byte string literal.
2852 """
2855 """
2853 opts = pycompat.byteskwargs(opts)
2856 opts = pycompat.byteskwargs(opts)
2854
2857
2855 if opts['localssh'] and not repo:
2858 if opts['localssh'] and not repo:
2856 raise error.Abort(_('--localssh requires a repository'))
2859 raise error.Abort(_('--localssh requires a repository'))
2857
2860
2858 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
2861 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
2859 raise error.Abort(_('invalid value for --peer'),
2862 raise error.Abort(_('invalid value for --peer'),
2860 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2863 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2861
2864
2862 if path and opts['localssh']:
2865 if path and opts['localssh']:
2863 raise error.Abort(_('cannot specify --localssh with an explicit '
2866 raise error.Abort(_('cannot specify --localssh with an explicit '
2864 'path'))
2867 'path'))
2865
2868
2866 if ui.interactive():
2869 if ui.interactive():
2867 ui.write(_('(waiting for commands on stdin)\n'))
2870 ui.write(_('(waiting for commands on stdin)\n'))
2868
2871
2869 blocks = list(_parsewirelangblocks(ui.fin))
2872 blocks = list(_parsewirelangblocks(ui.fin))
2870
2873
2871 proc = None
2874 proc = None
2872 stdin = None
2875 stdin = None
2873 stdout = None
2876 stdout = None
2874 stderr = None
2877 stderr = None
2875 opener = None
2878 opener = None
2876
2879
2877 if opts['localssh']:
2880 if opts['localssh']:
2878 # We start the SSH server in its own process so there is process
2881 # We start the SSH server in its own process so there is process
2879 # separation. This prevents a whole class of potential bugs around
2882 # separation. This prevents a whole class of potential bugs around
2880 # shared state from interfering with server operation.
2883 # shared state from interfering with server operation.
2881 args = procutil.hgcmd() + [
2884 args = procutil.hgcmd() + [
2882 '-R', repo.root,
2885 '-R', repo.root,
2883 'debugserve', '--sshstdio',
2886 'debugserve', '--sshstdio',
2884 ]
2887 ]
2885 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2888 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2886 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2889 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2887 bufsize=0)
2890 bufsize=0)
2888
2891
2889 stdin = proc.stdin
2892 stdin = proc.stdin
2890 stdout = proc.stdout
2893 stdout = proc.stdout
2891 stderr = proc.stderr
2894 stderr = proc.stderr
2892
2895
2893 # We turn the pipes into observers so we can log I/O.
2896 # We turn the pipes into observers so we can log I/O.
2894 if ui.verbose or opts['peer'] == 'raw':
2897 if ui.verbose or opts['peer'] == 'raw':
2895 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2898 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2896 logdata=True)
2899 logdata=True)
2897 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2900 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2898 logdata=True)
2901 logdata=True)
2899 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2902 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2900 logdata=True)
2903 logdata=True)
2901
2904
2902 # --localssh also implies the peer connection settings.
2905 # --localssh also implies the peer connection settings.
2903
2906
2904 url = 'ssh://localserver'
2907 url = 'ssh://localserver'
2905 autoreadstderr = not opts['noreadstderr']
2908 autoreadstderr = not opts['noreadstderr']
2906
2909
2907 if opts['peer'] == 'ssh1':
2910 if opts['peer'] == 'ssh1':
2908 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2911 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2909 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2912 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2910 None, autoreadstderr=autoreadstderr)
2913 None, autoreadstderr=autoreadstderr)
2911 elif opts['peer'] == 'ssh2':
2914 elif opts['peer'] == 'ssh2':
2912 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2915 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2913 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2916 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2914 None, autoreadstderr=autoreadstderr)
2917 None, autoreadstderr=autoreadstderr)
2915 elif opts['peer'] == 'raw':
2918 elif opts['peer'] == 'raw':
2916 ui.write(_('using raw connection to peer\n'))
2919 ui.write(_('using raw connection to peer\n'))
2917 peer = None
2920 peer = None
2918 else:
2921 else:
2919 ui.write(_('creating ssh peer from handshake results\n'))
2922 ui.write(_('creating ssh peer from handshake results\n'))
2920 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2923 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2921 autoreadstderr=autoreadstderr)
2924 autoreadstderr=autoreadstderr)
2922
2925
2923 elif path:
2926 elif path:
2924 # We bypass hg.peer() so we can proxy the sockets.
2927 # We bypass hg.peer() so we can proxy the sockets.
2925 # TODO consider not doing this because we skip
2928 # TODO consider not doing this because we skip
2926 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
2929 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
2927 u = util.url(path)
2930 u = util.url(path)
2928 if u.scheme != 'http':
2931 if u.scheme != 'http':
2929 raise error.Abort(_('only http:// paths are currently supported'))
2932 raise error.Abort(_('only http:// paths are currently supported'))
2930
2933
2931 url, authinfo = u.authinfo()
2934 url, authinfo = u.authinfo()
2932 openerargs = {
2935 openerargs = {
2933 r'useragent': b'Mercurial debugwireproto',
2936 r'useragent': b'Mercurial debugwireproto',
2934 }
2937 }
2935
2938
2936 # Turn pipes/sockets into observers so we can log I/O.
2939 # Turn pipes/sockets into observers so we can log I/O.
2937 if ui.verbose:
2940 if ui.verbose:
2938 openerargs.update({
2941 openerargs.update({
2939 r'loggingfh': ui,
2942 r'loggingfh': ui,
2940 r'loggingname': b's',
2943 r'loggingname': b's',
2941 r'loggingopts': {
2944 r'loggingopts': {
2942 r'logdata': True,
2945 r'logdata': True,
2943 r'logdataapis': False,
2946 r'logdataapis': False,
2944 },
2947 },
2945 })
2948 })
2946
2949
2947 if ui.debugflag:
2950 if ui.debugflag:
2948 openerargs[r'loggingopts'][r'logdataapis'] = True
2951 openerargs[r'loggingopts'][r'logdataapis'] = True
2949
2952
2950 # Don't send default headers when in raw mode. This allows us to
2953 # Don't send default headers when in raw mode. This allows us to
2951 # bypass most of the behavior of our URL handling code so we can
2954 # bypass most of the behavior of our URL handling code so we can
2952 # have near complete control over what's sent on the wire.
2955 # have near complete control over what's sent on the wire.
2953 if opts['peer'] == 'raw':
2956 if opts['peer'] == 'raw':
2954 openerargs[r'sendaccept'] = False
2957 openerargs[r'sendaccept'] = False
2955
2958
2956 opener = urlmod.opener(ui, authinfo, **openerargs)
2959 opener = urlmod.opener(ui, authinfo, **openerargs)
2957
2960
2958 if opts['peer'] == 'http2':
2961 if opts['peer'] == 'http2':
2959 ui.write(_('creating http peer for wire protocol version 2\n'))
2962 ui.write(_('creating http peer for wire protocol version 2\n'))
2960 # We go through makepeer() because we need an API descriptor for
2963 # We go through makepeer() because we need an API descriptor for
2961 # the peer instance to be useful.
2964 # the peer instance to be useful.
2962 with ui.configoverride({
2965 with ui.configoverride({
2963 ('experimental', 'httppeer.advertise-v2'): True}):
2966 ('experimental', 'httppeer.advertise-v2'): True}):
2964 if opts['nologhandshake']:
2967 if opts['nologhandshake']:
2965 ui.pushbuffer()
2968 ui.pushbuffer()
2966
2969
2967 peer = httppeer.makepeer(ui, path, opener=opener)
2970 peer = httppeer.makepeer(ui, path, opener=opener)
2968
2971
2969 if opts['nologhandshake']:
2972 if opts['nologhandshake']:
2970 ui.popbuffer()
2973 ui.popbuffer()
2971
2974
2972 if not isinstance(peer, httppeer.httpv2peer):
2975 if not isinstance(peer, httppeer.httpv2peer):
2973 raise error.Abort(_('could not instantiate HTTP peer for '
2976 raise error.Abort(_('could not instantiate HTTP peer for '
2974 'wire protocol version 2'),
2977 'wire protocol version 2'),
2975 hint=_('the server may not have the feature '
2978 hint=_('the server may not have the feature '
2976 'enabled or is not allowing this '
2979 'enabled or is not allowing this '
2977 'client version'))
2980 'client version'))
2978
2981
2979 elif opts['peer'] == 'raw':
2982 elif opts['peer'] == 'raw':
2980 ui.write(_('using raw connection to peer\n'))
2983 ui.write(_('using raw connection to peer\n'))
2981 peer = None
2984 peer = None
2982 elif opts['peer']:
2985 elif opts['peer']:
2983 raise error.Abort(_('--peer %s not supported with HTTP peers') %
2986 raise error.Abort(_('--peer %s not supported with HTTP peers') %
2984 opts['peer'])
2987 opts['peer'])
2985 else:
2988 else:
2986 peer = httppeer.makepeer(ui, path, opener=opener)
2989 peer = httppeer.makepeer(ui, path, opener=opener)
2987
2990
2988 # We /could/ populate stdin/stdout with sock.makefile()...
2991 # We /could/ populate stdin/stdout with sock.makefile()...
2989 else:
2992 else:
2990 raise error.Abort(_('unsupported connection configuration'))
2993 raise error.Abort(_('unsupported connection configuration'))
2991
2994
2992 batchedcommands = None
2995 batchedcommands = None
2993
2996
2994 # Now perform actions based on the parsed wire language instructions.
2997 # Now perform actions based on the parsed wire language instructions.
2995 for action, lines in blocks:
2998 for action, lines in blocks:
2996 if action in ('raw', 'raw+'):
2999 if action in ('raw', 'raw+'):
2997 if not stdin:
3000 if not stdin:
2998 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3001 raise error.Abort(_('cannot call raw/raw+ on this peer'))
2999
3002
3000 # Concatenate the data together.
3003 # Concatenate the data together.
3001 data = ''.join(l.lstrip() for l in lines)
3004 data = ''.join(l.lstrip() for l in lines)
3002 data = stringutil.unescapestr(data)
3005 data = stringutil.unescapestr(data)
3003 stdin.write(data)
3006 stdin.write(data)
3004
3007
3005 if action == 'raw+':
3008 if action == 'raw+':
3006 stdin.flush()
3009 stdin.flush()
3007 elif action == 'flush':
3010 elif action == 'flush':
3008 if not stdin:
3011 if not stdin:
3009 raise error.Abort(_('cannot call flush on this peer'))
3012 raise error.Abort(_('cannot call flush on this peer'))
3010 stdin.flush()
3013 stdin.flush()
3011 elif action.startswith('command'):
3014 elif action.startswith('command'):
3012 if not peer:
3015 if not peer:
3013 raise error.Abort(_('cannot send commands unless peer instance '
3016 raise error.Abort(_('cannot send commands unless peer instance '
3014 'is available'))
3017 'is available'))
3015
3018
3016 command = action.split(' ', 1)[1]
3019 command = action.split(' ', 1)[1]
3017
3020
3018 args = {}
3021 args = {}
3019 for line in lines:
3022 for line in lines:
3020 # We need to allow empty values.
3023 # We need to allow empty values.
3021 fields = line.lstrip().split(' ', 1)
3024 fields = line.lstrip().split(' ', 1)
3022 if len(fields) == 1:
3025 if len(fields) == 1:
3023 key = fields[0]
3026 key = fields[0]
3024 value = ''
3027 value = ''
3025 else:
3028 else:
3026 key, value = fields
3029 key, value = fields
3027
3030
3028 if value.startswith('eval:'):
3031 if value.startswith('eval:'):
3029 value = stringutil.evalpythonliteral(value[5:])
3032 value = stringutil.evalpythonliteral(value[5:])
3030 else:
3033 else:
3031 value = stringutil.unescapestr(value)
3034 value = stringutil.unescapestr(value)
3032
3035
3033 args[key] = value
3036 args[key] = value
3034
3037
3035 if batchedcommands is not None:
3038 if batchedcommands is not None:
3036 batchedcommands.append((command, args))
3039 batchedcommands.append((command, args))
3037 continue
3040 continue
3038
3041
3039 ui.status(_('sending %s command\n') % command)
3042 ui.status(_('sending %s command\n') % command)
3040
3043
3041 if 'PUSHFILE' in args:
3044 if 'PUSHFILE' in args:
3042 with open(args['PUSHFILE'], r'rb') as fh:
3045 with open(args['PUSHFILE'], r'rb') as fh:
3043 del args['PUSHFILE']
3046 del args['PUSHFILE']
3044 res, output = peer._callpush(command, fh,
3047 res, output = peer._callpush(command, fh,
3045 **pycompat.strkwargs(args))
3048 **pycompat.strkwargs(args))
3046 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3049 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3047 ui.status(_('remote output: %s\n') %
3050 ui.status(_('remote output: %s\n') %
3048 stringutil.escapestr(output))
3051 stringutil.escapestr(output))
3049 else:
3052 else:
3050 with peer.commandexecutor() as e:
3053 with peer.commandexecutor() as e:
3051 res = e.callcommand(command, args).result()
3054 res = e.callcommand(command, args).result()
3052
3055
3053 if isinstance(res, wireprotov2peer.commandresponse):
3056 if isinstance(res, wireprotov2peer.commandresponse):
3054 val = list(res.cborobjects())
3057 val = list(res.cborobjects())
3055 ui.status(_('response: %s\n') %
3058 ui.status(_('response: %s\n') %
3056 stringutil.pprint(val, bprefix=True))
3059 stringutil.pprint(val, bprefix=True))
3057
3060
3058 else:
3061 else:
3059 ui.status(_('response: %s\n') %
3062 ui.status(_('response: %s\n') %
3060 stringutil.pprint(res, bprefix=True))
3063 stringutil.pprint(res, bprefix=True))
3061
3064
3062 elif action == 'batchbegin':
3065 elif action == 'batchbegin':
3063 if batchedcommands is not None:
3066 if batchedcommands is not None:
3064 raise error.Abort(_('nested batchbegin not allowed'))
3067 raise error.Abort(_('nested batchbegin not allowed'))
3065
3068
3066 batchedcommands = []
3069 batchedcommands = []
3067 elif action == 'batchsubmit':
3070 elif action == 'batchsubmit':
3068 # There is a batching API we could go through. But it would be
3071 # There is a batching API we could go through. But it would be
3069 # difficult to normalize requests into function calls. It is easier
3072 # difficult to normalize requests into function calls. It is easier
3070 # to bypass this layer and normalize to commands + args.
3073 # to bypass this layer and normalize to commands + args.
3071 ui.status(_('sending batch with %d sub-commands\n') %
3074 ui.status(_('sending batch with %d sub-commands\n') %
3072 len(batchedcommands))
3075 len(batchedcommands))
3073 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3076 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3074 ui.status(_('response #%d: %s\n') %
3077 ui.status(_('response #%d: %s\n') %
3075 (i, stringutil.escapestr(chunk)))
3078 (i, stringutil.escapestr(chunk)))
3076
3079
3077 batchedcommands = None
3080 batchedcommands = None
3078
3081
3079 elif action.startswith('httprequest '):
3082 elif action.startswith('httprequest '):
3080 if not opener:
3083 if not opener:
3081 raise error.Abort(_('cannot use httprequest without an HTTP '
3084 raise error.Abort(_('cannot use httprequest without an HTTP '
3082 'peer'))
3085 'peer'))
3083
3086
3084 request = action.split(' ', 2)
3087 request = action.split(' ', 2)
3085 if len(request) != 3:
3088 if len(request) != 3:
3086 raise error.Abort(_('invalid httprequest: expected format is '
3089 raise error.Abort(_('invalid httprequest: expected format is '
3087 '"httprequest <method> <path>'))
3090 '"httprequest <method> <path>'))
3088
3091
3089 method, httppath = request[1:]
3092 method, httppath = request[1:]
3090 headers = {}
3093 headers = {}
3091 body = None
3094 body = None
3092 frames = []
3095 frames = []
3093 for line in lines:
3096 for line in lines:
3094 line = line.lstrip()
3097 line = line.lstrip()
3095 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3098 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3096 if m:
3099 if m:
3097 headers[m.group(1)] = m.group(2)
3100 headers[m.group(1)] = m.group(2)
3098 continue
3101 continue
3099
3102
3100 if line.startswith(b'BODYFILE '):
3103 if line.startswith(b'BODYFILE '):
3101 with open(line.split(b' ', 1), 'rb') as fh:
3104 with open(line.split(b' ', 1), 'rb') as fh:
3102 body = fh.read()
3105 body = fh.read()
3103 elif line.startswith(b'frame '):
3106 elif line.startswith(b'frame '):
3104 frame = wireprotoframing.makeframefromhumanstring(
3107 frame = wireprotoframing.makeframefromhumanstring(
3105 line[len(b'frame '):])
3108 line[len(b'frame '):])
3106
3109
3107 frames.append(frame)
3110 frames.append(frame)
3108 else:
3111 else:
3109 raise error.Abort(_('unknown argument to httprequest: %s') %
3112 raise error.Abort(_('unknown argument to httprequest: %s') %
3110 line)
3113 line)
3111
3114
3112 url = path + httppath
3115 url = path + httppath
3113
3116
3114 if frames:
3117 if frames:
3115 body = b''.join(bytes(f) for f in frames)
3118 body = b''.join(bytes(f) for f in frames)
3116
3119
3117 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3120 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3118
3121
3119 # urllib.Request insists on using has_data() as a proxy for
3122 # urllib.Request insists on using has_data() as a proxy for
3120 # determining the request method. Override that to use our
3123 # determining the request method. Override that to use our
3121 # explicitly requested method.
3124 # explicitly requested method.
3122 req.get_method = lambda: method
3125 req.get_method = lambda: method
3123
3126
3124 try:
3127 try:
3125 res = opener.open(req)
3128 res = opener.open(req)
3126 body = res.read()
3129 body = res.read()
3127 except util.urlerr.urlerror as e:
3130 except util.urlerr.urlerror as e:
3128 e.read()
3131 e.read()
3129 continue
3132 continue
3130
3133
3131 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3134 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3132 ui.write(_('cbor> %s\n') %
3135 ui.write(_('cbor> %s\n') %
3133 stringutil.pprint(cbor.loads(body), bprefix=True))
3136 stringutil.pprint(cbor.loads(body), bprefix=True))
3134
3137
3135 elif action == 'close':
3138 elif action == 'close':
3136 peer.close()
3139 peer.close()
3137 elif action == 'readavailable':
3140 elif action == 'readavailable':
3138 if not stdout or not stderr:
3141 if not stdout or not stderr:
3139 raise error.Abort(_('readavailable not available on this peer'))
3142 raise error.Abort(_('readavailable not available on this peer'))
3140
3143
3141 stdin.close()
3144 stdin.close()
3142 stdout.read()
3145 stdout.read()
3143 stderr.read()
3146 stderr.read()
3144
3147
3145 elif action == 'readline':
3148 elif action == 'readline':
3146 if not stdout:
3149 if not stdout:
3147 raise error.Abort(_('readline not available on this peer'))
3150 raise error.Abort(_('readline not available on this peer'))
3148 stdout.readline()
3151 stdout.readline()
3149 elif action == 'ereadline':
3152 elif action == 'ereadline':
3150 if not stderr:
3153 if not stderr:
3151 raise error.Abort(_('ereadline not available on this peer'))
3154 raise error.Abort(_('ereadline not available on this peer'))
3152 stderr.readline()
3155 stderr.readline()
3153 elif action.startswith('read '):
3156 elif action.startswith('read '):
3154 count = int(action.split(' ', 1)[1])
3157 count = int(action.split(' ', 1)[1])
3155 if not stdout:
3158 if not stdout:
3156 raise error.Abort(_('read not available on this peer'))
3159 raise error.Abort(_('read not available on this peer'))
3157 stdout.read(count)
3160 stdout.read(count)
3158 elif action.startswith('eread '):
3161 elif action.startswith('eread '):
3159 count = int(action.split(' ', 1)[1])
3162 count = int(action.split(' ', 1)[1])
3160 if not stderr:
3163 if not stderr:
3161 raise error.Abort(_('eread not available on this peer'))
3164 raise error.Abort(_('eread not available on this peer'))
3162 stderr.read(count)
3165 stderr.read(count)
3163 else:
3166 else:
3164 raise error.Abort(_('unknown action: %s') % action)
3167 raise error.Abort(_('unknown action: %s') % action)
3165
3168
3166 if batchedcommands is not None:
3169 if batchedcommands is not None:
3167 raise error.Abort(_('unclosed "batchbegin" request'))
3170 raise error.Abort(_('unclosed "batchbegin" request'))
3168
3171
3169 if peer:
3172 if peer:
3170 peer.close()
3173 peer.close()
3171
3174
3172 if proc:
3175 if proc:
3173 proc.kill()
3176 proc.kill()
@@ -1,439 +1,519 b''
1 $ cat << EOF >> $HGRCPATH
1 $ cat << EOF >> $HGRCPATH
2 > [ui]
2 > [ui]
3 > interactive=yes
3 > interactive=yes
4 > EOF
4 > EOF
5
5
6 $ hg init debugrevlog
6 $ hg init debugrevlog
7 $ cd debugrevlog
7 $ cd debugrevlog
8 $ echo a > a
8 $ echo a > a
9 $ hg ci -Am adda
9 $ hg ci -Am adda
10 adding a
10 adding a
11 $ hg rm .
12 removing a
13 $ hg ci -Am make-it-empty
14 $ hg revert --all -r 0
15 adding a
16 $ hg ci -Am make-it-full
11 #if reporevlogstore
17 #if reporevlogstore
12 $ hg debugrevlog -m
18 $ hg debugrevlog -m
13 format : 1
19 format : 1
14 flags : inline, generaldelta
20 flags : inline, generaldelta
15
21
16 revisions : 1
22 revisions : 3
17 merges : 0 ( 0.00%)
23 merges : 0 ( 0.00%)
18 normal : 1 (100.00%)
24 normal : 3 (100.00%)
19 revisions : 1
25 revisions : 3
20 full : 1 (100.00%)
26 full : 3 (100.00%)
21 deltas : 0 ( 0.00%)
27 deltas : 0 ( 0.00%)
22 revision size : 44
28 revision size : 88
23 full : 44 (100.00%)
29 full : 88 (100.00%)
24 deltas : 0 ( 0.00%)
30 deltas : 0 ( 0.00%)
25
31
26 chunks : 1
32 chunks : 3
27 0x75 (u) : 1 (100.00%)
33 empty : 1 (33.33%)
28 chunks size : 44
34 0x75 (u) : 2 (66.67%)
29 0x75 (u) : 44 (100.00%)
35 chunks size : 88
36 empty : 0 ( 0.00%)
37 0x75 (u) : 88 (100.00%)
30
38
31 avg chain length : 0
39 avg chain length : 0
32 max chain length : 0
40 max chain length : 0
33 max chain reach : 44
41 max chain reach : 44
34 compression ratio : 0
42 compression ratio : 0
35
43
36 uncompressed data size (min/max/avg) : 43 / 43 / 43
44 uncompressed data size (min/max/avg) : 0 / 43 / 28
37 full revision size (min/max/avg) : 44 / 44 / 44
45 full revision size (min/max/avg) : 0 / 44 / 29
38 delta size (min/max/avg) : 0 / 0 / 0
46 delta size (min/max/avg) : 0 / 0 / 0
39 #endif
47 #endif
40
48
41 Test debugindex, with and without the --verbose/--debug flag
49 Test debugindex, with and without the --verbose/--debug flag
42 $ hg debugindex a
50 $ hg debugindex a
43 rev linkrev nodeid p1 p2
51 rev linkrev nodeid p1 p2
44 0 0 b789fdd96dc2 000000000000 000000000000
52 0 0 b789fdd96dc2 000000000000 000000000000
45
53
46 #if no-reposimplestore
54 #if no-reposimplestore
47 $ hg --verbose debugindex a
55 $ hg --verbose debugindex a
48 rev offset length linkrev nodeid p1 p2
56 rev offset length linkrev nodeid p1 p2
49 0 0 3 0 b789fdd96dc2 000000000000 000000000000
57 0 0 3 0 b789fdd96dc2 000000000000 000000000000
50
58
51 $ hg --debug debugindex a
59 $ hg --debug debugindex a
52 rev offset length linkrev nodeid p1 p2
60 rev offset length linkrev nodeid p1 p2
53 0 0 3 0 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
61 0 0 3 0 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000
54 #endif
62 #endif
55
63
56 $ hg debugindex -f 1 a
64 $ hg debugindex -f 1 a
57 rev flag size link p1 p2 nodeid
65 rev flag size link p1 p2 nodeid
58 0 0000 2 0 -1 -1 b789fdd96dc2
66 0 0000 2 0 -1 -1 b789fdd96dc2
59
67
60 #if no-reposimplestore
68 #if no-reposimplestore
61 $ hg --verbose debugindex -f 1 a
69 $ hg --verbose debugindex -f 1 a
62 rev flag offset length size link p1 p2 nodeid
70 rev flag offset length size link p1 p2 nodeid
63 0 0000 0 3 2 0 -1 -1 b789fdd96dc2
71 0 0000 0 3 2 0 -1 -1 b789fdd96dc2
64
72
65 $ hg --debug debugindex -f 1 a
73 $ hg --debug debugindex -f 1 a
66 rev flag offset length size link p1 p2 nodeid
74 rev flag offset length size link p1 p2 nodeid
67 0 0000 0 3 2 0 -1 -1 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
75 0 0000 0 3 2 0 -1 -1 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
68 #endif
76 #endif
69
77
70 debugdelta chain basic output
78 debugdelta chain basic output
71
79
72 #if reporevlogstore
80 #if reporevlogstore
73 $ hg debugdeltachain -m
81 $ hg debugdeltachain -m
74 rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
82 rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio
75 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000
83 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000
84 1 2 1 -1 base 0 0 0 0.00000 0 0 0.00000
85 2 3 1 -1 base 44 43 44 1.02326 44 0 0.00000
76
86
77 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen}\n'
87 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen}\n'
78 0 1 1
88 0 1 1
89 1 2 1
90 2 3 1
79
91
80 $ hg debugdeltachain -m -Tjson
92 $ hg debugdeltachain -m -Tjson
81 [
93 [
82 {
94 {
83 "chainid": 1,
95 "chainid": 1,
84 "chainlen": 1,
96 "chainlen": 1,
85 "chainratio": 1.02325581395,
97 "chainratio": 1.02325581395,
86 "chainsize": 44,
98 "chainsize": 44,
87 "compsize": 44,
99 "compsize": 44,
88 "deltatype": "base",
100 "deltatype": "base",
89 "extradist": 0,
101 "extradist": 0,
90 "extraratio": 0.0,
102 "extraratio": 0.0,
91 "lindist": 44,
103 "lindist": 44,
92 "prevrev": -1,
104 "prevrev": -1,
93 "rev": 0,
105 "rev": 0,
94 "uncompsize": 43
106 "uncompsize": 43
107 },
108 {
109 "chainid": 2,
110 "chainlen": 1,
111 "chainratio": 0,
112 "chainsize": 0,
113 "compsize": 0,
114 "deltatype": "base",
115 "extradist": 0,
116 "extraratio": 0,
117 "lindist": 0,
118 "prevrev": -1,
119 "rev": 1,
120 "uncompsize": 0
121 },
122 {
123 "chainid": 3,
124 "chainlen": 1,
125 "chainratio": 1.02325581395,
126 "chainsize": 44,
127 "compsize": 44,
128 "deltatype": "base",
129 "extradist": 0,
130 "extraratio": 0.0,
131 "lindist": 44,
132 "prevrev": -1,
133 "rev": 2,
134 "uncompsize": 43
95 }
135 }
96 ]
136 ]
97
137
98 debugdelta chain with sparse read enabled
138 debugdelta chain with sparse read enabled
99
139
100 $ cat >> $HGRCPATH <<EOF
140 $ cat >> $HGRCPATH <<EOF
101 > [experimental]
141 > [experimental]
102 > sparse-read = True
142 > sparse-read = True
103 > EOF
143 > EOF
104 $ hg debugdeltachain -m
144 $ hg debugdeltachain -m
105 rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity srchunks
145 rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio readsize largestblk rddensity srchunks
106 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
146 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
147 1 2 1 -1 base 0 0 0 0.00000 0 0 0.00000 0 0 1.00000 1
148 2 3 1 -1 base 44 43 44 1.02326 44 0 0.00000 44 44 1.00000 1
107
149
108 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen} {readsize} {largestblock} {readdensity}\n'
150 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen} {readsize} {largestblock} {readdensity}\n'
109 0 1 1 44 44 1.0
151 0 1 1 44 44 1.0
152 1 2 1 0 0 1
153 2 3 1 44 44 1.0
110
154
111 $ hg debugdeltachain -m -Tjson
155 $ hg debugdeltachain -m -Tjson
112 [
156 [
113 {
157 {
114 "chainid": 1,
158 "chainid": 1,
115 "chainlen": 1,
159 "chainlen": 1,
116 "chainratio": 1.02325581395,
160 "chainratio": 1.02325581395,
117 "chainsize": 44,
161 "chainsize": 44,
118 "compsize": 44,
162 "compsize": 44,
119 "deltatype": "base",
163 "deltatype": "base",
120 "extradist": 0,
164 "extradist": 0,
121 "extraratio": 0.0,
165 "extraratio": 0.0,
122 "largestblock": 44,
166 "largestblock": 44,
123 "lindist": 44,
167 "lindist": 44,
124 "prevrev": -1,
168 "prevrev": -1,
125 "readdensity": 1.0,
169 "readdensity": 1.0,
126 "readsize": 44,
170 "readsize": 44,
127 "rev": 0,
171 "rev": 0,
128 "srchunks": 1,
172 "srchunks": 1,
129 "uncompsize": 43
173 "uncompsize": 43
174 },
175 {
176 "chainid": 2,
177 "chainlen": 1,
178 "chainratio": 0,
179 "chainsize": 0,
180 "compsize": 0,
181 "deltatype": "base",
182 "extradist": 0,
183 "extraratio": 0,
184 "largestblock": 0,
185 "lindist": 0,
186 "prevrev": -1,
187 "readdensity": 1,
188 "readsize": 0,
189 "rev": 1,
190 "srchunks": 1,
191 "uncompsize": 0
192 },
193 {
194 "chainid": 3,
195 "chainlen": 1,
196 "chainratio": 1.02325581395,
197 "chainsize": 44,
198 "compsize": 44,
199 "deltatype": "base",
200 "extradist": 0,
201 "extraratio": 0.0,
202 "largestblock": 44,
203 "lindist": 44,
204 "prevrev": -1,
205 "readdensity": 1.0,
206 "readsize": 44,
207 "rev": 2,
208 "srchunks": 1,
209 "uncompsize": 43
130 }
210 }
131 ]
211 ]
132
212
133 $ printf "This test checks things.\n" >> a
213 $ printf "This test checks things.\n" >> a
134 $ hg ci -m a
214 $ hg ci -m a
135 $ hg branch other
215 $ hg branch other
136 marked working directory as branch other
216 marked working directory as branch other
137 (branches are permanent and global, did you want a bookmark?)
217 (branches are permanent and global, did you want a bookmark?)
138 $ for i in `$TESTDIR/seq.py 5`; do
218 $ for i in `$TESTDIR/seq.py 5`; do
139 > printf "shorter ${i}" >> a
219 > printf "shorter ${i}" >> a
140 > hg ci -m "a other:$i"
220 > hg ci -m "a other:$i"
141 > hg up -q default
221 > hg up -q default
142 > printf "for the branch default we want longer chains: ${i}" >> a
222 > printf "for the branch default we want longer chains: ${i}" >> a
143 > hg ci -m "a default:$i"
223 > hg ci -m "a default:$i"
144 > hg up -q other
224 > hg up -q other
145 > done
225 > done
146 $ hg debugdeltachain a -T '{rev} {srchunks}\n' \
226 $ hg debugdeltachain a -T '{rev} {srchunks}\n' \
147 > --config experimental.sparse-read.density-threshold=0.50 \
227 > --config experimental.sparse-read.density-threshold=0.50 \
148 > --config experimental.sparse-read.min-gap-size=0
228 > --config experimental.sparse-read.min-gap-size=0
149 0 1
229 0 1
150 1 1
230 1 1
151 2 1
231 2 1
152 3 1
232 3 1
153 4 1
233 4 1
154 5 1
234 5 1
155 6 1
235 6 1
156 7 1
236 7 1
157 8 1
237 8 1
158 9 1
238 9 1
159 10 2
239 10 2
160 11 1
240 11 1
161 $ hg --config extensions.strip= strip --no-backup -r 1
241 $ hg --config extensions.strip= strip --no-backup -r 1
162 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
242 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
163
243
164 Test max chain len
244 Test max chain len
165 $ cat >> $HGRCPATH << EOF
245 $ cat >> $HGRCPATH << EOF
166 > [format]
246 > [format]
167 > maxchainlen=4
247 > maxchainlen=4
168 > EOF
248 > EOF
169
249
170 $ printf "This test checks if maxchainlen config value is respected also it can serve as basic test for debugrevlog -d <file>.\n" >> a
250 $ printf "This test checks if maxchainlen config value is respected also it can serve as basic test for debugrevlog -d <file>.\n" >> a
171 $ hg ci -m a
251 $ hg ci -m a
172 $ printf "b\n" >> a
252 $ printf "b\n" >> a
173 $ hg ci -m a
253 $ hg ci -m a
174 $ printf "c\n" >> a
254 $ printf "c\n" >> a
175 $ hg ci -m a
255 $ hg ci -m a
176 $ printf "d\n" >> a
256 $ printf "d\n" >> a
177 $ hg ci -m a
257 $ hg ci -m a
178 $ printf "e\n" >> a
258 $ printf "e\n" >> a
179 $ hg ci -m a
259 $ hg ci -m a
180 $ printf "f\n" >> a
260 $ printf "f\n" >> a
181 $ hg ci -m a
261 $ hg ci -m a
182 $ printf 'g\n' >> a
262 $ printf 'g\n' >> a
183 $ hg ci -m a
263 $ hg ci -m a
184 $ printf 'h\n' >> a
264 $ printf 'h\n' >> a
185 $ hg ci -m a
265 $ hg ci -m a
186
266
187 $ hg debugrevlog -d a
267 $ hg debugrevlog -d a
188 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
268 # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen
189 0 -1 -1 0 ??? 0 0 0 0 ??? ???? ? 1 0 (glob)
269 0 -1 -1 0 ??? 0 0 0 0 ??? ???? ? 1 0 (glob)
190 1 0 -1 ??? ??? 0 0 0 0 ??? ???? ? 1 1 (glob)
270 1 0 -1 ??? ??? 0 0 0 0 ??? ???? ? 1 1 (glob)
191 2 1 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
271 2 1 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
192 3 2 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
272 3 2 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
193 4 3 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 4 (glob)
273 4 3 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 4 (glob)
194 5 4 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 0 (glob)
274 5 4 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 0 (glob)
195 6 5 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 1 (glob)
275 6 5 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 1 (glob)
196 7 6 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
276 7 6 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob)
197 8 7 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
277 8 7 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob)
198 #endif
278 #endif
199
279
200 Test debuglocks command:
280 Test debuglocks command:
201
281
202 $ hg debuglocks
282 $ hg debuglocks
203 lock: free
283 lock: free
204 wlock: free
284 wlock: free
205
285
206 * Test setting the lock
286 * Test setting the lock
207
287
208 waitlock <file> will wait for file to be created. If it isn't in a reasonable
288 waitlock <file> will wait for file to be created. If it isn't in a reasonable
209 amount of time, displays error message and returns 1
289 amount of time, displays error message and returns 1
210 $ waitlock() {
290 $ waitlock() {
211 > start=`date +%s`
291 > start=`date +%s`
212 > timeout=5
292 > timeout=5
213 > while [ \( ! -f $1 \) -a \( ! -L $1 \) ]; do
293 > while [ \( ! -f $1 \) -a \( ! -L $1 \) ]; do
214 > now=`date +%s`
294 > now=`date +%s`
215 > if [ "`expr $now - $start`" -gt $timeout ]; then
295 > if [ "`expr $now - $start`" -gt $timeout ]; then
216 > echo "timeout: $1 was not created in $timeout seconds"
296 > echo "timeout: $1 was not created in $timeout seconds"
217 > return 1
297 > return 1
218 > fi
298 > fi
219 > sleep 0.1
299 > sleep 0.1
220 > done
300 > done
221 > }
301 > }
222 $ dolock() {
302 $ dolock() {
223 > {
303 > {
224 > waitlock .hg/unlock
304 > waitlock .hg/unlock
225 > rm -f .hg/unlock
305 > rm -f .hg/unlock
226 > echo y
306 > echo y
227 > } | hg debuglocks "$@" > /dev/null
307 > } | hg debuglocks "$@" > /dev/null
228 > }
308 > }
229 $ dolock -s &
309 $ dolock -s &
230 $ waitlock .hg/store/lock
310 $ waitlock .hg/store/lock
231
311
232 $ hg debuglocks
312 $ hg debuglocks
233 lock: user *, process * (*s) (glob)
313 lock: user *, process * (*s) (glob)
234 wlock: free
314 wlock: free
235 [1]
315 [1]
236 $ touch .hg/unlock
316 $ touch .hg/unlock
237 $ wait
317 $ wait
238 $ [ -f .hg/store/lock ] || echo "There is no lock"
318 $ [ -f .hg/store/lock ] || echo "There is no lock"
239 There is no lock
319 There is no lock
240
320
241 * Test setting the wlock
321 * Test setting the wlock
242
322
243 $ dolock -S &
323 $ dolock -S &
244 $ waitlock .hg/wlock
324 $ waitlock .hg/wlock
245
325
246 $ hg debuglocks
326 $ hg debuglocks
247 lock: free
327 lock: free
248 wlock: user *, process * (*s) (glob)
328 wlock: user *, process * (*s) (glob)
249 [1]
329 [1]
250 $ touch .hg/unlock
330 $ touch .hg/unlock
251 $ wait
331 $ wait
252 $ [ -f .hg/wlock ] || echo "There is no wlock"
332 $ [ -f .hg/wlock ] || echo "There is no wlock"
253 There is no wlock
333 There is no wlock
254
334
255 * Test setting both locks
335 * Test setting both locks
256
336
257 $ dolock -Ss &
337 $ dolock -Ss &
258 $ waitlock .hg/wlock && waitlock .hg/store/lock
338 $ waitlock .hg/wlock && waitlock .hg/store/lock
259
339
260 $ hg debuglocks
340 $ hg debuglocks
261 lock: user *, process * (*s) (glob)
341 lock: user *, process * (*s) (glob)
262 wlock: user *, process * (*s) (glob)
342 wlock: user *, process * (*s) (glob)
263 [2]
343 [2]
264
344
265 * Test failing to set a lock
345 * Test failing to set a lock
266
346
267 $ hg debuglocks -s
347 $ hg debuglocks -s
268 abort: lock is already held
348 abort: lock is already held
269 [255]
349 [255]
270
350
271 $ hg debuglocks -S
351 $ hg debuglocks -S
272 abort: wlock is already held
352 abort: wlock is already held
273 [255]
353 [255]
274
354
275 $ touch .hg/unlock
355 $ touch .hg/unlock
276 $ wait
356 $ wait
277
357
278 $ hg debuglocks
358 $ hg debuglocks
279 lock: free
359 lock: free
280 wlock: free
360 wlock: free
281
361
282 * Test forcing the lock
362 * Test forcing the lock
283
363
284 $ dolock -s &
364 $ dolock -s &
285 $ waitlock .hg/store/lock
365 $ waitlock .hg/store/lock
286
366
287 $ hg debuglocks
367 $ hg debuglocks
288 lock: user *, process * (*s) (glob)
368 lock: user *, process * (*s) (glob)
289 wlock: free
369 wlock: free
290 [1]
370 [1]
291
371
292 $ hg debuglocks -L
372 $ hg debuglocks -L
293
373
294 $ hg debuglocks
374 $ hg debuglocks
295 lock: free
375 lock: free
296 wlock: free
376 wlock: free
297
377
298 $ touch .hg/unlock
378 $ touch .hg/unlock
299 $ wait
379 $ wait
300
380
301 * Test forcing the wlock
381 * Test forcing the wlock
302
382
303 $ dolock -S &
383 $ dolock -S &
304 $ waitlock .hg/wlock
384 $ waitlock .hg/wlock
305
385
306 $ hg debuglocks
386 $ hg debuglocks
307 lock: free
387 lock: free
308 wlock: user *, process * (*s) (glob)
388 wlock: user *, process * (*s) (glob)
309 [1]
389 [1]
310
390
311 $ hg debuglocks -W
391 $ hg debuglocks -W
312
392
313 $ hg debuglocks
393 $ hg debuglocks
314 lock: free
394 lock: free
315 wlock: free
395 wlock: free
316
396
317 $ touch .hg/unlock
397 $ touch .hg/unlock
318 $ wait
398 $ wait
319
399
320 Test WdirUnsupported exception
400 Test WdirUnsupported exception
321
401
322 $ hg debugdata -c ffffffffffffffffffffffffffffffffffffffff
402 $ hg debugdata -c ffffffffffffffffffffffffffffffffffffffff
323 abort: working directory revision cannot be specified
403 abort: working directory revision cannot be specified
324 [255]
404 [255]
325
405
326 Test cache warming command
406 Test cache warming command
327
407
328 $ rm -rf .hg/cache/
408 $ rm -rf .hg/cache/
329 $ hg debugupdatecaches --debug
409 $ hg debugupdatecaches --debug
330 updating the branch cache
410 updating the branch cache
331 $ ls -r .hg/cache/*
411 $ ls -r .hg/cache/*
332 .hg/cache/rbc-revs-v1
412 .hg/cache/rbc-revs-v1
333 .hg/cache/rbc-names-v1
413 .hg/cache/rbc-names-v1
334 .hg/cache/branch2-served
414 .hg/cache/branch2-served
335
415
336 Test debugcolor
416 Test debugcolor
337
417
338 #if no-windows
418 #if no-windows
339 $ hg debugcolor --style --color always | egrep 'mode|style|log\.'
419 $ hg debugcolor --style --color always | egrep 'mode|style|log\.'
340 color mode: 'ansi'
420 color mode: 'ansi'
341 available style:
421 available style:
342 \x1b[0;33mlog.changeset\x1b[0m: \x1b[0;33myellow\x1b[0m (esc)
422 \x1b[0;33mlog.changeset\x1b[0m: \x1b[0;33myellow\x1b[0m (esc)
343 #endif
423 #endif
344
424
345 $ hg debugcolor --style --color never
425 $ hg debugcolor --style --color never
346 color mode: None
426 color mode: None
347 available style:
427 available style:
348
428
349 $ cd ..
429 $ cd ..
350
430
351 Test internal debugstacktrace command
431 Test internal debugstacktrace command
352
432
353 $ cat > debugstacktrace.py << EOF
433 $ cat > debugstacktrace.py << EOF
354 > from __future__ import absolute_import
434 > from __future__ import absolute_import
355 > import sys
435 > import sys
356 > from mercurial import util
436 > from mercurial import util
357 > def f():
437 > def f():
358 > util.debugstacktrace(f=sys.stdout)
438 > util.debugstacktrace(f=sys.stdout)
359 > g()
439 > g()
360 > def g():
440 > def g():
361 > util.dst('hello from g\\n', skip=1)
441 > util.dst('hello from g\\n', skip=1)
362 > h()
442 > h()
363 > def h():
443 > def h():
364 > util.dst('hi ...\\nfrom h hidden in g', 1, depth=2)
444 > util.dst('hi ...\\nfrom h hidden in g', 1, depth=2)
365 > f()
445 > f()
366 > EOF
446 > EOF
367 $ $PYTHON debugstacktrace.py
447 $ $PYTHON debugstacktrace.py
368 stacktrace at:
448 stacktrace at:
369 debugstacktrace.py:12 in * (glob)
449 debugstacktrace.py:12 in * (glob)
370 debugstacktrace.py:5 in f
450 debugstacktrace.py:5 in f
371 hello from g at:
451 hello from g at:
372 debugstacktrace.py:12 in * (glob)
452 debugstacktrace.py:12 in * (glob)
373 debugstacktrace.py:6 in f
453 debugstacktrace.py:6 in f
374 hi ...
454 hi ...
375 from h hidden in g at:
455 from h hidden in g at:
376 debugstacktrace.py:6 in f
456 debugstacktrace.py:6 in f
377 debugstacktrace.py:9 in g
457 debugstacktrace.py:9 in g
378
458
379 Test debugcapabilities command:
459 Test debugcapabilities command:
380
460
381 $ hg debugcapabilities ./debugrevlog/
461 $ hg debugcapabilities ./debugrevlog/
382 Main capabilities:
462 Main capabilities:
383 branchmap
463 branchmap
384 $USUAL_BUNDLE2_CAPS$
464 $USUAL_BUNDLE2_CAPS$
385 getbundle
465 getbundle
386 known
466 known
387 lookup
467 lookup
388 pushkey
468 pushkey
389 unbundle
469 unbundle
390 Bundle2 capabilities:
470 Bundle2 capabilities:
391 HG20
471 HG20
392 bookmarks
472 bookmarks
393 changegroup
473 changegroup
394 01
474 01
395 02
475 02
396 digests
476 digests
397 md5
477 md5
398 sha1
478 sha1
399 sha512
479 sha512
400 error
480 error
401 abort
481 abort
402 unsupportedcontent
482 unsupportedcontent
403 pushraced
483 pushraced
404 pushkey
484 pushkey
405 hgtagsfnodes
485 hgtagsfnodes
406 listkeys
486 listkeys
407 phases
487 phases
408 heads
488 heads
409 pushkey
489 pushkey
410 remote-changegroup
490 remote-changegroup
411 http
491 http
412 https
492 https
413 rev-branch-cache
493 rev-branch-cache
414 stream
494 stream
415 v2
495 v2
416
496
417 Test debugpeer
497 Test debugpeer
418
498
419 $ hg --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" debugpeer ssh://user@dummy/debugrevlog
499 $ hg --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" debugpeer ssh://user@dummy/debugrevlog
420 url: ssh://user@dummy/debugrevlog
500 url: ssh://user@dummy/debugrevlog
421 local: no
501 local: no
422 pushable: yes
502 pushable: yes
423
503
424 $ hg --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" --debug debugpeer ssh://user@dummy/debugrevlog
504 $ hg --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" --debug debugpeer ssh://user@dummy/debugrevlog
425 running "*" "*/tests/dummyssh" 'user@dummy' 'hg -R debugrevlog serve --stdio' (glob) (no-windows !)
505 running "*" "*/tests/dummyssh" 'user@dummy' 'hg -R debugrevlog serve --stdio' (glob) (no-windows !)
426 running "*" "*\tests/dummyssh" "user@dummy" "hg -R debugrevlog serve --stdio" (glob) (windows !)
506 running "*" "*\tests/dummyssh" "user@dummy" "hg -R debugrevlog serve --stdio" (glob) (windows !)
427 devel-peer-request: hello+between
507 devel-peer-request: hello+between
428 devel-peer-request: pairs: 81 bytes
508 devel-peer-request: pairs: 81 bytes
429 sending hello command
509 sending hello command
430 sending between command
510 sending between command
431 remote: 413
511 remote: 413
432 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS_SERVER$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
512 remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS_SERVER$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1 unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
433 remote: 1
513 remote: 1
434 devel-peer-request: protocaps
514 devel-peer-request: protocaps
435 devel-peer-request: caps: * bytes (glob)
515 devel-peer-request: caps: * bytes (glob)
436 sending protocaps command
516 sending protocaps command
437 url: ssh://user@dummy/debugrevlog
517 url: ssh://user@dummy/debugrevlog
438 local: no
518 local: no
439 pushable: yes
519 pushable: yes
General Comments 0
You need to be logged in to leave comments. Login now