##// END OF EJS Templates
debugcommands: support connecting to HTTP peers...
Gregory Szorc -
r37030:fc893982 default
parent child Browse files
Show More
@@ -1,2891 +1,2950 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import socket
17 import socket
18 import ssl
18 import ssl
19 import stat
19 import stat
20 import string
20 import string
21 import subprocess
21 import subprocess
22 import sys
22 import sys
23 import tempfile
23 import tempfile
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from . import (
35 from . import (
36 bundle2,
36 bundle2,
37 changegroup,
37 changegroup,
38 cmdutil,
38 cmdutil,
39 color,
39 color,
40 context,
40 context,
41 dagparser,
41 dagparser,
42 dagutil,
42 dagutil,
43 encoding,
43 encoding,
44 error,
44 error,
45 exchange,
45 exchange,
46 extensions,
46 extensions,
47 filemerge,
47 filemerge,
48 fileset,
48 fileset,
49 formatter,
49 formatter,
50 hg,
50 hg,
51 httppeer,
51 localrepo,
52 localrepo,
52 lock as lockmod,
53 lock as lockmod,
53 logcmdutil,
54 logcmdutil,
54 merge as mergemod,
55 merge as mergemod,
55 obsolete,
56 obsolete,
56 obsutil,
57 obsutil,
57 phases,
58 phases,
58 policy,
59 policy,
59 pvec,
60 pvec,
60 pycompat,
61 pycompat,
61 registrar,
62 registrar,
62 repair,
63 repair,
63 revlog,
64 revlog,
64 revset,
65 revset,
65 revsetlang,
66 revsetlang,
66 scmutil,
67 scmutil,
67 setdiscovery,
68 setdiscovery,
68 simplemerge,
69 simplemerge,
69 smartset,
70 smartset,
70 sshpeer,
71 sshpeer,
71 sslutil,
72 sslutil,
72 streamclone,
73 streamclone,
73 templater,
74 templater,
74 treediscovery,
75 treediscovery,
75 upgrade,
76 upgrade,
76 url as urlmod,
77 url as urlmod,
77 util,
78 util,
78 vfs as vfsmod,
79 vfs as vfsmod,
79 wireprotoserver,
80 wireprotoserver,
80 )
81 )
81 from .utils import dateutil
82 from .utils import dateutil
82
83
83 release = lockmod.release
84 release = lockmod.release
84
85
85 command = registrar.command()
86 command = registrar.command()
86
87
87 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
88 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
88 def debugancestor(ui, repo, *args):
89 def debugancestor(ui, repo, *args):
89 """find the ancestor revision of two revisions in a given index"""
90 """find the ancestor revision of two revisions in a given index"""
90 if len(args) == 3:
91 if len(args) == 3:
91 index, rev1, rev2 = args
92 index, rev1, rev2 = args
92 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
93 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
93 lookup = r.lookup
94 lookup = r.lookup
94 elif len(args) == 2:
95 elif len(args) == 2:
95 if not repo:
96 if not repo:
96 raise error.Abort(_('there is no Mercurial repository here '
97 raise error.Abort(_('there is no Mercurial repository here '
97 '(.hg not found)'))
98 '(.hg not found)'))
98 rev1, rev2 = args
99 rev1, rev2 = args
99 r = repo.changelog
100 r = repo.changelog
100 lookup = repo.lookup
101 lookup = repo.lookup
101 else:
102 else:
102 raise error.Abort(_('either two or three arguments required'))
103 raise error.Abort(_('either two or three arguments required'))
103 a = r.ancestor(lookup(rev1), lookup(rev2))
104 a = r.ancestor(lookup(rev1), lookup(rev2))
104 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
105 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
105
106
106 @command('debugapplystreamclonebundle', [], 'FILE')
107 @command('debugapplystreamclonebundle', [], 'FILE')
107 def debugapplystreamclonebundle(ui, repo, fname):
108 def debugapplystreamclonebundle(ui, repo, fname):
108 """apply a stream clone bundle file"""
109 """apply a stream clone bundle file"""
109 f = hg.openpath(ui, fname)
110 f = hg.openpath(ui, fname)
110 gen = exchange.readbundle(ui, f, fname)
111 gen = exchange.readbundle(ui, f, fname)
111 gen.apply(repo)
112 gen.apply(repo)
112
113
113 @command('debugbuilddag',
114 @command('debugbuilddag',
114 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
115 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
115 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
116 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
116 ('n', 'new-file', None, _('add new file at each rev'))],
117 ('n', 'new-file', None, _('add new file at each rev'))],
117 _('[OPTION]... [TEXT]'))
118 _('[OPTION]... [TEXT]'))
118 def debugbuilddag(ui, repo, text=None,
119 def debugbuilddag(ui, repo, text=None,
119 mergeable_file=False,
120 mergeable_file=False,
120 overwritten_file=False,
121 overwritten_file=False,
121 new_file=False):
122 new_file=False):
122 """builds a repo with a given DAG from scratch in the current empty repo
123 """builds a repo with a given DAG from scratch in the current empty repo
123
124
124 The description of the DAG is read from stdin if not given on the
125 The description of the DAG is read from stdin if not given on the
125 command line.
126 command line.
126
127
127 Elements:
128 Elements:
128
129
129 - "+n" is a linear run of n nodes based on the current default parent
130 - "+n" is a linear run of n nodes based on the current default parent
130 - "." is a single node based on the current default parent
131 - "." is a single node based on the current default parent
131 - "$" resets the default parent to null (implied at the start);
132 - "$" resets the default parent to null (implied at the start);
132 otherwise the default parent is always the last node created
133 otherwise the default parent is always the last node created
133 - "<p" sets the default parent to the backref p
134 - "<p" sets the default parent to the backref p
134 - "*p" is a fork at parent p, which is a backref
135 - "*p" is a fork at parent p, which is a backref
135 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
136 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
136 - "/p2" is a merge of the preceding node and p2
137 - "/p2" is a merge of the preceding node and p2
137 - ":tag" defines a local tag for the preceding node
138 - ":tag" defines a local tag for the preceding node
138 - "@branch" sets the named branch for subsequent nodes
139 - "@branch" sets the named branch for subsequent nodes
139 - "#...\\n" is a comment up to the end of the line
140 - "#...\\n" is a comment up to the end of the line
140
141
141 Whitespace between the above elements is ignored.
142 Whitespace between the above elements is ignored.
142
143
143 A backref is either
144 A backref is either
144
145
145 - a number n, which references the node curr-n, where curr is the current
146 - a number n, which references the node curr-n, where curr is the current
146 node, or
147 node, or
147 - the name of a local tag you placed earlier using ":tag", or
148 - the name of a local tag you placed earlier using ":tag", or
148 - empty to denote the default parent.
149 - empty to denote the default parent.
149
150
150 All string valued-elements are either strictly alphanumeric, or must
151 All string valued-elements are either strictly alphanumeric, or must
151 be enclosed in double quotes ("..."), with "\\" as escape character.
152 be enclosed in double quotes ("..."), with "\\" as escape character.
152 """
153 """
153
154
154 if text is None:
155 if text is None:
155 ui.status(_("reading DAG from stdin\n"))
156 ui.status(_("reading DAG from stdin\n"))
156 text = ui.fin.read()
157 text = ui.fin.read()
157
158
158 cl = repo.changelog
159 cl = repo.changelog
159 if len(cl) > 0:
160 if len(cl) > 0:
160 raise error.Abort(_('repository is not empty'))
161 raise error.Abort(_('repository is not empty'))
161
162
162 # determine number of revs in DAG
163 # determine number of revs in DAG
163 total = 0
164 total = 0
164 for type, data in dagparser.parsedag(text):
165 for type, data in dagparser.parsedag(text):
165 if type == 'n':
166 if type == 'n':
166 total += 1
167 total += 1
167
168
168 if mergeable_file:
169 if mergeable_file:
169 linesperrev = 2
170 linesperrev = 2
170 # make a file with k lines per rev
171 # make a file with k lines per rev
171 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
172 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
172 initialmergedlines.append("")
173 initialmergedlines.append("")
173
174
174 tags = []
175 tags = []
175
176
176 wlock = lock = tr = None
177 wlock = lock = tr = None
177 try:
178 try:
178 wlock = repo.wlock()
179 wlock = repo.wlock()
179 lock = repo.lock()
180 lock = repo.lock()
180 tr = repo.transaction("builddag")
181 tr = repo.transaction("builddag")
181
182
182 at = -1
183 at = -1
183 atbranch = 'default'
184 atbranch = 'default'
184 nodeids = []
185 nodeids = []
185 id = 0
186 id = 0
186 ui.progress(_('building'), id, unit=_('revisions'), total=total)
187 ui.progress(_('building'), id, unit=_('revisions'), total=total)
187 for type, data in dagparser.parsedag(text):
188 for type, data in dagparser.parsedag(text):
188 if type == 'n':
189 if type == 'n':
189 ui.note(('node %s\n' % pycompat.bytestr(data)))
190 ui.note(('node %s\n' % pycompat.bytestr(data)))
190 id, ps = data
191 id, ps = data
191
192
192 files = []
193 files = []
193 filecontent = {}
194 filecontent = {}
194
195
195 p2 = None
196 p2 = None
196 if mergeable_file:
197 if mergeable_file:
197 fn = "mf"
198 fn = "mf"
198 p1 = repo[ps[0]]
199 p1 = repo[ps[0]]
199 if len(ps) > 1:
200 if len(ps) > 1:
200 p2 = repo[ps[1]]
201 p2 = repo[ps[1]]
201 pa = p1.ancestor(p2)
202 pa = p1.ancestor(p2)
202 base, local, other = [x[fn].data() for x in (pa, p1,
203 base, local, other = [x[fn].data() for x in (pa, p1,
203 p2)]
204 p2)]
204 m3 = simplemerge.Merge3Text(base, local, other)
205 m3 = simplemerge.Merge3Text(base, local, other)
205 ml = [l.strip() for l in m3.merge_lines()]
206 ml = [l.strip() for l in m3.merge_lines()]
206 ml.append("")
207 ml.append("")
207 elif at > 0:
208 elif at > 0:
208 ml = p1[fn].data().split("\n")
209 ml = p1[fn].data().split("\n")
209 else:
210 else:
210 ml = initialmergedlines
211 ml = initialmergedlines
211 ml[id * linesperrev] += " r%i" % id
212 ml[id * linesperrev] += " r%i" % id
212 mergedtext = "\n".join(ml)
213 mergedtext = "\n".join(ml)
213 files.append(fn)
214 files.append(fn)
214 filecontent[fn] = mergedtext
215 filecontent[fn] = mergedtext
215
216
216 if overwritten_file:
217 if overwritten_file:
217 fn = "of"
218 fn = "of"
218 files.append(fn)
219 files.append(fn)
219 filecontent[fn] = "r%i\n" % id
220 filecontent[fn] = "r%i\n" % id
220
221
221 if new_file:
222 if new_file:
222 fn = "nf%i" % id
223 fn = "nf%i" % id
223 files.append(fn)
224 files.append(fn)
224 filecontent[fn] = "r%i\n" % id
225 filecontent[fn] = "r%i\n" % id
225 if len(ps) > 1:
226 if len(ps) > 1:
226 if not p2:
227 if not p2:
227 p2 = repo[ps[1]]
228 p2 = repo[ps[1]]
228 for fn in p2:
229 for fn in p2:
229 if fn.startswith("nf"):
230 if fn.startswith("nf"):
230 files.append(fn)
231 files.append(fn)
231 filecontent[fn] = p2[fn].data()
232 filecontent[fn] = p2[fn].data()
232
233
233 def fctxfn(repo, cx, path):
234 def fctxfn(repo, cx, path):
234 if path in filecontent:
235 if path in filecontent:
235 return context.memfilectx(repo, cx, path,
236 return context.memfilectx(repo, cx, path,
236 filecontent[path])
237 filecontent[path])
237 return None
238 return None
238
239
239 if len(ps) == 0 or ps[0] < 0:
240 if len(ps) == 0 or ps[0] < 0:
240 pars = [None, None]
241 pars = [None, None]
241 elif len(ps) == 1:
242 elif len(ps) == 1:
242 pars = [nodeids[ps[0]], None]
243 pars = [nodeids[ps[0]], None]
243 else:
244 else:
244 pars = [nodeids[p] for p in ps]
245 pars = [nodeids[p] for p in ps]
245 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
246 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
246 date=(id, 0),
247 date=(id, 0),
247 user="debugbuilddag",
248 user="debugbuilddag",
248 extra={'branch': atbranch})
249 extra={'branch': atbranch})
249 nodeid = repo.commitctx(cx)
250 nodeid = repo.commitctx(cx)
250 nodeids.append(nodeid)
251 nodeids.append(nodeid)
251 at = id
252 at = id
252 elif type == 'l':
253 elif type == 'l':
253 id, name = data
254 id, name = data
254 ui.note(('tag %s\n' % name))
255 ui.note(('tag %s\n' % name))
255 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
256 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
256 elif type == 'a':
257 elif type == 'a':
257 ui.note(('branch %s\n' % data))
258 ui.note(('branch %s\n' % data))
258 atbranch = data
259 atbranch = data
259 ui.progress(_('building'), id, unit=_('revisions'), total=total)
260 ui.progress(_('building'), id, unit=_('revisions'), total=total)
260 tr.close()
261 tr.close()
261
262
262 if tags:
263 if tags:
263 repo.vfs.write("localtags", "".join(tags))
264 repo.vfs.write("localtags", "".join(tags))
264 finally:
265 finally:
265 ui.progress(_('building'), None)
266 ui.progress(_('building'), None)
266 release(tr, lock, wlock)
267 release(tr, lock, wlock)
267
268
268 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
269 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
269 indent_string = ' ' * indent
270 indent_string = ' ' * indent
270 if all:
271 if all:
271 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
272 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
272 % indent_string)
273 % indent_string)
273
274
274 def showchunks(named):
275 def showchunks(named):
275 ui.write("\n%s%s\n" % (indent_string, named))
276 ui.write("\n%s%s\n" % (indent_string, named))
276 for deltadata in gen.deltaiter():
277 for deltadata in gen.deltaiter():
277 node, p1, p2, cs, deltabase, delta, flags = deltadata
278 node, p1, p2, cs, deltabase, delta, flags = deltadata
278 ui.write("%s%s %s %s %s %s %d\n" %
279 ui.write("%s%s %s %s %s %s %d\n" %
279 (indent_string, hex(node), hex(p1), hex(p2),
280 (indent_string, hex(node), hex(p1), hex(p2),
280 hex(cs), hex(deltabase), len(delta)))
281 hex(cs), hex(deltabase), len(delta)))
281
282
282 chunkdata = gen.changelogheader()
283 chunkdata = gen.changelogheader()
283 showchunks("changelog")
284 showchunks("changelog")
284 chunkdata = gen.manifestheader()
285 chunkdata = gen.manifestheader()
285 showchunks("manifest")
286 showchunks("manifest")
286 for chunkdata in iter(gen.filelogheader, {}):
287 for chunkdata in iter(gen.filelogheader, {}):
287 fname = chunkdata['filename']
288 fname = chunkdata['filename']
288 showchunks(fname)
289 showchunks(fname)
289 else:
290 else:
290 if isinstance(gen, bundle2.unbundle20):
291 if isinstance(gen, bundle2.unbundle20):
291 raise error.Abort(_('use debugbundle2 for this file'))
292 raise error.Abort(_('use debugbundle2 for this file'))
292 chunkdata = gen.changelogheader()
293 chunkdata = gen.changelogheader()
293 for deltadata in gen.deltaiter():
294 for deltadata in gen.deltaiter():
294 node, p1, p2, cs, deltabase, delta, flags = deltadata
295 node, p1, p2, cs, deltabase, delta, flags = deltadata
295 ui.write("%s%s\n" % (indent_string, hex(node)))
296 ui.write("%s%s\n" % (indent_string, hex(node)))
296
297
297 def _debugobsmarkers(ui, part, indent=0, **opts):
298 def _debugobsmarkers(ui, part, indent=0, **opts):
298 """display version and markers contained in 'data'"""
299 """display version and markers contained in 'data'"""
299 opts = pycompat.byteskwargs(opts)
300 opts = pycompat.byteskwargs(opts)
300 data = part.read()
301 data = part.read()
301 indent_string = ' ' * indent
302 indent_string = ' ' * indent
302 try:
303 try:
303 version, markers = obsolete._readmarkers(data)
304 version, markers = obsolete._readmarkers(data)
304 except error.UnknownVersion as exc:
305 except error.UnknownVersion as exc:
305 msg = "%sunsupported version: %s (%d bytes)\n"
306 msg = "%sunsupported version: %s (%d bytes)\n"
306 msg %= indent_string, exc.version, len(data)
307 msg %= indent_string, exc.version, len(data)
307 ui.write(msg)
308 ui.write(msg)
308 else:
309 else:
309 msg = "%sversion: %d (%d bytes)\n"
310 msg = "%sversion: %d (%d bytes)\n"
310 msg %= indent_string, version, len(data)
311 msg %= indent_string, version, len(data)
311 ui.write(msg)
312 ui.write(msg)
312 fm = ui.formatter('debugobsolete', opts)
313 fm = ui.formatter('debugobsolete', opts)
313 for rawmarker in sorted(markers):
314 for rawmarker in sorted(markers):
314 m = obsutil.marker(None, rawmarker)
315 m = obsutil.marker(None, rawmarker)
315 fm.startitem()
316 fm.startitem()
316 fm.plain(indent_string)
317 fm.plain(indent_string)
317 cmdutil.showmarker(fm, m)
318 cmdutil.showmarker(fm, m)
318 fm.end()
319 fm.end()
319
320
320 def _debugphaseheads(ui, data, indent=0):
321 def _debugphaseheads(ui, data, indent=0):
321 """display version and markers contained in 'data'"""
322 """display version and markers contained in 'data'"""
322 indent_string = ' ' * indent
323 indent_string = ' ' * indent
323 headsbyphase = phases.binarydecode(data)
324 headsbyphase = phases.binarydecode(data)
324 for phase in phases.allphases:
325 for phase in phases.allphases:
325 for head in headsbyphase[phase]:
326 for head in headsbyphase[phase]:
326 ui.write(indent_string)
327 ui.write(indent_string)
327 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
328 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
328
329
329 def _quasirepr(thing):
330 def _quasirepr(thing):
330 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
331 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
331 return '{%s}' % (
332 return '{%s}' % (
332 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
333 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
333 return pycompat.bytestr(repr(thing))
334 return pycompat.bytestr(repr(thing))
334
335
335 def _debugbundle2(ui, gen, all=None, **opts):
336 def _debugbundle2(ui, gen, all=None, **opts):
336 """lists the contents of a bundle2"""
337 """lists the contents of a bundle2"""
337 if not isinstance(gen, bundle2.unbundle20):
338 if not isinstance(gen, bundle2.unbundle20):
338 raise error.Abort(_('not a bundle2 file'))
339 raise error.Abort(_('not a bundle2 file'))
339 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
340 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
340 parttypes = opts.get(r'part_type', [])
341 parttypes = opts.get(r'part_type', [])
341 for part in gen.iterparts():
342 for part in gen.iterparts():
342 if parttypes and part.type not in parttypes:
343 if parttypes and part.type not in parttypes:
343 continue
344 continue
344 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
345 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
345 if part.type == 'changegroup':
346 if part.type == 'changegroup':
346 version = part.params.get('version', '01')
347 version = part.params.get('version', '01')
347 cg = changegroup.getunbundler(version, part, 'UN')
348 cg = changegroup.getunbundler(version, part, 'UN')
348 if not ui.quiet:
349 if not ui.quiet:
349 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
350 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
350 if part.type == 'obsmarkers':
351 if part.type == 'obsmarkers':
351 if not ui.quiet:
352 if not ui.quiet:
352 _debugobsmarkers(ui, part, indent=4, **opts)
353 _debugobsmarkers(ui, part, indent=4, **opts)
353 if part.type == 'phase-heads':
354 if part.type == 'phase-heads':
354 if not ui.quiet:
355 if not ui.quiet:
355 _debugphaseheads(ui, part, indent=4)
356 _debugphaseheads(ui, part, indent=4)
356
357
357 @command('debugbundle',
358 @command('debugbundle',
358 [('a', 'all', None, _('show all details')),
359 [('a', 'all', None, _('show all details')),
359 ('', 'part-type', [], _('show only the named part type')),
360 ('', 'part-type', [], _('show only the named part type')),
360 ('', 'spec', None, _('print the bundlespec of the bundle'))],
361 ('', 'spec', None, _('print the bundlespec of the bundle'))],
361 _('FILE'),
362 _('FILE'),
362 norepo=True)
363 norepo=True)
363 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
364 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
364 """lists the contents of a bundle"""
365 """lists the contents of a bundle"""
365 with hg.openpath(ui, bundlepath) as f:
366 with hg.openpath(ui, bundlepath) as f:
366 if spec:
367 if spec:
367 spec = exchange.getbundlespec(ui, f)
368 spec = exchange.getbundlespec(ui, f)
368 ui.write('%s\n' % spec)
369 ui.write('%s\n' % spec)
369 return
370 return
370
371
371 gen = exchange.readbundle(ui, f, bundlepath)
372 gen = exchange.readbundle(ui, f, bundlepath)
372 if isinstance(gen, bundle2.unbundle20):
373 if isinstance(gen, bundle2.unbundle20):
373 return _debugbundle2(ui, gen, all=all, **opts)
374 return _debugbundle2(ui, gen, all=all, **opts)
374 _debugchangegroup(ui, gen, all=all, **opts)
375 _debugchangegroup(ui, gen, all=all, **opts)
375
376
376 @command('debugcapabilities',
377 @command('debugcapabilities',
377 [], _('PATH'),
378 [], _('PATH'),
378 norepo=True)
379 norepo=True)
379 def debugcapabilities(ui, path, **opts):
380 def debugcapabilities(ui, path, **opts):
380 """lists the capabilities of a remote peer"""
381 """lists the capabilities of a remote peer"""
381 opts = pycompat.byteskwargs(opts)
382 opts = pycompat.byteskwargs(opts)
382 peer = hg.peer(ui, opts, path)
383 peer = hg.peer(ui, opts, path)
383 caps = peer.capabilities()
384 caps = peer.capabilities()
384 ui.write(('Main capabilities:\n'))
385 ui.write(('Main capabilities:\n'))
385 for c in sorted(caps):
386 for c in sorted(caps):
386 ui.write((' %s\n') % c)
387 ui.write((' %s\n') % c)
387 b2caps = bundle2.bundle2caps(peer)
388 b2caps = bundle2.bundle2caps(peer)
388 if b2caps:
389 if b2caps:
389 ui.write(('Bundle2 capabilities:\n'))
390 ui.write(('Bundle2 capabilities:\n'))
390 for key, values in sorted(b2caps.iteritems()):
391 for key, values in sorted(b2caps.iteritems()):
391 ui.write((' %s\n') % key)
392 ui.write((' %s\n') % key)
392 for v in values:
393 for v in values:
393 ui.write((' %s\n') % v)
394 ui.write((' %s\n') % v)
394
395
395 @command('debugcheckstate', [], '')
396 @command('debugcheckstate', [], '')
396 def debugcheckstate(ui, repo):
397 def debugcheckstate(ui, repo):
397 """validate the correctness of the current dirstate"""
398 """validate the correctness of the current dirstate"""
398 parent1, parent2 = repo.dirstate.parents()
399 parent1, parent2 = repo.dirstate.parents()
399 m1 = repo[parent1].manifest()
400 m1 = repo[parent1].manifest()
400 m2 = repo[parent2].manifest()
401 m2 = repo[parent2].manifest()
401 errors = 0
402 errors = 0
402 for f in repo.dirstate:
403 for f in repo.dirstate:
403 state = repo.dirstate[f]
404 state = repo.dirstate[f]
404 if state in "nr" and f not in m1:
405 if state in "nr" and f not in m1:
405 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
406 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
406 errors += 1
407 errors += 1
407 if state in "a" and f in m1:
408 if state in "a" and f in m1:
408 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
409 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
409 errors += 1
410 errors += 1
410 if state in "m" and f not in m1 and f not in m2:
411 if state in "m" and f not in m1 and f not in m2:
411 ui.warn(_("%s in state %s, but not in either manifest\n") %
412 ui.warn(_("%s in state %s, but not in either manifest\n") %
412 (f, state))
413 (f, state))
413 errors += 1
414 errors += 1
414 for f in m1:
415 for f in m1:
415 state = repo.dirstate[f]
416 state = repo.dirstate[f]
416 if state not in "nrm":
417 if state not in "nrm":
417 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
418 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
418 errors += 1
419 errors += 1
419 if errors:
420 if errors:
420 error = _(".hg/dirstate inconsistent with current parent's manifest")
421 error = _(".hg/dirstate inconsistent with current parent's manifest")
421 raise error.Abort(error)
422 raise error.Abort(error)
422
423
423 @command('debugcolor',
424 @command('debugcolor',
424 [('', 'style', None, _('show all configured styles'))],
425 [('', 'style', None, _('show all configured styles'))],
425 'hg debugcolor')
426 'hg debugcolor')
426 def debugcolor(ui, repo, **opts):
427 def debugcolor(ui, repo, **opts):
427 """show available color, effects or style"""
428 """show available color, effects or style"""
428 ui.write(('color mode: %s\n') % ui._colormode)
429 ui.write(('color mode: %s\n') % ui._colormode)
429 if opts.get(r'style'):
430 if opts.get(r'style'):
430 return _debugdisplaystyle(ui)
431 return _debugdisplaystyle(ui)
431 else:
432 else:
432 return _debugdisplaycolor(ui)
433 return _debugdisplaycolor(ui)
433
434
434 def _debugdisplaycolor(ui):
435 def _debugdisplaycolor(ui):
435 ui = ui.copy()
436 ui = ui.copy()
436 ui._styles.clear()
437 ui._styles.clear()
437 for effect in color._activeeffects(ui).keys():
438 for effect in color._activeeffects(ui).keys():
438 ui._styles[effect] = effect
439 ui._styles[effect] = effect
439 if ui._terminfoparams:
440 if ui._terminfoparams:
440 for k, v in ui.configitems('color'):
441 for k, v in ui.configitems('color'):
441 if k.startswith('color.'):
442 if k.startswith('color.'):
442 ui._styles[k] = k[6:]
443 ui._styles[k] = k[6:]
443 elif k.startswith('terminfo.'):
444 elif k.startswith('terminfo.'):
444 ui._styles[k] = k[9:]
445 ui._styles[k] = k[9:]
445 ui.write(_('available colors:\n'))
446 ui.write(_('available colors:\n'))
446 # sort label with a '_' after the other to group '_background' entry.
447 # sort label with a '_' after the other to group '_background' entry.
447 items = sorted(ui._styles.items(),
448 items = sorted(ui._styles.items(),
448 key=lambda i: ('_' in i[0], i[0], i[1]))
449 key=lambda i: ('_' in i[0], i[0], i[1]))
449 for colorname, label in items:
450 for colorname, label in items:
450 ui.write(('%s\n') % colorname, label=label)
451 ui.write(('%s\n') % colorname, label=label)
451
452
452 def _debugdisplaystyle(ui):
453 def _debugdisplaystyle(ui):
453 ui.write(_('available style:\n'))
454 ui.write(_('available style:\n'))
454 width = max(len(s) for s in ui._styles)
455 width = max(len(s) for s in ui._styles)
455 for label, effects in sorted(ui._styles.items()):
456 for label, effects in sorted(ui._styles.items()):
456 ui.write('%s' % label, label=label)
457 ui.write('%s' % label, label=label)
457 if effects:
458 if effects:
458 # 50
459 # 50
459 ui.write(': ')
460 ui.write(': ')
460 ui.write(' ' * (max(0, width - len(label))))
461 ui.write(' ' * (max(0, width - len(label))))
461 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
462 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
462 ui.write('\n')
463 ui.write('\n')
463
464
464 @command('debugcreatestreamclonebundle', [], 'FILE')
465 @command('debugcreatestreamclonebundle', [], 'FILE')
465 def debugcreatestreamclonebundle(ui, repo, fname):
466 def debugcreatestreamclonebundle(ui, repo, fname):
466 """create a stream clone bundle file
467 """create a stream clone bundle file
467
468
468 Stream bundles are special bundles that are essentially archives of
469 Stream bundles are special bundles that are essentially archives of
469 revlog files. They are commonly used for cloning very quickly.
470 revlog files. They are commonly used for cloning very quickly.
470 """
471 """
471 # TODO we may want to turn this into an abort when this functionality
472 # TODO we may want to turn this into an abort when this functionality
472 # is moved into `hg bundle`.
473 # is moved into `hg bundle`.
473 if phases.hassecret(repo):
474 if phases.hassecret(repo):
474 ui.warn(_('(warning: stream clone bundle will contain secret '
475 ui.warn(_('(warning: stream clone bundle will contain secret '
475 'revisions)\n'))
476 'revisions)\n'))
476
477
477 requirements, gen = streamclone.generatebundlev1(repo)
478 requirements, gen = streamclone.generatebundlev1(repo)
478 changegroup.writechunks(ui, gen, fname)
479 changegroup.writechunks(ui, gen, fname)
479
480
480 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
481 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
481
482
482 @command('debugdag',
483 @command('debugdag',
483 [('t', 'tags', None, _('use tags as labels')),
484 [('t', 'tags', None, _('use tags as labels')),
484 ('b', 'branches', None, _('annotate with branch names')),
485 ('b', 'branches', None, _('annotate with branch names')),
485 ('', 'dots', None, _('use dots for runs')),
486 ('', 'dots', None, _('use dots for runs')),
486 ('s', 'spaces', None, _('separate elements by spaces'))],
487 ('s', 'spaces', None, _('separate elements by spaces'))],
487 _('[OPTION]... [FILE [REV]...]'),
488 _('[OPTION]... [FILE [REV]...]'),
488 optionalrepo=True)
489 optionalrepo=True)
489 def debugdag(ui, repo, file_=None, *revs, **opts):
490 def debugdag(ui, repo, file_=None, *revs, **opts):
490 """format the changelog or an index DAG as a concise textual description
491 """format the changelog or an index DAG as a concise textual description
491
492
492 If you pass a revlog index, the revlog's DAG is emitted. If you list
493 If you pass a revlog index, the revlog's DAG is emitted. If you list
493 revision numbers, they get labeled in the output as rN.
494 revision numbers, they get labeled in the output as rN.
494
495
495 Otherwise, the changelog DAG of the current repo is emitted.
496 Otherwise, the changelog DAG of the current repo is emitted.
496 """
497 """
497 spaces = opts.get(r'spaces')
498 spaces = opts.get(r'spaces')
498 dots = opts.get(r'dots')
499 dots = opts.get(r'dots')
499 if file_:
500 if file_:
500 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
501 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
501 file_)
502 file_)
502 revs = set((int(r) for r in revs))
503 revs = set((int(r) for r in revs))
503 def events():
504 def events():
504 for r in rlog:
505 for r in rlog:
505 yield 'n', (r, list(p for p in rlog.parentrevs(r)
506 yield 'n', (r, list(p for p in rlog.parentrevs(r)
506 if p != -1))
507 if p != -1))
507 if r in revs:
508 if r in revs:
508 yield 'l', (r, "r%i" % r)
509 yield 'l', (r, "r%i" % r)
509 elif repo:
510 elif repo:
510 cl = repo.changelog
511 cl = repo.changelog
511 tags = opts.get(r'tags')
512 tags = opts.get(r'tags')
512 branches = opts.get(r'branches')
513 branches = opts.get(r'branches')
513 if tags:
514 if tags:
514 labels = {}
515 labels = {}
515 for l, n in repo.tags().items():
516 for l, n in repo.tags().items():
516 labels.setdefault(cl.rev(n), []).append(l)
517 labels.setdefault(cl.rev(n), []).append(l)
517 def events():
518 def events():
518 b = "default"
519 b = "default"
519 for r in cl:
520 for r in cl:
520 if branches:
521 if branches:
521 newb = cl.read(cl.node(r))[5]['branch']
522 newb = cl.read(cl.node(r))[5]['branch']
522 if newb != b:
523 if newb != b:
523 yield 'a', newb
524 yield 'a', newb
524 b = newb
525 b = newb
525 yield 'n', (r, list(p for p in cl.parentrevs(r)
526 yield 'n', (r, list(p for p in cl.parentrevs(r)
526 if p != -1))
527 if p != -1))
527 if tags:
528 if tags:
528 ls = labels.get(r)
529 ls = labels.get(r)
529 if ls:
530 if ls:
530 for l in ls:
531 for l in ls:
531 yield 'l', (r, l)
532 yield 'l', (r, l)
532 else:
533 else:
533 raise error.Abort(_('need repo for changelog dag'))
534 raise error.Abort(_('need repo for changelog dag'))
534
535
535 for line in dagparser.dagtextlines(events(),
536 for line in dagparser.dagtextlines(events(),
536 addspaces=spaces,
537 addspaces=spaces,
537 wraplabels=True,
538 wraplabels=True,
538 wrapannotations=True,
539 wrapannotations=True,
539 wrapnonlinear=dots,
540 wrapnonlinear=dots,
540 usedots=dots,
541 usedots=dots,
541 maxlinewidth=70):
542 maxlinewidth=70):
542 ui.write(line)
543 ui.write(line)
543 ui.write("\n")
544 ui.write("\n")
544
545
545 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
546 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
546 def debugdata(ui, repo, file_, rev=None, **opts):
547 def debugdata(ui, repo, file_, rev=None, **opts):
547 """dump the contents of a data file revision"""
548 """dump the contents of a data file revision"""
548 opts = pycompat.byteskwargs(opts)
549 opts = pycompat.byteskwargs(opts)
549 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
550 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
550 if rev is not None:
551 if rev is not None:
551 raise error.CommandError('debugdata', _('invalid arguments'))
552 raise error.CommandError('debugdata', _('invalid arguments'))
552 file_, rev = None, file_
553 file_, rev = None, file_
553 elif rev is None:
554 elif rev is None:
554 raise error.CommandError('debugdata', _('invalid arguments'))
555 raise error.CommandError('debugdata', _('invalid arguments'))
555 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
556 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
556 try:
557 try:
557 ui.write(r.revision(r.lookup(rev), raw=True))
558 ui.write(r.revision(r.lookup(rev), raw=True))
558 except KeyError:
559 except KeyError:
559 raise error.Abort(_('invalid revision identifier %s') % rev)
560 raise error.Abort(_('invalid revision identifier %s') % rev)
560
561
561 @command('debugdate',
562 @command('debugdate',
562 [('e', 'extended', None, _('try extended date formats'))],
563 [('e', 'extended', None, _('try extended date formats'))],
563 _('[-e] DATE [RANGE]'),
564 _('[-e] DATE [RANGE]'),
564 norepo=True, optionalrepo=True)
565 norepo=True, optionalrepo=True)
565 def debugdate(ui, date, range=None, **opts):
566 def debugdate(ui, date, range=None, **opts):
566 """parse and display a date"""
567 """parse and display a date"""
567 if opts[r"extended"]:
568 if opts[r"extended"]:
568 d = dateutil.parsedate(date, util.extendeddateformats)
569 d = dateutil.parsedate(date, util.extendeddateformats)
569 else:
570 else:
570 d = dateutil.parsedate(date)
571 d = dateutil.parsedate(date)
571 ui.write(("internal: %d %d\n") % d)
572 ui.write(("internal: %d %d\n") % d)
572 ui.write(("standard: %s\n") % dateutil.datestr(d))
573 ui.write(("standard: %s\n") % dateutil.datestr(d))
573 if range:
574 if range:
574 m = dateutil.matchdate(range)
575 m = dateutil.matchdate(range)
575 ui.write(("match: %s\n") % m(d[0]))
576 ui.write(("match: %s\n") % m(d[0]))
576
577
577 @command('debugdeltachain',
578 @command('debugdeltachain',
578 cmdutil.debugrevlogopts + cmdutil.formatteropts,
579 cmdutil.debugrevlogopts + cmdutil.formatteropts,
579 _('-c|-m|FILE'),
580 _('-c|-m|FILE'),
580 optionalrepo=True)
581 optionalrepo=True)
581 def debugdeltachain(ui, repo, file_=None, **opts):
582 def debugdeltachain(ui, repo, file_=None, **opts):
582 """dump information about delta chains in a revlog
583 """dump information about delta chains in a revlog
583
584
584 Output can be templatized. Available template keywords are:
585 Output can be templatized. Available template keywords are:
585
586
586 :``rev``: revision number
587 :``rev``: revision number
587 :``chainid``: delta chain identifier (numbered by unique base)
588 :``chainid``: delta chain identifier (numbered by unique base)
588 :``chainlen``: delta chain length to this revision
589 :``chainlen``: delta chain length to this revision
589 :``prevrev``: previous revision in delta chain
590 :``prevrev``: previous revision in delta chain
590 :``deltatype``: role of delta / how it was computed
591 :``deltatype``: role of delta / how it was computed
591 :``compsize``: compressed size of revision
592 :``compsize``: compressed size of revision
592 :``uncompsize``: uncompressed size of revision
593 :``uncompsize``: uncompressed size of revision
593 :``chainsize``: total size of compressed revisions in chain
594 :``chainsize``: total size of compressed revisions in chain
594 :``chainratio``: total chain size divided by uncompressed revision size
595 :``chainratio``: total chain size divided by uncompressed revision size
595 (new delta chains typically start at ratio 2.00)
596 (new delta chains typically start at ratio 2.00)
596 :``lindist``: linear distance from base revision in delta chain to end
597 :``lindist``: linear distance from base revision in delta chain to end
597 of this revision
598 of this revision
598 :``extradist``: total size of revisions not part of this delta chain from
599 :``extradist``: total size of revisions not part of this delta chain from
599 base of delta chain to end of this revision; a measurement
600 base of delta chain to end of this revision; a measurement
600 of how much extra data we need to read/seek across to read
601 of how much extra data we need to read/seek across to read
601 the delta chain for this revision
602 the delta chain for this revision
602 :``extraratio``: extradist divided by chainsize; another representation of
603 :``extraratio``: extradist divided by chainsize; another representation of
603 how much unrelated data is needed to load this delta chain
604 how much unrelated data is needed to load this delta chain
604
605
605 If the repository is configured to use the sparse read, additional keywords
606 If the repository is configured to use the sparse read, additional keywords
606 are available:
607 are available:
607
608
608 :``readsize``: total size of data read from the disk for a revision
609 :``readsize``: total size of data read from the disk for a revision
609 (sum of the sizes of all the blocks)
610 (sum of the sizes of all the blocks)
610 :``largestblock``: size of the largest block of data read from the disk
611 :``largestblock``: size of the largest block of data read from the disk
611 :``readdensity``: density of useful bytes in the data read from the disk
612 :``readdensity``: density of useful bytes in the data read from the disk
612 :``srchunks``: in how many data hunks the whole revision would be read
613 :``srchunks``: in how many data hunks the whole revision would be read
613
614
614 The sparse read can be enabled with experimental.sparse-read = True
615 The sparse read can be enabled with experimental.sparse-read = True
615 """
616 """
616 opts = pycompat.byteskwargs(opts)
617 opts = pycompat.byteskwargs(opts)
617 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
618 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
618 index = r.index
619 index = r.index
619 generaldelta = r.version & revlog.FLAG_GENERALDELTA
620 generaldelta = r.version & revlog.FLAG_GENERALDELTA
620 withsparseread = getattr(r, '_withsparseread', False)
621 withsparseread = getattr(r, '_withsparseread', False)
621
622
622 def revinfo(rev):
623 def revinfo(rev):
623 e = index[rev]
624 e = index[rev]
624 compsize = e[1]
625 compsize = e[1]
625 uncompsize = e[2]
626 uncompsize = e[2]
626 chainsize = 0
627 chainsize = 0
627
628
628 if generaldelta:
629 if generaldelta:
629 if e[3] == e[5]:
630 if e[3] == e[5]:
630 deltatype = 'p1'
631 deltatype = 'p1'
631 elif e[3] == e[6]:
632 elif e[3] == e[6]:
632 deltatype = 'p2'
633 deltatype = 'p2'
633 elif e[3] == rev - 1:
634 elif e[3] == rev - 1:
634 deltatype = 'prev'
635 deltatype = 'prev'
635 elif e[3] == rev:
636 elif e[3] == rev:
636 deltatype = 'base'
637 deltatype = 'base'
637 else:
638 else:
638 deltatype = 'other'
639 deltatype = 'other'
639 else:
640 else:
640 if e[3] == rev:
641 if e[3] == rev:
641 deltatype = 'base'
642 deltatype = 'base'
642 else:
643 else:
643 deltatype = 'prev'
644 deltatype = 'prev'
644
645
645 chain = r._deltachain(rev)[0]
646 chain = r._deltachain(rev)[0]
646 for iterrev in chain:
647 for iterrev in chain:
647 e = index[iterrev]
648 e = index[iterrev]
648 chainsize += e[1]
649 chainsize += e[1]
649
650
650 return compsize, uncompsize, deltatype, chain, chainsize
651 return compsize, uncompsize, deltatype, chain, chainsize
651
652
652 fm = ui.formatter('debugdeltachain', opts)
653 fm = ui.formatter('debugdeltachain', opts)
653
654
654 fm.plain(' rev chain# chainlen prev delta '
655 fm.plain(' rev chain# chainlen prev delta '
655 'size rawsize chainsize ratio lindist extradist '
656 'size rawsize chainsize ratio lindist extradist '
656 'extraratio')
657 'extraratio')
657 if withsparseread:
658 if withsparseread:
658 fm.plain(' readsize largestblk rddensity srchunks')
659 fm.plain(' readsize largestblk rddensity srchunks')
659 fm.plain('\n')
660 fm.plain('\n')
660
661
661 chainbases = {}
662 chainbases = {}
662 for rev in r:
663 for rev in r:
663 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
664 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
664 chainbase = chain[0]
665 chainbase = chain[0]
665 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
666 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
666 start = r.start
667 start = r.start
667 length = r.length
668 length = r.length
668 basestart = start(chainbase)
669 basestart = start(chainbase)
669 revstart = start(rev)
670 revstart = start(rev)
670 lineardist = revstart + comp - basestart
671 lineardist = revstart + comp - basestart
671 extradist = lineardist - chainsize
672 extradist = lineardist - chainsize
672 try:
673 try:
673 prevrev = chain[-2]
674 prevrev = chain[-2]
674 except IndexError:
675 except IndexError:
675 prevrev = -1
676 prevrev = -1
676
677
677 chainratio = float(chainsize) / float(uncomp)
678 chainratio = float(chainsize) / float(uncomp)
678 extraratio = float(extradist) / float(chainsize)
679 extraratio = float(extradist) / float(chainsize)
679
680
680 fm.startitem()
681 fm.startitem()
681 fm.write('rev chainid chainlen prevrev deltatype compsize '
682 fm.write('rev chainid chainlen prevrev deltatype compsize '
682 'uncompsize chainsize chainratio lindist extradist '
683 'uncompsize chainsize chainratio lindist extradist '
683 'extraratio',
684 'extraratio',
684 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
685 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
685 rev, chainid, len(chain), prevrev, deltatype, comp,
686 rev, chainid, len(chain), prevrev, deltatype, comp,
686 uncomp, chainsize, chainratio, lineardist, extradist,
687 uncomp, chainsize, chainratio, lineardist, extradist,
687 extraratio,
688 extraratio,
688 rev=rev, chainid=chainid, chainlen=len(chain),
689 rev=rev, chainid=chainid, chainlen=len(chain),
689 prevrev=prevrev, deltatype=deltatype, compsize=comp,
690 prevrev=prevrev, deltatype=deltatype, compsize=comp,
690 uncompsize=uncomp, chainsize=chainsize,
691 uncompsize=uncomp, chainsize=chainsize,
691 chainratio=chainratio, lindist=lineardist,
692 chainratio=chainratio, lindist=lineardist,
692 extradist=extradist, extraratio=extraratio)
693 extradist=extradist, extraratio=extraratio)
693 if withsparseread:
694 if withsparseread:
694 readsize = 0
695 readsize = 0
695 largestblock = 0
696 largestblock = 0
696 srchunks = 0
697 srchunks = 0
697
698
698 for revschunk in revlog._slicechunk(r, chain):
699 for revschunk in revlog._slicechunk(r, chain):
699 srchunks += 1
700 srchunks += 1
700 blkend = start(revschunk[-1]) + length(revschunk[-1])
701 blkend = start(revschunk[-1]) + length(revschunk[-1])
701 blksize = blkend - start(revschunk[0])
702 blksize = blkend - start(revschunk[0])
702
703
703 readsize += blksize
704 readsize += blksize
704 if largestblock < blksize:
705 if largestblock < blksize:
705 largestblock = blksize
706 largestblock = blksize
706
707
707 readdensity = float(chainsize) / float(readsize)
708 readdensity = float(chainsize) / float(readsize)
708
709
709 fm.write('readsize largestblock readdensity srchunks',
710 fm.write('readsize largestblock readdensity srchunks',
710 ' %10d %10d %9.5f %8d',
711 ' %10d %10d %9.5f %8d',
711 readsize, largestblock, readdensity, srchunks,
712 readsize, largestblock, readdensity, srchunks,
712 readsize=readsize, largestblock=largestblock,
713 readsize=readsize, largestblock=largestblock,
713 readdensity=readdensity, srchunks=srchunks)
714 readdensity=readdensity, srchunks=srchunks)
714
715
715 fm.plain('\n')
716 fm.plain('\n')
716
717
717 fm.end()
718 fm.end()
718
719
719 @command('debugdirstate|debugstate',
720 @command('debugdirstate|debugstate',
720 [('', 'nodates', None, _('do not display the saved mtime')),
721 [('', 'nodates', None, _('do not display the saved mtime')),
721 ('', 'datesort', None, _('sort by saved mtime'))],
722 ('', 'datesort', None, _('sort by saved mtime'))],
722 _('[OPTION]...'))
723 _('[OPTION]...'))
723 def debugstate(ui, repo, **opts):
724 def debugstate(ui, repo, **opts):
724 """show the contents of the current dirstate"""
725 """show the contents of the current dirstate"""
725
726
726 nodates = opts.get(r'nodates')
727 nodates = opts.get(r'nodates')
727 datesort = opts.get(r'datesort')
728 datesort = opts.get(r'datesort')
728
729
729 timestr = ""
730 timestr = ""
730 if datesort:
731 if datesort:
731 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
732 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
732 else:
733 else:
733 keyfunc = None # sort by filename
734 keyfunc = None # sort by filename
734 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
735 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
735 if ent[3] == -1:
736 if ent[3] == -1:
736 timestr = 'unset '
737 timestr = 'unset '
737 elif nodates:
738 elif nodates:
738 timestr = 'set '
739 timestr = 'set '
739 else:
740 else:
740 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
741 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
741 time.localtime(ent[3]))
742 time.localtime(ent[3]))
742 timestr = encoding.strtolocal(timestr)
743 timestr = encoding.strtolocal(timestr)
743 if ent[1] & 0o20000:
744 if ent[1] & 0o20000:
744 mode = 'lnk'
745 mode = 'lnk'
745 else:
746 else:
746 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
747 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
747 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
748 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
748 for f in repo.dirstate.copies():
749 for f in repo.dirstate.copies():
749 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
750 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
750
751
751 @command('debugdiscovery',
752 @command('debugdiscovery',
752 [('', 'old', None, _('use old-style discovery')),
753 [('', 'old', None, _('use old-style discovery')),
753 ('', 'nonheads', None,
754 ('', 'nonheads', None,
754 _('use old-style discovery with non-heads included')),
755 _('use old-style discovery with non-heads included')),
755 ('', 'rev', [], 'restrict discovery to this set of revs'),
756 ('', 'rev', [], 'restrict discovery to this set of revs'),
756 ] + cmdutil.remoteopts,
757 ] + cmdutil.remoteopts,
757 _('[--rev REV] [OTHER]'))
758 _('[--rev REV] [OTHER]'))
758 def debugdiscovery(ui, repo, remoteurl="default", **opts):
759 def debugdiscovery(ui, repo, remoteurl="default", **opts):
759 """runs the changeset discovery protocol in isolation"""
760 """runs the changeset discovery protocol in isolation"""
760 opts = pycompat.byteskwargs(opts)
761 opts = pycompat.byteskwargs(opts)
761 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
762 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
762 remote = hg.peer(repo, opts, remoteurl)
763 remote = hg.peer(repo, opts, remoteurl)
763 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
764 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
764
765
765 # make sure tests are repeatable
766 # make sure tests are repeatable
766 random.seed(12323)
767 random.seed(12323)
767
768
768 def doit(pushedrevs, remoteheads, remote=remote):
769 def doit(pushedrevs, remoteheads, remote=remote):
769 if opts.get('old'):
770 if opts.get('old'):
770 if not util.safehasattr(remote, 'branches'):
771 if not util.safehasattr(remote, 'branches'):
771 # enable in-client legacy support
772 # enable in-client legacy support
772 remote = localrepo.locallegacypeer(remote.local())
773 remote = localrepo.locallegacypeer(remote.local())
773 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
774 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
774 force=True)
775 force=True)
775 common = set(common)
776 common = set(common)
776 if not opts.get('nonheads'):
777 if not opts.get('nonheads'):
777 ui.write(("unpruned common: %s\n") %
778 ui.write(("unpruned common: %s\n") %
778 " ".join(sorted(short(n) for n in common)))
779 " ".join(sorted(short(n) for n in common)))
779 dag = dagutil.revlogdag(repo.changelog)
780 dag = dagutil.revlogdag(repo.changelog)
780 all = dag.ancestorset(dag.internalizeall(common))
781 all = dag.ancestorset(dag.internalizeall(common))
781 common = dag.externalizeall(dag.headsetofconnecteds(all))
782 common = dag.externalizeall(dag.headsetofconnecteds(all))
782 else:
783 else:
783 nodes = None
784 nodes = None
784 if pushedrevs:
785 if pushedrevs:
785 revs = scmutil.revrange(repo, pushedrevs)
786 revs = scmutil.revrange(repo, pushedrevs)
786 nodes = [repo[r].node() for r in revs]
787 nodes = [repo[r].node() for r in revs]
787 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
788 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
788 ancestorsof=nodes)
789 ancestorsof=nodes)
789 common = set(common)
790 common = set(common)
790 rheads = set(hds)
791 rheads = set(hds)
791 lheads = set(repo.heads())
792 lheads = set(repo.heads())
792 ui.write(("common heads: %s\n") %
793 ui.write(("common heads: %s\n") %
793 " ".join(sorted(short(n) for n in common)))
794 " ".join(sorted(short(n) for n in common)))
794 if lheads <= common:
795 if lheads <= common:
795 ui.write(("local is subset\n"))
796 ui.write(("local is subset\n"))
796 elif rheads <= common:
797 elif rheads <= common:
797 ui.write(("remote is subset\n"))
798 ui.write(("remote is subset\n"))
798
799
799 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
800 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
800 localrevs = opts['rev']
801 localrevs = opts['rev']
801 doit(localrevs, remoterevs)
802 doit(localrevs, remoterevs)
802
803
803 _chunksize = 4 << 10
804 _chunksize = 4 << 10
804
805
805 @command('debugdownload',
806 @command('debugdownload',
806 [
807 [
807 ('o', 'output', '', _('path')),
808 ('o', 'output', '', _('path')),
808 ],
809 ],
809 optionalrepo=True)
810 optionalrepo=True)
810 def debugdownload(ui, repo, url, output=None, **opts):
811 def debugdownload(ui, repo, url, output=None, **opts):
811 """download a resource using Mercurial logic and config
812 """download a resource using Mercurial logic and config
812 """
813 """
813 fh = urlmod.open(ui, url, output)
814 fh = urlmod.open(ui, url, output)
814
815
815 dest = ui
816 dest = ui
816 if output:
817 if output:
817 dest = open(output, "wb", _chunksize)
818 dest = open(output, "wb", _chunksize)
818 try:
819 try:
819 data = fh.read(_chunksize)
820 data = fh.read(_chunksize)
820 while data:
821 while data:
821 dest.write(data)
822 dest.write(data)
822 data = fh.read(_chunksize)
823 data = fh.read(_chunksize)
823 finally:
824 finally:
824 if output:
825 if output:
825 dest.close()
826 dest.close()
826
827
827 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
828 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
828 def debugextensions(ui, **opts):
829 def debugextensions(ui, **opts):
829 '''show information about active extensions'''
830 '''show information about active extensions'''
830 opts = pycompat.byteskwargs(opts)
831 opts = pycompat.byteskwargs(opts)
831 exts = extensions.extensions(ui)
832 exts = extensions.extensions(ui)
832 hgver = util.version()
833 hgver = util.version()
833 fm = ui.formatter('debugextensions', opts)
834 fm = ui.formatter('debugextensions', opts)
834 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
835 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
835 isinternal = extensions.ismoduleinternal(extmod)
836 isinternal = extensions.ismoduleinternal(extmod)
836 extsource = pycompat.fsencode(extmod.__file__)
837 extsource = pycompat.fsencode(extmod.__file__)
837 if isinternal:
838 if isinternal:
838 exttestedwith = [] # never expose magic string to users
839 exttestedwith = [] # never expose magic string to users
839 else:
840 else:
840 exttestedwith = getattr(extmod, 'testedwith', '').split()
841 exttestedwith = getattr(extmod, 'testedwith', '').split()
841 extbuglink = getattr(extmod, 'buglink', None)
842 extbuglink = getattr(extmod, 'buglink', None)
842
843
843 fm.startitem()
844 fm.startitem()
844
845
845 if ui.quiet or ui.verbose:
846 if ui.quiet or ui.verbose:
846 fm.write('name', '%s\n', extname)
847 fm.write('name', '%s\n', extname)
847 else:
848 else:
848 fm.write('name', '%s', extname)
849 fm.write('name', '%s', extname)
849 if isinternal or hgver in exttestedwith:
850 if isinternal or hgver in exttestedwith:
850 fm.plain('\n')
851 fm.plain('\n')
851 elif not exttestedwith:
852 elif not exttestedwith:
852 fm.plain(_(' (untested!)\n'))
853 fm.plain(_(' (untested!)\n'))
853 else:
854 else:
854 lasttestedversion = exttestedwith[-1]
855 lasttestedversion = exttestedwith[-1]
855 fm.plain(' (%s!)\n' % lasttestedversion)
856 fm.plain(' (%s!)\n' % lasttestedversion)
856
857
857 fm.condwrite(ui.verbose and extsource, 'source',
858 fm.condwrite(ui.verbose and extsource, 'source',
858 _(' location: %s\n'), extsource or "")
859 _(' location: %s\n'), extsource or "")
859
860
860 if ui.verbose:
861 if ui.verbose:
861 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
862 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
862 fm.data(bundled=isinternal)
863 fm.data(bundled=isinternal)
863
864
864 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
865 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
865 _(' tested with: %s\n'),
866 _(' tested with: %s\n'),
866 fm.formatlist(exttestedwith, name='ver'))
867 fm.formatlist(exttestedwith, name='ver'))
867
868
868 fm.condwrite(ui.verbose and extbuglink, 'buglink',
869 fm.condwrite(ui.verbose and extbuglink, 'buglink',
869 _(' bug reporting: %s\n'), extbuglink or "")
870 _(' bug reporting: %s\n'), extbuglink or "")
870
871
871 fm.end()
872 fm.end()
872
873
873 @command('debugfileset',
874 @command('debugfileset',
874 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
875 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
875 _('[-r REV] FILESPEC'))
876 _('[-r REV] FILESPEC'))
876 def debugfileset(ui, repo, expr, **opts):
877 def debugfileset(ui, repo, expr, **opts):
877 '''parse and apply a fileset specification'''
878 '''parse and apply a fileset specification'''
878 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
879 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
879 if ui.verbose:
880 if ui.verbose:
880 tree = fileset.parse(expr)
881 tree = fileset.parse(expr)
881 ui.note(fileset.prettyformat(tree), "\n")
882 ui.note(fileset.prettyformat(tree), "\n")
882
883
883 for f in ctx.getfileset(expr):
884 for f in ctx.getfileset(expr):
884 ui.write("%s\n" % f)
885 ui.write("%s\n" % f)
885
886
886 @command('debugformat',
887 @command('debugformat',
887 [] + cmdutil.formatteropts,
888 [] + cmdutil.formatteropts,
888 _(''))
889 _(''))
889 def debugformat(ui, repo, **opts):
890 def debugformat(ui, repo, **opts):
890 """display format information about the current repository
891 """display format information about the current repository
891
892
892 Use --verbose to get extra information about current config value and
893 Use --verbose to get extra information about current config value and
893 Mercurial default."""
894 Mercurial default."""
894 opts = pycompat.byteskwargs(opts)
895 opts = pycompat.byteskwargs(opts)
895 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
896 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
896 maxvariantlength = max(len('format-variant'), maxvariantlength)
897 maxvariantlength = max(len('format-variant'), maxvariantlength)
897
898
898 def makeformatname(name):
899 def makeformatname(name):
899 return '%s:' + (' ' * (maxvariantlength - len(name)))
900 return '%s:' + (' ' * (maxvariantlength - len(name)))
900
901
901 fm = ui.formatter('debugformat', opts)
902 fm = ui.formatter('debugformat', opts)
902 if fm.isplain():
903 if fm.isplain():
903 def formatvalue(value):
904 def formatvalue(value):
904 if util.safehasattr(value, 'startswith'):
905 if util.safehasattr(value, 'startswith'):
905 return value
906 return value
906 if value:
907 if value:
907 return 'yes'
908 return 'yes'
908 else:
909 else:
909 return 'no'
910 return 'no'
910 else:
911 else:
911 formatvalue = pycompat.identity
912 formatvalue = pycompat.identity
912
913
913 fm.plain('format-variant')
914 fm.plain('format-variant')
914 fm.plain(' ' * (maxvariantlength - len('format-variant')))
915 fm.plain(' ' * (maxvariantlength - len('format-variant')))
915 fm.plain(' repo')
916 fm.plain(' repo')
916 if ui.verbose:
917 if ui.verbose:
917 fm.plain(' config default')
918 fm.plain(' config default')
918 fm.plain('\n')
919 fm.plain('\n')
919 for fv in upgrade.allformatvariant:
920 for fv in upgrade.allformatvariant:
920 fm.startitem()
921 fm.startitem()
921 repovalue = fv.fromrepo(repo)
922 repovalue = fv.fromrepo(repo)
922 configvalue = fv.fromconfig(repo)
923 configvalue = fv.fromconfig(repo)
923
924
924 if repovalue != configvalue:
925 if repovalue != configvalue:
925 namelabel = 'formatvariant.name.mismatchconfig'
926 namelabel = 'formatvariant.name.mismatchconfig'
926 repolabel = 'formatvariant.repo.mismatchconfig'
927 repolabel = 'formatvariant.repo.mismatchconfig'
927 elif repovalue != fv.default:
928 elif repovalue != fv.default:
928 namelabel = 'formatvariant.name.mismatchdefault'
929 namelabel = 'formatvariant.name.mismatchdefault'
929 repolabel = 'formatvariant.repo.mismatchdefault'
930 repolabel = 'formatvariant.repo.mismatchdefault'
930 else:
931 else:
931 namelabel = 'formatvariant.name.uptodate'
932 namelabel = 'formatvariant.name.uptodate'
932 repolabel = 'formatvariant.repo.uptodate'
933 repolabel = 'formatvariant.repo.uptodate'
933
934
934 fm.write('name', makeformatname(fv.name), fv.name,
935 fm.write('name', makeformatname(fv.name), fv.name,
935 label=namelabel)
936 label=namelabel)
936 fm.write('repo', ' %3s', formatvalue(repovalue),
937 fm.write('repo', ' %3s', formatvalue(repovalue),
937 label=repolabel)
938 label=repolabel)
938 if fv.default != configvalue:
939 if fv.default != configvalue:
939 configlabel = 'formatvariant.config.special'
940 configlabel = 'formatvariant.config.special'
940 else:
941 else:
941 configlabel = 'formatvariant.config.default'
942 configlabel = 'formatvariant.config.default'
942 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
943 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
943 label=configlabel)
944 label=configlabel)
944 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
945 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
945 label='formatvariant.default')
946 label='formatvariant.default')
946 fm.plain('\n')
947 fm.plain('\n')
947 fm.end()
948 fm.end()
948
949
949 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
950 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
950 def debugfsinfo(ui, path="."):
951 def debugfsinfo(ui, path="."):
951 """show information detected about current filesystem"""
952 """show information detected about current filesystem"""
952 ui.write(('path: %s\n') % path)
953 ui.write(('path: %s\n') % path)
953 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
954 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
954 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
955 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
955 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
956 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
956 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
957 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
957 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
958 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
958 casesensitive = '(unknown)'
959 casesensitive = '(unknown)'
959 try:
960 try:
960 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
961 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
961 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
962 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
962 except OSError:
963 except OSError:
963 pass
964 pass
964 ui.write(('case-sensitive: %s\n') % casesensitive)
965 ui.write(('case-sensitive: %s\n') % casesensitive)
965
966
966 @command('debuggetbundle',
967 @command('debuggetbundle',
967 [('H', 'head', [], _('id of head node'), _('ID')),
968 [('H', 'head', [], _('id of head node'), _('ID')),
968 ('C', 'common', [], _('id of common node'), _('ID')),
969 ('C', 'common', [], _('id of common node'), _('ID')),
969 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
970 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
970 _('REPO FILE [-H|-C ID]...'),
971 _('REPO FILE [-H|-C ID]...'),
971 norepo=True)
972 norepo=True)
972 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
973 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
973 """retrieves a bundle from a repo
974 """retrieves a bundle from a repo
974
975
975 Every ID must be a full-length hex node id string. Saves the bundle to the
976 Every ID must be a full-length hex node id string. Saves the bundle to the
976 given file.
977 given file.
977 """
978 """
978 opts = pycompat.byteskwargs(opts)
979 opts = pycompat.byteskwargs(opts)
979 repo = hg.peer(ui, opts, repopath)
980 repo = hg.peer(ui, opts, repopath)
980 if not repo.capable('getbundle'):
981 if not repo.capable('getbundle'):
981 raise error.Abort("getbundle() not supported by target repository")
982 raise error.Abort("getbundle() not supported by target repository")
982 args = {}
983 args = {}
983 if common:
984 if common:
984 args[r'common'] = [bin(s) for s in common]
985 args[r'common'] = [bin(s) for s in common]
985 if head:
986 if head:
986 args[r'heads'] = [bin(s) for s in head]
987 args[r'heads'] = [bin(s) for s in head]
987 # TODO: get desired bundlecaps from command line.
988 # TODO: get desired bundlecaps from command line.
988 args[r'bundlecaps'] = None
989 args[r'bundlecaps'] = None
989 bundle = repo.getbundle('debug', **args)
990 bundle = repo.getbundle('debug', **args)
990
991
991 bundletype = opts.get('type', 'bzip2').lower()
992 bundletype = opts.get('type', 'bzip2').lower()
992 btypes = {'none': 'HG10UN',
993 btypes = {'none': 'HG10UN',
993 'bzip2': 'HG10BZ',
994 'bzip2': 'HG10BZ',
994 'gzip': 'HG10GZ',
995 'gzip': 'HG10GZ',
995 'bundle2': 'HG20'}
996 'bundle2': 'HG20'}
996 bundletype = btypes.get(bundletype)
997 bundletype = btypes.get(bundletype)
997 if bundletype not in bundle2.bundletypes:
998 if bundletype not in bundle2.bundletypes:
998 raise error.Abort(_('unknown bundle type specified with --type'))
999 raise error.Abort(_('unknown bundle type specified with --type'))
999 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1000 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1000
1001
1001 @command('debugignore', [], '[FILE]')
1002 @command('debugignore', [], '[FILE]')
1002 def debugignore(ui, repo, *files, **opts):
1003 def debugignore(ui, repo, *files, **opts):
1003 """display the combined ignore pattern and information about ignored files
1004 """display the combined ignore pattern and information about ignored files
1004
1005
1005 With no argument display the combined ignore pattern.
1006 With no argument display the combined ignore pattern.
1006
1007
1007 Given space separated file names, shows if the given file is ignored and
1008 Given space separated file names, shows if the given file is ignored and
1008 if so, show the ignore rule (file and line number) that matched it.
1009 if so, show the ignore rule (file and line number) that matched it.
1009 """
1010 """
1010 ignore = repo.dirstate._ignore
1011 ignore = repo.dirstate._ignore
1011 if not files:
1012 if not files:
1012 # Show all the patterns
1013 # Show all the patterns
1013 ui.write("%s\n" % pycompat.byterepr(ignore))
1014 ui.write("%s\n" % pycompat.byterepr(ignore))
1014 else:
1015 else:
1015 m = scmutil.match(repo[None], pats=files)
1016 m = scmutil.match(repo[None], pats=files)
1016 for f in m.files():
1017 for f in m.files():
1017 nf = util.normpath(f)
1018 nf = util.normpath(f)
1018 ignored = None
1019 ignored = None
1019 ignoredata = None
1020 ignoredata = None
1020 if nf != '.':
1021 if nf != '.':
1021 if ignore(nf):
1022 if ignore(nf):
1022 ignored = nf
1023 ignored = nf
1023 ignoredata = repo.dirstate._ignorefileandline(nf)
1024 ignoredata = repo.dirstate._ignorefileandline(nf)
1024 else:
1025 else:
1025 for p in util.finddirs(nf):
1026 for p in util.finddirs(nf):
1026 if ignore(p):
1027 if ignore(p):
1027 ignored = p
1028 ignored = p
1028 ignoredata = repo.dirstate._ignorefileandline(p)
1029 ignoredata = repo.dirstate._ignorefileandline(p)
1029 break
1030 break
1030 if ignored:
1031 if ignored:
1031 if ignored == nf:
1032 if ignored == nf:
1032 ui.write(_("%s is ignored\n") % m.uipath(f))
1033 ui.write(_("%s is ignored\n") % m.uipath(f))
1033 else:
1034 else:
1034 ui.write(_("%s is ignored because of "
1035 ui.write(_("%s is ignored because of "
1035 "containing folder %s\n")
1036 "containing folder %s\n")
1036 % (m.uipath(f), ignored))
1037 % (m.uipath(f), ignored))
1037 ignorefile, lineno, line = ignoredata
1038 ignorefile, lineno, line = ignoredata
1038 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1039 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1039 % (ignorefile, lineno, line))
1040 % (ignorefile, lineno, line))
1040 else:
1041 else:
1041 ui.write(_("%s is not ignored\n") % m.uipath(f))
1042 ui.write(_("%s is not ignored\n") % m.uipath(f))
1042
1043
1043 @command('debugindex', cmdutil.debugrevlogopts +
1044 @command('debugindex', cmdutil.debugrevlogopts +
1044 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1045 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1045 _('[-f FORMAT] -c|-m|FILE'),
1046 _('[-f FORMAT] -c|-m|FILE'),
1046 optionalrepo=True)
1047 optionalrepo=True)
1047 def debugindex(ui, repo, file_=None, **opts):
1048 def debugindex(ui, repo, file_=None, **opts):
1048 """dump the contents of an index file"""
1049 """dump the contents of an index file"""
1049 opts = pycompat.byteskwargs(opts)
1050 opts = pycompat.byteskwargs(opts)
1050 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1051 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1051 format = opts.get('format', 0)
1052 format = opts.get('format', 0)
1052 if format not in (0, 1):
1053 if format not in (0, 1):
1053 raise error.Abort(_("unknown format %d") % format)
1054 raise error.Abort(_("unknown format %d") % format)
1054
1055
1055 generaldelta = r.version & revlog.FLAG_GENERALDELTA
1056 generaldelta = r.version & revlog.FLAG_GENERALDELTA
1056 if generaldelta:
1057 if generaldelta:
1057 basehdr = ' delta'
1058 basehdr = ' delta'
1058 else:
1059 else:
1059 basehdr = ' base'
1060 basehdr = ' base'
1060
1061
1061 if ui.debugflag:
1062 if ui.debugflag:
1062 shortfn = hex
1063 shortfn = hex
1063 else:
1064 else:
1064 shortfn = short
1065 shortfn = short
1065
1066
1066 # There might not be anything in r, so have a sane default
1067 # There might not be anything in r, so have a sane default
1067 idlen = 12
1068 idlen = 12
1068 for i in r:
1069 for i in r:
1069 idlen = len(shortfn(r.node(i)))
1070 idlen = len(shortfn(r.node(i)))
1070 break
1071 break
1071
1072
1072 if format == 0:
1073 if format == 0:
1073 ui.write((" rev offset length " + basehdr + " linkrev"
1074 ui.write((" rev offset length " + basehdr + " linkrev"
1074 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
1075 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
1075 elif format == 1:
1076 elif format == 1:
1076 ui.write((" rev flag offset length"
1077 ui.write((" rev flag offset length"
1077 " size " + basehdr + " link p1 p2"
1078 " size " + basehdr + " link p1 p2"
1078 " %s\n") % "nodeid".rjust(idlen))
1079 " %s\n") % "nodeid".rjust(idlen))
1079
1080
1080 for i in r:
1081 for i in r:
1081 node = r.node(i)
1082 node = r.node(i)
1082 if generaldelta:
1083 if generaldelta:
1083 base = r.deltaparent(i)
1084 base = r.deltaparent(i)
1084 else:
1085 else:
1085 base = r.chainbase(i)
1086 base = r.chainbase(i)
1086 if format == 0:
1087 if format == 0:
1087 try:
1088 try:
1088 pp = r.parents(node)
1089 pp = r.parents(node)
1089 except Exception:
1090 except Exception:
1090 pp = [nullid, nullid]
1091 pp = [nullid, nullid]
1091 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1092 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1092 i, r.start(i), r.length(i), base, r.linkrev(i),
1093 i, r.start(i), r.length(i), base, r.linkrev(i),
1093 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1094 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1094 elif format == 1:
1095 elif format == 1:
1095 pr = r.parentrevs(i)
1096 pr = r.parentrevs(i)
1096 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1097 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1097 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1098 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1098 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1099 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1099
1100
1100 @command('debugindexdot', cmdutil.debugrevlogopts,
1101 @command('debugindexdot', cmdutil.debugrevlogopts,
1101 _('-c|-m|FILE'), optionalrepo=True)
1102 _('-c|-m|FILE'), optionalrepo=True)
1102 def debugindexdot(ui, repo, file_=None, **opts):
1103 def debugindexdot(ui, repo, file_=None, **opts):
1103 """dump an index DAG as a graphviz dot file"""
1104 """dump an index DAG as a graphviz dot file"""
1104 opts = pycompat.byteskwargs(opts)
1105 opts = pycompat.byteskwargs(opts)
1105 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1106 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1106 ui.write(("digraph G {\n"))
1107 ui.write(("digraph G {\n"))
1107 for i in r:
1108 for i in r:
1108 node = r.node(i)
1109 node = r.node(i)
1109 pp = r.parents(node)
1110 pp = r.parents(node)
1110 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1111 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1111 if pp[1] != nullid:
1112 if pp[1] != nullid:
1112 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1113 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1113 ui.write("}\n")
1114 ui.write("}\n")
1114
1115
1115 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1116 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1116 def debuginstall(ui, **opts):
1117 def debuginstall(ui, **opts):
1117 '''test Mercurial installation
1118 '''test Mercurial installation
1118
1119
1119 Returns 0 on success.
1120 Returns 0 on success.
1120 '''
1121 '''
1121 opts = pycompat.byteskwargs(opts)
1122 opts = pycompat.byteskwargs(opts)
1122
1123
1123 def writetemp(contents):
1124 def writetemp(contents):
1124 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1125 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1125 f = os.fdopen(fd, r"wb")
1126 f = os.fdopen(fd, r"wb")
1126 f.write(contents)
1127 f.write(contents)
1127 f.close()
1128 f.close()
1128 return name
1129 return name
1129
1130
1130 problems = 0
1131 problems = 0
1131
1132
1132 fm = ui.formatter('debuginstall', opts)
1133 fm = ui.formatter('debuginstall', opts)
1133 fm.startitem()
1134 fm.startitem()
1134
1135
1135 # encoding
1136 # encoding
1136 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1137 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1137 err = None
1138 err = None
1138 try:
1139 try:
1139 codecs.lookup(pycompat.sysstr(encoding.encoding))
1140 codecs.lookup(pycompat.sysstr(encoding.encoding))
1140 except LookupError as inst:
1141 except LookupError as inst:
1141 err = util.forcebytestr(inst)
1142 err = util.forcebytestr(inst)
1142 problems += 1
1143 problems += 1
1143 fm.condwrite(err, 'encodingerror', _(" %s\n"
1144 fm.condwrite(err, 'encodingerror', _(" %s\n"
1144 " (check that your locale is properly set)\n"), err)
1145 " (check that your locale is properly set)\n"), err)
1145
1146
1146 # Python
1147 # Python
1147 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1148 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1148 pycompat.sysexecutable)
1149 pycompat.sysexecutable)
1149 fm.write('pythonver', _("checking Python version (%s)\n"),
1150 fm.write('pythonver', _("checking Python version (%s)\n"),
1150 ("%d.%d.%d" % sys.version_info[:3]))
1151 ("%d.%d.%d" % sys.version_info[:3]))
1151 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1152 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1152 os.path.dirname(pycompat.fsencode(os.__file__)))
1153 os.path.dirname(pycompat.fsencode(os.__file__)))
1153
1154
1154 security = set(sslutil.supportedprotocols)
1155 security = set(sslutil.supportedprotocols)
1155 if sslutil.hassni:
1156 if sslutil.hassni:
1156 security.add('sni')
1157 security.add('sni')
1157
1158
1158 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1159 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1159 fm.formatlist(sorted(security), name='protocol',
1160 fm.formatlist(sorted(security), name='protocol',
1160 fmt='%s', sep=','))
1161 fmt='%s', sep=','))
1161
1162
1162 # These are warnings, not errors. So don't increment problem count. This
1163 # These are warnings, not errors. So don't increment problem count. This
1163 # may change in the future.
1164 # may change in the future.
1164 if 'tls1.2' not in security:
1165 if 'tls1.2' not in security:
1165 fm.plain(_(' TLS 1.2 not supported by Python install; '
1166 fm.plain(_(' TLS 1.2 not supported by Python install; '
1166 'network connections lack modern security\n'))
1167 'network connections lack modern security\n'))
1167 if 'sni' not in security:
1168 if 'sni' not in security:
1168 fm.plain(_(' SNI not supported by Python install; may have '
1169 fm.plain(_(' SNI not supported by Python install; may have '
1169 'connectivity issues with some servers\n'))
1170 'connectivity issues with some servers\n'))
1170
1171
1171 # TODO print CA cert info
1172 # TODO print CA cert info
1172
1173
1173 # hg version
1174 # hg version
1174 hgver = util.version()
1175 hgver = util.version()
1175 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1176 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1176 hgver.split('+')[0])
1177 hgver.split('+')[0])
1177 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1178 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1178 '+'.join(hgver.split('+')[1:]))
1179 '+'.join(hgver.split('+')[1:]))
1179
1180
1180 # compiled modules
1181 # compiled modules
1181 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1182 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1182 policy.policy)
1183 policy.policy)
1183 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1184 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1184 os.path.dirname(pycompat.fsencode(__file__)))
1185 os.path.dirname(pycompat.fsencode(__file__)))
1185
1186
1186 if policy.policy in ('c', 'allow'):
1187 if policy.policy in ('c', 'allow'):
1187 err = None
1188 err = None
1188 try:
1189 try:
1189 from .cext import (
1190 from .cext import (
1190 base85,
1191 base85,
1191 bdiff,
1192 bdiff,
1192 mpatch,
1193 mpatch,
1193 osutil,
1194 osutil,
1194 )
1195 )
1195 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1196 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1196 except Exception as inst:
1197 except Exception as inst:
1197 err = util.forcebytestr(inst)
1198 err = util.forcebytestr(inst)
1198 problems += 1
1199 problems += 1
1199 fm.condwrite(err, 'extensionserror', " %s\n", err)
1200 fm.condwrite(err, 'extensionserror', " %s\n", err)
1200
1201
1201 compengines = util.compengines._engines.values()
1202 compengines = util.compengines._engines.values()
1202 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1203 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1203 fm.formatlist(sorted(e.name() for e in compengines),
1204 fm.formatlist(sorted(e.name() for e in compengines),
1204 name='compengine', fmt='%s', sep=', '))
1205 name='compengine', fmt='%s', sep=', '))
1205 fm.write('compenginesavail', _('checking available compression engines '
1206 fm.write('compenginesavail', _('checking available compression engines '
1206 '(%s)\n'),
1207 '(%s)\n'),
1207 fm.formatlist(sorted(e.name() for e in compengines
1208 fm.formatlist(sorted(e.name() for e in compengines
1208 if e.available()),
1209 if e.available()),
1209 name='compengine', fmt='%s', sep=', '))
1210 name='compengine', fmt='%s', sep=', '))
1210 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1211 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1211 fm.write('compenginesserver', _('checking available compression engines '
1212 fm.write('compenginesserver', _('checking available compression engines '
1212 'for wire protocol (%s)\n'),
1213 'for wire protocol (%s)\n'),
1213 fm.formatlist([e.name() for e in wirecompengines
1214 fm.formatlist([e.name() for e in wirecompengines
1214 if e.wireprotosupport()],
1215 if e.wireprotosupport()],
1215 name='compengine', fmt='%s', sep=', '))
1216 name='compengine', fmt='%s', sep=', '))
1216 re2 = 'missing'
1217 re2 = 'missing'
1217 if util._re2:
1218 if util._re2:
1218 re2 = 'available'
1219 re2 = 'available'
1219 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1220 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1220 fm.data(re2=bool(util._re2))
1221 fm.data(re2=bool(util._re2))
1221
1222
1222 # templates
1223 # templates
1223 p = templater.templatepaths()
1224 p = templater.templatepaths()
1224 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1225 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1225 fm.condwrite(not p, '', _(" no template directories found\n"))
1226 fm.condwrite(not p, '', _(" no template directories found\n"))
1226 if p:
1227 if p:
1227 m = templater.templatepath("map-cmdline.default")
1228 m = templater.templatepath("map-cmdline.default")
1228 if m:
1229 if m:
1229 # template found, check if it is working
1230 # template found, check if it is working
1230 err = None
1231 err = None
1231 try:
1232 try:
1232 templater.templater.frommapfile(m)
1233 templater.templater.frommapfile(m)
1233 except Exception as inst:
1234 except Exception as inst:
1234 err = util.forcebytestr(inst)
1235 err = util.forcebytestr(inst)
1235 p = None
1236 p = None
1236 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1237 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1237 else:
1238 else:
1238 p = None
1239 p = None
1239 fm.condwrite(p, 'defaulttemplate',
1240 fm.condwrite(p, 'defaulttemplate',
1240 _("checking default template (%s)\n"), m)
1241 _("checking default template (%s)\n"), m)
1241 fm.condwrite(not m, 'defaulttemplatenotfound',
1242 fm.condwrite(not m, 'defaulttemplatenotfound',
1242 _(" template '%s' not found\n"), "default")
1243 _(" template '%s' not found\n"), "default")
1243 if not p:
1244 if not p:
1244 problems += 1
1245 problems += 1
1245 fm.condwrite(not p, '',
1246 fm.condwrite(not p, '',
1246 _(" (templates seem to have been installed incorrectly)\n"))
1247 _(" (templates seem to have been installed incorrectly)\n"))
1247
1248
1248 # editor
1249 # editor
1249 editor = ui.geteditor()
1250 editor = ui.geteditor()
1250 editor = util.expandpath(editor)
1251 editor = util.expandpath(editor)
1251 editorbin = util.shellsplit(editor)[0]
1252 editorbin = util.shellsplit(editor)[0]
1252 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1253 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1253 cmdpath = util.findexe(editorbin)
1254 cmdpath = util.findexe(editorbin)
1254 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1255 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1255 _(" No commit editor set and can't find %s in PATH\n"
1256 _(" No commit editor set and can't find %s in PATH\n"
1256 " (specify a commit editor in your configuration"
1257 " (specify a commit editor in your configuration"
1257 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1258 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1258 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1259 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1259 _(" Can't find editor '%s' in PATH\n"
1260 _(" Can't find editor '%s' in PATH\n"
1260 " (specify a commit editor in your configuration"
1261 " (specify a commit editor in your configuration"
1261 " file)\n"), not cmdpath and editorbin)
1262 " file)\n"), not cmdpath and editorbin)
1262 if not cmdpath and editor != 'vi':
1263 if not cmdpath and editor != 'vi':
1263 problems += 1
1264 problems += 1
1264
1265
1265 # check username
1266 # check username
1266 username = None
1267 username = None
1267 err = None
1268 err = None
1268 try:
1269 try:
1269 username = ui.username()
1270 username = ui.username()
1270 except error.Abort as e:
1271 except error.Abort as e:
1271 err = util.forcebytestr(e)
1272 err = util.forcebytestr(e)
1272 problems += 1
1273 problems += 1
1273
1274
1274 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1275 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1275 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1276 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1276 " (specify a username in your configuration file)\n"), err)
1277 " (specify a username in your configuration file)\n"), err)
1277
1278
1278 fm.condwrite(not problems, '',
1279 fm.condwrite(not problems, '',
1279 _("no problems detected\n"))
1280 _("no problems detected\n"))
1280 if not problems:
1281 if not problems:
1281 fm.data(problems=problems)
1282 fm.data(problems=problems)
1282 fm.condwrite(problems, 'problems',
1283 fm.condwrite(problems, 'problems',
1283 _("%d problems detected,"
1284 _("%d problems detected,"
1284 " please check your install!\n"), problems)
1285 " please check your install!\n"), problems)
1285 fm.end()
1286 fm.end()
1286
1287
1287 return problems
1288 return problems
1288
1289
1289 @command('debugknown', [], _('REPO ID...'), norepo=True)
1290 @command('debugknown', [], _('REPO ID...'), norepo=True)
1290 def debugknown(ui, repopath, *ids, **opts):
1291 def debugknown(ui, repopath, *ids, **opts):
1291 """test whether node ids are known to a repo
1292 """test whether node ids are known to a repo
1292
1293
1293 Every ID must be a full-length hex node id string. Returns a list of 0s
1294 Every ID must be a full-length hex node id string. Returns a list of 0s
1294 and 1s indicating unknown/known.
1295 and 1s indicating unknown/known.
1295 """
1296 """
1296 opts = pycompat.byteskwargs(opts)
1297 opts = pycompat.byteskwargs(opts)
1297 repo = hg.peer(ui, opts, repopath)
1298 repo = hg.peer(ui, opts, repopath)
1298 if not repo.capable('known'):
1299 if not repo.capable('known'):
1299 raise error.Abort("known() not supported by target repository")
1300 raise error.Abort("known() not supported by target repository")
1300 flags = repo.known([bin(s) for s in ids])
1301 flags = repo.known([bin(s) for s in ids])
1301 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1302 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1302
1303
1303 @command('debuglabelcomplete', [], _('LABEL...'))
1304 @command('debuglabelcomplete', [], _('LABEL...'))
1304 def debuglabelcomplete(ui, repo, *args):
1305 def debuglabelcomplete(ui, repo, *args):
1305 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1306 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1306 debugnamecomplete(ui, repo, *args)
1307 debugnamecomplete(ui, repo, *args)
1307
1308
1308 @command('debuglocks',
1309 @command('debuglocks',
1309 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1310 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1310 ('W', 'force-wlock', None,
1311 ('W', 'force-wlock', None,
1311 _('free the working state lock (DANGEROUS)')),
1312 _('free the working state lock (DANGEROUS)')),
1312 ('s', 'set-lock', None, _('set the store lock until stopped')),
1313 ('s', 'set-lock', None, _('set the store lock until stopped')),
1313 ('S', 'set-wlock', None,
1314 ('S', 'set-wlock', None,
1314 _('set the working state lock until stopped'))],
1315 _('set the working state lock until stopped'))],
1315 _('[OPTION]...'))
1316 _('[OPTION]...'))
1316 def debuglocks(ui, repo, **opts):
1317 def debuglocks(ui, repo, **opts):
1317 """show or modify state of locks
1318 """show or modify state of locks
1318
1319
1319 By default, this command will show which locks are held. This
1320 By default, this command will show which locks are held. This
1320 includes the user and process holding the lock, the amount of time
1321 includes the user and process holding the lock, the amount of time
1321 the lock has been held, and the machine name where the process is
1322 the lock has been held, and the machine name where the process is
1322 running if it's not local.
1323 running if it's not local.
1323
1324
1324 Locks protect the integrity of Mercurial's data, so should be
1325 Locks protect the integrity of Mercurial's data, so should be
1325 treated with care. System crashes or other interruptions may cause
1326 treated with care. System crashes or other interruptions may cause
1326 locks to not be properly released, though Mercurial will usually
1327 locks to not be properly released, though Mercurial will usually
1327 detect and remove such stale locks automatically.
1328 detect and remove such stale locks automatically.
1328
1329
1329 However, detecting stale locks may not always be possible (for
1330 However, detecting stale locks may not always be possible (for
1330 instance, on a shared filesystem). Removing locks may also be
1331 instance, on a shared filesystem). Removing locks may also be
1331 blocked by filesystem permissions.
1332 blocked by filesystem permissions.
1332
1333
1333 Setting a lock will prevent other commands from changing the data.
1334 Setting a lock will prevent other commands from changing the data.
1334 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1335 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1335 The set locks are removed when the command exits.
1336 The set locks are removed when the command exits.
1336
1337
1337 Returns 0 if no locks are held.
1338 Returns 0 if no locks are held.
1338
1339
1339 """
1340 """
1340
1341
1341 if opts.get(r'force_lock'):
1342 if opts.get(r'force_lock'):
1342 repo.svfs.unlink('lock')
1343 repo.svfs.unlink('lock')
1343 if opts.get(r'force_wlock'):
1344 if opts.get(r'force_wlock'):
1344 repo.vfs.unlink('wlock')
1345 repo.vfs.unlink('wlock')
1345 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1346 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1346 return 0
1347 return 0
1347
1348
1348 locks = []
1349 locks = []
1349 try:
1350 try:
1350 if opts.get(r'set_wlock'):
1351 if opts.get(r'set_wlock'):
1351 try:
1352 try:
1352 locks.append(repo.wlock(False))
1353 locks.append(repo.wlock(False))
1353 except error.LockHeld:
1354 except error.LockHeld:
1354 raise error.Abort(_('wlock is already held'))
1355 raise error.Abort(_('wlock is already held'))
1355 if opts.get(r'set_lock'):
1356 if opts.get(r'set_lock'):
1356 try:
1357 try:
1357 locks.append(repo.lock(False))
1358 locks.append(repo.lock(False))
1358 except error.LockHeld:
1359 except error.LockHeld:
1359 raise error.Abort(_('lock is already held'))
1360 raise error.Abort(_('lock is already held'))
1360 if len(locks):
1361 if len(locks):
1361 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1362 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1362 return 0
1363 return 0
1363 finally:
1364 finally:
1364 release(*locks)
1365 release(*locks)
1365
1366
1366 now = time.time()
1367 now = time.time()
1367 held = 0
1368 held = 0
1368
1369
1369 def report(vfs, name, method):
1370 def report(vfs, name, method):
1370 # this causes stale locks to get reaped for more accurate reporting
1371 # this causes stale locks to get reaped for more accurate reporting
1371 try:
1372 try:
1372 l = method(False)
1373 l = method(False)
1373 except error.LockHeld:
1374 except error.LockHeld:
1374 l = None
1375 l = None
1375
1376
1376 if l:
1377 if l:
1377 l.release()
1378 l.release()
1378 else:
1379 else:
1379 try:
1380 try:
1380 st = vfs.lstat(name)
1381 st = vfs.lstat(name)
1381 age = now - st[stat.ST_MTIME]
1382 age = now - st[stat.ST_MTIME]
1382 user = util.username(st.st_uid)
1383 user = util.username(st.st_uid)
1383 locker = vfs.readlock(name)
1384 locker = vfs.readlock(name)
1384 if ":" in locker:
1385 if ":" in locker:
1385 host, pid = locker.split(':')
1386 host, pid = locker.split(':')
1386 if host == socket.gethostname():
1387 if host == socket.gethostname():
1387 locker = 'user %s, process %s' % (user, pid)
1388 locker = 'user %s, process %s' % (user, pid)
1388 else:
1389 else:
1389 locker = 'user %s, process %s, host %s' \
1390 locker = 'user %s, process %s, host %s' \
1390 % (user, pid, host)
1391 % (user, pid, host)
1391 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1392 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1392 return 1
1393 return 1
1393 except OSError as e:
1394 except OSError as e:
1394 if e.errno != errno.ENOENT:
1395 if e.errno != errno.ENOENT:
1395 raise
1396 raise
1396
1397
1397 ui.write(("%-6s free\n") % (name + ":"))
1398 ui.write(("%-6s free\n") % (name + ":"))
1398 return 0
1399 return 0
1399
1400
1400 held += report(repo.svfs, "lock", repo.lock)
1401 held += report(repo.svfs, "lock", repo.lock)
1401 held += report(repo.vfs, "wlock", repo.wlock)
1402 held += report(repo.vfs, "wlock", repo.wlock)
1402
1403
1403 return held
1404 return held
1404
1405
1405 @command('debugmergestate', [], '')
1406 @command('debugmergestate', [], '')
1406 def debugmergestate(ui, repo, *args):
1407 def debugmergestate(ui, repo, *args):
1407 """print merge state
1408 """print merge state
1408
1409
1409 Use --verbose to print out information about whether v1 or v2 merge state
1410 Use --verbose to print out information about whether v1 or v2 merge state
1410 was chosen."""
1411 was chosen."""
1411 def _hashornull(h):
1412 def _hashornull(h):
1412 if h == nullhex:
1413 if h == nullhex:
1413 return 'null'
1414 return 'null'
1414 else:
1415 else:
1415 return h
1416 return h
1416
1417
1417 def printrecords(version):
1418 def printrecords(version):
1418 ui.write(('* version %d records\n') % version)
1419 ui.write(('* version %d records\n') % version)
1419 if version == 1:
1420 if version == 1:
1420 records = v1records
1421 records = v1records
1421 else:
1422 else:
1422 records = v2records
1423 records = v2records
1423
1424
1424 for rtype, record in records:
1425 for rtype, record in records:
1425 # pretty print some record types
1426 # pretty print some record types
1426 if rtype == 'L':
1427 if rtype == 'L':
1427 ui.write(('local: %s\n') % record)
1428 ui.write(('local: %s\n') % record)
1428 elif rtype == 'O':
1429 elif rtype == 'O':
1429 ui.write(('other: %s\n') % record)
1430 ui.write(('other: %s\n') % record)
1430 elif rtype == 'm':
1431 elif rtype == 'm':
1431 driver, mdstate = record.split('\0', 1)
1432 driver, mdstate = record.split('\0', 1)
1432 ui.write(('merge driver: %s (state "%s")\n')
1433 ui.write(('merge driver: %s (state "%s")\n')
1433 % (driver, mdstate))
1434 % (driver, mdstate))
1434 elif rtype in 'FDC':
1435 elif rtype in 'FDC':
1435 r = record.split('\0')
1436 r = record.split('\0')
1436 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1437 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1437 if version == 1:
1438 if version == 1:
1438 onode = 'not stored in v1 format'
1439 onode = 'not stored in v1 format'
1439 flags = r[7]
1440 flags = r[7]
1440 else:
1441 else:
1441 onode, flags = r[7:9]
1442 onode, flags = r[7:9]
1442 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1443 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1443 % (f, rtype, state, _hashornull(hash)))
1444 % (f, rtype, state, _hashornull(hash)))
1444 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1445 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1445 ui.write((' ancestor path: %s (node %s)\n')
1446 ui.write((' ancestor path: %s (node %s)\n')
1446 % (afile, _hashornull(anode)))
1447 % (afile, _hashornull(anode)))
1447 ui.write((' other path: %s (node %s)\n')
1448 ui.write((' other path: %s (node %s)\n')
1448 % (ofile, _hashornull(onode)))
1449 % (ofile, _hashornull(onode)))
1449 elif rtype == 'f':
1450 elif rtype == 'f':
1450 filename, rawextras = record.split('\0', 1)
1451 filename, rawextras = record.split('\0', 1)
1451 extras = rawextras.split('\0')
1452 extras = rawextras.split('\0')
1452 i = 0
1453 i = 0
1453 extrastrings = []
1454 extrastrings = []
1454 while i < len(extras):
1455 while i < len(extras):
1455 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1456 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1456 i += 2
1457 i += 2
1457
1458
1458 ui.write(('file extras: %s (%s)\n')
1459 ui.write(('file extras: %s (%s)\n')
1459 % (filename, ', '.join(extrastrings)))
1460 % (filename, ', '.join(extrastrings)))
1460 elif rtype == 'l':
1461 elif rtype == 'l':
1461 labels = record.split('\0', 2)
1462 labels = record.split('\0', 2)
1462 labels = [l for l in labels if len(l) > 0]
1463 labels = [l for l in labels if len(l) > 0]
1463 ui.write(('labels:\n'))
1464 ui.write(('labels:\n'))
1464 ui.write((' local: %s\n' % labels[0]))
1465 ui.write((' local: %s\n' % labels[0]))
1465 ui.write((' other: %s\n' % labels[1]))
1466 ui.write((' other: %s\n' % labels[1]))
1466 if len(labels) > 2:
1467 if len(labels) > 2:
1467 ui.write((' base: %s\n' % labels[2]))
1468 ui.write((' base: %s\n' % labels[2]))
1468 else:
1469 else:
1469 ui.write(('unrecognized entry: %s\t%s\n')
1470 ui.write(('unrecognized entry: %s\t%s\n')
1470 % (rtype, record.replace('\0', '\t')))
1471 % (rtype, record.replace('\0', '\t')))
1471
1472
1472 # Avoid mergestate.read() since it may raise an exception for unsupported
1473 # Avoid mergestate.read() since it may raise an exception for unsupported
1473 # merge state records. We shouldn't be doing this, but this is OK since this
1474 # merge state records. We shouldn't be doing this, but this is OK since this
1474 # command is pretty low-level.
1475 # command is pretty low-level.
1475 ms = mergemod.mergestate(repo)
1476 ms = mergemod.mergestate(repo)
1476
1477
1477 # sort so that reasonable information is on top
1478 # sort so that reasonable information is on top
1478 v1records = ms._readrecordsv1()
1479 v1records = ms._readrecordsv1()
1479 v2records = ms._readrecordsv2()
1480 v2records = ms._readrecordsv2()
1480 order = 'LOml'
1481 order = 'LOml'
1481 def key(r):
1482 def key(r):
1482 idx = order.find(r[0])
1483 idx = order.find(r[0])
1483 if idx == -1:
1484 if idx == -1:
1484 return (1, r[1])
1485 return (1, r[1])
1485 else:
1486 else:
1486 return (0, idx)
1487 return (0, idx)
1487 v1records.sort(key=key)
1488 v1records.sort(key=key)
1488 v2records.sort(key=key)
1489 v2records.sort(key=key)
1489
1490
1490 if not v1records and not v2records:
1491 if not v1records and not v2records:
1491 ui.write(('no merge state found\n'))
1492 ui.write(('no merge state found\n'))
1492 elif not v2records:
1493 elif not v2records:
1493 ui.note(('no version 2 merge state\n'))
1494 ui.note(('no version 2 merge state\n'))
1494 printrecords(1)
1495 printrecords(1)
1495 elif ms._v1v2match(v1records, v2records):
1496 elif ms._v1v2match(v1records, v2records):
1496 ui.note(('v1 and v2 states match: using v2\n'))
1497 ui.note(('v1 and v2 states match: using v2\n'))
1497 printrecords(2)
1498 printrecords(2)
1498 else:
1499 else:
1499 ui.note(('v1 and v2 states mismatch: using v1\n'))
1500 ui.note(('v1 and v2 states mismatch: using v1\n'))
1500 printrecords(1)
1501 printrecords(1)
1501 if ui.verbose:
1502 if ui.verbose:
1502 printrecords(2)
1503 printrecords(2)
1503
1504
1504 @command('debugnamecomplete', [], _('NAME...'))
1505 @command('debugnamecomplete', [], _('NAME...'))
1505 def debugnamecomplete(ui, repo, *args):
1506 def debugnamecomplete(ui, repo, *args):
1506 '''complete "names" - tags, open branch names, bookmark names'''
1507 '''complete "names" - tags, open branch names, bookmark names'''
1507
1508
1508 names = set()
1509 names = set()
1509 # since we previously only listed open branches, we will handle that
1510 # since we previously only listed open branches, we will handle that
1510 # specially (after this for loop)
1511 # specially (after this for loop)
1511 for name, ns in repo.names.iteritems():
1512 for name, ns in repo.names.iteritems():
1512 if name != 'branches':
1513 if name != 'branches':
1513 names.update(ns.listnames(repo))
1514 names.update(ns.listnames(repo))
1514 names.update(tag for (tag, heads, tip, closed)
1515 names.update(tag for (tag, heads, tip, closed)
1515 in repo.branchmap().iterbranches() if not closed)
1516 in repo.branchmap().iterbranches() if not closed)
1516 completions = set()
1517 completions = set()
1517 if not args:
1518 if not args:
1518 args = ['']
1519 args = ['']
1519 for a in args:
1520 for a in args:
1520 completions.update(n for n in names if n.startswith(a))
1521 completions.update(n for n in names if n.startswith(a))
1521 ui.write('\n'.join(sorted(completions)))
1522 ui.write('\n'.join(sorted(completions)))
1522 ui.write('\n')
1523 ui.write('\n')
1523
1524
1524 @command('debugobsolete',
1525 @command('debugobsolete',
1525 [('', 'flags', 0, _('markers flag')),
1526 [('', 'flags', 0, _('markers flag')),
1526 ('', 'record-parents', False,
1527 ('', 'record-parents', False,
1527 _('record parent information for the precursor')),
1528 _('record parent information for the precursor')),
1528 ('r', 'rev', [], _('display markers relevant to REV')),
1529 ('r', 'rev', [], _('display markers relevant to REV')),
1529 ('', 'exclusive', False, _('restrict display to markers only '
1530 ('', 'exclusive', False, _('restrict display to markers only '
1530 'relevant to REV')),
1531 'relevant to REV')),
1531 ('', 'index', False, _('display index of the marker')),
1532 ('', 'index', False, _('display index of the marker')),
1532 ('', 'delete', [], _('delete markers specified by indices')),
1533 ('', 'delete', [], _('delete markers specified by indices')),
1533 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1534 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1534 _('[OBSOLETED [REPLACEMENT ...]]'))
1535 _('[OBSOLETED [REPLACEMENT ...]]'))
1535 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1536 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1536 """create arbitrary obsolete marker
1537 """create arbitrary obsolete marker
1537
1538
1538 With no arguments, displays the list of obsolescence markers."""
1539 With no arguments, displays the list of obsolescence markers."""
1539
1540
1540 opts = pycompat.byteskwargs(opts)
1541 opts = pycompat.byteskwargs(opts)
1541
1542
1542 def parsenodeid(s):
1543 def parsenodeid(s):
1543 try:
1544 try:
1544 # We do not use revsingle/revrange functions here to accept
1545 # We do not use revsingle/revrange functions here to accept
1545 # arbitrary node identifiers, possibly not present in the
1546 # arbitrary node identifiers, possibly not present in the
1546 # local repository.
1547 # local repository.
1547 n = bin(s)
1548 n = bin(s)
1548 if len(n) != len(nullid):
1549 if len(n) != len(nullid):
1549 raise TypeError()
1550 raise TypeError()
1550 return n
1551 return n
1551 except TypeError:
1552 except TypeError:
1552 raise error.Abort('changeset references must be full hexadecimal '
1553 raise error.Abort('changeset references must be full hexadecimal '
1553 'node identifiers')
1554 'node identifiers')
1554
1555
1555 if opts.get('delete'):
1556 if opts.get('delete'):
1556 indices = []
1557 indices = []
1557 for v in opts.get('delete'):
1558 for v in opts.get('delete'):
1558 try:
1559 try:
1559 indices.append(int(v))
1560 indices.append(int(v))
1560 except ValueError:
1561 except ValueError:
1561 raise error.Abort(_('invalid index value: %r') % v,
1562 raise error.Abort(_('invalid index value: %r') % v,
1562 hint=_('use integers for indices'))
1563 hint=_('use integers for indices'))
1563
1564
1564 if repo.currenttransaction():
1565 if repo.currenttransaction():
1565 raise error.Abort(_('cannot delete obsmarkers in the middle '
1566 raise error.Abort(_('cannot delete obsmarkers in the middle '
1566 'of transaction.'))
1567 'of transaction.'))
1567
1568
1568 with repo.lock():
1569 with repo.lock():
1569 n = repair.deleteobsmarkers(repo.obsstore, indices)
1570 n = repair.deleteobsmarkers(repo.obsstore, indices)
1570 ui.write(_('deleted %i obsolescence markers\n') % n)
1571 ui.write(_('deleted %i obsolescence markers\n') % n)
1571
1572
1572 return
1573 return
1573
1574
1574 if precursor is not None:
1575 if precursor is not None:
1575 if opts['rev']:
1576 if opts['rev']:
1576 raise error.Abort('cannot select revision when creating marker')
1577 raise error.Abort('cannot select revision when creating marker')
1577 metadata = {}
1578 metadata = {}
1578 metadata['user'] = opts['user'] or ui.username()
1579 metadata['user'] = opts['user'] or ui.username()
1579 succs = tuple(parsenodeid(succ) for succ in successors)
1580 succs = tuple(parsenodeid(succ) for succ in successors)
1580 l = repo.lock()
1581 l = repo.lock()
1581 try:
1582 try:
1582 tr = repo.transaction('debugobsolete')
1583 tr = repo.transaction('debugobsolete')
1583 try:
1584 try:
1584 date = opts.get('date')
1585 date = opts.get('date')
1585 if date:
1586 if date:
1586 date = dateutil.parsedate(date)
1587 date = dateutil.parsedate(date)
1587 else:
1588 else:
1588 date = None
1589 date = None
1589 prec = parsenodeid(precursor)
1590 prec = parsenodeid(precursor)
1590 parents = None
1591 parents = None
1591 if opts['record_parents']:
1592 if opts['record_parents']:
1592 if prec not in repo.unfiltered():
1593 if prec not in repo.unfiltered():
1593 raise error.Abort('cannot used --record-parents on '
1594 raise error.Abort('cannot used --record-parents on '
1594 'unknown changesets')
1595 'unknown changesets')
1595 parents = repo.unfiltered()[prec].parents()
1596 parents = repo.unfiltered()[prec].parents()
1596 parents = tuple(p.node() for p in parents)
1597 parents = tuple(p.node() for p in parents)
1597 repo.obsstore.create(tr, prec, succs, opts['flags'],
1598 repo.obsstore.create(tr, prec, succs, opts['flags'],
1598 parents=parents, date=date,
1599 parents=parents, date=date,
1599 metadata=metadata, ui=ui)
1600 metadata=metadata, ui=ui)
1600 tr.close()
1601 tr.close()
1601 except ValueError as exc:
1602 except ValueError as exc:
1602 raise error.Abort(_('bad obsmarker input: %s') %
1603 raise error.Abort(_('bad obsmarker input: %s') %
1603 pycompat.bytestr(exc))
1604 pycompat.bytestr(exc))
1604 finally:
1605 finally:
1605 tr.release()
1606 tr.release()
1606 finally:
1607 finally:
1607 l.release()
1608 l.release()
1608 else:
1609 else:
1609 if opts['rev']:
1610 if opts['rev']:
1610 revs = scmutil.revrange(repo, opts['rev'])
1611 revs = scmutil.revrange(repo, opts['rev'])
1611 nodes = [repo[r].node() for r in revs]
1612 nodes = [repo[r].node() for r in revs]
1612 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1613 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1613 exclusive=opts['exclusive']))
1614 exclusive=opts['exclusive']))
1614 markers.sort(key=lambda x: x._data)
1615 markers.sort(key=lambda x: x._data)
1615 else:
1616 else:
1616 markers = obsutil.getmarkers(repo)
1617 markers = obsutil.getmarkers(repo)
1617
1618
1618 markerstoiter = markers
1619 markerstoiter = markers
1619 isrelevant = lambda m: True
1620 isrelevant = lambda m: True
1620 if opts.get('rev') and opts.get('index'):
1621 if opts.get('rev') and opts.get('index'):
1621 markerstoiter = obsutil.getmarkers(repo)
1622 markerstoiter = obsutil.getmarkers(repo)
1622 markerset = set(markers)
1623 markerset = set(markers)
1623 isrelevant = lambda m: m in markerset
1624 isrelevant = lambda m: m in markerset
1624
1625
1625 fm = ui.formatter('debugobsolete', opts)
1626 fm = ui.formatter('debugobsolete', opts)
1626 for i, m in enumerate(markerstoiter):
1627 for i, m in enumerate(markerstoiter):
1627 if not isrelevant(m):
1628 if not isrelevant(m):
1628 # marker can be irrelevant when we're iterating over a set
1629 # marker can be irrelevant when we're iterating over a set
1629 # of markers (markerstoiter) which is bigger than the set
1630 # of markers (markerstoiter) which is bigger than the set
1630 # of markers we want to display (markers)
1631 # of markers we want to display (markers)
1631 # this can happen if both --index and --rev options are
1632 # this can happen if both --index and --rev options are
1632 # provided and thus we need to iterate over all of the markers
1633 # provided and thus we need to iterate over all of the markers
1633 # to get the correct indices, but only display the ones that
1634 # to get the correct indices, but only display the ones that
1634 # are relevant to --rev value
1635 # are relevant to --rev value
1635 continue
1636 continue
1636 fm.startitem()
1637 fm.startitem()
1637 ind = i if opts.get('index') else None
1638 ind = i if opts.get('index') else None
1638 cmdutil.showmarker(fm, m, index=ind)
1639 cmdutil.showmarker(fm, m, index=ind)
1639 fm.end()
1640 fm.end()
1640
1641
1641 @command('debugpathcomplete',
1642 @command('debugpathcomplete',
1642 [('f', 'full', None, _('complete an entire path')),
1643 [('f', 'full', None, _('complete an entire path')),
1643 ('n', 'normal', None, _('show only normal files')),
1644 ('n', 'normal', None, _('show only normal files')),
1644 ('a', 'added', None, _('show only added files')),
1645 ('a', 'added', None, _('show only added files')),
1645 ('r', 'removed', None, _('show only removed files'))],
1646 ('r', 'removed', None, _('show only removed files'))],
1646 _('FILESPEC...'))
1647 _('FILESPEC...'))
1647 def debugpathcomplete(ui, repo, *specs, **opts):
1648 def debugpathcomplete(ui, repo, *specs, **opts):
1648 '''complete part or all of a tracked path
1649 '''complete part or all of a tracked path
1649
1650
1650 This command supports shells that offer path name completion. It
1651 This command supports shells that offer path name completion. It
1651 currently completes only files already known to the dirstate.
1652 currently completes only files already known to the dirstate.
1652
1653
1653 Completion extends only to the next path segment unless
1654 Completion extends only to the next path segment unless
1654 --full is specified, in which case entire paths are used.'''
1655 --full is specified, in which case entire paths are used.'''
1655
1656
1656 def complete(path, acceptable):
1657 def complete(path, acceptable):
1657 dirstate = repo.dirstate
1658 dirstate = repo.dirstate
1658 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1659 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1659 rootdir = repo.root + pycompat.ossep
1660 rootdir = repo.root + pycompat.ossep
1660 if spec != repo.root and not spec.startswith(rootdir):
1661 if spec != repo.root and not spec.startswith(rootdir):
1661 return [], []
1662 return [], []
1662 if os.path.isdir(spec):
1663 if os.path.isdir(spec):
1663 spec += '/'
1664 spec += '/'
1664 spec = spec[len(rootdir):]
1665 spec = spec[len(rootdir):]
1665 fixpaths = pycompat.ossep != '/'
1666 fixpaths = pycompat.ossep != '/'
1666 if fixpaths:
1667 if fixpaths:
1667 spec = spec.replace(pycompat.ossep, '/')
1668 spec = spec.replace(pycompat.ossep, '/')
1668 speclen = len(spec)
1669 speclen = len(spec)
1669 fullpaths = opts[r'full']
1670 fullpaths = opts[r'full']
1670 files, dirs = set(), set()
1671 files, dirs = set(), set()
1671 adddir, addfile = dirs.add, files.add
1672 adddir, addfile = dirs.add, files.add
1672 for f, st in dirstate.iteritems():
1673 for f, st in dirstate.iteritems():
1673 if f.startswith(spec) and st[0] in acceptable:
1674 if f.startswith(spec) and st[0] in acceptable:
1674 if fixpaths:
1675 if fixpaths:
1675 f = f.replace('/', pycompat.ossep)
1676 f = f.replace('/', pycompat.ossep)
1676 if fullpaths:
1677 if fullpaths:
1677 addfile(f)
1678 addfile(f)
1678 continue
1679 continue
1679 s = f.find(pycompat.ossep, speclen)
1680 s = f.find(pycompat.ossep, speclen)
1680 if s >= 0:
1681 if s >= 0:
1681 adddir(f[:s])
1682 adddir(f[:s])
1682 else:
1683 else:
1683 addfile(f)
1684 addfile(f)
1684 return files, dirs
1685 return files, dirs
1685
1686
1686 acceptable = ''
1687 acceptable = ''
1687 if opts[r'normal']:
1688 if opts[r'normal']:
1688 acceptable += 'nm'
1689 acceptable += 'nm'
1689 if opts[r'added']:
1690 if opts[r'added']:
1690 acceptable += 'a'
1691 acceptable += 'a'
1691 if opts[r'removed']:
1692 if opts[r'removed']:
1692 acceptable += 'r'
1693 acceptable += 'r'
1693 cwd = repo.getcwd()
1694 cwd = repo.getcwd()
1694 if not specs:
1695 if not specs:
1695 specs = ['.']
1696 specs = ['.']
1696
1697
1697 files, dirs = set(), set()
1698 files, dirs = set(), set()
1698 for spec in specs:
1699 for spec in specs:
1699 f, d = complete(spec, acceptable or 'nmar')
1700 f, d = complete(spec, acceptable or 'nmar')
1700 files.update(f)
1701 files.update(f)
1701 dirs.update(d)
1702 dirs.update(d)
1702 files.update(dirs)
1703 files.update(dirs)
1703 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1704 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1704 ui.write('\n')
1705 ui.write('\n')
1705
1706
1706 @command('debugpeer', [], _('PATH'), norepo=True)
1707 @command('debugpeer', [], _('PATH'), norepo=True)
1707 def debugpeer(ui, path):
1708 def debugpeer(ui, path):
1708 """establish a connection to a peer repository"""
1709 """establish a connection to a peer repository"""
1709 # Always enable peer request logging. Requires --debug to display
1710 # Always enable peer request logging. Requires --debug to display
1710 # though.
1711 # though.
1711 overrides = {
1712 overrides = {
1712 ('devel', 'debug.peer-request'): True,
1713 ('devel', 'debug.peer-request'): True,
1713 }
1714 }
1714
1715
1715 with ui.configoverride(overrides):
1716 with ui.configoverride(overrides):
1716 peer = hg.peer(ui, {}, path)
1717 peer = hg.peer(ui, {}, path)
1717
1718
1718 local = peer.local() is not None
1719 local = peer.local() is not None
1719 canpush = peer.canpush()
1720 canpush = peer.canpush()
1720
1721
1721 ui.write(_('url: %s\n') % peer.url())
1722 ui.write(_('url: %s\n') % peer.url())
1722 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1723 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1723 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1724 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1724
1725
1725 @command('debugpickmergetool',
1726 @command('debugpickmergetool',
1726 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1727 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1727 ('', 'changedelete', None, _('emulate merging change and delete')),
1728 ('', 'changedelete', None, _('emulate merging change and delete')),
1728 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1729 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1729 _('[PATTERN]...'),
1730 _('[PATTERN]...'),
1730 inferrepo=True)
1731 inferrepo=True)
1731 def debugpickmergetool(ui, repo, *pats, **opts):
1732 def debugpickmergetool(ui, repo, *pats, **opts):
1732 """examine which merge tool is chosen for specified file
1733 """examine which merge tool is chosen for specified file
1733
1734
1734 As described in :hg:`help merge-tools`, Mercurial examines
1735 As described in :hg:`help merge-tools`, Mercurial examines
1735 configurations below in this order to decide which merge tool is
1736 configurations below in this order to decide which merge tool is
1736 chosen for specified file.
1737 chosen for specified file.
1737
1738
1738 1. ``--tool`` option
1739 1. ``--tool`` option
1739 2. ``HGMERGE`` environment variable
1740 2. ``HGMERGE`` environment variable
1740 3. configurations in ``merge-patterns`` section
1741 3. configurations in ``merge-patterns`` section
1741 4. configuration of ``ui.merge``
1742 4. configuration of ``ui.merge``
1742 5. configurations in ``merge-tools`` section
1743 5. configurations in ``merge-tools`` section
1743 6. ``hgmerge`` tool (for historical reason only)
1744 6. ``hgmerge`` tool (for historical reason only)
1744 7. default tool for fallback (``:merge`` or ``:prompt``)
1745 7. default tool for fallback (``:merge`` or ``:prompt``)
1745
1746
1746 This command writes out examination result in the style below::
1747 This command writes out examination result in the style below::
1747
1748
1748 FILE = MERGETOOL
1749 FILE = MERGETOOL
1749
1750
1750 By default, all files known in the first parent context of the
1751 By default, all files known in the first parent context of the
1751 working directory are examined. Use file patterns and/or -I/-X
1752 working directory are examined. Use file patterns and/or -I/-X
1752 options to limit target files. -r/--rev is also useful to examine
1753 options to limit target files. -r/--rev is also useful to examine
1753 files in another context without actual updating to it.
1754 files in another context without actual updating to it.
1754
1755
1755 With --debug, this command shows warning messages while matching
1756 With --debug, this command shows warning messages while matching
1756 against ``merge-patterns`` and so on, too. It is recommended to
1757 against ``merge-patterns`` and so on, too. It is recommended to
1757 use this option with explicit file patterns and/or -I/-X options,
1758 use this option with explicit file patterns and/or -I/-X options,
1758 because this option increases amount of output per file according
1759 because this option increases amount of output per file according
1759 to configurations in hgrc.
1760 to configurations in hgrc.
1760
1761
1761 With -v/--verbose, this command shows configurations below at
1762 With -v/--verbose, this command shows configurations below at
1762 first (only if specified).
1763 first (only if specified).
1763
1764
1764 - ``--tool`` option
1765 - ``--tool`` option
1765 - ``HGMERGE`` environment variable
1766 - ``HGMERGE`` environment variable
1766 - configuration of ``ui.merge``
1767 - configuration of ``ui.merge``
1767
1768
1768 If merge tool is chosen before matching against
1769 If merge tool is chosen before matching against
1769 ``merge-patterns``, this command can't show any helpful
1770 ``merge-patterns``, this command can't show any helpful
1770 information, even with --debug. In such case, information above is
1771 information, even with --debug. In such case, information above is
1771 useful to know why a merge tool is chosen.
1772 useful to know why a merge tool is chosen.
1772 """
1773 """
1773 opts = pycompat.byteskwargs(opts)
1774 opts = pycompat.byteskwargs(opts)
1774 overrides = {}
1775 overrides = {}
1775 if opts['tool']:
1776 if opts['tool']:
1776 overrides[('ui', 'forcemerge')] = opts['tool']
1777 overrides[('ui', 'forcemerge')] = opts['tool']
1777 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1778 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1778
1779
1779 with ui.configoverride(overrides, 'debugmergepatterns'):
1780 with ui.configoverride(overrides, 'debugmergepatterns'):
1780 hgmerge = encoding.environ.get("HGMERGE")
1781 hgmerge = encoding.environ.get("HGMERGE")
1781 if hgmerge is not None:
1782 if hgmerge is not None:
1782 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1783 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1783 uimerge = ui.config("ui", "merge")
1784 uimerge = ui.config("ui", "merge")
1784 if uimerge:
1785 if uimerge:
1785 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1786 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1786
1787
1787 ctx = scmutil.revsingle(repo, opts.get('rev'))
1788 ctx = scmutil.revsingle(repo, opts.get('rev'))
1788 m = scmutil.match(ctx, pats, opts)
1789 m = scmutil.match(ctx, pats, opts)
1789 changedelete = opts['changedelete']
1790 changedelete = opts['changedelete']
1790 for path in ctx.walk(m):
1791 for path in ctx.walk(m):
1791 fctx = ctx[path]
1792 fctx = ctx[path]
1792 try:
1793 try:
1793 if not ui.debugflag:
1794 if not ui.debugflag:
1794 ui.pushbuffer(error=True)
1795 ui.pushbuffer(error=True)
1795 tool, toolpath = filemerge._picktool(repo, ui, path,
1796 tool, toolpath = filemerge._picktool(repo, ui, path,
1796 fctx.isbinary(),
1797 fctx.isbinary(),
1797 'l' in fctx.flags(),
1798 'l' in fctx.flags(),
1798 changedelete)
1799 changedelete)
1799 finally:
1800 finally:
1800 if not ui.debugflag:
1801 if not ui.debugflag:
1801 ui.popbuffer()
1802 ui.popbuffer()
1802 ui.write(('%s = %s\n') % (path, tool))
1803 ui.write(('%s = %s\n') % (path, tool))
1803
1804
1804 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1805 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1805 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1806 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1806 '''access the pushkey key/value protocol
1807 '''access the pushkey key/value protocol
1807
1808
1808 With two args, list the keys in the given namespace.
1809 With two args, list the keys in the given namespace.
1809
1810
1810 With five args, set a key to new if it currently is set to old.
1811 With five args, set a key to new if it currently is set to old.
1811 Reports success or failure.
1812 Reports success or failure.
1812 '''
1813 '''
1813
1814
1814 target = hg.peer(ui, {}, repopath)
1815 target = hg.peer(ui, {}, repopath)
1815 if keyinfo:
1816 if keyinfo:
1816 key, old, new = keyinfo
1817 key, old, new = keyinfo
1817 r = target.pushkey(namespace, key, old, new)
1818 r = target.pushkey(namespace, key, old, new)
1818 ui.status(pycompat.bytestr(r) + '\n')
1819 ui.status(pycompat.bytestr(r) + '\n')
1819 return not r
1820 return not r
1820 else:
1821 else:
1821 for k, v in sorted(target.listkeys(namespace).iteritems()):
1822 for k, v in sorted(target.listkeys(namespace).iteritems()):
1822 ui.write("%s\t%s\n" % (util.escapestr(k),
1823 ui.write("%s\t%s\n" % (util.escapestr(k),
1823 util.escapestr(v)))
1824 util.escapestr(v)))
1824
1825
1825 @command('debugpvec', [], _('A B'))
1826 @command('debugpvec', [], _('A B'))
1826 def debugpvec(ui, repo, a, b=None):
1827 def debugpvec(ui, repo, a, b=None):
1827 ca = scmutil.revsingle(repo, a)
1828 ca = scmutil.revsingle(repo, a)
1828 cb = scmutil.revsingle(repo, b)
1829 cb = scmutil.revsingle(repo, b)
1829 pa = pvec.ctxpvec(ca)
1830 pa = pvec.ctxpvec(ca)
1830 pb = pvec.ctxpvec(cb)
1831 pb = pvec.ctxpvec(cb)
1831 if pa == pb:
1832 if pa == pb:
1832 rel = "="
1833 rel = "="
1833 elif pa > pb:
1834 elif pa > pb:
1834 rel = ">"
1835 rel = ">"
1835 elif pa < pb:
1836 elif pa < pb:
1836 rel = "<"
1837 rel = "<"
1837 elif pa | pb:
1838 elif pa | pb:
1838 rel = "|"
1839 rel = "|"
1839 ui.write(_("a: %s\n") % pa)
1840 ui.write(_("a: %s\n") % pa)
1840 ui.write(_("b: %s\n") % pb)
1841 ui.write(_("b: %s\n") % pb)
1841 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1842 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1842 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1843 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1843 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1844 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1844 pa.distance(pb), rel))
1845 pa.distance(pb), rel))
1845
1846
1846 @command('debugrebuilddirstate|debugrebuildstate',
1847 @command('debugrebuilddirstate|debugrebuildstate',
1847 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1848 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1848 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1849 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1849 'the working copy parent')),
1850 'the working copy parent')),
1850 ],
1851 ],
1851 _('[-r REV]'))
1852 _('[-r REV]'))
1852 def debugrebuilddirstate(ui, repo, rev, **opts):
1853 def debugrebuilddirstate(ui, repo, rev, **opts):
1853 """rebuild the dirstate as it would look like for the given revision
1854 """rebuild the dirstate as it would look like for the given revision
1854
1855
1855 If no revision is specified the first current parent will be used.
1856 If no revision is specified the first current parent will be used.
1856
1857
1857 The dirstate will be set to the files of the given revision.
1858 The dirstate will be set to the files of the given revision.
1858 The actual working directory content or existing dirstate
1859 The actual working directory content or existing dirstate
1859 information such as adds or removes is not considered.
1860 information such as adds or removes is not considered.
1860
1861
1861 ``minimal`` will only rebuild the dirstate status for files that claim to be
1862 ``minimal`` will only rebuild the dirstate status for files that claim to be
1862 tracked but are not in the parent manifest, or that exist in the parent
1863 tracked but are not in the parent manifest, or that exist in the parent
1863 manifest but are not in the dirstate. It will not change adds, removes, or
1864 manifest but are not in the dirstate. It will not change adds, removes, or
1864 modified files that are in the working copy parent.
1865 modified files that are in the working copy parent.
1865
1866
1866 One use of this command is to make the next :hg:`status` invocation
1867 One use of this command is to make the next :hg:`status` invocation
1867 check the actual file content.
1868 check the actual file content.
1868 """
1869 """
1869 ctx = scmutil.revsingle(repo, rev)
1870 ctx = scmutil.revsingle(repo, rev)
1870 with repo.wlock():
1871 with repo.wlock():
1871 dirstate = repo.dirstate
1872 dirstate = repo.dirstate
1872 changedfiles = None
1873 changedfiles = None
1873 # See command doc for what minimal does.
1874 # See command doc for what minimal does.
1874 if opts.get(r'minimal'):
1875 if opts.get(r'minimal'):
1875 manifestfiles = set(ctx.manifest().keys())
1876 manifestfiles = set(ctx.manifest().keys())
1876 dirstatefiles = set(dirstate)
1877 dirstatefiles = set(dirstate)
1877 manifestonly = manifestfiles - dirstatefiles
1878 manifestonly = manifestfiles - dirstatefiles
1878 dsonly = dirstatefiles - manifestfiles
1879 dsonly = dirstatefiles - manifestfiles
1879 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1880 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1880 changedfiles = manifestonly | dsnotadded
1881 changedfiles = manifestonly | dsnotadded
1881
1882
1882 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1883 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1883
1884
1884 @command('debugrebuildfncache', [], '')
1885 @command('debugrebuildfncache', [], '')
1885 def debugrebuildfncache(ui, repo):
1886 def debugrebuildfncache(ui, repo):
1886 """rebuild the fncache file"""
1887 """rebuild the fncache file"""
1887 repair.rebuildfncache(ui, repo)
1888 repair.rebuildfncache(ui, repo)
1888
1889
1889 @command('debugrename',
1890 @command('debugrename',
1890 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1891 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1891 _('[-r REV] FILE'))
1892 _('[-r REV] FILE'))
1892 def debugrename(ui, repo, file1, *pats, **opts):
1893 def debugrename(ui, repo, file1, *pats, **opts):
1893 """dump rename information"""
1894 """dump rename information"""
1894
1895
1895 opts = pycompat.byteskwargs(opts)
1896 opts = pycompat.byteskwargs(opts)
1896 ctx = scmutil.revsingle(repo, opts.get('rev'))
1897 ctx = scmutil.revsingle(repo, opts.get('rev'))
1897 m = scmutil.match(ctx, (file1,) + pats, opts)
1898 m = scmutil.match(ctx, (file1,) + pats, opts)
1898 for abs in ctx.walk(m):
1899 for abs in ctx.walk(m):
1899 fctx = ctx[abs]
1900 fctx = ctx[abs]
1900 o = fctx.filelog().renamed(fctx.filenode())
1901 o = fctx.filelog().renamed(fctx.filenode())
1901 rel = m.rel(abs)
1902 rel = m.rel(abs)
1902 if o:
1903 if o:
1903 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1904 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1904 else:
1905 else:
1905 ui.write(_("%s not renamed\n") % rel)
1906 ui.write(_("%s not renamed\n") % rel)
1906
1907
1907 @command('debugrevlog', cmdutil.debugrevlogopts +
1908 @command('debugrevlog', cmdutil.debugrevlogopts +
1908 [('d', 'dump', False, _('dump index data'))],
1909 [('d', 'dump', False, _('dump index data'))],
1909 _('-c|-m|FILE'),
1910 _('-c|-m|FILE'),
1910 optionalrepo=True)
1911 optionalrepo=True)
1911 def debugrevlog(ui, repo, file_=None, **opts):
1912 def debugrevlog(ui, repo, file_=None, **opts):
1912 """show data and statistics about a revlog"""
1913 """show data and statistics about a revlog"""
1913 opts = pycompat.byteskwargs(opts)
1914 opts = pycompat.byteskwargs(opts)
1914 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1915 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1915
1916
1916 if opts.get("dump"):
1917 if opts.get("dump"):
1917 numrevs = len(r)
1918 numrevs = len(r)
1918 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1919 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1919 " rawsize totalsize compression heads chainlen\n"))
1920 " rawsize totalsize compression heads chainlen\n"))
1920 ts = 0
1921 ts = 0
1921 heads = set()
1922 heads = set()
1922
1923
1923 for rev in xrange(numrevs):
1924 for rev in xrange(numrevs):
1924 dbase = r.deltaparent(rev)
1925 dbase = r.deltaparent(rev)
1925 if dbase == -1:
1926 if dbase == -1:
1926 dbase = rev
1927 dbase = rev
1927 cbase = r.chainbase(rev)
1928 cbase = r.chainbase(rev)
1928 clen = r.chainlen(rev)
1929 clen = r.chainlen(rev)
1929 p1, p2 = r.parentrevs(rev)
1930 p1, p2 = r.parentrevs(rev)
1930 rs = r.rawsize(rev)
1931 rs = r.rawsize(rev)
1931 ts = ts + rs
1932 ts = ts + rs
1932 heads -= set(r.parentrevs(rev))
1933 heads -= set(r.parentrevs(rev))
1933 heads.add(rev)
1934 heads.add(rev)
1934 try:
1935 try:
1935 compression = ts / r.end(rev)
1936 compression = ts / r.end(rev)
1936 except ZeroDivisionError:
1937 except ZeroDivisionError:
1937 compression = 0
1938 compression = 0
1938 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1939 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1939 "%11d %5d %8d\n" %
1940 "%11d %5d %8d\n" %
1940 (rev, p1, p2, r.start(rev), r.end(rev),
1941 (rev, p1, p2, r.start(rev), r.end(rev),
1941 r.start(dbase), r.start(cbase),
1942 r.start(dbase), r.start(cbase),
1942 r.start(p1), r.start(p2),
1943 r.start(p1), r.start(p2),
1943 rs, ts, compression, len(heads), clen))
1944 rs, ts, compression, len(heads), clen))
1944 return 0
1945 return 0
1945
1946
1946 v = r.version
1947 v = r.version
1947 format = v & 0xFFFF
1948 format = v & 0xFFFF
1948 flags = []
1949 flags = []
1949 gdelta = False
1950 gdelta = False
1950 if v & revlog.FLAG_INLINE_DATA:
1951 if v & revlog.FLAG_INLINE_DATA:
1951 flags.append('inline')
1952 flags.append('inline')
1952 if v & revlog.FLAG_GENERALDELTA:
1953 if v & revlog.FLAG_GENERALDELTA:
1953 gdelta = True
1954 gdelta = True
1954 flags.append('generaldelta')
1955 flags.append('generaldelta')
1955 if not flags:
1956 if not flags:
1956 flags = ['(none)']
1957 flags = ['(none)']
1957
1958
1958 nummerges = 0
1959 nummerges = 0
1959 numfull = 0
1960 numfull = 0
1960 numprev = 0
1961 numprev = 0
1961 nump1 = 0
1962 nump1 = 0
1962 nump2 = 0
1963 nump2 = 0
1963 numother = 0
1964 numother = 0
1964 nump1prev = 0
1965 nump1prev = 0
1965 nump2prev = 0
1966 nump2prev = 0
1966 chainlengths = []
1967 chainlengths = []
1967 chainbases = []
1968 chainbases = []
1968 chainspans = []
1969 chainspans = []
1969
1970
1970 datasize = [None, 0, 0]
1971 datasize = [None, 0, 0]
1971 fullsize = [None, 0, 0]
1972 fullsize = [None, 0, 0]
1972 deltasize = [None, 0, 0]
1973 deltasize = [None, 0, 0]
1973 chunktypecounts = {}
1974 chunktypecounts = {}
1974 chunktypesizes = {}
1975 chunktypesizes = {}
1975
1976
1976 def addsize(size, l):
1977 def addsize(size, l):
1977 if l[0] is None or size < l[0]:
1978 if l[0] is None or size < l[0]:
1978 l[0] = size
1979 l[0] = size
1979 if size > l[1]:
1980 if size > l[1]:
1980 l[1] = size
1981 l[1] = size
1981 l[2] += size
1982 l[2] += size
1982
1983
1983 numrevs = len(r)
1984 numrevs = len(r)
1984 for rev in xrange(numrevs):
1985 for rev in xrange(numrevs):
1985 p1, p2 = r.parentrevs(rev)
1986 p1, p2 = r.parentrevs(rev)
1986 delta = r.deltaparent(rev)
1987 delta = r.deltaparent(rev)
1987 if format > 0:
1988 if format > 0:
1988 addsize(r.rawsize(rev), datasize)
1989 addsize(r.rawsize(rev), datasize)
1989 if p2 != nullrev:
1990 if p2 != nullrev:
1990 nummerges += 1
1991 nummerges += 1
1991 size = r.length(rev)
1992 size = r.length(rev)
1992 if delta == nullrev:
1993 if delta == nullrev:
1993 chainlengths.append(0)
1994 chainlengths.append(0)
1994 chainbases.append(r.start(rev))
1995 chainbases.append(r.start(rev))
1995 chainspans.append(size)
1996 chainspans.append(size)
1996 numfull += 1
1997 numfull += 1
1997 addsize(size, fullsize)
1998 addsize(size, fullsize)
1998 else:
1999 else:
1999 chainlengths.append(chainlengths[delta] + 1)
2000 chainlengths.append(chainlengths[delta] + 1)
2000 baseaddr = chainbases[delta]
2001 baseaddr = chainbases[delta]
2001 revaddr = r.start(rev)
2002 revaddr = r.start(rev)
2002 chainbases.append(baseaddr)
2003 chainbases.append(baseaddr)
2003 chainspans.append((revaddr - baseaddr) + size)
2004 chainspans.append((revaddr - baseaddr) + size)
2004 addsize(size, deltasize)
2005 addsize(size, deltasize)
2005 if delta == rev - 1:
2006 if delta == rev - 1:
2006 numprev += 1
2007 numprev += 1
2007 if delta == p1:
2008 if delta == p1:
2008 nump1prev += 1
2009 nump1prev += 1
2009 elif delta == p2:
2010 elif delta == p2:
2010 nump2prev += 1
2011 nump2prev += 1
2011 elif delta == p1:
2012 elif delta == p1:
2012 nump1 += 1
2013 nump1 += 1
2013 elif delta == p2:
2014 elif delta == p2:
2014 nump2 += 1
2015 nump2 += 1
2015 elif delta != nullrev:
2016 elif delta != nullrev:
2016 numother += 1
2017 numother += 1
2017
2018
2018 # Obtain data on the raw chunks in the revlog.
2019 # Obtain data on the raw chunks in the revlog.
2019 segment = r._getsegmentforrevs(rev, rev)[1]
2020 segment = r._getsegmentforrevs(rev, rev)[1]
2020 if segment:
2021 if segment:
2021 chunktype = bytes(segment[0:1])
2022 chunktype = bytes(segment[0:1])
2022 else:
2023 else:
2023 chunktype = 'empty'
2024 chunktype = 'empty'
2024
2025
2025 if chunktype not in chunktypecounts:
2026 if chunktype not in chunktypecounts:
2026 chunktypecounts[chunktype] = 0
2027 chunktypecounts[chunktype] = 0
2027 chunktypesizes[chunktype] = 0
2028 chunktypesizes[chunktype] = 0
2028
2029
2029 chunktypecounts[chunktype] += 1
2030 chunktypecounts[chunktype] += 1
2030 chunktypesizes[chunktype] += size
2031 chunktypesizes[chunktype] += size
2031
2032
2032 # Adjust size min value for empty cases
2033 # Adjust size min value for empty cases
2033 for size in (datasize, fullsize, deltasize):
2034 for size in (datasize, fullsize, deltasize):
2034 if size[0] is None:
2035 if size[0] is None:
2035 size[0] = 0
2036 size[0] = 0
2036
2037
2037 numdeltas = numrevs - numfull
2038 numdeltas = numrevs - numfull
2038 numoprev = numprev - nump1prev - nump2prev
2039 numoprev = numprev - nump1prev - nump2prev
2039 totalrawsize = datasize[2]
2040 totalrawsize = datasize[2]
2040 datasize[2] /= numrevs
2041 datasize[2] /= numrevs
2041 fulltotal = fullsize[2]
2042 fulltotal = fullsize[2]
2042 fullsize[2] /= numfull
2043 fullsize[2] /= numfull
2043 deltatotal = deltasize[2]
2044 deltatotal = deltasize[2]
2044 if numrevs - numfull > 0:
2045 if numrevs - numfull > 0:
2045 deltasize[2] /= numrevs - numfull
2046 deltasize[2] /= numrevs - numfull
2046 totalsize = fulltotal + deltatotal
2047 totalsize = fulltotal + deltatotal
2047 avgchainlen = sum(chainlengths) / numrevs
2048 avgchainlen = sum(chainlengths) / numrevs
2048 maxchainlen = max(chainlengths)
2049 maxchainlen = max(chainlengths)
2049 maxchainspan = max(chainspans)
2050 maxchainspan = max(chainspans)
2050 compratio = 1
2051 compratio = 1
2051 if totalsize:
2052 if totalsize:
2052 compratio = totalrawsize / totalsize
2053 compratio = totalrawsize / totalsize
2053
2054
2054 basedfmtstr = '%%%dd\n'
2055 basedfmtstr = '%%%dd\n'
2055 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2056 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2056
2057
2057 def dfmtstr(max):
2058 def dfmtstr(max):
2058 return basedfmtstr % len(str(max))
2059 return basedfmtstr % len(str(max))
2059 def pcfmtstr(max, padding=0):
2060 def pcfmtstr(max, padding=0):
2060 return basepcfmtstr % (len(str(max)), ' ' * padding)
2061 return basepcfmtstr % (len(str(max)), ' ' * padding)
2061
2062
2062 def pcfmt(value, total):
2063 def pcfmt(value, total):
2063 if total:
2064 if total:
2064 return (value, 100 * float(value) / total)
2065 return (value, 100 * float(value) / total)
2065 else:
2066 else:
2066 return value, 100.0
2067 return value, 100.0
2067
2068
2068 ui.write(('format : %d\n') % format)
2069 ui.write(('format : %d\n') % format)
2069 ui.write(('flags : %s\n') % ', '.join(flags))
2070 ui.write(('flags : %s\n') % ', '.join(flags))
2070
2071
2071 ui.write('\n')
2072 ui.write('\n')
2072 fmt = pcfmtstr(totalsize)
2073 fmt = pcfmtstr(totalsize)
2073 fmt2 = dfmtstr(totalsize)
2074 fmt2 = dfmtstr(totalsize)
2074 ui.write(('revisions : ') + fmt2 % numrevs)
2075 ui.write(('revisions : ') + fmt2 % numrevs)
2075 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2076 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2076 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2077 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2077 ui.write(('revisions : ') + fmt2 % numrevs)
2078 ui.write(('revisions : ') + fmt2 % numrevs)
2078 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2079 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2079 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2080 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2080 ui.write(('revision size : ') + fmt2 % totalsize)
2081 ui.write(('revision size : ') + fmt2 % totalsize)
2081 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2082 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2082 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2083 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2083
2084
2084 def fmtchunktype(chunktype):
2085 def fmtchunktype(chunktype):
2085 if chunktype == 'empty':
2086 if chunktype == 'empty':
2086 return ' %s : ' % chunktype
2087 return ' %s : ' % chunktype
2087 elif chunktype in pycompat.bytestr(string.ascii_letters):
2088 elif chunktype in pycompat.bytestr(string.ascii_letters):
2088 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2089 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2089 else:
2090 else:
2090 return ' 0x%s : ' % hex(chunktype)
2091 return ' 0x%s : ' % hex(chunktype)
2091
2092
2092 ui.write('\n')
2093 ui.write('\n')
2093 ui.write(('chunks : ') + fmt2 % numrevs)
2094 ui.write(('chunks : ') + fmt2 % numrevs)
2094 for chunktype in sorted(chunktypecounts):
2095 for chunktype in sorted(chunktypecounts):
2095 ui.write(fmtchunktype(chunktype))
2096 ui.write(fmtchunktype(chunktype))
2096 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2097 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2097 ui.write(('chunks size : ') + fmt2 % totalsize)
2098 ui.write(('chunks size : ') + fmt2 % totalsize)
2098 for chunktype in sorted(chunktypecounts):
2099 for chunktype in sorted(chunktypecounts):
2099 ui.write(fmtchunktype(chunktype))
2100 ui.write(fmtchunktype(chunktype))
2100 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2101 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2101
2102
2102 ui.write('\n')
2103 ui.write('\n')
2103 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2104 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2104 ui.write(('avg chain length : ') + fmt % avgchainlen)
2105 ui.write(('avg chain length : ') + fmt % avgchainlen)
2105 ui.write(('max chain length : ') + fmt % maxchainlen)
2106 ui.write(('max chain length : ') + fmt % maxchainlen)
2106 ui.write(('max chain reach : ') + fmt % maxchainspan)
2107 ui.write(('max chain reach : ') + fmt % maxchainspan)
2107 ui.write(('compression ratio : ') + fmt % compratio)
2108 ui.write(('compression ratio : ') + fmt % compratio)
2108
2109
2109 if format > 0:
2110 if format > 0:
2110 ui.write('\n')
2111 ui.write('\n')
2111 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2112 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2112 % tuple(datasize))
2113 % tuple(datasize))
2113 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2114 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2114 % tuple(fullsize))
2115 % tuple(fullsize))
2115 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2116 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2116 % tuple(deltasize))
2117 % tuple(deltasize))
2117
2118
2118 if numdeltas > 0:
2119 if numdeltas > 0:
2119 ui.write('\n')
2120 ui.write('\n')
2120 fmt = pcfmtstr(numdeltas)
2121 fmt = pcfmtstr(numdeltas)
2121 fmt2 = pcfmtstr(numdeltas, 4)
2122 fmt2 = pcfmtstr(numdeltas, 4)
2122 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2123 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2123 if numprev > 0:
2124 if numprev > 0:
2124 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2125 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2125 numprev))
2126 numprev))
2126 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2127 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2127 numprev))
2128 numprev))
2128 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2129 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2129 numprev))
2130 numprev))
2130 if gdelta:
2131 if gdelta:
2131 ui.write(('deltas against p1 : ')
2132 ui.write(('deltas against p1 : ')
2132 + fmt % pcfmt(nump1, numdeltas))
2133 + fmt % pcfmt(nump1, numdeltas))
2133 ui.write(('deltas against p2 : ')
2134 ui.write(('deltas against p2 : ')
2134 + fmt % pcfmt(nump2, numdeltas))
2135 + fmt % pcfmt(nump2, numdeltas))
2135 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2136 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2136 numdeltas))
2137 numdeltas))
2137
2138
2138 @command('debugrevspec',
2139 @command('debugrevspec',
2139 [('', 'optimize', None,
2140 [('', 'optimize', None,
2140 _('print parsed tree after optimizing (DEPRECATED)')),
2141 _('print parsed tree after optimizing (DEPRECATED)')),
2141 ('', 'show-revs', True, _('print list of result revisions (default)')),
2142 ('', 'show-revs', True, _('print list of result revisions (default)')),
2142 ('s', 'show-set', None, _('print internal representation of result set')),
2143 ('s', 'show-set', None, _('print internal representation of result set')),
2143 ('p', 'show-stage', [],
2144 ('p', 'show-stage', [],
2144 _('print parsed tree at the given stage'), _('NAME')),
2145 _('print parsed tree at the given stage'), _('NAME')),
2145 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2146 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2146 ('', 'verify-optimized', False, _('verify optimized result')),
2147 ('', 'verify-optimized', False, _('verify optimized result')),
2147 ],
2148 ],
2148 ('REVSPEC'))
2149 ('REVSPEC'))
2149 def debugrevspec(ui, repo, expr, **opts):
2150 def debugrevspec(ui, repo, expr, **opts):
2150 """parse and apply a revision specification
2151 """parse and apply a revision specification
2151
2152
2152 Use -p/--show-stage option to print the parsed tree at the given stages.
2153 Use -p/--show-stage option to print the parsed tree at the given stages.
2153 Use -p all to print tree at every stage.
2154 Use -p all to print tree at every stage.
2154
2155
2155 Use --no-show-revs option with -s or -p to print only the set
2156 Use --no-show-revs option with -s or -p to print only the set
2156 representation or the parsed tree respectively.
2157 representation or the parsed tree respectively.
2157
2158
2158 Use --verify-optimized to compare the optimized result with the unoptimized
2159 Use --verify-optimized to compare the optimized result with the unoptimized
2159 one. Returns 1 if the optimized result differs.
2160 one. Returns 1 if the optimized result differs.
2160 """
2161 """
2161 opts = pycompat.byteskwargs(opts)
2162 opts = pycompat.byteskwargs(opts)
2162 aliases = ui.configitems('revsetalias')
2163 aliases = ui.configitems('revsetalias')
2163 stages = [
2164 stages = [
2164 ('parsed', lambda tree: tree),
2165 ('parsed', lambda tree: tree),
2165 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2166 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2166 ui.warn)),
2167 ui.warn)),
2167 ('concatenated', revsetlang.foldconcat),
2168 ('concatenated', revsetlang.foldconcat),
2168 ('analyzed', revsetlang.analyze),
2169 ('analyzed', revsetlang.analyze),
2169 ('optimized', revsetlang.optimize),
2170 ('optimized', revsetlang.optimize),
2170 ]
2171 ]
2171 if opts['no_optimized']:
2172 if opts['no_optimized']:
2172 stages = stages[:-1]
2173 stages = stages[:-1]
2173 if opts['verify_optimized'] and opts['no_optimized']:
2174 if opts['verify_optimized'] and opts['no_optimized']:
2174 raise error.Abort(_('cannot use --verify-optimized with '
2175 raise error.Abort(_('cannot use --verify-optimized with '
2175 '--no-optimized'))
2176 '--no-optimized'))
2176 stagenames = set(n for n, f in stages)
2177 stagenames = set(n for n, f in stages)
2177
2178
2178 showalways = set()
2179 showalways = set()
2179 showchanged = set()
2180 showchanged = set()
2180 if ui.verbose and not opts['show_stage']:
2181 if ui.verbose and not opts['show_stage']:
2181 # show parsed tree by --verbose (deprecated)
2182 # show parsed tree by --verbose (deprecated)
2182 showalways.add('parsed')
2183 showalways.add('parsed')
2183 showchanged.update(['expanded', 'concatenated'])
2184 showchanged.update(['expanded', 'concatenated'])
2184 if opts['optimize']:
2185 if opts['optimize']:
2185 showalways.add('optimized')
2186 showalways.add('optimized')
2186 if opts['show_stage'] and opts['optimize']:
2187 if opts['show_stage'] and opts['optimize']:
2187 raise error.Abort(_('cannot use --optimize with --show-stage'))
2188 raise error.Abort(_('cannot use --optimize with --show-stage'))
2188 if opts['show_stage'] == ['all']:
2189 if opts['show_stage'] == ['all']:
2189 showalways.update(stagenames)
2190 showalways.update(stagenames)
2190 else:
2191 else:
2191 for n in opts['show_stage']:
2192 for n in opts['show_stage']:
2192 if n not in stagenames:
2193 if n not in stagenames:
2193 raise error.Abort(_('invalid stage name: %s') % n)
2194 raise error.Abort(_('invalid stage name: %s') % n)
2194 showalways.update(opts['show_stage'])
2195 showalways.update(opts['show_stage'])
2195
2196
2196 treebystage = {}
2197 treebystage = {}
2197 printedtree = None
2198 printedtree = None
2198 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2199 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2199 for n, f in stages:
2200 for n, f in stages:
2200 treebystage[n] = tree = f(tree)
2201 treebystage[n] = tree = f(tree)
2201 if n in showalways or (n in showchanged and tree != printedtree):
2202 if n in showalways or (n in showchanged and tree != printedtree):
2202 if opts['show_stage'] or n != 'parsed':
2203 if opts['show_stage'] or n != 'parsed':
2203 ui.write(("* %s:\n") % n)
2204 ui.write(("* %s:\n") % n)
2204 ui.write(revsetlang.prettyformat(tree), "\n")
2205 ui.write(revsetlang.prettyformat(tree), "\n")
2205 printedtree = tree
2206 printedtree = tree
2206
2207
2207 if opts['verify_optimized']:
2208 if opts['verify_optimized']:
2208 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2209 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2209 brevs = revset.makematcher(treebystage['optimized'])(repo)
2210 brevs = revset.makematcher(treebystage['optimized'])(repo)
2210 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2211 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2211 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2212 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2212 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2213 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2213 arevs = list(arevs)
2214 arevs = list(arevs)
2214 brevs = list(brevs)
2215 brevs = list(brevs)
2215 if arevs == brevs:
2216 if arevs == brevs:
2216 return 0
2217 return 0
2217 ui.write(('--- analyzed\n'), label='diff.file_a')
2218 ui.write(('--- analyzed\n'), label='diff.file_a')
2218 ui.write(('+++ optimized\n'), label='diff.file_b')
2219 ui.write(('+++ optimized\n'), label='diff.file_b')
2219 sm = difflib.SequenceMatcher(None, arevs, brevs)
2220 sm = difflib.SequenceMatcher(None, arevs, brevs)
2220 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2221 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2221 if tag in ('delete', 'replace'):
2222 if tag in ('delete', 'replace'):
2222 for c in arevs[alo:ahi]:
2223 for c in arevs[alo:ahi]:
2223 ui.write('-%s\n' % c, label='diff.deleted')
2224 ui.write('-%s\n' % c, label='diff.deleted')
2224 if tag in ('insert', 'replace'):
2225 if tag in ('insert', 'replace'):
2225 for c in brevs[blo:bhi]:
2226 for c in brevs[blo:bhi]:
2226 ui.write('+%s\n' % c, label='diff.inserted')
2227 ui.write('+%s\n' % c, label='diff.inserted')
2227 if tag == 'equal':
2228 if tag == 'equal':
2228 for c in arevs[alo:ahi]:
2229 for c in arevs[alo:ahi]:
2229 ui.write(' %s\n' % c)
2230 ui.write(' %s\n' % c)
2230 return 1
2231 return 1
2231
2232
2232 func = revset.makematcher(tree)
2233 func = revset.makematcher(tree)
2233 revs = func(repo)
2234 revs = func(repo)
2234 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2235 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2235 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2236 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2236 if not opts['show_revs']:
2237 if not opts['show_revs']:
2237 return
2238 return
2238 for c in revs:
2239 for c in revs:
2239 ui.write("%d\n" % c)
2240 ui.write("%d\n" % c)
2240
2241
2241 @command('debugserve', [
2242 @command('debugserve', [
2242 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2243 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2243 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2244 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2244 ('', 'logiofile', '', _('file to log server I/O to')),
2245 ('', 'logiofile', '', _('file to log server I/O to')),
2245 ], '')
2246 ], '')
2246 def debugserve(ui, repo, **opts):
2247 def debugserve(ui, repo, **opts):
2247 """run a server with advanced settings
2248 """run a server with advanced settings
2248
2249
2249 This command is similar to :hg:`serve`. It exists partially as a
2250 This command is similar to :hg:`serve`. It exists partially as a
2250 workaround to the fact that ``hg serve --stdio`` must have specific
2251 workaround to the fact that ``hg serve --stdio`` must have specific
2251 arguments for security reasons.
2252 arguments for security reasons.
2252 """
2253 """
2253 opts = pycompat.byteskwargs(opts)
2254 opts = pycompat.byteskwargs(opts)
2254
2255
2255 if not opts['sshstdio']:
2256 if not opts['sshstdio']:
2256 raise error.Abort(_('only --sshstdio is currently supported'))
2257 raise error.Abort(_('only --sshstdio is currently supported'))
2257
2258
2258 logfh = None
2259 logfh = None
2259
2260
2260 if opts['logiofd'] and opts['logiofile']:
2261 if opts['logiofd'] and opts['logiofile']:
2261 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2262 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2262
2263
2263 if opts['logiofd']:
2264 if opts['logiofd']:
2264 # Line buffered because output is line based.
2265 # Line buffered because output is line based.
2265 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2266 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2266 elif opts['logiofile']:
2267 elif opts['logiofile']:
2267 logfh = open(opts['logiofile'], 'ab', 1)
2268 logfh = open(opts['logiofile'], 'ab', 1)
2268
2269
2269 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2270 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2270 s.serve_forever()
2271 s.serve_forever()
2271
2272
2272 @command('debugsetparents', [], _('REV1 [REV2]'))
2273 @command('debugsetparents', [], _('REV1 [REV2]'))
2273 def debugsetparents(ui, repo, rev1, rev2=None):
2274 def debugsetparents(ui, repo, rev1, rev2=None):
2274 """manually set the parents of the current working directory
2275 """manually set the parents of the current working directory
2275
2276
2276 This is useful for writing repository conversion tools, but should
2277 This is useful for writing repository conversion tools, but should
2277 be used with care. For example, neither the working directory nor the
2278 be used with care. For example, neither the working directory nor the
2278 dirstate is updated, so file status may be incorrect after running this
2279 dirstate is updated, so file status may be incorrect after running this
2279 command.
2280 command.
2280
2281
2281 Returns 0 on success.
2282 Returns 0 on success.
2282 """
2283 """
2283
2284
2284 r1 = scmutil.revsingle(repo, rev1).node()
2285 r1 = scmutil.revsingle(repo, rev1).node()
2285 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2286 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2286
2287
2287 with repo.wlock():
2288 with repo.wlock():
2288 repo.setparents(r1, r2)
2289 repo.setparents(r1, r2)
2289
2290
2290 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2291 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2291 def debugssl(ui, repo, source=None, **opts):
2292 def debugssl(ui, repo, source=None, **opts):
2292 '''test a secure connection to a server
2293 '''test a secure connection to a server
2293
2294
2294 This builds the certificate chain for the server on Windows, installing the
2295 This builds the certificate chain for the server on Windows, installing the
2295 missing intermediates and trusted root via Windows Update if necessary. It
2296 missing intermediates and trusted root via Windows Update if necessary. It
2296 does nothing on other platforms.
2297 does nothing on other platforms.
2297
2298
2298 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2299 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2299 that server is used. See :hg:`help urls` for more information.
2300 that server is used. See :hg:`help urls` for more information.
2300
2301
2301 If the update succeeds, retry the original operation. Otherwise, the cause
2302 If the update succeeds, retry the original operation. Otherwise, the cause
2302 of the SSL error is likely another issue.
2303 of the SSL error is likely another issue.
2303 '''
2304 '''
2304 if not pycompat.iswindows:
2305 if not pycompat.iswindows:
2305 raise error.Abort(_('certificate chain building is only possible on '
2306 raise error.Abort(_('certificate chain building is only possible on '
2306 'Windows'))
2307 'Windows'))
2307
2308
2308 if not source:
2309 if not source:
2309 if not repo:
2310 if not repo:
2310 raise error.Abort(_("there is no Mercurial repository here, and no "
2311 raise error.Abort(_("there is no Mercurial repository here, and no "
2311 "server specified"))
2312 "server specified"))
2312 source = "default"
2313 source = "default"
2313
2314
2314 source, branches = hg.parseurl(ui.expandpath(source))
2315 source, branches = hg.parseurl(ui.expandpath(source))
2315 url = util.url(source)
2316 url = util.url(source)
2316 addr = None
2317 addr = None
2317
2318
2318 defaultport = {'https': 443, 'ssh': 22}
2319 defaultport = {'https': 443, 'ssh': 22}
2319 if url.scheme in defaultport:
2320 if url.scheme in defaultport:
2320 try:
2321 try:
2321 addr = (url.host, int(url.port or defaultport[url.scheme]))
2322 addr = (url.host, int(url.port or defaultport[url.scheme]))
2322 except ValueError:
2323 except ValueError:
2323 raise error.Abort(_("malformed port number in URL"))
2324 raise error.Abort(_("malformed port number in URL"))
2324 else:
2325 else:
2325 raise error.Abort(_("only https and ssh connections are supported"))
2326 raise error.Abort(_("only https and ssh connections are supported"))
2326
2327
2327 from . import win32
2328 from . import win32
2328
2329
2329 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2330 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2330 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2331 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2331
2332
2332 try:
2333 try:
2333 s.connect(addr)
2334 s.connect(addr)
2334 cert = s.getpeercert(True)
2335 cert = s.getpeercert(True)
2335
2336
2336 ui.status(_('checking the certificate chain for %s\n') % url.host)
2337 ui.status(_('checking the certificate chain for %s\n') % url.host)
2337
2338
2338 complete = win32.checkcertificatechain(cert, build=False)
2339 complete = win32.checkcertificatechain(cert, build=False)
2339
2340
2340 if not complete:
2341 if not complete:
2341 ui.status(_('certificate chain is incomplete, updating... '))
2342 ui.status(_('certificate chain is incomplete, updating... '))
2342
2343
2343 if not win32.checkcertificatechain(cert):
2344 if not win32.checkcertificatechain(cert):
2344 ui.status(_('failed.\n'))
2345 ui.status(_('failed.\n'))
2345 else:
2346 else:
2346 ui.status(_('done.\n'))
2347 ui.status(_('done.\n'))
2347 else:
2348 else:
2348 ui.status(_('full certificate chain is available\n'))
2349 ui.status(_('full certificate chain is available\n'))
2349 finally:
2350 finally:
2350 s.close()
2351 s.close()
2351
2352
2352 @command('debugsub',
2353 @command('debugsub',
2353 [('r', 'rev', '',
2354 [('r', 'rev', '',
2354 _('revision to check'), _('REV'))],
2355 _('revision to check'), _('REV'))],
2355 _('[-r REV] [REV]'))
2356 _('[-r REV] [REV]'))
2356 def debugsub(ui, repo, rev=None):
2357 def debugsub(ui, repo, rev=None):
2357 ctx = scmutil.revsingle(repo, rev, None)
2358 ctx = scmutil.revsingle(repo, rev, None)
2358 for k, v in sorted(ctx.substate.items()):
2359 for k, v in sorted(ctx.substate.items()):
2359 ui.write(('path %s\n') % k)
2360 ui.write(('path %s\n') % k)
2360 ui.write((' source %s\n') % v[0])
2361 ui.write((' source %s\n') % v[0])
2361 ui.write((' revision %s\n') % v[1])
2362 ui.write((' revision %s\n') % v[1])
2362
2363
2363 @command('debugsuccessorssets',
2364 @command('debugsuccessorssets',
2364 [('', 'closest', False, _('return closest successors sets only'))],
2365 [('', 'closest', False, _('return closest successors sets only'))],
2365 _('[REV]'))
2366 _('[REV]'))
2366 def debugsuccessorssets(ui, repo, *revs, **opts):
2367 def debugsuccessorssets(ui, repo, *revs, **opts):
2367 """show set of successors for revision
2368 """show set of successors for revision
2368
2369
2369 A successors set of changeset A is a consistent group of revisions that
2370 A successors set of changeset A is a consistent group of revisions that
2370 succeed A. It contains non-obsolete changesets only unless closests
2371 succeed A. It contains non-obsolete changesets only unless closests
2371 successors set is set.
2372 successors set is set.
2372
2373
2373 In most cases a changeset A has a single successors set containing a single
2374 In most cases a changeset A has a single successors set containing a single
2374 successor (changeset A replaced by A').
2375 successor (changeset A replaced by A').
2375
2376
2376 A changeset that is made obsolete with no successors are called "pruned".
2377 A changeset that is made obsolete with no successors are called "pruned".
2377 Such changesets have no successors sets at all.
2378 Such changesets have no successors sets at all.
2378
2379
2379 A changeset that has been "split" will have a successors set containing
2380 A changeset that has been "split" will have a successors set containing
2380 more than one successor.
2381 more than one successor.
2381
2382
2382 A changeset that has been rewritten in multiple different ways is called
2383 A changeset that has been rewritten in multiple different ways is called
2383 "divergent". Such changesets have multiple successor sets (each of which
2384 "divergent". Such changesets have multiple successor sets (each of which
2384 may also be split, i.e. have multiple successors).
2385 may also be split, i.e. have multiple successors).
2385
2386
2386 Results are displayed as follows::
2387 Results are displayed as follows::
2387
2388
2388 <rev1>
2389 <rev1>
2389 <successors-1A>
2390 <successors-1A>
2390 <rev2>
2391 <rev2>
2391 <successors-2A>
2392 <successors-2A>
2392 <successors-2B1> <successors-2B2> <successors-2B3>
2393 <successors-2B1> <successors-2B2> <successors-2B3>
2393
2394
2394 Here rev2 has two possible (i.e. divergent) successors sets. The first
2395 Here rev2 has two possible (i.e. divergent) successors sets. The first
2395 holds one element, whereas the second holds three (i.e. the changeset has
2396 holds one element, whereas the second holds three (i.e. the changeset has
2396 been split).
2397 been split).
2397 """
2398 """
2398 # passed to successorssets caching computation from one call to another
2399 # passed to successorssets caching computation from one call to another
2399 cache = {}
2400 cache = {}
2400 ctx2str = bytes
2401 ctx2str = bytes
2401 node2str = short
2402 node2str = short
2402 for rev in scmutil.revrange(repo, revs):
2403 for rev in scmutil.revrange(repo, revs):
2403 ctx = repo[rev]
2404 ctx = repo[rev]
2404 ui.write('%s\n'% ctx2str(ctx))
2405 ui.write('%s\n'% ctx2str(ctx))
2405 for succsset in obsutil.successorssets(repo, ctx.node(),
2406 for succsset in obsutil.successorssets(repo, ctx.node(),
2406 closest=opts[r'closest'],
2407 closest=opts[r'closest'],
2407 cache=cache):
2408 cache=cache):
2408 if succsset:
2409 if succsset:
2409 ui.write(' ')
2410 ui.write(' ')
2410 ui.write(node2str(succsset[0]))
2411 ui.write(node2str(succsset[0]))
2411 for node in succsset[1:]:
2412 for node in succsset[1:]:
2412 ui.write(' ')
2413 ui.write(' ')
2413 ui.write(node2str(node))
2414 ui.write(node2str(node))
2414 ui.write('\n')
2415 ui.write('\n')
2415
2416
2416 @command('debugtemplate',
2417 @command('debugtemplate',
2417 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2418 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2418 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2419 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2419 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2420 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2420 optionalrepo=True)
2421 optionalrepo=True)
2421 def debugtemplate(ui, repo, tmpl, **opts):
2422 def debugtemplate(ui, repo, tmpl, **opts):
2422 """parse and apply a template
2423 """parse and apply a template
2423
2424
2424 If -r/--rev is given, the template is processed as a log template and
2425 If -r/--rev is given, the template is processed as a log template and
2425 applied to the given changesets. Otherwise, it is processed as a generic
2426 applied to the given changesets. Otherwise, it is processed as a generic
2426 template.
2427 template.
2427
2428
2428 Use --verbose to print the parsed tree.
2429 Use --verbose to print the parsed tree.
2429 """
2430 """
2430 revs = None
2431 revs = None
2431 if opts[r'rev']:
2432 if opts[r'rev']:
2432 if repo is None:
2433 if repo is None:
2433 raise error.RepoError(_('there is no Mercurial repository here '
2434 raise error.RepoError(_('there is no Mercurial repository here '
2434 '(.hg not found)'))
2435 '(.hg not found)'))
2435 revs = scmutil.revrange(repo, opts[r'rev'])
2436 revs = scmutil.revrange(repo, opts[r'rev'])
2436
2437
2437 props = {}
2438 props = {}
2438 for d in opts[r'define']:
2439 for d in opts[r'define']:
2439 try:
2440 try:
2440 k, v = (e.strip() for e in d.split('=', 1))
2441 k, v = (e.strip() for e in d.split('=', 1))
2441 if not k or k == 'ui':
2442 if not k or k == 'ui':
2442 raise ValueError
2443 raise ValueError
2443 props[k] = v
2444 props[k] = v
2444 except ValueError:
2445 except ValueError:
2445 raise error.Abort(_('malformed keyword definition: %s') % d)
2446 raise error.Abort(_('malformed keyword definition: %s') % d)
2446
2447
2447 if ui.verbose:
2448 if ui.verbose:
2448 aliases = ui.configitems('templatealias')
2449 aliases = ui.configitems('templatealias')
2449 tree = templater.parse(tmpl)
2450 tree = templater.parse(tmpl)
2450 ui.note(templater.prettyformat(tree), '\n')
2451 ui.note(templater.prettyformat(tree), '\n')
2451 newtree = templater.expandaliases(tree, aliases)
2452 newtree = templater.expandaliases(tree, aliases)
2452 if newtree != tree:
2453 if newtree != tree:
2453 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2454 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2454
2455
2455 if revs is None:
2456 if revs is None:
2456 tres = formatter.templateresources(ui, repo)
2457 tres = formatter.templateresources(ui, repo)
2457 t = formatter.maketemplater(ui, tmpl, resources=tres)
2458 t = formatter.maketemplater(ui, tmpl, resources=tres)
2458 ui.write(t.renderdefault(props))
2459 ui.write(t.renderdefault(props))
2459 else:
2460 else:
2460 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2461 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2461 for r in revs:
2462 for r in revs:
2462 displayer.show(repo[r], **pycompat.strkwargs(props))
2463 displayer.show(repo[r], **pycompat.strkwargs(props))
2463 displayer.close()
2464 displayer.close()
2464
2465
2465 @command('debuguigetpass', [
2466 @command('debuguigetpass', [
2466 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2467 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2467 ], _('[-p TEXT]'), norepo=True)
2468 ], _('[-p TEXT]'), norepo=True)
2468 def debuguigetpass(ui, prompt=''):
2469 def debuguigetpass(ui, prompt=''):
2469 """show prompt to type password"""
2470 """show prompt to type password"""
2470 r = ui.getpass(prompt)
2471 r = ui.getpass(prompt)
2471 ui.write(('respose: %s\n') % r)
2472 ui.write(('respose: %s\n') % r)
2472
2473
2473 @command('debuguiprompt', [
2474 @command('debuguiprompt', [
2474 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2475 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2475 ], _('[-p TEXT]'), norepo=True)
2476 ], _('[-p TEXT]'), norepo=True)
2476 def debuguiprompt(ui, prompt=''):
2477 def debuguiprompt(ui, prompt=''):
2477 """show plain prompt"""
2478 """show plain prompt"""
2478 r = ui.prompt(prompt)
2479 r = ui.prompt(prompt)
2479 ui.write(('response: %s\n') % r)
2480 ui.write(('response: %s\n') % r)
2480
2481
2481 @command('debugupdatecaches', [])
2482 @command('debugupdatecaches', [])
2482 def debugupdatecaches(ui, repo, *pats, **opts):
2483 def debugupdatecaches(ui, repo, *pats, **opts):
2483 """warm all known caches in the repository"""
2484 """warm all known caches in the repository"""
2484 with repo.wlock(), repo.lock():
2485 with repo.wlock(), repo.lock():
2485 repo.updatecaches(full=True)
2486 repo.updatecaches(full=True)
2486
2487
2487 @command('debugupgraderepo', [
2488 @command('debugupgraderepo', [
2488 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2489 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2489 ('', 'run', False, _('performs an upgrade')),
2490 ('', 'run', False, _('performs an upgrade')),
2490 ])
2491 ])
2491 def debugupgraderepo(ui, repo, run=False, optimize=None):
2492 def debugupgraderepo(ui, repo, run=False, optimize=None):
2492 """upgrade a repository to use different features
2493 """upgrade a repository to use different features
2493
2494
2494 If no arguments are specified, the repository is evaluated for upgrade
2495 If no arguments are specified, the repository is evaluated for upgrade
2495 and a list of problems and potential optimizations is printed.
2496 and a list of problems and potential optimizations is printed.
2496
2497
2497 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2498 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2498 can be influenced via additional arguments. More details will be provided
2499 can be influenced via additional arguments. More details will be provided
2499 by the command output when run without ``--run``.
2500 by the command output when run without ``--run``.
2500
2501
2501 During the upgrade, the repository will be locked and no writes will be
2502 During the upgrade, the repository will be locked and no writes will be
2502 allowed.
2503 allowed.
2503
2504
2504 At the end of the upgrade, the repository may not be readable while new
2505 At the end of the upgrade, the repository may not be readable while new
2505 repository data is swapped in. This window will be as long as it takes to
2506 repository data is swapped in. This window will be as long as it takes to
2506 rename some directories inside the ``.hg`` directory. On most machines, this
2507 rename some directories inside the ``.hg`` directory. On most machines, this
2507 should complete almost instantaneously and the chances of a consumer being
2508 should complete almost instantaneously and the chances of a consumer being
2508 unable to access the repository should be low.
2509 unable to access the repository should be low.
2509 """
2510 """
2510 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2511 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2511
2512
2512 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2513 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2513 inferrepo=True)
2514 inferrepo=True)
2514 def debugwalk(ui, repo, *pats, **opts):
2515 def debugwalk(ui, repo, *pats, **opts):
2515 """show how files match on given patterns"""
2516 """show how files match on given patterns"""
2516 opts = pycompat.byteskwargs(opts)
2517 opts = pycompat.byteskwargs(opts)
2517 m = scmutil.match(repo[None], pats, opts)
2518 m = scmutil.match(repo[None], pats, opts)
2518 ui.write(('matcher: %r\n' % m))
2519 ui.write(('matcher: %r\n' % m))
2519 items = list(repo[None].walk(m))
2520 items = list(repo[None].walk(m))
2520 if not items:
2521 if not items:
2521 return
2522 return
2522 f = lambda fn: fn
2523 f = lambda fn: fn
2523 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2524 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2524 f = lambda fn: util.normpath(fn)
2525 f = lambda fn: util.normpath(fn)
2525 fmt = 'f %%-%ds %%-%ds %%s' % (
2526 fmt = 'f %%-%ds %%-%ds %%s' % (
2526 max([len(abs) for abs in items]),
2527 max([len(abs) for abs in items]),
2527 max([len(m.rel(abs)) for abs in items]))
2528 max([len(m.rel(abs)) for abs in items]))
2528 for abs in items:
2529 for abs in items:
2529 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2530 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2530 ui.write("%s\n" % line.rstrip())
2531 ui.write("%s\n" % line.rstrip())
2531
2532
2532 @command('debugwhyunstable', [], _('REV'))
2533 @command('debugwhyunstable', [], _('REV'))
2533 def debugwhyunstable(ui, repo, rev):
2534 def debugwhyunstable(ui, repo, rev):
2534 """explain instabilities of a changeset"""
2535 """explain instabilities of a changeset"""
2535 for entry in obsutil.whyunstable(repo, repo[rev]):
2536 for entry in obsutil.whyunstable(repo, repo[rev]):
2536 dnodes = ''
2537 dnodes = ''
2537 if entry.get('divergentnodes'):
2538 if entry.get('divergentnodes'):
2538 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2539 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2539 for ctx in entry['divergentnodes']) + ' '
2540 for ctx in entry['divergentnodes']) + ' '
2540 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2541 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2541 entry['reason'], entry['node']))
2542 entry['reason'], entry['node']))
2542
2543
2543 @command('debugwireargs',
2544 @command('debugwireargs',
2544 [('', 'three', '', 'three'),
2545 [('', 'three', '', 'three'),
2545 ('', 'four', '', 'four'),
2546 ('', 'four', '', 'four'),
2546 ('', 'five', '', 'five'),
2547 ('', 'five', '', 'five'),
2547 ] + cmdutil.remoteopts,
2548 ] + cmdutil.remoteopts,
2548 _('REPO [OPTIONS]... [ONE [TWO]]'),
2549 _('REPO [OPTIONS]... [ONE [TWO]]'),
2549 norepo=True)
2550 norepo=True)
2550 def debugwireargs(ui, repopath, *vals, **opts):
2551 def debugwireargs(ui, repopath, *vals, **opts):
2551 opts = pycompat.byteskwargs(opts)
2552 opts = pycompat.byteskwargs(opts)
2552 repo = hg.peer(ui, opts, repopath)
2553 repo = hg.peer(ui, opts, repopath)
2553 for opt in cmdutil.remoteopts:
2554 for opt in cmdutil.remoteopts:
2554 del opts[opt[1]]
2555 del opts[opt[1]]
2555 args = {}
2556 args = {}
2556 for k, v in opts.iteritems():
2557 for k, v in opts.iteritems():
2557 if v:
2558 if v:
2558 args[k] = v
2559 args[k] = v
2559 args = pycompat.strkwargs(args)
2560 args = pycompat.strkwargs(args)
2560 # run twice to check that we don't mess up the stream for the next command
2561 # run twice to check that we don't mess up the stream for the next command
2561 res1 = repo.debugwireargs(*vals, **args)
2562 res1 = repo.debugwireargs(*vals, **args)
2562 res2 = repo.debugwireargs(*vals, **args)
2563 res2 = repo.debugwireargs(*vals, **args)
2563 ui.write("%s\n" % res1)
2564 ui.write("%s\n" % res1)
2564 if res1 != res2:
2565 if res1 != res2:
2565 ui.warn("%s\n" % res2)
2566 ui.warn("%s\n" % res2)
2566
2567
2567 def _parsewirelangblocks(fh):
2568 def _parsewirelangblocks(fh):
2568 activeaction = None
2569 activeaction = None
2569 blocklines = []
2570 blocklines = []
2570
2571
2571 for line in fh:
2572 for line in fh:
2572 line = line.rstrip()
2573 line = line.rstrip()
2573 if not line:
2574 if not line:
2574 continue
2575 continue
2575
2576
2576 if line.startswith(b'#'):
2577 if line.startswith(b'#'):
2577 continue
2578 continue
2578
2579
2579 if not line.startswith(' '):
2580 if not line.startswith(' '):
2580 # New block. Flush previous one.
2581 # New block. Flush previous one.
2581 if activeaction:
2582 if activeaction:
2582 yield activeaction, blocklines
2583 yield activeaction, blocklines
2583
2584
2584 activeaction = line
2585 activeaction = line
2585 blocklines = []
2586 blocklines = []
2586 continue
2587 continue
2587
2588
2588 # Else we start with an indent.
2589 # Else we start with an indent.
2589
2590
2590 if not activeaction:
2591 if not activeaction:
2591 raise error.Abort(_('indented line outside of block'))
2592 raise error.Abort(_('indented line outside of block'))
2592
2593
2593 blocklines.append(line)
2594 blocklines.append(line)
2594
2595
2595 # Flush last block.
2596 # Flush last block.
2596 if activeaction:
2597 if activeaction:
2597 yield activeaction, blocklines
2598 yield activeaction, blocklines
2598
2599
2599 @command('debugwireproto',
2600 @command('debugwireproto',
2600 [
2601 [
2601 ('', 'localssh', False, _('start an SSH server for this repo')),
2602 ('', 'localssh', False, _('start an SSH server for this repo')),
2602 ('', 'peer', '', _('construct a specific version of the peer')),
2603 ('', 'peer', '', _('construct a specific version of the peer')),
2603 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2604 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2604 ] + cmdutil.remoteopts,
2605 ] + cmdutil.remoteopts,
2605 _('[REPO]'),
2606 _('[PATH]'),
2606 optionalrepo=True)
2607 optionalrepo=True)
2607 def debugwireproto(ui, repo, **opts):
2608 def debugwireproto(ui, repo, path=None, **opts):
2608 """send wire protocol commands to a server
2609 """send wire protocol commands to a server
2609
2610
2610 This command can be used to issue wire protocol commands to remote
2611 This command can be used to issue wire protocol commands to remote
2611 peers and to debug the raw data being exchanged.
2612 peers and to debug the raw data being exchanged.
2612
2613
2613 ``--localssh`` will start an SSH server against the current repository
2614 ``--localssh`` will start an SSH server against the current repository
2614 and connect to that. By default, the connection will perform a handshake
2615 and connect to that. By default, the connection will perform a handshake
2615 and establish an appropriate peer instance.
2616 and establish an appropriate peer instance.
2616
2617
2617 ``--peer`` can be used to bypass the handshake protocol and construct a
2618 ``--peer`` can be used to bypass the handshake protocol and construct a
2618 peer instance using the specified class type. Valid values are ``raw``,
2619 peer instance using the specified class type. Valid values are ``raw``,
2619 ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending raw data
2620 ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending raw data
2620 payloads and don't support higher-level command actions.
2621 payloads and don't support higher-level command actions.
2621
2622
2622 ``--noreadstderr`` can be used to disable automatic reading from stderr
2623 ``--noreadstderr`` can be used to disable automatic reading from stderr
2623 of the peer (for SSH connections only). Disabling automatic reading of
2624 of the peer (for SSH connections only). Disabling automatic reading of
2624 stderr is useful for making output more deterministic.
2625 stderr is useful for making output more deterministic.
2625
2626
2626 Commands are issued via a mini language which is specified via stdin.
2627 Commands are issued via a mini language which is specified via stdin.
2627 The language consists of individual actions to perform. An action is
2628 The language consists of individual actions to perform. An action is
2628 defined by a block. A block is defined as a line with no leading
2629 defined by a block. A block is defined as a line with no leading
2629 space followed by 0 or more lines with leading space. Blocks are
2630 space followed by 0 or more lines with leading space. Blocks are
2630 effectively a high-level command with additional metadata.
2631 effectively a high-level command with additional metadata.
2631
2632
2632 Lines beginning with ``#`` are ignored.
2633 Lines beginning with ``#`` are ignored.
2633
2634
2634 The following sections denote available actions.
2635 The following sections denote available actions.
2635
2636
2636 raw
2637 raw
2637 ---
2638 ---
2638
2639
2639 Send raw data to the server.
2640 Send raw data to the server.
2640
2641
2641 The block payload contains the raw data to send as one atomic send
2642 The block payload contains the raw data to send as one atomic send
2642 operation. The data may not actually be delivered in a single system
2643 operation. The data may not actually be delivered in a single system
2643 call: it depends on the abilities of the transport being used.
2644 call: it depends on the abilities of the transport being used.
2644
2645
2645 Each line in the block is de-indented and concatenated. Then, that
2646 Each line in the block is de-indented and concatenated. Then, that
2646 value is evaluated as a Python b'' literal. This allows the use of
2647 value is evaluated as a Python b'' literal. This allows the use of
2647 backslash escaping, etc.
2648 backslash escaping, etc.
2648
2649
2649 raw+
2650 raw+
2650 ----
2651 ----
2651
2652
2652 Behaves like ``raw`` except flushes output afterwards.
2653 Behaves like ``raw`` except flushes output afterwards.
2653
2654
2654 command <X>
2655 command <X>
2655 -----------
2656 -----------
2656
2657
2657 Send a request to run a named command, whose name follows the ``command``
2658 Send a request to run a named command, whose name follows the ``command``
2658 string.
2659 string.
2659
2660
2660 Arguments to the command are defined as lines in this block. The format of
2661 Arguments to the command are defined as lines in this block. The format of
2661 each line is ``<key> <value>``. e.g.::
2662 each line is ``<key> <value>``. e.g.::
2662
2663
2663 command listkeys
2664 command listkeys
2664 namespace bookmarks
2665 namespace bookmarks
2665
2666
2666 Values are interpreted as Python b'' literals. This allows encoding
2667 Values are interpreted as Python b'' literals. This allows encoding
2667 special byte sequences via backslash escaping.
2668 special byte sequences via backslash escaping.
2668
2669
2669 The following arguments have special meaning:
2670 The following arguments have special meaning:
2670
2671
2671 ``PUSHFILE``
2672 ``PUSHFILE``
2672 When defined, the *push* mechanism of the peer will be used instead
2673 When defined, the *push* mechanism of the peer will be used instead
2673 of the static request-response mechanism and the content of the
2674 of the static request-response mechanism and the content of the
2674 file specified in the value of this argument will be sent as the
2675 file specified in the value of this argument will be sent as the
2675 command payload.
2676 command payload.
2676
2677
2677 This can be used to submit a local bundle file to the remote.
2678 This can be used to submit a local bundle file to the remote.
2678
2679
2679 batchbegin
2680 batchbegin
2680 ----------
2681 ----------
2681
2682
2682 Instruct the peer to begin a batched send.
2683 Instruct the peer to begin a batched send.
2683
2684
2684 All ``command`` blocks are queued for execution until the next
2685 All ``command`` blocks are queued for execution until the next
2685 ``batchsubmit`` block.
2686 ``batchsubmit`` block.
2686
2687
2687 batchsubmit
2688 batchsubmit
2688 -----------
2689 -----------
2689
2690
2690 Submit previously queued ``command`` blocks as a batch request.
2691 Submit previously queued ``command`` blocks as a batch request.
2691
2692
2692 This action MUST be paired with a ``batchbegin`` action.
2693 This action MUST be paired with a ``batchbegin`` action.
2693
2694
2694 close
2695 close
2695 -----
2696 -----
2696
2697
2697 Close the connection to the server.
2698 Close the connection to the server.
2698
2699
2699 flush
2700 flush
2700 -----
2701 -----
2701
2702
2702 Flush data written to the server.
2703 Flush data written to the server.
2703
2704
2704 readavailable
2705 readavailable
2705 -------------
2706 -------------
2706
2707
2707 Close the write end of the connection and read all available data from
2708 Close the write end of the connection and read all available data from
2708 the server.
2709 the server.
2709
2710
2710 If the connection to the server encompasses multiple pipes, we poll both
2711 If the connection to the server encompasses multiple pipes, we poll both
2711 pipes and read available data.
2712 pipes and read available data.
2712
2713
2713 readline
2714 readline
2714 --------
2715 --------
2715
2716
2716 Read a line of output from the server. If there are multiple output
2717 Read a line of output from the server. If there are multiple output
2717 pipes, reads only the main pipe.
2718 pipes, reads only the main pipe.
2718
2719
2719 ereadline
2720 ereadline
2720 ---------
2721 ---------
2721
2722
2722 Like ``readline``, but read from the stderr pipe, if available.
2723 Like ``readline``, but read from the stderr pipe, if available.
2723
2724
2724 read <X>
2725 read <X>
2725 --------
2726 --------
2726
2727
2727 ``read()`` N bytes from the server's main output pipe.
2728 ``read()`` N bytes from the server's main output pipe.
2728
2729
2729 eread <X>
2730 eread <X>
2730 ---------
2731 ---------
2731
2732
2732 ``read()`` N bytes from the server's stderr pipe, if available.
2733 ``read()`` N bytes from the server's stderr pipe, if available.
2733 """
2734 """
2734 opts = pycompat.byteskwargs(opts)
2735 opts = pycompat.byteskwargs(opts)
2735
2736
2736 if opts['localssh'] and not repo:
2737 if opts['localssh'] and not repo:
2737 raise error.Abort(_('--localssh requires a repository'))
2738 raise error.Abort(_('--localssh requires a repository'))
2738
2739
2739 if opts['peer'] and opts['peer'] not in ('raw', 'ssh1', 'ssh2'):
2740 if opts['peer'] and opts['peer'] not in ('raw', 'ssh1', 'ssh2'):
2740 raise error.Abort(_('invalid value for --peer'),
2741 raise error.Abort(_('invalid value for --peer'),
2741 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2742 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2742
2743
2744 if path and opts['localssh']:
2745 raise error.Abort(_('cannot specify --localssh with an explicit '
2746 'path'))
2747
2743 if ui.interactive():
2748 if ui.interactive():
2744 ui.write(_('(waiting for commands on stdin)\n'))
2749 ui.write(_('(waiting for commands on stdin)\n'))
2745
2750
2746 blocks = list(_parsewirelangblocks(ui.fin))
2751 blocks = list(_parsewirelangblocks(ui.fin))
2747
2752
2748 proc = None
2753 proc = None
2754 stdin = None
2755 stdout = None
2756 stderr = None
2749
2757
2750 if opts['localssh']:
2758 if opts['localssh']:
2751 # We start the SSH server in its own process so there is process
2759 # We start the SSH server in its own process so there is process
2752 # separation. This prevents a whole class of potential bugs around
2760 # separation. This prevents a whole class of potential bugs around
2753 # shared state from interfering with server operation.
2761 # shared state from interfering with server operation.
2754 args = util.hgcmd() + [
2762 args = util.hgcmd() + [
2755 '-R', repo.root,
2763 '-R', repo.root,
2756 'debugserve', '--sshstdio',
2764 'debugserve', '--sshstdio',
2757 ]
2765 ]
2758 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2766 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2759 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2767 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2760 bufsize=0)
2768 bufsize=0)
2761
2769
2762 stdin = proc.stdin
2770 stdin = proc.stdin
2763 stdout = proc.stdout
2771 stdout = proc.stdout
2764 stderr = proc.stderr
2772 stderr = proc.stderr
2765
2773
2766 # We turn the pipes into observers so we can log I/O.
2774 # We turn the pipes into observers so we can log I/O.
2767 if ui.verbose or opts['peer'] == 'raw':
2775 if ui.verbose or opts['peer'] == 'raw':
2768 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2776 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2769 logdata=True)
2777 logdata=True)
2770 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2778 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2771 logdata=True)
2779 logdata=True)
2772 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2780 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2773 logdata=True)
2781 logdata=True)
2774
2782
2775 # --localssh also implies the peer connection settings.
2783 # --localssh also implies the peer connection settings.
2776
2784
2777 url = 'ssh://localserver'
2785 url = 'ssh://localserver'
2778 autoreadstderr = not opts['noreadstderr']
2786 autoreadstderr = not opts['noreadstderr']
2779
2787
2780 if opts['peer'] == 'ssh1':
2788 if opts['peer'] == 'ssh1':
2781 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2789 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2782 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2790 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2783 None, autoreadstderr=autoreadstderr)
2791 None, autoreadstderr=autoreadstderr)
2784 elif opts['peer'] == 'ssh2':
2792 elif opts['peer'] == 'ssh2':
2785 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2793 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2786 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2794 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2787 None, autoreadstderr=autoreadstderr)
2795 None, autoreadstderr=autoreadstderr)
2788 elif opts['peer'] == 'raw':
2796 elif opts['peer'] == 'raw':
2789 ui.write(_('using raw connection to peer\n'))
2797 ui.write(_('using raw connection to peer\n'))
2790 peer = None
2798 peer = None
2791 else:
2799 else:
2792 ui.write(_('creating ssh peer from handshake results\n'))
2800 ui.write(_('creating ssh peer from handshake results\n'))
2793 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2801 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2794 autoreadstderr=autoreadstderr)
2802 autoreadstderr=autoreadstderr)
2795
2803
2804 elif path:
2805 # We bypass hg.peer() so we can proxy the sockets.
2806 # TODO consider not doing this because we skip
2807 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
2808 u = util.url(path)
2809 if u.scheme != 'http':
2810 raise error.Abort(_('only http:// paths are currently supported'))
2811
2812 url, authinfo = u.authinfo()
2813 openerargs = {}
2814
2815 # Turn pipes/sockets into observers so we can log I/O.
2816 if ui.verbose:
2817 openerargs = {
2818 r'loggingfh': ui,
2819 r'loggingname': b's',
2820 r'loggingopts': {
2821 r'logdata': True,
2822 },
2823 }
2824
2825 opener = urlmod.opener(ui, authinfo, **openerargs)
2826
2827 if opts['peer'] == 'raw':
2828 ui.write(_('using raw connection to peer\n'))
2829 peer = None
2830 elif opts['peer']:
2831 raise error.Abort(_('--peer %s not supported with HTTP peers') %
2832 opts['peer'])
2833 else:
2834 peer = httppeer.httppeer(ui, path, url, opener)
2835 peer._fetchcaps()
2836
2837 # We /could/ populate stdin/stdout with sock.makefile()...
2796 else:
2838 else:
2797 raise error.Abort(_('only --localssh is currently supported'))
2839 raise error.Abort(_('unsupported connection configuration'))
2798
2840
2799 batchedcommands = None
2841 batchedcommands = None
2800
2842
2801 # Now perform actions based on the parsed wire language instructions.
2843 # Now perform actions based on the parsed wire language instructions.
2802 for action, lines in blocks:
2844 for action, lines in blocks:
2803 if action in ('raw', 'raw+'):
2845 if action in ('raw', 'raw+'):
2846 if not stdin:
2847 raise error.Abort(_('cannot call raw/raw+ on this peer'))
2848
2804 # Concatenate the data together.
2849 # Concatenate the data together.
2805 data = ''.join(l.lstrip() for l in lines)
2850 data = ''.join(l.lstrip() for l in lines)
2806 data = util.unescapestr(data)
2851 data = util.unescapestr(data)
2807 stdin.write(data)
2852 stdin.write(data)
2808
2853
2809 if action == 'raw+':
2854 if action == 'raw+':
2810 stdin.flush()
2855 stdin.flush()
2811 elif action == 'flush':
2856 elif action == 'flush':
2857 if not stdin:
2858 raise error.Abort(_('cannot call flush on this peer'))
2812 stdin.flush()
2859 stdin.flush()
2813 elif action.startswith('command'):
2860 elif action.startswith('command'):
2814 if not peer:
2861 if not peer:
2815 raise error.Abort(_('cannot send commands unless peer instance '
2862 raise error.Abort(_('cannot send commands unless peer instance '
2816 'is available'))
2863 'is available'))
2817
2864
2818 command = action.split(' ', 1)[1]
2865 command = action.split(' ', 1)[1]
2819
2866
2820 args = {}
2867 args = {}
2821 for line in lines:
2868 for line in lines:
2822 # We need to allow empty values.
2869 # We need to allow empty values.
2823 fields = line.lstrip().split(' ', 1)
2870 fields = line.lstrip().split(' ', 1)
2824 if len(fields) == 1:
2871 if len(fields) == 1:
2825 key = fields[0]
2872 key = fields[0]
2826 value = ''
2873 value = ''
2827 else:
2874 else:
2828 key, value = fields
2875 key, value = fields
2829
2876
2830 args[key] = util.unescapestr(value)
2877 args[key] = util.unescapestr(value)
2831
2878
2832 if batchedcommands is not None:
2879 if batchedcommands is not None:
2833 batchedcommands.append((command, args))
2880 batchedcommands.append((command, args))
2834 continue
2881 continue
2835
2882
2836 ui.status(_('sending %s command\n') % command)
2883 ui.status(_('sending %s command\n') % command)
2837
2884
2838 if 'PUSHFILE' in args:
2885 if 'PUSHFILE' in args:
2839 with open(args['PUSHFILE'], r'rb') as fh:
2886 with open(args['PUSHFILE'], r'rb') as fh:
2840 del args['PUSHFILE']
2887 del args['PUSHFILE']
2841 res, output = peer._callpush(command, fh,
2888 res, output = peer._callpush(command, fh,
2842 **pycompat.strkwargs(args))
2889 **pycompat.strkwargs(args))
2843 ui.status(_('result: %s\n') % util.escapedata(res))
2890 ui.status(_('result: %s\n') % util.escapedata(res))
2844 ui.status(_('remote output: %s\n') %
2891 ui.status(_('remote output: %s\n') %
2845 util.escapedata(output))
2892 util.escapedata(output))
2846 else:
2893 else:
2847 res = peer._call(command, **pycompat.strkwargs(args))
2894 res = peer._call(command, **pycompat.strkwargs(args))
2848 ui.status(_('response: %s\n') % util.escapedata(res))
2895 ui.status(_('response: %s\n') % util.escapedata(res))
2849
2896
2850 elif action == 'batchbegin':
2897 elif action == 'batchbegin':
2851 if batchedcommands is not None:
2898 if batchedcommands is not None:
2852 raise error.Abort(_('nested batchbegin not allowed'))
2899 raise error.Abort(_('nested batchbegin not allowed'))
2853
2900
2854 batchedcommands = []
2901 batchedcommands = []
2855 elif action == 'batchsubmit':
2902 elif action == 'batchsubmit':
2856 # There is a batching API we could go through. But it would be
2903 # There is a batching API we could go through. But it would be
2857 # difficult to normalize requests into function calls. It is easier
2904 # difficult to normalize requests into function calls. It is easier
2858 # to bypass this layer and normalize to commands + args.
2905 # to bypass this layer and normalize to commands + args.
2859 ui.status(_('sending batch with %d sub-commands\n') %
2906 ui.status(_('sending batch with %d sub-commands\n') %
2860 len(batchedcommands))
2907 len(batchedcommands))
2861 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
2908 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
2862 ui.status(_('response #%d: %s\n') % (i, util.escapedata(chunk)))
2909 ui.status(_('response #%d: %s\n') % (i, util.escapedata(chunk)))
2863
2910
2864 batchedcommands = None
2911 batchedcommands = None
2865 elif action == 'close':
2912 elif action == 'close':
2866 peer.close()
2913 peer.close()
2867 elif action == 'readavailable':
2914 elif action == 'readavailable':
2915 if not stdout or not stderr:
2916 raise error.Abort(_('readavailable not available on this peer'))
2917
2868 stdin.close()
2918 stdin.close()
2869 stdout.read()
2919 stdout.read()
2870 stderr.read()
2920 stderr.read()
2921
2871 elif action == 'readline':
2922 elif action == 'readline':
2923 if not stdout:
2924 raise error.Abort(_('readline not available on this peer'))
2872 stdout.readline()
2925 stdout.readline()
2873 elif action == 'ereadline':
2926 elif action == 'ereadline':
2927 if not stderr:
2928 raise error.Abort(_('ereadline not available on this peer'))
2874 stderr.readline()
2929 stderr.readline()
2875 elif action.startswith('read '):
2930 elif action.startswith('read '):
2876 count = int(action.split(' ', 1)[1])
2931 count = int(action.split(' ', 1)[1])
2932 if not stdout:
2933 raise error.Abort(_('read not available on this peer'))
2877 stdout.read(count)
2934 stdout.read(count)
2878 elif action.startswith('eread '):
2935 elif action.startswith('eread '):
2879 count = int(action.split(' ', 1)[1])
2936 count = int(action.split(' ', 1)[1])
2937 if not stderr:
2938 raise error.Abort(_('eread not available on this peer'))
2880 stderr.read(count)
2939 stderr.read(count)
2881 else:
2940 else:
2882 raise error.Abort(_('unknown action: %s') % action)
2941 raise error.Abort(_('unknown action: %s') % action)
2883
2942
2884 if batchedcommands is not None:
2943 if batchedcommands is not None:
2885 raise error.Abort(_('unclosed "batchbegin" request'))
2944 raise error.Abort(_('unclosed "batchbegin" request'))
2886
2945
2887 if peer:
2946 if peer:
2888 peer.close()
2947 peer.close()
2889
2948
2890 if proc:
2949 if proc:
2891 proc.kill()
2950 proc.kill()
@@ -1,163 +1,229 b''
1 $ cat >> $HGRCPATH << EOF
1 $ cat >> $HGRCPATH << EOF
2 > [web]
2 > [web]
3 > push_ssl = false
3 > push_ssl = false
4 > allow_push = *
4 > allow_push = *
5 > EOF
5 > EOF
6
6
7 $ hg init server
7 $ hg init server
8 $ cd server
8 $ cd server
9 $ touch a
9 $ touch a
10 $ hg -q commit -A -m initial
10 $ hg -q commit -A -m initial
11 $ cd ..
11 $ cd ..
12
12
13 $ hg serve -R server -p $HGPORT -d --pid-file hg.pid
13 $ hg serve -R server -p $HGPORT -d --pid-file hg.pid
14 $ cat hg.pid >> $DAEMON_PIDS
14 $ cat hg.pid >> $DAEMON_PIDS
15
15
16 compression formats are advertised in compression capability
16 compression formats are advertised in compression capability
17
17
18 #if zstd
18 #if zstd
19 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=capabilities' | tr ' ' '\n' | grep '^compression=zstd,zlib$' > /dev/null
19 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=capabilities' | tr ' ' '\n' | grep '^compression=zstd,zlib$' > /dev/null
20 #else
20 #else
21 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=capabilities' | tr ' ' '\n' | grep '^compression=zlib$' > /dev/null
21 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=capabilities' | tr ' ' '\n' | grep '^compression=zlib$' > /dev/null
22 #endif
22 #endif
23
23
24 $ killdaemons.py
24 $ killdaemons.py
25
25
26 server.compressionengines can replace engines list wholesale
26 server.compressionengines can replace engines list wholesale
27
27
28 $ hg serve --config server.compressionengines=none -R server -p $HGPORT -d --pid-file hg.pid
28 $ hg serve --config server.compressionengines=none -R server -p $HGPORT -d --pid-file hg.pid
29 $ cat hg.pid > $DAEMON_PIDS
29 $ cat hg.pid > $DAEMON_PIDS
30 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=capabilities' | tr ' ' '\n' | grep '^compression=none$' > /dev/null
30 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=capabilities' | tr ' ' '\n' | grep '^compression=none$' > /dev/null
31
31
32 $ killdaemons.py
32 $ killdaemons.py
33
33
34 Order of engines can also change
34 Order of engines can also change
35
35
36 $ hg serve --config server.compressionengines=none,zlib -R server -p $HGPORT -d --pid-file hg.pid
36 $ hg serve --config server.compressionengines=none,zlib -R server -p $HGPORT -d --pid-file hg.pid
37 $ cat hg.pid > $DAEMON_PIDS
37 $ cat hg.pid > $DAEMON_PIDS
38 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=capabilities' | tr ' ' '\n' | grep '^compression=none,zlib$' > /dev/null
38 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=capabilities' | tr ' ' '\n' | grep '^compression=none,zlib$' > /dev/null
39
39
40 $ killdaemons.py
40 $ killdaemons.py
41
41
42 Start a default server again
42 Start a default server again
43
43
44 $ hg serve -R server -p $HGPORT -d --pid-file hg.pid
44 $ hg serve -R server -p $HGPORT -d --pid-file hg.pid
45 $ cat hg.pid > $DAEMON_PIDS
45 $ cat hg.pid > $DAEMON_PIDS
46
46
47 Server should send application/mercurial-0.1 to clients if no Accept is used
47 Server should send application/mercurial-0.1 to clients if no Accept is used
48
48
49 $ get-with-headers.py --headeronly $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' -
49 $ get-with-headers.py --headeronly $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' -
50 200 Script output follows
50 200 Script output follows
51 content-type: application/mercurial-0.1
51 content-type: application/mercurial-0.1
52 date: $HTTP_DATE$
52 date: $HTTP_DATE$
53 server: testing stub value
53 server: testing stub value
54 transfer-encoding: chunked
54 transfer-encoding: chunked
55
55
56 Server should send application/mercurial-0.1 when client says it wants it
56 Server should send application/mercurial-0.1 when client says it wants it
57
57
58 $ get-with-headers.py --hgproto '0.1' --headeronly $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' -
58 $ get-with-headers.py --hgproto '0.1' --headeronly $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' -
59 200 Script output follows
59 200 Script output follows
60 content-type: application/mercurial-0.1
60 content-type: application/mercurial-0.1
61 date: $HTTP_DATE$
61 date: $HTTP_DATE$
62 server: testing stub value
62 server: testing stub value
63 transfer-encoding: chunked
63 transfer-encoding: chunked
64
64
65 Server should send application/mercurial-0.2 when client says it wants it
65 Server should send application/mercurial-0.2 when client says it wants it
66
66
67 $ get-with-headers.py --hgproto '0.2' --headeronly $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' -
67 $ get-with-headers.py --hgproto '0.2' --headeronly $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' -
68 200 Script output follows
68 200 Script output follows
69 content-type: application/mercurial-0.2
69 content-type: application/mercurial-0.2
70 date: $HTTP_DATE$
70 date: $HTTP_DATE$
71 server: testing stub value
71 server: testing stub value
72 transfer-encoding: chunked
72 transfer-encoding: chunked
73
73
74 $ get-with-headers.py --hgproto '0.1 0.2' --headeronly $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' -
74 $ get-with-headers.py --hgproto '0.1 0.2' --headeronly $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' -
75 200 Script output follows
75 200 Script output follows
76 content-type: application/mercurial-0.2
76 content-type: application/mercurial-0.2
77 date: $HTTP_DATE$
77 date: $HTTP_DATE$
78 server: testing stub value
78 server: testing stub value
79 transfer-encoding: chunked
79 transfer-encoding: chunked
80
80
81 Requesting a compression format that server doesn't support results will fall back to 0.1
81 Requesting a compression format that server doesn't support results will fall back to 0.1
82
82
83 $ get-with-headers.py --hgproto '0.2 comp=aa' --headeronly $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' -
83 $ get-with-headers.py --hgproto '0.2 comp=aa' --headeronly $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' -
84 200 Script output follows
84 200 Script output follows
85 content-type: application/mercurial-0.1
85 content-type: application/mercurial-0.1
86 date: $HTTP_DATE$
86 date: $HTTP_DATE$
87 server: testing stub value
87 server: testing stub value
88 transfer-encoding: chunked
88 transfer-encoding: chunked
89
89
90 #if zstd
90 #if zstd
91 zstd is used if available
91 zstd is used if available
92
92
93 $ get-with-headers.py --hgproto '0.2 comp=zstd' $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' > resp
93 $ get-with-headers.py --hgproto '0.2 comp=zstd' $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' > resp
94 $ f --size --hexdump --bytes 36 --sha1 resp
94 $ f --size --hexdump --bytes 36 --sha1 resp
95 resp: size=248, sha1=4d8d8f87fb82bd542ce52881fdc94f850748
95 resp: size=248, sha1=4d8d8f87fb82bd542ce52881fdc94f850748
96 0000: 32 30 30 20 53 63 72 69 70 74 20 6f 75 74 70 75 |200 Script outpu|
96 0000: 32 30 30 20 53 63 72 69 70 74 20 6f 75 74 70 75 |200 Script outpu|
97 0010: 74 20 66 6f 6c 6c 6f 77 73 0a 0a 04 7a 73 74 64 |t follows...zstd|
97 0010: 74 20 66 6f 6c 6c 6f 77 73 0a 0a 04 7a 73 74 64 |t follows...zstd|
98 0020: 28 b5 2f fd |(./.|
98 0020: 28 b5 2f fd |(./.|
99
99
100 #endif
100 #endif
101
101
102 application/mercurial-0.2 is not yet used on non-streaming responses
102 application/mercurial-0.2 is not yet used on non-streaming responses
103
103
104 $ get-with-headers.py --hgproto '0.2' $LOCALIP:$HGPORT '?cmd=heads' -
104 $ get-with-headers.py --hgproto '0.2' $LOCALIP:$HGPORT '?cmd=heads' -
105 200 Script output follows
105 200 Script output follows
106 content-length: 41
106 content-length: 41
107 content-type: application/mercurial-0.1
107 content-type: application/mercurial-0.1
108 date: $HTTP_DATE$
108 date: $HTTP_DATE$
109 server: testing stub value
109 server: testing stub value
110
110
111 e93700bd72895c5addab234c56d4024b487a362f
111 e93700bd72895c5addab234c56d4024b487a362f
112
112
113 Now test protocol preference usage
113 Now test protocol preference usage
114
114
115 $ killdaemons.py
115 $ killdaemons.py
116 $ hg serve --config server.compressionengines=none,zlib -R server -p $HGPORT -d --pid-file hg.pid
116 $ hg serve --config server.compressionengines=none,zlib -R server -p $HGPORT -d --pid-file hg.pid
117 $ cat hg.pid > $DAEMON_PIDS
117 $ cat hg.pid > $DAEMON_PIDS
118
118
119 No Accept will send 0.1+zlib, even though "none" is preferred b/c "none" isn't supported on 0.1
119 No Accept will send 0.1+zlib, even though "none" is preferred b/c "none" isn't supported on 0.1
120
120
121 $ get-with-headers.py --headeronly $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' Content-Type
121 $ get-with-headers.py --headeronly $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' Content-Type
122 200 Script output follows
122 200 Script output follows
123 content-type: application/mercurial-0.1
123 content-type: application/mercurial-0.1
124
124
125 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' > resp
125 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' > resp
126 $ f --size --hexdump --bytes 28 --sha1 resp
126 $ f --size --hexdump --bytes 28 --sha1 resp
127 resp: size=227, sha1=35a4c074da74f32f5440da3cbf04
127 resp: size=227, sha1=35a4c074da74f32f5440da3cbf04
128 0000: 32 30 30 20 53 63 72 69 70 74 20 6f 75 74 70 75 |200 Script outpu|
128 0000: 32 30 30 20 53 63 72 69 70 74 20 6f 75 74 70 75 |200 Script outpu|
129 0010: 74 20 66 6f 6c 6c 6f 77 73 0a 0a 78 |t follows..x|
129 0010: 74 20 66 6f 6c 6c 6f 77 73 0a 0a 78 |t follows..x|
130
130
131 Explicit 0.1 will send zlib because "none" isn't supported on 0.1
131 Explicit 0.1 will send zlib because "none" isn't supported on 0.1
132
132
133 $ get-with-headers.py --hgproto '0.1' $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' > resp
133 $ get-with-headers.py --hgproto '0.1' $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' > resp
134 $ f --size --hexdump --bytes 28 --sha1 resp
134 $ f --size --hexdump --bytes 28 --sha1 resp
135 resp: size=227, sha1=35a4c074da74f32f5440da3cbf04
135 resp: size=227, sha1=35a4c074da74f32f5440da3cbf04
136 0000: 32 30 30 20 53 63 72 69 70 74 20 6f 75 74 70 75 |200 Script outpu|
136 0000: 32 30 30 20 53 63 72 69 70 74 20 6f 75 74 70 75 |200 Script outpu|
137 0010: 74 20 66 6f 6c 6c 6f 77 73 0a 0a 78 |t follows..x|
137 0010: 74 20 66 6f 6c 6c 6f 77 73 0a 0a 78 |t follows..x|
138
138
139 0.2 with no compression will get "none" because that is server's preference
139 0.2 with no compression will get "none" because that is server's preference
140 (spec says ZL and UN are implicitly supported)
140 (spec says ZL and UN are implicitly supported)
141
141
142 $ get-with-headers.py --hgproto '0.2' $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' > resp
142 $ get-with-headers.py --hgproto '0.2' $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' > resp
143 $ f --size --hexdump --bytes 32 --sha1 resp
143 $ f --size --hexdump --bytes 32 --sha1 resp
144 resp: size=432, sha1=ac931b412ec185a02e0e5bcff98dac83
144 resp: size=432, sha1=ac931b412ec185a02e0e5bcff98dac83
145 0000: 32 30 30 20 53 63 72 69 70 74 20 6f 75 74 70 75 |200 Script outpu|
145 0000: 32 30 30 20 53 63 72 69 70 74 20 6f 75 74 70 75 |200 Script outpu|
146 0010: 74 20 66 6f 6c 6c 6f 77 73 0a 0a 04 6e 6f 6e 65 |t follows...none|
146 0010: 74 20 66 6f 6c 6c 6f 77 73 0a 0a 04 6e 6f 6e 65 |t follows...none|
147
147
148 Client receives server preference even if local order doesn't match
148 Client receives server preference even if local order doesn't match
149
149
150 $ get-with-headers.py --hgproto '0.2 comp=zlib,none' $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' > resp
150 $ get-with-headers.py --hgproto '0.2 comp=zlib,none' $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' > resp
151 $ f --size --hexdump --bytes 32 --sha1 resp
151 $ f --size --hexdump --bytes 32 --sha1 resp
152 resp: size=432, sha1=ac931b412ec185a02e0e5bcff98dac83
152 resp: size=432, sha1=ac931b412ec185a02e0e5bcff98dac83
153 0000: 32 30 30 20 53 63 72 69 70 74 20 6f 75 74 70 75 |200 Script outpu|
153 0000: 32 30 30 20 53 63 72 69 70 74 20 6f 75 74 70 75 |200 Script outpu|
154 0010: 74 20 66 6f 6c 6c 6f 77 73 0a 0a 04 6e 6f 6e 65 |t follows...none|
154 0010: 74 20 66 6f 6c 6c 6f 77 73 0a 0a 04 6e 6f 6e 65 |t follows...none|
155
155
156 Client receives only supported format even if not server preferred format
156 Client receives only supported format even if not server preferred format
157
157
158 $ get-with-headers.py --hgproto '0.2 comp=zlib' $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' > resp
158 $ get-with-headers.py --hgproto '0.2 comp=zlib' $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' > resp
159 $ f --size --hexdump --bytes 33 --sha1 resp
159 $ f --size --hexdump --bytes 33 --sha1 resp
160 resp: size=232, sha1=a1c727f0c9693ca15742a75c30419bc36
160 resp: size=232, sha1=a1c727f0c9693ca15742a75c30419bc36
161 0000: 32 30 30 20 53 63 72 69 70 74 20 6f 75 74 70 75 |200 Script outpu|
161 0000: 32 30 30 20 53 63 72 69 70 74 20 6f 75 74 70 75 |200 Script outpu|
162 0010: 74 20 66 6f 6c 6c 6f 77 73 0a 0a 04 7a 6c 69 62 |t follows...zlib|
162 0010: 74 20 66 6f 6c 6c 6f 77 73 0a 0a 04 7a 6c 69 62 |t follows...zlib|
163 0020: 78 |x|
163 0020: 78 |x|
164
165 $ killdaemons.py
166 $ cd ..
167
168 Test listkeys for listing namespaces
169
170 $ hg init empty
171 $ hg -R empty serve -p $HGPORT -d --pid-file hg.pid
172 $ cat hg.pid > $DAEMON_PIDS
173
174 $ hg --verbose debugwireproto http://$LOCALIP:$HGPORT << EOF
175 > command listkeys
176 > namespace namespaces
177 > EOF
178 s> sendall(*, 0): (glob)
179 s> GET /?cmd=capabilities HTTP/1.1\r\n
180 s> Accept-Encoding: identity\r\n
181 s> accept: application/mercurial-0.1\r\n
182 s> host: $LOCALIP:$HGPORT\r\n (glob)
183 s> user-agent: mercurial/proto-1.0 (Mercurial *)\r\n (glob)
184 s> \r\n
185 s> makefile('rb', None)
186 s> readline() -> 36:
187 s> HTTP/1.1 200 Script output follows\r\n
188 s> readline() -> 28:
189 s> Server: testing stub value\r\n
190 s> readline() -> *: (glob)
191 s> Date: $HTTP_DATE$\r\n
192 s> readline() -> 41:
193 s> Content-Type: application/mercurial-0.1\r\n
194 s> readline() -> 21:
195 s> Content-Length: *\r\n (glob)
196 s> readline() -> 2:
197 s> \r\n
198 s> read(*) -> *: lookup branchmap pushkey known getbundle unbundlehash batch changegroupsubset streamreqs=generaldelta,revlogv1 $USUAL_BUNDLE2_CAPS_SERVER$ unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=$BUNDLE2_COMPRESSIONS$ (glob)
199 sending listkeys command
200 s> sendall(*, 0): (glob)
201 s> GET /?cmd=listkeys HTTP/1.1\r\n
202 s> Accept-Encoding: identity\r\n
203 s> vary: X-HgArg-1,X-HgProto-1\r\n
204 s> x-hgarg-1: namespace=namespaces\r\n
205 s> x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$\r\n
206 s> accept: application/mercurial-0.1\r\n
207 s> host: $LOCALIP:$HGPORT\r\n (glob)
208 s> user-agent: mercurial/proto-1.0 (Mercurial *)\r\n (glob)
209 s> \r\n
210 s> makefile('rb', None)
211 s> readline() -> 36:
212 s> HTTP/1.1 200 Script output follows\r\n
213 s> readline() -> 28:
214 s> Server: testing stub value\r\n
215 s> readline() -> *: (glob)
216 s> Date: $HTTP_DATE$\r\n
217 s> readline() -> 41:
218 s> Content-Type: application/mercurial-0.1\r\n
219 s> readline() -> 20:
220 s> Content-Length: 30\r\n
221 s> readline() -> 2:
222 s> \r\n
223 s> read(30) -> 30:
224 s> bookmarks \n
225 s> namespaces \n
226 s> phases
227 response: bookmarks \nnamespaces \nphases
228
229 $ killdaemons.py
General Comments 0
You need to be logged in to leave comments. Login now