##// END OF EJS Templates
debugsetparents: avoid using "r1/r2" variable names for nodeids...
Martin von Zweigbergk -
r37161:8bac14ce default
parent child Browse files
Show More
@@ -1,3067 +1,3067
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import tempfile
24 import tempfile
25 import time
25 import time
26
26
27 from .i18n import _
27 from .i18n import _
28 from .node import (
28 from .node import (
29 bin,
29 bin,
30 hex,
30 hex,
31 nullhex,
31 nullhex,
32 nullid,
32 nullid,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from . import (
36 from . import (
37 bundle2,
37 bundle2,
38 changegroup,
38 changegroup,
39 cmdutil,
39 cmdutil,
40 color,
40 color,
41 context,
41 context,
42 dagparser,
42 dagparser,
43 dagutil,
43 dagutil,
44 encoding,
44 encoding,
45 error,
45 error,
46 exchange,
46 exchange,
47 extensions,
47 extensions,
48 filemerge,
48 filemerge,
49 fileset,
49 fileset,
50 formatter,
50 formatter,
51 hg,
51 hg,
52 httppeer,
52 httppeer,
53 localrepo,
53 localrepo,
54 lock as lockmod,
54 lock as lockmod,
55 logcmdutil,
55 logcmdutil,
56 merge as mergemod,
56 merge as mergemod,
57 obsolete,
57 obsolete,
58 obsutil,
58 obsutil,
59 phases,
59 phases,
60 policy,
60 policy,
61 pvec,
61 pvec,
62 pycompat,
62 pycompat,
63 registrar,
63 registrar,
64 repair,
64 repair,
65 revlog,
65 revlog,
66 revset,
66 revset,
67 revsetlang,
67 revsetlang,
68 scmutil,
68 scmutil,
69 setdiscovery,
69 setdiscovery,
70 simplemerge,
70 simplemerge,
71 smartset,
71 smartset,
72 sshpeer,
72 sshpeer,
73 sslutil,
73 sslutil,
74 streamclone,
74 streamclone,
75 templater,
75 templater,
76 treediscovery,
76 treediscovery,
77 upgrade,
77 upgrade,
78 url as urlmod,
78 url as urlmod,
79 util,
79 util,
80 vfs as vfsmod,
80 vfs as vfsmod,
81 wireprotoframing,
81 wireprotoframing,
82 wireprotoserver,
82 wireprotoserver,
83 )
83 )
84 from .utils import (
84 from .utils import (
85 dateutil,
85 dateutil,
86 procutil,
86 procutil,
87 stringutil,
87 stringutil,
88 )
88 )
89
89
90 release = lockmod.release
90 release = lockmod.release
91
91
92 command = registrar.command()
92 command = registrar.command()
93
93
94 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
94 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
95 def debugancestor(ui, repo, *args):
95 def debugancestor(ui, repo, *args):
96 """find the ancestor revision of two revisions in a given index"""
96 """find the ancestor revision of two revisions in a given index"""
97 if len(args) == 3:
97 if len(args) == 3:
98 index, rev1, rev2 = args
98 index, rev1, rev2 = args
99 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
99 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
100 lookup = r.lookup
100 lookup = r.lookup
101 elif len(args) == 2:
101 elif len(args) == 2:
102 if not repo:
102 if not repo:
103 raise error.Abort(_('there is no Mercurial repository here '
103 raise error.Abort(_('there is no Mercurial repository here '
104 '(.hg not found)'))
104 '(.hg not found)'))
105 rev1, rev2 = args
105 rev1, rev2 = args
106 r = repo.changelog
106 r = repo.changelog
107 lookup = repo.lookup
107 lookup = repo.lookup
108 else:
108 else:
109 raise error.Abort(_('either two or three arguments required'))
109 raise error.Abort(_('either two or three arguments required'))
110 a = r.ancestor(lookup(rev1), lookup(rev2))
110 a = r.ancestor(lookup(rev1), lookup(rev2))
111 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
111 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
112
112
113 @command('debugapplystreamclonebundle', [], 'FILE')
113 @command('debugapplystreamclonebundle', [], 'FILE')
114 def debugapplystreamclonebundle(ui, repo, fname):
114 def debugapplystreamclonebundle(ui, repo, fname):
115 """apply a stream clone bundle file"""
115 """apply a stream clone bundle file"""
116 f = hg.openpath(ui, fname)
116 f = hg.openpath(ui, fname)
117 gen = exchange.readbundle(ui, f, fname)
117 gen = exchange.readbundle(ui, f, fname)
118 gen.apply(repo)
118 gen.apply(repo)
119
119
120 @command('debugbuilddag',
120 @command('debugbuilddag',
121 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
121 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
122 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
122 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
123 ('n', 'new-file', None, _('add new file at each rev'))],
123 ('n', 'new-file', None, _('add new file at each rev'))],
124 _('[OPTION]... [TEXT]'))
124 _('[OPTION]... [TEXT]'))
125 def debugbuilddag(ui, repo, text=None,
125 def debugbuilddag(ui, repo, text=None,
126 mergeable_file=False,
126 mergeable_file=False,
127 overwritten_file=False,
127 overwritten_file=False,
128 new_file=False):
128 new_file=False):
129 """builds a repo with a given DAG from scratch in the current empty repo
129 """builds a repo with a given DAG from scratch in the current empty repo
130
130
131 The description of the DAG is read from stdin if not given on the
131 The description of the DAG is read from stdin if not given on the
132 command line.
132 command line.
133
133
134 Elements:
134 Elements:
135
135
136 - "+n" is a linear run of n nodes based on the current default parent
136 - "+n" is a linear run of n nodes based on the current default parent
137 - "." is a single node based on the current default parent
137 - "." is a single node based on the current default parent
138 - "$" resets the default parent to null (implied at the start);
138 - "$" resets the default parent to null (implied at the start);
139 otherwise the default parent is always the last node created
139 otherwise the default parent is always the last node created
140 - "<p" sets the default parent to the backref p
140 - "<p" sets the default parent to the backref p
141 - "*p" is a fork at parent p, which is a backref
141 - "*p" is a fork at parent p, which is a backref
142 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
142 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
143 - "/p2" is a merge of the preceding node and p2
143 - "/p2" is a merge of the preceding node and p2
144 - ":tag" defines a local tag for the preceding node
144 - ":tag" defines a local tag for the preceding node
145 - "@branch" sets the named branch for subsequent nodes
145 - "@branch" sets the named branch for subsequent nodes
146 - "#...\\n" is a comment up to the end of the line
146 - "#...\\n" is a comment up to the end of the line
147
147
148 Whitespace between the above elements is ignored.
148 Whitespace between the above elements is ignored.
149
149
150 A backref is either
150 A backref is either
151
151
152 - a number n, which references the node curr-n, where curr is the current
152 - a number n, which references the node curr-n, where curr is the current
153 node, or
153 node, or
154 - the name of a local tag you placed earlier using ":tag", or
154 - the name of a local tag you placed earlier using ":tag", or
155 - empty to denote the default parent.
155 - empty to denote the default parent.
156
156
157 All string valued-elements are either strictly alphanumeric, or must
157 All string valued-elements are either strictly alphanumeric, or must
158 be enclosed in double quotes ("..."), with "\\" as escape character.
158 be enclosed in double quotes ("..."), with "\\" as escape character.
159 """
159 """
160
160
161 if text is None:
161 if text is None:
162 ui.status(_("reading DAG from stdin\n"))
162 ui.status(_("reading DAG from stdin\n"))
163 text = ui.fin.read()
163 text = ui.fin.read()
164
164
165 cl = repo.changelog
165 cl = repo.changelog
166 if len(cl) > 0:
166 if len(cl) > 0:
167 raise error.Abort(_('repository is not empty'))
167 raise error.Abort(_('repository is not empty'))
168
168
169 # determine number of revs in DAG
169 # determine number of revs in DAG
170 total = 0
170 total = 0
171 for type, data in dagparser.parsedag(text):
171 for type, data in dagparser.parsedag(text):
172 if type == 'n':
172 if type == 'n':
173 total += 1
173 total += 1
174
174
175 if mergeable_file:
175 if mergeable_file:
176 linesperrev = 2
176 linesperrev = 2
177 # make a file with k lines per rev
177 # make a file with k lines per rev
178 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
178 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
179 initialmergedlines.append("")
179 initialmergedlines.append("")
180
180
181 tags = []
181 tags = []
182
182
183 wlock = lock = tr = None
183 wlock = lock = tr = None
184 try:
184 try:
185 wlock = repo.wlock()
185 wlock = repo.wlock()
186 lock = repo.lock()
186 lock = repo.lock()
187 tr = repo.transaction("builddag")
187 tr = repo.transaction("builddag")
188
188
189 at = -1
189 at = -1
190 atbranch = 'default'
190 atbranch = 'default'
191 nodeids = []
191 nodeids = []
192 id = 0
192 id = 0
193 ui.progress(_('building'), id, unit=_('revisions'), total=total)
193 ui.progress(_('building'), id, unit=_('revisions'), total=total)
194 for type, data in dagparser.parsedag(text):
194 for type, data in dagparser.parsedag(text):
195 if type == 'n':
195 if type == 'n':
196 ui.note(('node %s\n' % pycompat.bytestr(data)))
196 ui.note(('node %s\n' % pycompat.bytestr(data)))
197 id, ps = data
197 id, ps = data
198
198
199 files = []
199 files = []
200 filecontent = {}
200 filecontent = {}
201
201
202 p2 = None
202 p2 = None
203 if mergeable_file:
203 if mergeable_file:
204 fn = "mf"
204 fn = "mf"
205 p1 = repo[ps[0]]
205 p1 = repo[ps[0]]
206 if len(ps) > 1:
206 if len(ps) > 1:
207 p2 = repo[ps[1]]
207 p2 = repo[ps[1]]
208 pa = p1.ancestor(p2)
208 pa = p1.ancestor(p2)
209 base, local, other = [x[fn].data() for x in (pa, p1,
209 base, local, other = [x[fn].data() for x in (pa, p1,
210 p2)]
210 p2)]
211 m3 = simplemerge.Merge3Text(base, local, other)
211 m3 = simplemerge.Merge3Text(base, local, other)
212 ml = [l.strip() for l in m3.merge_lines()]
212 ml = [l.strip() for l in m3.merge_lines()]
213 ml.append("")
213 ml.append("")
214 elif at > 0:
214 elif at > 0:
215 ml = p1[fn].data().split("\n")
215 ml = p1[fn].data().split("\n")
216 else:
216 else:
217 ml = initialmergedlines
217 ml = initialmergedlines
218 ml[id * linesperrev] += " r%i" % id
218 ml[id * linesperrev] += " r%i" % id
219 mergedtext = "\n".join(ml)
219 mergedtext = "\n".join(ml)
220 files.append(fn)
220 files.append(fn)
221 filecontent[fn] = mergedtext
221 filecontent[fn] = mergedtext
222
222
223 if overwritten_file:
223 if overwritten_file:
224 fn = "of"
224 fn = "of"
225 files.append(fn)
225 files.append(fn)
226 filecontent[fn] = "r%i\n" % id
226 filecontent[fn] = "r%i\n" % id
227
227
228 if new_file:
228 if new_file:
229 fn = "nf%i" % id
229 fn = "nf%i" % id
230 files.append(fn)
230 files.append(fn)
231 filecontent[fn] = "r%i\n" % id
231 filecontent[fn] = "r%i\n" % id
232 if len(ps) > 1:
232 if len(ps) > 1:
233 if not p2:
233 if not p2:
234 p2 = repo[ps[1]]
234 p2 = repo[ps[1]]
235 for fn in p2:
235 for fn in p2:
236 if fn.startswith("nf"):
236 if fn.startswith("nf"):
237 files.append(fn)
237 files.append(fn)
238 filecontent[fn] = p2[fn].data()
238 filecontent[fn] = p2[fn].data()
239
239
240 def fctxfn(repo, cx, path):
240 def fctxfn(repo, cx, path):
241 if path in filecontent:
241 if path in filecontent:
242 return context.memfilectx(repo, cx, path,
242 return context.memfilectx(repo, cx, path,
243 filecontent[path])
243 filecontent[path])
244 return None
244 return None
245
245
246 if len(ps) == 0 or ps[0] < 0:
246 if len(ps) == 0 or ps[0] < 0:
247 pars = [None, None]
247 pars = [None, None]
248 elif len(ps) == 1:
248 elif len(ps) == 1:
249 pars = [nodeids[ps[0]], None]
249 pars = [nodeids[ps[0]], None]
250 else:
250 else:
251 pars = [nodeids[p] for p in ps]
251 pars = [nodeids[p] for p in ps]
252 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
252 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
253 date=(id, 0),
253 date=(id, 0),
254 user="debugbuilddag",
254 user="debugbuilddag",
255 extra={'branch': atbranch})
255 extra={'branch': atbranch})
256 nodeid = repo.commitctx(cx)
256 nodeid = repo.commitctx(cx)
257 nodeids.append(nodeid)
257 nodeids.append(nodeid)
258 at = id
258 at = id
259 elif type == 'l':
259 elif type == 'l':
260 id, name = data
260 id, name = data
261 ui.note(('tag %s\n' % name))
261 ui.note(('tag %s\n' % name))
262 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
262 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
263 elif type == 'a':
263 elif type == 'a':
264 ui.note(('branch %s\n' % data))
264 ui.note(('branch %s\n' % data))
265 atbranch = data
265 atbranch = data
266 ui.progress(_('building'), id, unit=_('revisions'), total=total)
266 ui.progress(_('building'), id, unit=_('revisions'), total=total)
267 tr.close()
267 tr.close()
268
268
269 if tags:
269 if tags:
270 repo.vfs.write("localtags", "".join(tags))
270 repo.vfs.write("localtags", "".join(tags))
271 finally:
271 finally:
272 ui.progress(_('building'), None)
272 ui.progress(_('building'), None)
273 release(tr, lock, wlock)
273 release(tr, lock, wlock)
274
274
275 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
275 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
276 indent_string = ' ' * indent
276 indent_string = ' ' * indent
277 if all:
277 if all:
278 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
278 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
279 % indent_string)
279 % indent_string)
280
280
281 def showchunks(named):
281 def showchunks(named):
282 ui.write("\n%s%s\n" % (indent_string, named))
282 ui.write("\n%s%s\n" % (indent_string, named))
283 for deltadata in gen.deltaiter():
283 for deltadata in gen.deltaiter():
284 node, p1, p2, cs, deltabase, delta, flags = deltadata
284 node, p1, p2, cs, deltabase, delta, flags = deltadata
285 ui.write("%s%s %s %s %s %s %d\n" %
285 ui.write("%s%s %s %s %s %s %d\n" %
286 (indent_string, hex(node), hex(p1), hex(p2),
286 (indent_string, hex(node), hex(p1), hex(p2),
287 hex(cs), hex(deltabase), len(delta)))
287 hex(cs), hex(deltabase), len(delta)))
288
288
289 chunkdata = gen.changelogheader()
289 chunkdata = gen.changelogheader()
290 showchunks("changelog")
290 showchunks("changelog")
291 chunkdata = gen.manifestheader()
291 chunkdata = gen.manifestheader()
292 showchunks("manifest")
292 showchunks("manifest")
293 for chunkdata in iter(gen.filelogheader, {}):
293 for chunkdata in iter(gen.filelogheader, {}):
294 fname = chunkdata['filename']
294 fname = chunkdata['filename']
295 showchunks(fname)
295 showchunks(fname)
296 else:
296 else:
297 if isinstance(gen, bundle2.unbundle20):
297 if isinstance(gen, bundle2.unbundle20):
298 raise error.Abort(_('use debugbundle2 for this file'))
298 raise error.Abort(_('use debugbundle2 for this file'))
299 chunkdata = gen.changelogheader()
299 chunkdata = gen.changelogheader()
300 for deltadata in gen.deltaiter():
300 for deltadata in gen.deltaiter():
301 node, p1, p2, cs, deltabase, delta, flags = deltadata
301 node, p1, p2, cs, deltabase, delta, flags = deltadata
302 ui.write("%s%s\n" % (indent_string, hex(node)))
302 ui.write("%s%s\n" % (indent_string, hex(node)))
303
303
304 def _debugobsmarkers(ui, part, indent=0, **opts):
304 def _debugobsmarkers(ui, part, indent=0, **opts):
305 """display version and markers contained in 'data'"""
305 """display version and markers contained in 'data'"""
306 opts = pycompat.byteskwargs(opts)
306 opts = pycompat.byteskwargs(opts)
307 data = part.read()
307 data = part.read()
308 indent_string = ' ' * indent
308 indent_string = ' ' * indent
309 try:
309 try:
310 version, markers = obsolete._readmarkers(data)
310 version, markers = obsolete._readmarkers(data)
311 except error.UnknownVersion as exc:
311 except error.UnknownVersion as exc:
312 msg = "%sunsupported version: %s (%d bytes)\n"
312 msg = "%sunsupported version: %s (%d bytes)\n"
313 msg %= indent_string, exc.version, len(data)
313 msg %= indent_string, exc.version, len(data)
314 ui.write(msg)
314 ui.write(msg)
315 else:
315 else:
316 msg = "%sversion: %d (%d bytes)\n"
316 msg = "%sversion: %d (%d bytes)\n"
317 msg %= indent_string, version, len(data)
317 msg %= indent_string, version, len(data)
318 ui.write(msg)
318 ui.write(msg)
319 fm = ui.formatter('debugobsolete', opts)
319 fm = ui.formatter('debugobsolete', opts)
320 for rawmarker in sorted(markers):
320 for rawmarker in sorted(markers):
321 m = obsutil.marker(None, rawmarker)
321 m = obsutil.marker(None, rawmarker)
322 fm.startitem()
322 fm.startitem()
323 fm.plain(indent_string)
323 fm.plain(indent_string)
324 cmdutil.showmarker(fm, m)
324 cmdutil.showmarker(fm, m)
325 fm.end()
325 fm.end()
326
326
327 def _debugphaseheads(ui, data, indent=0):
327 def _debugphaseheads(ui, data, indent=0):
328 """display version and markers contained in 'data'"""
328 """display version and markers contained in 'data'"""
329 indent_string = ' ' * indent
329 indent_string = ' ' * indent
330 headsbyphase = phases.binarydecode(data)
330 headsbyphase = phases.binarydecode(data)
331 for phase in phases.allphases:
331 for phase in phases.allphases:
332 for head in headsbyphase[phase]:
332 for head in headsbyphase[phase]:
333 ui.write(indent_string)
333 ui.write(indent_string)
334 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
334 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
335
335
336 def _quasirepr(thing):
336 def _quasirepr(thing):
337 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
337 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
338 return '{%s}' % (
338 return '{%s}' % (
339 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
339 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
340 return pycompat.bytestr(repr(thing))
340 return pycompat.bytestr(repr(thing))
341
341
342 def _debugbundle2(ui, gen, all=None, **opts):
342 def _debugbundle2(ui, gen, all=None, **opts):
343 """lists the contents of a bundle2"""
343 """lists the contents of a bundle2"""
344 if not isinstance(gen, bundle2.unbundle20):
344 if not isinstance(gen, bundle2.unbundle20):
345 raise error.Abort(_('not a bundle2 file'))
345 raise error.Abort(_('not a bundle2 file'))
346 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
346 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
347 parttypes = opts.get(r'part_type', [])
347 parttypes = opts.get(r'part_type', [])
348 for part in gen.iterparts():
348 for part in gen.iterparts():
349 if parttypes and part.type not in parttypes:
349 if parttypes and part.type not in parttypes:
350 continue
350 continue
351 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
351 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
352 if part.type == 'changegroup':
352 if part.type == 'changegroup':
353 version = part.params.get('version', '01')
353 version = part.params.get('version', '01')
354 cg = changegroup.getunbundler(version, part, 'UN')
354 cg = changegroup.getunbundler(version, part, 'UN')
355 if not ui.quiet:
355 if not ui.quiet:
356 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
356 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
357 if part.type == 'obsmarkers':
357 if part.type == 'obsmarkers':
358 if not ui.quiet:
358 if not ui.quiet:
359 _debugobsmarkers(ui, part, indent=4, **opts)
359 _debugobsmarkers(ui, part, indent=4, **opts)
360 if part.type == 'phase-heads':
360 if part.type == 'phase-heads':
361 if not ui.quiet:
361 if not ui.quiet:
362 _debugphaseheads(ui, part, indent=4)
362 _debugphaseheads(ui, part, indent=4)
363
363
364 @command('debugbundle',
364 @command('debugbundle',
365 [('a', 'all', None, _('show all details')),
365 [('a', 'all', None, _('show all details')),
366 ('', 'part-type', [], _('show only the named part type')),
366 ('', 'part-type', [], _('show only the named part type')),
367 ('', 'spec', None, _('print the bundlespec of the bundle'))],
367 ('', 'spec', None, _('print the bundlespec of the bundle'))],
368 _('FILE'),
368 _('FILE'),
369 norepo=True)
369 norepo=True)
370 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
370 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
371 """lists the contents of a bundle"""
371 """lists the contents of a bundle"""
372 with hg.openpath(ui, bundlepath) as f:
372 with hg.openpath(ui, bundlepath) as f:
373 if spec:
373 if spec:
374 spec = exchange.getbundlespec(ui, f)
374 spec = exchange.getbundlespec(ui, f)
375 ui.write('%s\n' % spec)
375 ui.write('%s\n' % spec)
376 return
376 return
377
377
378 gen = exchange.readbundle(ui, f, bundlepath)
378 gen = exchange.readbundle(ui, f, bundlepath)
379 if isinstance(gen, bundle2.unbundle20):
379 if isinstance(gen, bundle2.unbundle20):
380 return _debugbundle2(ui, gen, all=all, **opts)
380 return _debugbundle2(ui, gen, all=all, **opts)
381 _debugchangegroup(ui, gen, all=all, **opts)
381 _debugchangegroup(ui, gen, all=all, **opts)
382
382
383 @command('debugcapabilities',
383 @command('debugcapabilities',
384 [], _('PATH'),
384 [], _('PATH'),
385 norepo=True)
385 norepo=True)
386 def debugcapabilities(ui, path, **opts):
386 def debugcapabilities(ui, path, **opts):
387 """lists the capabilities of a remote peer"""
387 """lists the capabilities of a remote peer"""
388 opts = pycompat.byteskwargs(opts)
388 opts = pycompat.byteskwargs(opts)
389 peer = hg.peer(ui, opts, path)
389 peer = hg.peer(ui, opts, path)
390 caps = peer.capabilities()
390 caps = peer.capabilities()
391 ui.write(('Main capabilities:\n'))
391 ui.write(('Main capabilities:\n'))
392 for c in sorted(caps):
392 for c in sorted(caps):
393 ui.write((' %s\n') % c)
393 ui.write((' %s\n') % c)
394 b2caps = bundle2.bundle2caps(peer)
394 b2caps = bundle2.bundle2caps(peer)
395 if b2caps:
395 if b2caps:
396 ui.write(('Bundle2 capabilities:\n'))
396 ui.write(('Bundle2 capabilities:\n'))
397 for key, values in sorted(b2caps.iteritems()):
397 for key, values in sorted(b2caps.iteritems()):
398 ui.write((' %s\n') % key)
398 ui.write((' %s\n') % key)
399 for v in values:
399 for v in values:
400 ui.write((' %s\n') % v)
400 ui.write((' %s\n') % v)
401
401
402 @command('debugcheckstate', [], '')
402 @command('debugcheckstate', [], '')
403 def debugcheckstate(ui, repo):
403 def debugcheckstate(ui, repo):
404 """validate the correctness of the current dirstate"""
404 """validate the correctness of the current dirstate"""
405 parent1, parent2 = repo.dirstate.parents()
405 parent1, parent2 = repo.dirstate.parents()
406 m1 = repo[parent1].manifest()
406 m1 = repo[parent1].manifest()
407 m2 = repo[parent2].manifest()
407 m2 = repo[parent2].manifest()
408 errors = 0
408 errors = 0
409 for f in repo.dirstate:
409 for f in repo.dirstate:
410 state = repo.dirstate[f]
410 state = repo.dirstate[f]
411 if state in "nr" and f not in m1:
411 if state in "nr" and f not in m1:
412 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
412 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
413 errors += 1
413 errors += 1
414 if state in "a" and f in m1:
414 if state in "a" and f in m1:
415 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
415 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
416 errors += 1
416 errors += 1
417 if state in "m" and f not in m1 and f not in m2:
417 if state in "m" and f not in m1 and f not in m2:
418 ui.warn(_("%s in state %s, but not in either manifest\n") %
418 ui.warn(_("%s in state %s, but not in either manifest\n") %
419 (f, state))
419 (f, state))
420 errors += 1
420 errors += 1
421 for f in m1:
421 for f in m1:
422 state = repo.dirstate[f]
422 state = repo.dirstate[f]
423 if state not in "nrm":
423 if state not in "nrm":
424 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
424 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
425 errors += 1
425 errors += 1
426 if errors:
426 if errors:
427 error = _(".hg/dirstate inconsistent with current parent's manifest")
427 error = _(".hg/dirstate inconsistent with current parent's manifest")
428 raise error.Abort(error)
428 raise error.Abort(error)
429
429
430 @command('debugcolor',
430 @command('debugcolor',
431 [('', 'style', None, _('show all configured styles'))],
431 [('', 'style', None, _('show all configured styles'))],
432 'hg debugcolor')
432 'hg debugcolor')
433 def debugcolor(ui, repo, **opts):
433 def debugcolor(ui, repo, **opts):
434 """show available color, effects or style"""
434 """show available color, effects or style"""
435 ui.write(('color mode: %s\n') % ui._colormode)
435 ui.write(('color mode: %s\n') % ui._colormode)
436 if opts.get(r'style'):
436 if opts.get(r'style'):
437 return _debugdisplaystyle(ui)
437 return _debugdisplaystyle(ui)
438 else:
438 else:
439 return _debugdisplaycolor(ui)
439 return _debugdisplaycolor(ui)
440
440
441 def _debugdisplaycolor(ui):
441 def _debugdisplaycolor(ui):
442 ui = ui.copy()
442 ui = ui.copy()
443 ui._styles.clear()
443 ui._styles.clear()
444 for effect in color._activeeffects(ui).keys():
444 for effect in color._activeeffects(ui).keys():
445 ui._styles[effect] = effect
445 ui._styles[effect] = effect
446 if ui._terminfoparams:
446 if ui._terminfoparams:
447 for k, v in ui.configitems('color'):
447 for k, v in ui.configitems('color'):
448 if k.startswith('color.'):
448 if k.startswith('color.'):
449 ui._styles[k] = k[6:]
449 ui._styles[k] = k[6:]
450 elif k.startswith('terminfo.'):
450 elif k.startswith('terminfo.'):
451 ui._styles[k] = k[9:]
451 ui._styles[k] = k[9:]
452 ui.write(_('available colors:\n'))
452 ui.write(_('available colors:\n'))
453 # sort label with a '_' after the other to group '_background' entry.
453 # sort label with a '_' after the other to group '_background' entry.
454 items = sorted(ui._styles.items(),
454 items = sorted(ui._styles.items(),
455 key=lambda i: ('_' in i[0], i[0], i[1]))
455 key=lambda i: ('_' in i[0], i[0], i[1]))
456 for colorname, label in items:
456 for colorname, label in items:
457 ui.write(('%s\n') % colorname, label=label)
457 ui.write(('%s\n') % colorname, label=label)
458
458
459 def _debugdisplaystyle(ui):
459 def _debugdisplaystyle(ui):
460 ui.write(_('available style:\n'))
460 ui.write(_('available style:\n'))
461 width = max(len(s) for s in ui._styles)
461 width = max(len(s) for s in ui._styles)
462 for label, effects in sorted(ui._styles.items()):
462 for label, effects in sorted(ui._styles.items()):
463 ui.write('%s' % label, label=label)
463 ui.write('%s' % label, label=label)
464 if effects:
464 if effects:
465 # 50
465 # 50
466 ui.write(': ')
466 ui.write(': ')
467 ui.write(' ' * (max(0, width - len(label))))
467 ui.write(' ' * (max(0, width - len(label))))
468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
469 ui.write('\n')
469 ui.write('\n')
470
470
471 @command('debugcreatestreamclonebundle', [], 'FILE')
471 @command('debugcreatestreamclonebundle', [], 'FILE')
472 def debugcreatestreamclonebundle(ui, repo, fname):
472 def debugcreatestreamclonebundle(ui, repo, fname):
473 """create a stream clone bundle file
473 """create a stream clone bundle file
474
474
475 Stream bundles are special bundles that are essentially archives of
475 Stream bundles are special bundles that are essentially archives of
476 revlog files. They are commonly used for cloning very quickly.
476 revlog files. They are commonly used for cloning very quickly.
477 """
477 """
478 # TODO we may want to turn this into an abort when this functionality
478 # TODO we may want to turn this into an abort when this functionality
479 # is moved into `hg bundle`.
479 # is moved into `hg bundle`.
480 if phases.hassecret(repo):
480 if phases.hassecret(repo):
481 ui.warn(_('(warning: stream clone bundle will contain secret '
481 ui.warn(_('(warning: stream clone bundle will contain secret '
482 'revisions)\n'))
482 'revisions)\n'))
483
483
484 requirements, gen = streamclone.generatebundlev1(repo)
484 requirements, gen = streamclone.generatebundlev1(repo)
485 changegroup.writechunks(ui, gen, fname)
485 changegroup.writechunks(ui, gen, fname)
486
486
487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
488
488
489 @command('debugdag',
489 @command('debugdag',
490 [('t', 'tags', None, _('use tags as labels')),
490 [('t', 'tags', None, _('use tags as labels')),
491 ('b', 'branches', None, _('annotate with branch names')),
491 ('b', 'branches', None, _('annotate with branch names')),
492 ('', 'dots', None, _('use dots for runs')),
492 ('', 'dots', None, _('use dots for runs')),
493 ('s', 'spaces', None, _('separate elements by spaces'))],
493 ('s', 'spaces', None, _('separate elements by spaces'))],
494 _('[OPTION]... [FILE [REV]...]'),
494 _('[OPTION]... [FILE [REV]...]'),
495 optionalrepo=True)
495 optionalrepo=True)
496 def debugdag(ui, repo, file_=None, *revs, **opts):
496 def debugdag(ui, repo, file_=None, *revs, **opts):
497 """format the changelog or an index DAG as a concise textual description
497 """format the changelog or an index DAG as a concise textual description
498
498
499 If you pass a revlog index, the revlog's DAG is emitted. If you list
499 If you pass a revlog index, the revlog's DAG is emitted. If you list
500 revision numbers, they get labeled in the output as rN.
500 revision numbers, they get labeled in the output as rN.
501
501
502 Otherwise, the changelog DAG of the current repo is emitted.
502 Otherwise, the changelog DAG of the current repo is emitted.
503 """
503 """
504 spaces = opts.get(r'spaces')
504 spaces = opts.get(r'spaces')
505 dots = opts.get(r'dots')
505 dots = opts.get(r'dots')
506 if file_:
506 if file_:
507 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
507 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
508 file_)
508 file_)
509 revs = set((int(r) for r in revs))
509 revs = set((int(r) for r in revs))
510 def events():
510 def events():
511 for r in rlog:
511 for r in rlog:
512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
513 if p != -1))
513 if p != -1))
514 if r in revs:
514 if r in revs:
515 yield 'l', (r, "r%i" % r)
515 yield 'l', (r, "r%i" % r)
516 elif repo:
516 elif repo:
517 cl = repo.changelog
517 cl = repo.changelog
518 tags = opts.get(r'tags')
518 tags = opts.get(r'tags')
519 branches = opts.get(r'branches')
519 branches = opts.get(r'branches')
520 if tags:
520 if tags:
521 labels = {}
521 labels = {}
522 for l, n in repo.tags().items():
522 for l, n in repo.tags().items():
523 labels.setdefault(cl.rev(n), []).append(l)
523 labels.setdefault(cl.rev(n), []).append(l)
524 def events():
524 def events():
525 b = "default"
525 b = "default"
526 for r in cl:
526 for r in cl:
527 if branches:
527 if branches:
528 newb = cl.read(cl.node(r))[5]['branch']
528 newb = cl.read(cl.node(r))[5]['branch']
529 if newb != b:
529 if newb != b:
530 yield 'a', newb
530 yield 'a', newb
531 b = newb
531 b = newb
532 yield 'n', (r, list(p for p in cl.parentrevs(r)
532 yield 'n', (r, list(p for p in cl.parentrevs(r)
533 if p != -1))
533 if p != -1))
534 if tags:
534 if tags:
535 ls = labels.get(r)
535 ls = labels.get(r)
536 if ls:
536 if ls:
537 for l in ls:
537 for l in ls:
538 yield 'l', (r, l)
538 yield 'l', (r, l)
539 else:
539 else:
540 raise error.Abort(_('need repo for changelog dag'))
540 raise error.Abort(_('need repo for changelog dag'))
541
541
542 for line in dagparser.dagtextlines(events(),
542 for line in dagparser.dagtextlines(events(),
543 addspaces=spaces,
543 addspaces=spaces,
544 wraplabels=True,
544 wraplabels=True,
545 wrapannotations=True,
545 wrapannotations=True,
546 wrapnonlinear=dots,
546 wrapnonlinear=dots,
547 usedots=dots,
547 usedots=dots,
548 maxlinewidth=70):
548 maxlinewidth=70):
549 ui.write(line)
549 ui.write(line)
550 ui.write("\n")
550 ui.write("\n")
551
551
552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
553 def debugdata(ui, repo, file_, rev=None, **opts):
553 def debugdata(ui, repo, file_, rev=None, **opts):
554 """dump the contents of a data file revision"""
554 """dump the contents of a data file revision"""
555 opts = pycompat.byteskwargs(opts)
555 opts = pycompat.byteskwargs(opts)
556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
557 if rev is not None:
557 if rev is not None:
558 raise error.CommandError('debugdata', _('invalid arguments'))
558 raise error.CommandError('debugdata', _('invalid arguments'))
559 file_, rev = None, file_
559 file_, rev = None, file_
560 elif rev is None:
560 elif rev is None:
561 raise error.CommandError('debugdata', _('invalid arguments'))
561 raise error.CommandError('debugdata', _('invalid arguments'))
562 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
562 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
563 try:
563 try:
564 ui.write(r.revision(r.lookup(rev), raw=True))
564 ui.write(r.revision(r.lookup(rev), raw=True))
565 except KeyError:
565 except KeyError:
566 raise error.Abort(_('invalid revision identifier %s') % rev)
566 raise error.Abort(_('invalid revision identifier %s') % rev)
567
567
568 @command('debugdate',
568 @command('debugdate',
569 [('e', 'extended', None, _('try extended date formats'))],
569 [('e', 'extended', None, _('try extended date formats'))],
570 _('[-e] DATE [RANGE]'),
570 _('[-e] DATE [RANGE]'),
571 norepo=True, optionalrepo=True)
571 norepo=True, optionalrepo=True)
572 def debugdate(ui, date, range=None, **opts):
572 def debugdate(ui, date, range=None, **opts):
573 """parse and display a date"""
573 """parse and display a date"""
574 if opts[r"extended"]:
574 if opts[r"extended"]:
575 d = dateutil.parsedate(date, util.extendeddateformats)
575 d = dateutil.parsedate(date, util.extendeddateformats)
576 else:
576 else:
577 d = dateutil.parsedate(date)
577 d = dateutil.parsedate(date)
578 ui.write(("internal: %d %d\n") % d)
578 ui.write(("internal: %d %d\n") % d)
579 ui.write(("standard: %s\n") % dateutil.datestr(d))
579 ui.write(("standard: %s\n") % dateutil.datestr(d))
580 if range:
580 if range:
581 m = dateutil.matchdate(range)
581 m = dateutil.matchdate(range)
582 ui.write(("match: %s\n") % m(d[0]))
582 ui.write(("match: %s\n") % m(d[0]))
583
583
584 @command('debugdeltachain',
584 @command('debugdeltachain',
585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
586 _('-c|-m|FILE'),
586 _('-c|-m|FILE'),
587 optionalrepo=True)
587 optionalrepo=True)
588 def debugdeltachain(ui, repo, file_=None, **opts):
588 def debugdeltachain(ui, repo, file_=None, **opts):
589 """dump information about delta chains in a revlog
589 """dump information about delta chains in a revlog
590
590
591 Output can be templatized. Available template keywords are:
591 Output can be templatized. Available template keywords are:
592
592
593 :``rev``: revision number
593 :``rev``: revision number
594 :``chainid``: delta chain identifier (numbered by unique base)
594 :``chainid``: delta chain identifier (numbered by unique base)
595 :``chainlen``: delta chain length to this revision
595 :``chainlen``: delta chain length to this revision
596 :``prevrev``: previous revision in delta chain
596 :``prevrev``: previous revision in delta chain
597 :``deltatype``: role of delta / how it was computed
597 :``deltatype``: role of delta / how it was computed
598 :``compsize``: compressed size of revision
598 :``compsize``: compressed size of revision
599 :``uncompsize``: uncompressed size of revision
599 :``uncompsize``: uncompressed size of revision
600 :``chainsize``: total size of compressed revisions in chain
600 :``chainsize``: total size of compressed revisions in chain
601 :``chainratio``: total chain size divided by uncompressed revision size
601 :``chainratio``: total chain size divided by uncompressed revision size
602 (new delta chains typically start at ratio 2.00)
602 (new delta chains typically start at ratio 2.00)
603 :``lindist``: linear distance from base revision in delta chain to end
603 :``lindist``: linear distance from base revision in delta chain to end
604 of this revision
604 of this revision
605 :``extradist``: total size of revisions not part of this delta chain from
605 :``extradist``: total size of revisions not part of this delta chain from
606 base of delta chain to end of this revision; a measurement
606 base of delta chain to end of this revision; a measurement
607 of how much extra data we need to read/seek across to read
607 of how much extra data we need to read/seek across to read
608 the delta chain for this revision
608 the delta chain for this revision
609 :``extraratio``: extradist divided by chainsize; another representation of
609 :``extraratio``: extradist divided by chainsize; another representation of
610 how much unrelated data is needed to load this delta chain
610 how much unrelated data is needed to load this delta chain
611
611
612 If the repository is configured to use the sparse read, additional keywords
612 If the repository is configured to use the sparse read, additional keywords
613 are available:
613 are available:
614
614
615 :``readsize``: total size of data read from the disk for a revision
615 :``readsize``: total size of data read from the disk for a revision
616 (sum of the sizes of all the blocks)
616 (sum of the sizes of all the blocks)
617 :``largestblock``: size of the largest block of data read from the disk
617 :``largestblock``: size of the largest block of data read from the disk
618 :``readdensity``: density of useful bytes in the data read from the disk
618 :``readdensity``: density of useful bytes in the data read from the disk
619 :``srchunks``: in how many data hunks the whole revision would be read
619 :``srchunks``: in how many data hunks the whole revision would be read
620
620
621 The sparse read can be enabled with experimental.sparse-read = True
621 The sparse read can be enabled with experimental.sparse-read = True
622 """
622 """
623 opts = pycompat.byteskwargs(opts)
623 opts = pycompat.byteskwargs(opts)
624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
625 index = r.index
625 index = r.index
626 generaldelta = r.version & revlog.FLAG_GENERALDELTA
626 generaldelta = r.version & revlog.FLAG_GENERALDELTA
627 withsparseread = getattr(r, '_withsparseread', False)
627 withsparseread = getattr(r, '_withsparseread', False)
628
628
629 def revinfo(rev):
629 def revinfo(rev):
630 e = index[rev]
630 e = index[rev]
631 compsize = e[1]
631 compsize = e[1]
632 uncompsize = e[2]
632 uncompsize = e[2]
633 chainsize = 0
633 chainsize = 0
634
634
635 if generaldelta:
635 if generaldelta:
636 if e[3] == e[5]:
636 if e[3] == e[5]:
637 deltatype = 'p1'
637 deltatype = 'p1'
638 elif e[3] == e[6]:
638 elif e[3] == e[6]:
639 deltatype = 'p2'
639 deltatype = 'p2'
640 elif e[3] == rev - 1:
640 elif e[3] == rev - 1:
641 deltatype = 'prev'
641 deltatype = 'prev'
642 elif e[3] == rev:
642 elif e[3] == rev:
643 deltatype = 'base'
643 deltatype = 'base'
644 else:
644 else:
645 deltatype = 'other'
645 deltatype = 'other'
646 else:
646 else:
647 if e[3] == rev:
647 if e[3] == rev:
648 deltatype = 'base'
648 deltatype = 'base'
649 else:
649 else:
650 deltatype = 'prev'
650 deltatype = 'prev'
651
651
652 chain = r._deltachain(rev)[0]
652 chain = r._deltachain(rev)[0]
653 for iterrev in chain:
653 for iterrev in chain:
654 e = index[iterrev]
654 e = index[iterrev]
655 chainsize += e[1]
655 chainsize += e[1]
656
656
657 return compsize, uncompsize, deltatype, chain, chainsize
657 return compsize, uncompsize, deltatype, chain, chainsize
658
658
659 fm = ui.formatter('debugdeltachain', opts)
659 fm = ui.formatter('debugdeltachain', opts)
660
660
661 fm.plain(' rev chain# chainlen prev delta '
661 fm.plain(' rev chain# chainlen prev delta '
662 'size rawsize chainsize ratio lindist extradist '
662 'size rawsize chainsize ratio lindist extradist '
663 'extraratio')
663 'extraratio')
664 if withsparseread:
664 if withsparseread:
665 fm.plain(' readsize largestblk rddensity srchunks')
665 fm.plain(' readsize largestblk rddensity srchunks')
666 fm.plain('\n')
666 fm.plain('\n')
667
667
668 chainbases = {}
668 chainbases = {}
669 for rev in r:
669 for rev in r:
670 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
670 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
671 chainbase = chain[0]
671 chainbase = chain[0]
672 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
672 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
673 start = r.start
673 start = r.start
674 length = r.length
674 length = r.length
675 basestart = start(chainbase)
675 basestart = start(chainbase)
676 revstart = start(rev)
676 revstart = start(rev)
677 lineardist = revstart + comp - basestart
677 lineardist = revstart + comp - basestart
678 extradist = lineardist - chainsize
678 extradist = lineardist - chainsize
679 try:
679 try:
680 prevrev = chain[-2]
680 prevrev = chain[-2]
681 except IndexError:
681 except IndexError:
682 prevrev = -1
682 prevrev = -1
683
683
684 chainratio = float(chainsize) / float(uncomp)
684 chainratio = float(chainsize) / float(uncomp)
685 extraratio = float(extradist) / float(chainsize)
685 extraratio = float(extradist) / float(chainsize)
686
686
687 fm.startitem()
687 fm.startitem()
688 fm.write('rev chainid chainlen prevrev deltatype compsize '
688 fm.write('rev chainid chainlen prevrev deltatype compsize '
689 'uncompsize chainsize chainratio lindist extradist '
689 'uncompsize chainsize chainratio lindist extradist '
690 'extraratio',
690 'extraratio',
691 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
691 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
692 rev, chainid, len(chain), prevrev, deltatype, comp,
692 rev, chainid, len(chain), prevrev, deltatype, comp,
693 uncomp, chainsize, chainratio, lineardist, extradist,
693 uncomp, chainsize, chainratio, lineardist, extradist,
694 extraratio,
694 extraratio,
695 rev=rev, chainid=chainid, chainlen=len(chain),
695 rev=rev, chainid=chainid, chainlen=len(chain),
696 prevrev=prevrev, deltatype=deltatype, compsize=comp,
696 prevrev=prevrev, deltatype=deltatype, compsize=comp,
697 uncompsize=uncomp, chainsize=chainsize,
697 uncompsize=uncomp, chainsize=chainsize,
698 chainratio=chainratio, lindist=lineardist,
698 chainratio=chainratio, lindist=lineardist,
699 extradist=extradist, extraratio=extraratio)
699 extradist=extradist, extraratio=extraratio)
700 if withsparseread:
700 if withsparseread:
701 readsize = 0
701 readsize = 0
702 largestblock = 0
702 largestblock = 0
703 srchunks = 0
703 srchunks = 0
704
704
705 for revschunk in revlog._slicechunk(r, chain):
705 for revschunk in revlog._slicechunk(r, chain):
706 srchunks += 1
706 srchunks += 1
707 blkend = start(revschunk[-1]) + length(revschunk[-1])
707 blkend = start(revschunk[-1]) + length(revschunk[-1])
708 blksize = blkend - start(revschunk[0])
708 blksize = blkend - start(revschunk[0])
709
709
710 readsize += blksize
710 readsize += blksize
711 if largestblock < blksize:
711 if largestblock < blksize:
712 largestblock = blksize
712 largestblock = blksize
713
713
714 readdensity = float(chainsize) / float(readsize)
714 readdensity = float(chainsize) / float(readsize)
715
715
716 fm.write('readsize largestblock readdensity srchunks',
716 fm.write('readsize largestblock readdensity srchunks',
717 ' %10d %10d %9.5f %8d',
717 ' %10d %10d %9.5f %8d',
718 readsize, largestblock, readdensity, srchunks,
718 readsize, largestblock, readdensity, srchunks,
719 readsize=readsize, largestblock=largestblock,
719 readsize=readsize, largestblock=largestblock,
720 readdensity=readdensity, srchunks=srchunks)
720 readdensity=readdensity, srchunks=srchunks)
721
721
722 fm.plain('\n')
722 fm.plain('\n')
723
723
724 fm.end()
724 fm.end()
725
725
726 @command('debugdirstate|debugstate',
726 @command('debugdirstate|debugstate',
727 [('', 'nodates', None, _('do not display the saved mtime')),
727 [('', 'nodates', None, _('do not display the saved mtime')),
728 ('', 'datesort', None, _('sort by saved mtime'))],
728 ('', 'datesort', None, _('sort by saved mtime'))],
729 _('[OPTION]...'))
729 _('[OPTION]...'))
730 def debugstate(ui, repo, **opts):
730 def debugstate(ui, repo, **opts):
731 """show the contents of the current dirstate"""
731 """show the contents of the current dirstate"""
732
732
733 nodates = opts.get(r'nodates')
733 nodates = opts.get(r'nodates')
734 datesort = opts.get(r'datesort')
734 datesort = opts.get(r'datesort')
735
735
736 timestr = ""
736 timestr = ""
737 if datesort:
737 if datesort:
738 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
738 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
739 else:
739 else:
740 keyfunc = None # sort by filename
740 keyfunc = None # sort by filename
741 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
741 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
742 if ent[3] == -1:
742 if ent[3] == -1:
743 timestr = 'unset '
743 timestr = 'unset '
744 elif nodates:
744 elif nodates:
745 timestr = 'set '
745 timestr = 'set '
746 else:
746 else:
747 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
747 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
748 time.localtime(ent[3]))
748 time.localtime(ent[3]))
749 timestr = encoding.strtolocal(timestr)
749 timestr = encoding.strtolocal(timestr)
750 if ent[1] & 0o20000:
750 if ent[1] & 0o20000:
751 mode = 'lnk'
751 mode = 'lnk'
752 else:
752 else:
753 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
753 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
754 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
754 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
755 for f in repo.dirstate.copies():
755 for f in repo.dirstate.copies():
756 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
756 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
757
757
758 @command('debugdiscovery',
758 @command('debugdiscovery',
759 [('', 'old', None, _('use old-style discovery')),
759 [('', 'old', None, _('use old-style discovery')),
760 ('', 'nonheads', None,
760 ('', 'nonheads', None,
761 _('use old-style discovery with non-heads included')),
761 _('use old-style discovery with non-heads included')),
762 ('', 'rev', [], 'restrict discovery to this set of revs'),
762 ('', 'rev', [], 'restrict discovery to this set of revs'),
763 ] + cmdutil.remoteopts,
763 ] + cmdutil.remoteopts,
764 _('[--rev REV] [OTHER]'))
764 _('[--rev REV] [OTHER]'))
765 def debugdiscovery(ui, repo, remoteurl="default", **opts):
765 def debugdiscovery(ui, repo, remoteurl="default", **opts):
766 """runs the changeset discovery protocol in isolation"""
766 """runs the changeset discovery protocol in isolation"""
767 opts = pycompat.byteskwargs(opts)
767 opts = pycompat.byteskwargs(opts)
768 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
768 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
769 remote = hg.peer(repo, opts, remoteurl)
769 remote = hg.peer(repo, opts, remoteurl)
770 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
770 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
771
771
772 # make sure tests are repeatable
772 # make sure tests are repeatable
773 random.seed(12323)
773 random.seed(12323)
774
774
775 def doit(pushedrevs, remoteheads, remote=remote):
775 def doit(pushedrevs, remoteheads, remote=remote):
776 if opts.get('old'):
776 if opts.get('old'):
777 if not util.safehasattr(remote, 'branches'):
777 if not util.safehasattr(remote, 'branches'):
778 # enable in-client legacy support
778 # enable in-client legacy support
779 remote = localrepo.locallegacypeer(remote.local())
779 remote = localrepo.locallegacypeer(remote.local())
780 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
780 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
781 force=True)
781 force=True)
782 common = set(common)
782 common = set(common)
783 if not opts.get('nonheads'):
783 if not opts.get('nonheads'):
784 ui.write(("unpruned common: %s\n") %
784 ui.write(("unpruned common: %s\n") %
785 " ".join(sorted(short(n) for n in common)))
785 " ".join(sorted(short(n) for n in common)))
786 dag = dagutil.revlogdag(repo.changelog)
786 dag = dagutil.revlogdag(repo.changelog)
787 all = dag.ancestorset(dag.internalizeall(common))
787 all = dag.ancestorset(dag.internalizeall(common))
788 common = dag.externalizeall(dag.headsetofconnecteds(all))
788 common = dag.externalizeall(dag.headsetofconnecteds(all))
789 else:
789 else:
790 nodes = None
790 nodes = None
791 if pushedrevs:
791 if pushedrevs:
792 revs = scmutil.revrange(repo, pushedrevs)
792 revs = scmutil.revrange(repo, pushedrevs)
793 nodes = [repo[r].node() for r in revs]
793 nodes = [repo[r].node() for r in revs]
794 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
794 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
795 ancestorsof=nodes)
795 ancestorsof=nodes)
796 common = set(common)
796 common = set(common)
797 rheads = set(hds)
797 rheads = set(hds)
798 lheads = set(repo.heads())
798 lheads = set(repo.heads())
799 ui.write(("common heads: %s\n") %
799 ui.write(("common heads: %s\n") %
800 " ".join(sorted(short(n) for n in common)))
800 " ".join(sorted(short(n) for n in common)))
801 if lheads <= common:
801 if lheads <= common:
802 ui.write(("local is subset\n"))
802 ui.write(("local is subset\n"))
803 elif rheads <= common:
803 elif rheads <= common:
804 ui.write(("remote is subset\n"))
804 ui.write(("remote is subset\n"))
805
805
806 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
806 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
807 localrevs = opts['rev']
807 localrevs = opts['rev']
808 doit(localrevs, remoterevs)
808 doit(localrevs, remoterevs)
809
809
810 _chunksize = 4 << 10
810 _chunksize = 4 << 10
811
811
812 @command('debugdownload',
812 @command('debugdownload',
813 [
813 [
814 ('o', 'output', '', _('path')),
814 ('o', 'output', '', _('path')),
815 ],
815 ],
816 optionalrepo=True)
816 optionalrepo=True)
817 def debugdownload(ui, repo, url, output=None, **opts):
817 def debugdownload(ui, repo, url, output=None, **opts):
818 """download a resource using Mercurial logic and config
818 """download a resource using Mercurial logic and config
819 """
819 """
820 fh = urlmod.open(ui, url, output)
820 fh = urlmod.open(ui, url, output)
821
821
822 dest = ui
822 dest = ui
823 if output:
823 if output:
824 dest = open(output, "wb", _chunksize)
824 dest = open(output, "wb", _chunksize)
825 try:
825 try:
826 data = fh.read(_chunksize)
826 data = fh.read(_chunksize)
827 while data:
827 while data:
828 dest.write(data)
828 dest.write(data)
829 data = fh.read(_chunksize)
829 data = fh.read(_chunksize)
830 finally:
830 finally:
831 if output:
831 if output:
832 dest.close()
832 dest.close()
833
833
834 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
834 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
835 def debugextensions(ui, **opts):
835 def debugextensions(ui, **opts):
836 '''show information about active extensions'''
836 '''show information about active extensions'''
837 opts = pycompat.byteskwargs(opts)
837 opts = pycompat.byteskwargs(opts)
838 exts = extensions.extensions(ui)
838 exts = extensions.extensions(ui)
839 hgver = util.version()
839 hgver = util.version()
840 fm = ui.formatter('debugextensions', opts)
840 fm = ui.formatter('debugextensions', opts)
841 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
841 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
842 isinternal = extensions.ismoduleinternal(extmod)
842 isinternal = extensions.ismoduleinternal(extmod)
843 extsource = pycompat.fsencode(extmod.__file__)
843 extsource = pycompat.fsencode(extmod.__file__)
844 if isinternal:
844 if isinternal:
845 exttestedwith = [] # never expose magic string to users
845 exttestedwith = [] # never expose magic string to users
846 else:
846 else:
847 exttestedwith = getattr(extmod, 'testedwith', '').split()
847 exttestedwith = getattr(extmod, 'testedwith', '').split()
848 extbuglink = getattr(extmod, 'buglink', None)
848 extbuglink = getattr(extmod, 'buglink', None)
849
849
850 fm.startitem()
850 fm.startitem()
851
851
852 if ui.quiet or ui.verbose:
852 if ui.quiet or ui.verbose:
853 fm.write('name', '%s\n', extname)
853 fm.write('name', '%s\n', extname)
854 else:
854 else:
855 fm.write('name', '%s', extname)
855 fm.write('name', '%s', extname)
856 if isinternal or hgver in exttestedwith:
856 if isinternal or hgver in exttestedwith:
857 fm.plain('\n')
857 fm.plain('\n')
858 elif not exttestedwith:
858 elif not exttestedwith:
859 fm.plain(_(' (untested!)\n'))
859 fm.plain(_(' (untested!)\n'))
860 else:
860 else:
861 lasttestedversion = exttestedwith[-1]
861 lasttestedversion = exttestedwith[-1]
862 fm.plain(' (%s!)\n' % lasttestedversion)
862 fm.plain(' (%s!)\n' % lasttestedversion)
863
863
864 fm.condwrite(ui.verbose and extsource, 'source',
864 fm.condwrite(ui.verbose and extsource, 'source',
865 _(' location: %s\n'), extsource or "")
865 _(' location: %s\n'), extsource or "")
866
866
867 if ui.verbose:
867 if ui.verbose:
868 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
868 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
869 fm.data(bundled=isinternal)
869 fm.data(bundled=isinternal)
870
870
871 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
871 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
872 _(' tested with: %s\n'),
872 _(' tested with: %s\n'),
873 fm.formatlist(exttestedwith, name='ver'))
873 fm.formatlist(exttestedwith, name='ver'))
874
874
875 fm.condwrite(ui.verbose and extbuglink, 'buglink',
875 fm.condwrite(ui.verbose and extbuglink, 'buglink',
876 _(' bug reporting: %s\n'), extbuglink or "")
876 _(' bug reporting: %s\n'), extbuglink or "")
877
877
878 fm.end()
878 fm.end()
879
879
880 @command('debugfileset',
880 @command('debugfileset',
881 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
881 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
882 _('[-r REV] FILESPEC'))
882 _('[-r REV] FILESPEC'))
883 def debugfileset(ui, repo, expr, **opts):
883 def debugfileset(ui, repo, expr, **opts):
884 '''parse and apply a fileset specification'''
884 '''parse and apply a fileset specification'''
885 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
885 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
886 if ui.verbose:
886 if ui.verbose:
887 tree = fileset.parse(expr)
887 tree = fileset.parse(expr)
888 ui.note(fileset.prettyformat(tree), "\n")
888 ui.note(fileset.prettyformat(tree), "\n")
889
889
890 for f in ctx.getfileset(expr):
890 for f in ctx.getfileset(expr):
891 ui.write("%s\n" % f)
891 ui.write("%s\n" % f)
892
892
893 @command('debugformat',
893 @command('debugformat',
894 [] + cmdutil.formatteropts,
894 [] + cmdutil.formatteropts,
895 _(''))
895 _(''))
896 def debugformat(ui, repo, **opts):
896 def debugformat(ui, repo, **opts):
897 """display format information about the current repository
897 """display format information about the current repository
898
898
899 Use --verbose to get extra information about current config value and
899 Use --verbose to get extra information about current config value and
900 Mercurial default."""
900 Mercurial default."""
901 opts = pycompat.byteskwargs(opts)
901 opts = pycompat.byteskwargs(opts)
902 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
902 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
903 maxvariantlength = max(len('format-variant'), maxvariantlength)
903 maxvariantlength = max(len('format-variant'), maxvariantlength)
904
904
905 def makeformatname(name):
905 def makeformatname(name):
906 return '%s:' + (' ' * (maxvariantlength - len(name)))
906 return '%s:' + (' ' * (maxvariantlength - len(name)))
907
907
908 fm = ui.formatter('debugformat', opts)
908 fm = ui.formatter('debugformat', opts)
909 if fm.isplain():
909 if fm.isplain():
910 def formatvalue(value):
910 def formatvalue(value):
911 if util.safehasattr(value, 'startswith'):
911 if util.safehasattr(value, 'startswith'):
912 return value
912 return value
913 if value:
913 if value:
914 return 'yes'
914 return 'yes'
915 else:
915 else:
916 return 'no'
916 return 'no'
917 else:
917 else:
918 formatvalue = pycompat.identity
918 formatvalue = pycompat.identity
919
919
920 fm.plain('format-variant')
920 fm.plain('format-variant')
921 fm.plain(' ' * (maxvariantlength - len('format-variant')))
921 fm.plain(' ' * (maxvariantlength - len('format-variant')))
922 fm.plain(' repo')
922 fm.plain(' repo')
923 if ui.verbose:
923 if ui.verbose:
924 fm.plain(' config default')
924 fm.plain(' config default')
925 fm.plain('\n')
925 fm.plain('\n')
926 for fv in upgrade.allformatvariant:
926 for fv in upgrade.allformatvariant:
927 fm.startitem()
927 fm.startitem()
928 repovalue = fv.fromrepo(repo)
928 repovalue = fv.fromrepo(repo)
929 configvalue = fv.fromconfig(repo)
929 configvalue = fv.fromconfig(repo)
930
930
931 if repovalue != configvalue:
931 if repovalue != configvalue:
932 namelabel = 'formatvariant.name.mismatchconfig'
932 namelabel = 'formatvariant.name.mismatchconfig'
933 repolabel = 'formatvariant.repo.mismatchconfig'
933 repolabel = 'formatvariant.repo.mismatchconfig'
934 elif repovalue != fv.default:
934 elif repovalue != fv.default:
935 namelabel = 'formatvariant.name.mismatchdefault'
935 namelabel = 'formatvariant.name.mismatchdefault'
936 repolabel = 'formatvariant.repo.mismatchdefault'
936 repolabel = 'formatvariant.repo.mismatchdefault'
937 else:
937 else:
938 namelabel = 'formatvariant.name.uptodate'
938 namelabel = 'formatvariant.name.uptodate'
939 repolabel = 'formatvariant.repo.uptodate'
939 repolabel = 'formatvariant.repo.uptodate'
940
940
941 fm.write('name', makeformatname(fv.name), fv.name,
941 fm.write('name', makeformatname(fv.name), fv.name,
942 label=namelabel)
942 label=namelabel)
943 fm.write('repo', ' %3s', formatvalue(repovalue),
943 fm.write('repo', ' %3s', formatvalue(repovalue),
944 label=repolabel)
944 label=repolabel)
945 if fv.default != configvalue:
945 if fv.default != configvalue:
946 configlabel = 'formatvariant.config.special'
946 configlabel = 'formatvariant.config.special'
947 else:
947 else:
948 configlabel = 'formatvariant.config.default'
948 configlabel = 'formatvariant.config.default'
949 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
949 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
950 label=configlabel)
950 label=configlabel)
951 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
951 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
952 label='formatvariant.default')
952 label='formatvariant.default')
953 fm.plain('\n')
953 fm.plain('\n')
954 fm.end()
954 fm.end()
955
955
956 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
956 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
957 def debugfsinfo(ui, path="."):
957 def debugfsinfo(ui, path="."):
958 """show information detected about current filesystem"""
958 """show information detected about current filesystem"""
959 ui.write(('path: %s\n') % path)
959 ui.write(('path: %s\n') % path)
960 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
960 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
961 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
961 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
962 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
962 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
963 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
963 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
964 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
964 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
965 casesensitive = '(unknown)'
965 casesensitive = '(unknown)'
966 try:
966 try:
967 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
967 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
968 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
968 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
969 except OSError:
969 except OSError:
970 pass
970 pass
971 ui.write(('case-sensitive: %s\n') % casesensitive)
971 ui.write(('case-sensitive: %s\n') % casesensitive)
972
972
973 @command('debuggetbundle',
973 @command('debuggetbundle',
974 [('H', 'head', [], _('id of head node'), _('ID')),
974 [('H', 'head', [], _('id of head node'), _('ID')),
975 ('C', 'common', [], _('id of common node'), _('ID')),
975 ('C', 'common', [], _('id of common node'), _('ID')),
976 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
976 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
977 _('REPO FILE [-H|-C ID]...'),
977 _('REPO FILE [-H|-C ID]...'),
978 norepo=True)
978 norepo=True)
979 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
979 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
980 """retrieves a bundle from a repo
980 """retrieves a bundle from a repo
981
981
982 Every ID must be a full-length hex node id string. Saves the bundle to the
982 Every ID must be a full-length hex node id string. Saves the bundle to the
983 given file.
983 given file.
984 """
984 """
985 opts = pycompat.byteskwargs(opts)
985 opts = pycompat.byteskwargs(opts)
986 repo = hg.peer(ui, opts, repopath)
986 repo = hg.peer(ui, opts, repopath)
987 if not repo.capable('getbundle'):
987 if not repo.capable('getbundle'):
988 raise error.Abort("getbundle() not supported by target repository")
988 raise error.Abort("getbundle() not supported by target repository")
989 args = {}
989 args = {}
990 if common:
990 if common:
991 args[r'common'] = [bin(s) for s in common]
991 args[r'common'] = [bin(s) for s in common]
992 if head:
992 if head:
993 args[r'heads'] = [bin(s) for s in head]
993 args[r'heads'] = [bin(s) for s in head]
994 # TODO: get desired bundlecaps from command line.
994 # TODO: get desired bundlecaps from command line.
995 args[r'bundlecaps'] = None
995 args[r'bundlecaps'] = None
996 bundle = repo.getbundle('debug', **args)
996 bundle = repo.getbundle('debug', **args)
997
997
998 bundletype = opts.get('type', 'bzip2').lower()
998 bundletype = opts.get('type', 'bzip2').lower()
999 btypes = {'none': 'HG10UN',
999 btypes = {'none': 'HG10UN',
1000 'bzip2': 'HG10BZ',
1000 'bzip2': 'HG10BZ',
1001 'gzip': 'HG10GZ',
1001 'gzip': 'HG10GZ',
1002 'bundle2': 'HG20'}
1002 'bundle2': 'HG20'}
1003 bundletype = btypes.get(bundletype)
1003 bundletype = btypes.get(bundletype)
1004 if bundletype not in bundle2.bundletypes:
1004 if bundletype not in bundle2.bundletypes:
1005 raise error.Abort(_('unknown bundle type specified with --type'))
1005 raise error.Abort(_('unknown bundle type specified with --type'))
1006 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1006 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1007
1007
1008 @command('debugignore', [], '[FILE]')
1008 @command('debugignore', [], '[FILE]')
1009 def debugignore(ui, repo, *files, **opts):
1009 def debugignore(ui, repo, *files, **opts):
1010 """display the combined ignore pattern and information about ignored files
1010 """display the combined ignore pattern and information about ignored files
1011
1011
1012 With no argument display the combined ignore pattern.
1012 With no argument display the combined ignore pattern.
1013
1013
1014 Given space separated file names, shows if the given file is ignored and
1014 Given space separated file names, shows if the given file is ignored and
1015 if so, show the ignore rule (file and line number) that matched it.
1015 if so, show the ignore rule (file and line number) that matched it.
1016 """
1016 """
1017 ignore = repo.dirstate._ignore
1017 ignore = repo.dirstate._ignore
1018 if not files:
1018 if not files:
1019 # Show all the patterns
1019 # Show all the patterns
1020 ui.write("%s\n" % pycompat.byterepr(ignore))
1020 ui.write("%s\n" % pycompat.byterepr(ignore))
1021 else:
1021 else:
1022 m = scmutil.match(repo[None], pats=files)
1022 m = scmutil.match(repo[None], pats=files)
1023 for f in m.files():
1023 for f in m.files():
1024 nf = util.normpath(f)
1024 nf = util.normpath(f)
1025 ignored = None
1025 ignored = None
1026 ignoredata = None
1026 ignoredata = None
1027 if nf != '.':
1027 if nf != '.':
1028 if ignore(nf):
1028 if ignore(nf):
1029 ignored = nf
1029 ignored = nf
1030 ignoredata = repo.dirstate._ignorefileandline(nf)
1030 ignoredata = repo.dirstate._ignorefileandline(nf)
1031 else:
1031 else:
1032 for p in util.finddirs(nf):
1032 for p in util.finddirs(nf):
1033 if ignore(p):
1033 if ignore(p):
1034 ignored = p
1034 ignored = p
1035 ignoredata = repo.dirstate._ignorefileandline(p)
1035 ignoredata = repo.dirstate._ignorefileandline(p)
1036 break
1036 break
1037 if ignored:
1037 if ignored:
1038 if ignored == nf:
1038 if ignored == nf:
1039 ui.write(_("%s is ignored\n") % m.uipath(f))
1039 ui.write(_("%s is ignored\n") % m.uipath(f))
1040 else:
1040 else:
1041 ui.write(_("%s is ignored because of "
1041 ui.write(_("%s is ignored because of "
1042 "containing folder %s\n")
1042 "containing folder %s\n")
1043 % (m.uipath(f), ignored))
1043 % (m.uipath(f), ignored))
1044 ignorefile, lineno, line = ignoredata
1044 ignorefile, lineno, line = ignoredata
1045 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1045 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1046 % (ignorefile, lineno, line))
1046 % (ignorefile, lineno, line))
1047 else:
1047 else:
1048 ui.write(_("%s is not ignored\n") % m.uipath(f))
1048 ui.write(_("%s is not ignored\n") % m.uipath(f))
1049
1049
1050 @command('debugindex', cmdutil.debugrevlogopts +
1050 @command('debugindex', cmdutil.debugrevlogopts +
1051 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1051 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1052 _('[-f FORMAT] -c|-m|FILE'),
1052 _('[-f FORMAT] -c|-m|FILE'),
1053 optionalrepo=True)
1053 optionalrepo=True)
1054 def debugindex(ui, repo, file_=None, **opts):
1054 def debugindex(ui, repo, file_=None, **opts):
1055 """dump the contents of an index file"""
1055 """dump the contents of an index file"""
1056 opts = pycompat.byteskwargs(opts)
1056 opts = pycompat.byteskwargs(opts)
1057 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1057 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1058 format = opts.get('format', 0)
1058 format = opts.get('format', 0)
1059 if format not in (0, 1):
1059 if format not in (0, 1):
1060 raise error.Abort(_("unknown format %d") % format)
1060 raise error.Abort(_("unknown format %d") % format)
1061
1061
1062 generaldelta = r.version & revlog.FLAG_GENERALDELTA
1062 generaldelta = r.version & revlog.FLAG_GENERALDELTA
1063 if generaldelta:
1063 if generaldelta:
1064 basehdr = ' delta'
1064 basehdr = ' delta'
1065 else:
1065 else:
1066 basehdr = ' base'
1066 basehdr = ' base'
1067
1067
1068 if ui.debugflag:
1068 if ui.debugflag:
1069 shortfn = hex
1069 shortfn = hex
1070 else:
1070 else:
1071 shortfn = short
1071 shortfn = short
1072
1072
1073 # There might not be anything in r, so have a sane default
1073 # There might not be anything in r, so have a sane default
1074 idlen = 12
1074 idlen = 12
1075 for i in r:
1075 for i in r:
1076 idlen = len(shortfn(r.node(i)))
1076 idlen = len(shortfn(r.node(i)))
1077 break
1077 break
1078
1078
1079 if format == 0:
1079 if format == 0:
1080 ui.write((" rev offset length " + basehdr + " linkrev"
1080 ui.write((" rev offset length " + basehdr + " linkrev"
1081 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
1081 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
1082 elif format == 1:
1082 elif format == 1:
1083 ui.write((" rev flag offset length"
1083 ui.write((" rev flag offset length"
1084 " size " + basehdr + " link p1 p2"
1084 " size " + basehdr + " link p1 p2"
1085 " %s\n") % "nodeid".rjust(idlen))
1085 " %s\n") % "nodeid".rjust(idlen))
1086
1086
1087 for i in r:
1087 for i in r:
1088 node = r.node(i)
1088 node = r.node(i)
1089 if generaldelta:
1089 if generaldelta:
1090 base = r.deltaparent(i)
1090 base = r.deltaparent(i)
1091 else:
1091 else:
1092 base = r.chainbase(i)
1092 base = r.chainbase(i)
1093 if format == 0:
1093 if format == 0:
1094 try:
1094 try:
1095 pp = r.parents(node)
1095 pp = r.parents(node)
1096 except Exception:
1096 except Exception:
1097 pp = [nullid, nullid]
1097 pp = [nullid, nullid]
1098 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1098 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1099 i, r.start(i), r.length(i), base, r.linkrev(i),
1099 i, r.start(i), r.length(i), base, r.linkrev(i),
1100 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1100 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1101 elif format == 1:
1101 elif format == 1:
1102 pr = r.parentrevs(i)
1102 pr = r.parentrevs(i)
1103 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1103 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1104 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1104 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1105 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1105 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1106
1106
1107 @command('debugindexdot', cmdutil.debugrevlogopts,
1107 @command('debugindexdot', cmdutil.debugrevlogopts,
1108 _('-c|-m|FILE'), optionalrepo=True)
1108 _('-c|-m|FILE'), optionalrepo=True)
1109 def debugindexdot(ui, repo, file_=None, **opts):
1109 def debugindexdot(ui, repo, file_=None, **opts):
1110 """dump an index DAG as a graphviz dot file"""
1110 """dump an index DAG as a graphviz dot file"""
1111 opts = pycompat.byteskwargs(opts)
1111 opts = pycompat.byteskwargs(opts)
1112 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1112 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1113 ui.write(("digraph G {\n"))
1113 ui.write(("digraph G {\n"))
1114 for i in r:
1114 for i in r:
1115 node = r.node(i)
1115 node = r.node(i)
1116 pp = r.parents(node)
1116 pp = r.parents(node)
1117 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1117 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1118 if pp[1] != nullid:
1118 if pp[1] != nullid:
1119 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1119 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1120 ui.write("}\n")
1120 ui.write("}\n")
1121
1121
1122 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1122 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1123 def debuginstall(ui, **opts):
1123 def debuginstall(ui, **opts):
1124 '''test Mercurial installation
1124 '''test Mercurial installation
1125
1125
1126 Returns 0 on success.
1126 Returns 0 on success.
1127 '''
1127 '''
1128 opts = pycompat.byteskwargs(opts)
1128 opts = pycompat.byteskwargs(opts)
1129
1129
1130 def writetemp(contents):
1130 def writetemp(contents):
1131 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1131 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1132 f = os.fdopen(fd, r"wb")
1132 f = os.fdopen(fd, r"wb")
1133 f.write(contents)
1133 f.write(contents)
1134 f.close()
1134 f.close()
1135 return name
1135 return name
1136
1136
1137 problems = 0
1137 problems = 0
1138
1138
1139 fm = ui.formatter('debuginstall', opts)
1139 fm = ui.formatter('debuginstall', opts)
1140 fm.startitem()
1140 fm.startitem()
1141
1141
1142 # encoding
1142 # encoding
1143 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1143 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1144 err = None
1144 err = None
1145 try:
1145 try:
1146 codecs.lookup(pycompat.sysstr(encoding.encoding))
1146 codecs.lookup(pycompat.sysstr(encoding.encoding))
1147 except LookupError as inst:
1147 except LookupError as inst:
1148 err = stringutil.forcebytestr(inst)
1148 err = stringutil.forcebytestr(inst)
1149 problems += 1
1149 problems += 1
1150 fm.condwrite(err, 'encodingerror', _(" %s\n"
1150 fm.condwrite(err, 'encodingerror', _(" %s\n"
1151 " (check that your locale is properly set)\n"), err)
1151 " (check that your locale is properly set)\n"), err)
1152
1152
1153 # Python
1153 # Python
1154 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1154 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1155 pycompat.sysexecutable)
1155 pycompat.sysexecutable)
1156 fm.write('pythonver', _("checking Python version (%s)\n"),
1156 fm.write('pythonver', _("checking Python version (%s)\n"),
1157 ("%d.%d.%d" % sys.version_info[:3]))
1157 ("%d.%d.%d" % sys.version_info[:3]))
1158 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1158 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1159 os.path.dirname(pycompat.fsencode(os.__file__)))
1159 os.path.dirname(pycompat.fsencode(os.__file__)))
1160
1160
1161 security = set(sslutil.supportedprotocols)
1161 security = set(sslutil.supportedprotocols)
1162 if sslutil.hassni:
1162 if sslutil.hassni:
1163 security.add('sni')
1163 security.add('sni')
1164
1164
1165 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1165 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1166 fm.formatlist(sorted(security), name='protocol',
1166 fm.formatlist(sorted(security), name='protocol',
1167 fmt='%s', sep=','))
1167 fmt='%s', sep=','))
1168
1168
1169 # These are warnings, not errors. So don't increment problem count. This
1169 # These are warnings, not errors. So don't increment problem count. This
1170 # may change in the future.
1170 # may change in the future.
1171 if 'tls1.2' not in security:
1171 if 'tls1.2' not in security:
1172 fm.plain(_(' TLS 1.2 not supported by Python install; '
1172 fm.plain(_(' TLS 1.2 not supported by Python install; '
1173 'network connections lack modern security\n'))
1173 'network connections lack modern security\n'))
1174 if 'sni' not in security:
1174 if 'sni' not in security:
1175 fm.plain(_(' SNI not supported by Python install; may have '
1175 fm.plain(_(' SNI not supported by Python install; may have '
1176 'connectivity issues with some servers\n'))
1176 'connectivity issues with some servers\n'))
1177
1177
1178 # TODO print CA cert info
1178 # TODO print CA cert info
1179
1179
1180 # hg version
1180 # hg version
1181 hgver = util.version()
1181 hgver = util.version()
1182 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1182 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1183 hgver.split('+')[0])
1183 hgver.split('+')[0])
1184 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1184 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1185 '+'.join(hgver.split('+')[1:]))
1185 '+'.join(hgver.split('+')[1:]))
1186
1186
1187 # compiled modules
1187 # compiled modules
1188 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1188 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1189 policy.policy)
1189 policy.policy)
1190 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1190 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1191 os.path.dirname(pycompat.fsencode(__file__)))
1191 os.path.dirname(pycompat.fsencode(__file__)))
1192
1192
1193 if policy.policy in ('c', 'allow'):
1193 if policy.policy in ('c', 'allow'):
1194 err = None
1194 err = None
1195 try:
1195 try:
1196 from .cext import (
1196 from .cext import (
1197 base85,
1197 base85,
1198 bdiff,
1198 bdiff,
1199 mpatch,
1199 mpatch,
1200 osutil,
1200 osutil,
1201 )
1201 )
1202 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1202 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1203 except Exception as inst:
1203 except Exception as inst:
1204 err = stringutil.forcebytestr(inst)
1204 err = stringutil.forcebytestr(inst)
1205 problems += 1
1205 problems += 1
1206 fm.condwrite(err, 'extensionserror', " %s\n", err)
1206 fm.condwrite(err, 'extensionserror', " %s\n", err)
1207
1207
1208 compengines = util.compengines._engines.values()
1208 compengines = util.compengines._engines.values()
1209 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1209 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1210 fm.formatlist(sorted(e.name() for e in compengines),
1210 fm.formatlist(sorted(e.name() for e in compengines),
1211 name='compengine', fmt='%s', sep=', '))
1211 name='compengine', fmt='%s', sep=', '))
1212 fm.write('compenginesavail', _('checking available compression engines '
1212 fm.write('compenginesavail', _('checking available compression engines '
1213 '(%s)\n'),
1213 '(%s)\n'),
1214 fm.formatlist(sorted(e.name() for e in compengines
1214 fm.formatlist(sorted(e.name() for e in compengines
1215 if e.available()),
1215 if e.available()),
1216 name='compengine', fmt='%s', sep=', '))
1216 name='compengine', fmt='%s', sep=', '))
1217 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1217 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1218 fm.write('compenginesserver', _('checking available compression engines '
1218 fm.write('compenginesserver', _('checking available compression engines '
1219 'for wire protocol (%s)\n'),
1219 'for wire protocol (%s)\n'),
1220 fm.formatlist([e.name() for e in wirecompengines
1220 fm.formatlist([e.name() for e in wirecompengines
1221 if e.wireprotosupport()],
1221 if e.wireprotosupport()],
1222 name='compengine', fmt='%s', sep=', '))
1222 name='compengine', fmt='%s', sep=', '))
1223 re2 = 'missing'
1223 re2 = 'missing'
1224 if util._re2:
1224 if util._re2:
1225 re2 = 'available'
1225 re2 = 'available'
1226 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1226 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1227 fm.data(re2=bool(util._re2))
1227 fm.data(re2=bool(util._re2))
1228
1228
1229 # templates
1229 # templates
1230 p = templater.templatepaths()
1230 p = templater.templatepaths()
1231 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1231 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1232 fm.condwrite(not p, '', _(" no template directories found\n"))
1232 fm.condwrite(not p, '', _(" no template directories found\n"))
1233 if p:
1233 if p:
1234 m = templater.templatepath("map-cmdline.default")
1234 m = templater.templatepath("map-cmdline.default")
1235 if m:
1235 if m:
1236 # template found, check if it is working
1236 # template found, check if it is working
1237 err = None
1237 err = None
1238 try:
1238 try:
1239 templater.templater.frommapfile(m)
1239 templater.templater.frommapfile(m)
1240 except Exception as inst:
1240 except Exception as inst:
1241 err = stringutil.forcebytestr(inst)
1241 err = stringutil.forcebytestr(inst)
1242 p = None
1242 p = None
1243 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1243 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1244 else:
1244 else:
1245 p = None
1245 p = None
1246 fm.condwrite(p, 'defaulttemplate',
1246 fm.condwrite(p, 'defaulttemplate',
1247 _("checking default template (%s)\n"), m)
1247 _("checking default template (%s)\n"), m)
1248 fm.condwrite(not m, 'defaulttemplatenotfound',
1248 fm.condwrite(not m, 'defaulttemplatenotfound',
1249 _(" template '%s' not found\n"), "default")
1249 _(" template '%s' not found\n"), "default")
1250 if not p:
1250 if not p:
1251 problems += 1
1251 problems += 1
1252 fm.condwrite(not p, '',
1252 fm.condwrite(not p, '',
1253 _(" (templates seem to have been installed incorrectly)\n"))
1253 _(" (templates seem to have been installed incorrectly)\n"))
1254
1254
1255 # editor
1255 # editor
1256 editor = ui.geteditor()
1256 editor = ui.geteditor()
1257 editor = util.expandpath(editor)
1257 editor = util.expandpath(editor)
1258 editorbin = procutil.shellsplit(editor)[0]
1258 editorbin = procutil.shellsplit(editor)[0]
1259 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1259 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1260 cmdpath = procutil.findexe(editorbin)
1260 cmdpath = procutil.findexe(editorbin)
1261 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1261 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1262 _(" No commit editor set and can't find %s in PATH\n"
1262 _(" No commit editor set and can't find %s in PATH\n"
1263 " (specify a commit editor in your configuration"
1263 " (specify a commit editor in your configuration"
1264 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1264 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1265 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1265 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1266 _(" Can't find editor '%s' in PATH\n"
1266 _(" Can't find editor '%s' in PATH\n"
1267 " (specify a commit editor in your configuration"
1267 " (specify a commit editor in your configuration"
1268 " file)\n"), not cmdpath and editorbin)
1268 " file)\n"), not cmdpath and editorbin)
1269 if not cmdpath and editor != 'vi':
1269 if not cmdpath and editor != 'vi':
1270 problems += 1
1270 problems += 1
1271
1271
1272 # check username
1272 # check username
1273 username = None
1273 username = None
1274 err = None
1274 err = None
1275 try:
1275 try:
1276 username = ui.username()
1276 username = ui.username()
1277 except error.Abort as e:
1277 except error.Abort as e:
1278 err = stringutil.forcebytestr(e)
1278 err = stringutil.forcebytestr(e)
1279 problems += 1
1279 problems += 1
1280
1280
1281 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1281 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1282 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1282 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1283 " (specify a username in your configuration file)\n"), err)
1283 " (specify a username in your configuration file)\n"), err)
1284
1284
1285 fm.condwrite(not problems, '',
1285 fm.condwrite(not problems, '',
1286 _("no problems detected\n"))
1286 _("no problems detected\n"))
1287 if not problems:
1287 if not problems:
1288 fm.data(problems=problems)
1288 fm.data(problems=problems)
1289 fm.condwrite(problems, 'problems',
1289 fm.condwrite(problems, 'problems',
1290 _("%d problems detected,"
1290 _("%d problems detected,"
1291 " please check your install!\n"), problems)
1291 " please check your install!\n"), problems)
1292 fm.end()
1292 fm.end()
1293
1293
1294 return problems
1294 return problems
1295
1295
1296 @command('debugknown', [], _('REPO ID...'), norepo=True)
1296 @command('debugknown', [], _('REPO ID...'), norepo=True)
1297 def debugknown(ui, repopath, *ids, **opts):
1297 def debugknown(ui, repopath, *ids, **opts):
1298 """test whether node ids are known to a repo
1298 """test whether node ids are known to a repo
1299
1299
1300 Every ID must be a full-length hex node id string. Returns a list of 0s
1300 Every ID must be a full-length hex node id string. Returns a list of 0s
1301 and 1s indicating unknown/known.
1301 and 1s indicating unknown/known.
1302 """
1302 """
1303 opts = pycompat.byteskwargs(opts)
1303 opts = pycompat.byteskwargs(opts)
1304 repo = hg.peer(ui, opts, repopath)
1304 repo = hg.peer(ui, opts, repopath)
1305 if not repo.capable('known'):
1305 if not repo.capable('known'):
1306 raise error.Abort("known() not supported by target repository")
1306 raise error.Abort("known() not supported by target repository")
1307 flags = repo.known([bin(s) for s in ids])
1307 flags = repo.known([bin(s) for s in ids])
1308 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1308 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1309
1309
1310 @command('debuglabelcomplete', [], _('LABEL...'))
1310 @command('debuglabelcomplete', [], _('LABEL...'))
1311 def debuglabelcomplete(ui, repo, *args):
1311 def debuglabelcomplete(ui, repo, *args):
1312 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1312 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1313 debugnamecomplete(ui, repo, *args)
1313 debugnamecomplete(ui, repo, *args)
1314
1314
1315 @command('debuglocks',
1315 @command('debuglocks',
1316 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1316 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1317 ('W', 'force-wlock', None,
1317 ('W', 'force-wlock', None,
1318 _('free the working state lock (DANGEROUS)')),
1318 _('free the working state lock (DANGEROUS)')),
1319 ('s', 'set-lock', None, _('set the store lock until stopped')),
1319 ('s', 'set-lock', None, _('set the store lock until stopped')),
1320 ('S', 'set-wlock', None,
1320 ('S', 'set-wlock', None,
1321 _('set the working state lock until stopped'))],
1321 _('set the working state lock until stopped'))],
1322 _('[OPTION]...'))
1322 _('[OPTION]...'))
1323 def debuglocks(ui, repo, **opts):
1323 def debuglocks(ui, repo, **opts):
1324 """show or modify state of locks
1324 """show or modify state of locks
1325
1325
1326 By default, this command will show which locks are held. This
1326 By default, this command will show which locks are held. This
1327 includes the user and process holding the lock, the amount of time
1327 includes the user and process holding the lock, the amount of time
1328 the lock has been held, and the machine name where the process is
1328 the lock has been held, and the machine name where the process is
1329 running if it's not local.
1329 running if it's not local.
1330
1330
1331 Locks protect the integrity of Mercurial's data, so should be
1331 Locks protect the integrity of Mercurial's data, so should be
1332 treated with care. System crashes or other interruptions may cause
1332 treated with care. System crashes or other interruptions may cause
1333 locks to not be properly released, though Mercurial will usually
1333 locks to not be properly released, though Mercurial will usually
1334 detect and remove such stale locks automatically.
1334 detect and remove such stale locks automatically.
1335
1335
1336 However, detecting stale locks may not always be possible (for
1336 However, detecting stale locks may not always be possible (for
1337 instance, on a shared filesystem). Removing locks may also be
1337 instance, on a shared filesystem). Removing locks may also be
1338 blocked by filesystem permissions.
1338 blocked by filesystem permissions.
1339
1339
1340 Setting a lock will prevent other commands from changing the data.
1340 Setting a lock will prevent other commands from changing the data.
1341 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1341 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1342 The set locks are removed when the command exits.
1342 The set locks are removed when the command exits.
1343
1343
1344 Returns 0 if no locks are held.
1344 Returns 0 if no locks are held.
1345
1345
1346 """
1346 """
1347
1347
1348 if opts.get(r'force_lock'):
1348 if opts.get(r'force_lock'):
1349 repo.svfs.unlink('lock')
1349 repo.svfs.unlink('lock')
1350 if opts.get(r'force_wlock'):
1350 if opts.get(r'force_wlock'):
1351 repo.vfs.unlink('wlock')
1351 repo.vfs.unlink('wlock')
1352 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1352 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1353 return 0
1353 return 0
1354
1354
1355 locks = []
1355 locks = []
1356 try:
1356 try:
1357 if opts.get(r'set_wlock'):
1357 if opts.get(r'set_wlock'):
1358 try:
1358 try:
1359 locks.append(repo.wlock(False))
1359 locks.append(repo.wlock(False))
1360 except error.LockHeld:
1360 except error.LockHeld:
1361 raise error.Abort(_('wlock is already held'))
1361 raise error.Abort(_('wlock is already held'))
1362 if opts.get(r'set_lock'):
1362 if opts.get(r'set_lock'):
1363 try:
1363 try:
1364 locks.append(repo.lock(False))
1364 locks.append(repo.lock(False))
1365 except error.LockHeld:
1365 except error.LockHeld:
1366 raise error.Abort(_('lock is already held'))
1366 raise error.Abort(_('lock is already held'))
1367 if len(locks):
1367 if len(locks):
1368 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1368 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1369 return 0
1369 return 0
1370 finally:
1370 finally:
1371 release(*locks)
1371 release(*locks)
1372
1372
1373 now = time.time()
1373 now = time.time()
1374 held = 0
1374 held = 0
1375
1375
1376 def report(vfs, name, method):
1376 def report(vfs, name, method):
1377 # this causes stale locks to get reaped for more accurate reporting
1377 # this causes stale locks to get reaped for more accurate reporting
1378 try:
1378 try:
1379 l = method(False)
1379 l = method(False)
1380 except error.LockHeld:
1380 except error.LockHeld:
1381 l = None
1381 l = None
1382
1382
1383 if l:
1383 if l:
1384 l.release()
1384 l.release()
1385 else:
1385 else:
1386 try:
1386 try:
1387 st = vfs.lstat(name)
1387 st = vfs.lstat(name)
1388 age = now - st[stat.ST_MTIME]
1388 age = now - st[stat.ST_MTIME]
1389 user = util.username(st.st_uid)
1389 user = util.username(st.st_uid)
1390 locker = vfs.readlock(name)
1390 locker = vfs.readlock(name)
1391 if ":" in locker:
1391 if ":" in locker:
1392 host, pid = locker.split(':')
1392 host, pid = locker.split(':')
1393 if host == socket.gethostname():
1393 if host == socket.gethostname():
1394 locker = 'user %s, process %s' % (user, pid)
1394 locker = 'user %s, process %s' % (user, pid)
1395 else:
1395 else:
1396 locker = 'user %s, process %s, host %s' \
1396 locker = 'user %s, process %s, host %s' \
1397 % (user, pid, host)
1397 % (user, pid, host)
1398 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1398 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1399 return 1
1399 return 1
1400 except OSError as e:
1400 except OSError as e:
1401 if e.errno != errno.ENOENT:
1401 if e.errno != errno.ENOENT:
1402 raise
1402 raise
1403
1403
1404 ui.write(("%-6s free\n") % (name + ":"))
1404 ui.write(("%-6s free\n") % (name + ":"))
1405 return 0
1405 return 0
1406
1406
1407 held += report(repo.svfs, "lock", repo.lock)
1407 held += report(repo.svfs, "lock", repo.lock)
1408 held += report(repo.vfs, "wlock", repo.wlock)
1408 held += report(repo.vfs, "wlock", repo.wlock)
1409
1409
1410 return held
1410 return held
1411
1411
1412 @command('debugmergestate', [], '')
1412 @command('debugmergestate', [], '')
1413 def debugmergestate(ui, repo, *args):
1413 def debugmergestate(ui, repo, *args):
1414 """print merge state
1414 """print merge state
1415
1415
1416 Use --verbose to print out information about whether v1 or v2 merge state
1416 Use --verbose to print out information about whether v1 or v2 merge state
1417 was chosen."""
1417 was chosen."""
1418 def _hashornull(h):
1418 def _hashornull(h):
1419 if h == nullhex:
1419 if h == nullhex:
1420 return 'null'
1420 return 'null'
1421 else:
1421 else:
1422 return h
1422 return h
1423
1423
1424 def printrecords(version):
1424 def printrecords(version):
1425 ui.write(('* version %d records\n') % version)
1425 ui.write(('* version %d records\n') % version)
1426 if version == 1:
1426 if version == 1:
1427 records = v1records
1427 records = v1records
1428 else:
1428 else:
1429 records = v2records
1429 records = v2records
1430
1430
1431 for rtype, record in records:
1431 for rtype, record in records:
1432 # pretty print some record types
1432 # pretty print some record types
1433 if rtype == 'L':
1433 if rtype == 'L':
1434 ui.write(('local: %s\n') % record)
1434 ui.write(('local: %s\n') % record)
1435 elif rtype == 'O':
1435 elif rtype == 'O':
1436 ui.write(('other: %s\n') % record)
1436 ui.write(('other: %s\n') % record)
1437 elif rtype == 'm':
1437 elif rtype == 'm':
1438 driver, mdstate = record.split('\0', 1)
1438 driver, mdstate = record.split('\0', 1)
1439 ui.write(('merge driver: %s (state "%s")\n')
1439 ui.write(('merge driver: %s (state "%s")\n')
1440 % (driver, mdstate))
1440 % (driver, mdstate))
1441 elif rtype in 'FDC':
1441 elif rtype in 'FDC':
1442 r = record.split('\0')
1442 r = record.split('\0')
1443 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1443 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1444 if version == 1:
1444 if version == 1:
1445 onode = 'not stored in v1 format'
1445 onode = 'not stored in v1 format'
1446 flags = r[7]
1446 flags = r[7]
1447 else:
1447 else:
1448 onode, flags = r[7:9]
1448 onode, flags = r[7:9]
1449 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1449 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1450 % (f, rtype, state, _hashornull(hash)))
1450 % (f, rtype, state, _hashornull(hash)))
1451 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1451 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1452 ui.write((' ancestor path: %s (node %s)\n')
1452 ui.write((' ancestor path: %s (node %s)\n')
1453 % (afile, _hashornull(anode)))
1453 % (afile, _hashornull(anode)))
1454 ui.write((' other path: %s (node %s)\n')
1454 ui.write((' other path: %s (node %s)\n')
1455 % (ofile, _hashornull(onode)))
1455 % (ofile, _hashornull(onode)))
1456 elif rtype == 'f':
1456 elif rtype == 'f':
1457 filename, rawextras = record.split('\0', 1)
1457 filename, rawextras = record.split('\0', 1)
1458 extras = rawextras.split('\0')
1458 extras = rawextras.split('\0')
1459 i = 0
1459 i = 0
1460 extrastrings = []
1460 extrastrings = []
1461 while i < len(extras):
1461 while i < len(extras):
1462 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1462 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1463 i += 2
1463 i += 2
1464
1464
1465 ui.write(('file extras: %s (%s)\n')
1465 ui.write(('file extras: %s (%s)\n')
1466 % (filename, ', '.join(extrastrings)))
1466 % (filename, ', '.join(extrastrings)))
1467 elif rtype == 'l':
1467 elif rtype == 'l':
1468 labels = record.split('\0', 2)
1468 labels = record.split('\0', 2)
1469 labels = [l for l in labels if len(l) > 0]
1469 labels = [l for l in labels if len(l) > 0]
1470 ui.write(('labels:\n'))
1470 ui.write(('labels:\n'))
1471 ui.write((' local: %s\n' % labels[0]))
1471 ui.write((' local: %s\n' % labels[0]))
1472 ui.write((' other: %s\n' % labels[1]))
1472 ui.write((' other: %s\n' % labels[1]))
1473 if len(labels) > 2:
1473 if len(labels) > 2:
1474 ui.write((' base: %s\n' % labels[2]))
1474 ui.write((' base: %s\n' % labels[2]))
1475 else:
1475 else:
1476 ui.write(('unrecognized entry: %s\t%s\n')
1476 ui.write(('unrecognized entry: %s\t%s\n')
1477 % (rtype, record.replace('\0', '\t')))
1477 % (rtype, record.replace('\0', '\t')))
1478
1478
1479 # Avoid mergestate.read() since it may raise an exception for unsupported
1479 # Avoid mergestate.read() since it may raise an exception for unsupported
1480 # merge state records. We shouldn't be doing this, but this is OK since this
1480 # merge state records. We shouldn't be doing this, but this is OK since this
1481 # command is pretty low-level.
1481 # command is pretty low-level.
1482 ms = mergemod.mergestate(repo)
1482 ms = mergemod.mergestate(repo)
1483
1483
1484 # sort so that reasonable information is on top
1484 # sort so that reasonable information is on top
1485 v1records = ms._readrecordsv1()
1485 v1records = ms._readrecordsv1()
1486 v2records = ms._readrecordsv2()
1486 v2records = ms._readrecordsv2()
1487 order = 'LOml'
1487 order = 'LOml'
1488 def key(r):
1488 def key(r):
1489 idx = order.find(r[0])
1489 idx = order.find(r[0])
1490 if idx == -1:
1490 if idx == -1:
1491 return (1, r[1])
1491 return (1, r[1])
1492 else:
1492 else:
1493 return (0, idx)
1493 return (0, idx)
1494 v1records.sort(key=key)
1494 v1records.sort(key=key)
1495 v2records.sort(key=key)
1495 v2records.sort(key=key)
1496
1496
1497 if not v1records and not v2records:
1497 if not v1records and not v2records:
1498 ui.write(('no merge state found\n'))
1498 ui.write(('no merge state found\n'))
1499 elif not v2records:
1499 elif not v2records:
1500 ui.note(('no version 2 merge state\n'))
1500 ui.note(('no version 2 merge state\n'))
1501 printrecords(1)
1501 printrecords(1)
1502 elif ms._v1v2match(v1records, v2records):
1502 elif ms._v1v2match(v1records, v2records):
1503 ui.note(('v1 and v2 states match: using v2\n'))
1503 ui.note(('v1 and v2 states match: using v2\n'))
1504 printrecords(2)
1504 printrecords(2)
1505 else:
1505 else:
1506 ui.note(('v1 and v2 states mismatch: using v1\n'))
1506 ui.note(('v1 and v2 states mismatch: using v1\n'))
1507 printrecords(1)
1507 printrecords(1)
1508 if ui.verbose:
1508 if ui.verbose:
1509 printrecords(2)
1509 printrecords(2)
1510
1510
1511 @command('debugnamecomplete', [], _('NAME...'))
1511 @command('debugnamecomplete', [], _('NAME...'))
1512 def debugnamecomplete(ui, repo, *args):
1512 def debugnamecomplete(ui, repo, *args):
1513 '''complete "names" - tags, open branch names, bookmark names'''
1513 '''complete "names" - tags, open branch names, bookmark names'''
1514
1514
1515 names = set()
1515 names = set()
1516 # since we previously only listed open branches, we will handle that
1516 # since we previously only listed open branches, we will handle that
1517 # specially (after this for loop)
1517 # specially (after this for loop)
1518 for name, ns in repo.names.iteritems():
1518 for name, ns in repo.names.iteritems():
1519 if name != 'branches':
1519 if name != 'branches':
1520 names.update(ns.listnames(repo))
1520 names.update(ns.listnames(repo))
1521 names.update(tag for (tag, heads, tip, closed)
1521 names.update(tag for (tag, heads, tip, closed)
1522 in repo.branchmap().iterbranches() if not closed)
1522 in repo.branchmap().iterbranches() if not closed)
1523 completions = set()
1523 completions = set()
1524 if not args:
1524 if not args:
1525 args = ['']
1525 args = ['']
1526 for a in args:
1526 for a in args:
1527 completions.update(n for n in names if n.startswith(a))
1527 completions.update(n for n in names if n.startswith(a))
1528 ui.write('\n'.join(sorted(completions)))
1528 ui.write('\n'.join(sorted(completions)))
1529 ui.write('\n')
1529 ui.write('\n')
1530
1530
1531 @command('debugobsolete',
1531 @command('debugobsolete',
1532 [('', 'flags', 0, _('markers flag')),
1532 [('', 'flags', 0, _('markers flag')),
1533 ('', 'record-parents', False,
1533 ('', 'record-parents', False,
1534 _('record parent information for the precursor')),
1534 _('record parent information for the precursor')),
1535 ('r', 'rev', [], _('display markers relevant to REV')),
1535 ('r', 'rev', [], _('display markers relevant to REV')),
1536 ('', 'exclusive', False, _('restrict display to markers only '
1536 ('', 'exclusive', False, _('restrict display to markers only '
1537 'relevant to REV')),
1537 'relevant to REV')),
1538 ('', 'index', False, _('display index of the marker')),
1538 ('', 'index', False, _('display index of the marker')),
1539 ('', 'delete', [], _('delete markers specified by indices')),
1539 ('', 'delete', [], _('delete markers specified by indices')),
1540 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1540 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1541 _('[OBSOLETED [REPLACEMENT ...]]'))
1541 _('[OBSOLETED [REPLACEMENT ...]]'))
1542 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1542 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1543 """create arbitrary obsolete marker
1543 """create arbitrary obsolete marker
1544
1544
1545 With no arguments, displays the list of obsolescence markers."""
1545 With no arguments, displays the list of obsolescence markers."""
1546
1546
1547 opts = pycompat.byteskwargs(opts)
1547 opts = pycompat.byteskwargs(opts)
1548
1548
1549 def parsenodeid(s):
1549 def parsenodeid(s):
1550 try:
1550 try:
1551 # We do not use revsingle/revrange functions here to accept
1551 # We do not use revsingle/revrange functions here to accept
1552 # arbitrary node identifiers, possibly not present in the
1552 # arbitrary node identifiers, possibly not present in the
1553 # local repository.
1553 # local repository.
1554 n = bin(s)
1554 n = bin(s)
1555 if len(n) != len(nullid):
1555 if len(n) != len(nullid):
1556 raise TypeError()
1556 raise TypeError()
1557 return n
1557 return n
1558 except TypeError:
1558 except TypeError:
1559 raise error.Abort('changeset references must be full hexadecimal '
1559 raise error.Abort('changeset references must be full hexadecimal '
1560 'node identifiers')
1560 'node identifiers')
1561
1561
1562 if opts.get('delete'):
1562 if opts.get('delete'):
1563 indices = []
1563 indices = []
1564 for v in opts.get('delete'):
1564 for v in opts.get('delete'):
1565 try:
1565 try:
1566 indices.append(int(v))
1566 indices.append(int(v))
1567 except ValueError:
1567 except ValueError:
1568 raise error.Abort(_('invalid index value: %r') % v,
1568 raise error.Abort(_('invalid index value: %r') % v,
1569 hint=_('use integers for indices'))
1569 hint=_('use integers for indices'))
1570
1570
1571 if repo.currenttransaction():
1571 if repo.currenttransaction():
1572 raise error.Abort(_('cannot delete obsmarkers in the middle '
1572 raise error.Abort(_('cannot delete obsmarkers in the middle '
1573 'of transaction.'))
1573 'of transaction.'))
1574
1574
1575 with repo.lock():
1575 with repo.lock():
1576 n = repair.deleteobsmarkers(repo.obsstore, indices)
1576 n = repair.deleteobsmarkers(repo.obsstore, indices)
1577 ui.write(_('deleted %i obsolescence markers\n') % n)
1577 ui.write(_('deleted %i obsolescence markers\n') % n)
1578
1578
1579 return
1579 return
1580
1580
1581 if precursor is not None:
1581 if precursor is not None:
1582 if opts['rev']:
1582 if opts['rev']:
1583 raise error.Abort('cannot select revision when creating marker')
1583 raise error.Abort('cannot select revision when creating marker')
1584 metadata = {}
1584 metadata = {}
1585 metadata['user'] = opts['user'] or ui.username()
1585 metadata['user'] = opts['user'] or ui.username()
1586 succs = tuple(parsenodeid(succ) for succ in successors)
1586 succs = tuple(parsenodeid(succ) for succ in successors)
1587 l = repo.lock()
1587 l = repo.lock()
1588 try:
1588 try:
1589 tr = repo.transaction('debugobsolete')
1589 tr = repo.transaction('debugobsolete')
1590 try:
1590 try:
1591 date = opts.get('date')
1591 date = opts.get('date')
1592 if date:
1592 if date:
1593 date = dateutil.parsedate(date)
1593 date = dateutil.parsedate(date)
1594 else:
1594 else:
1595 date = None
1595 date = None
1596 prec = parsenodeid(precursor)
1596 prec = parsenodeid(precursor)
1597 parents = None
1597 parents = None
1598 if opts['record_parents']:
1598 if opts['record_parents']:
1599 if prec not in repo.unfiltered():
1599 if prec not in repo.unfiltered():
1600 raise error.Abort('cannot used --record-parents on '
1600 raise error.Abort('cannot used --record-parents on '
1601 'unknown changesets')
1601 'unknown changesets')
1602 parents = repo.unfiltered()[prec].parents()
1602 parents = repo.unfiltered()[prec].parents()
1603 parents = tuple(p.node() for p in parents)
1603 parents = tuple(p.node() for p in parents)
1604 repo.obsstore.create(tr, prec, succs, opts['flags'],
1604 repo.obsstore.create(tr, prec, succs, opts['flags'],
1605 parents=parents, date=date,
1605 parents=parents, date=date,
1606 metadata=metadata, ui=ui)
1606 metadata=metadata, ui=ui)
1607 tr.close()
1607 tr.close()
1608 except ValueError as exc:
1608 except ValueError as exc:
1609 raise error.Abort(_('bad obsmarker input: %s') %
1609 raise error.Abort(_('bad obsmarker input: %s') %
1610 pycompat.bytestr(exc))
1610 pycompat.bytestr(exc))
1611 finally:
1611 finally:
1612 tr.release()
1612 tr.release()
1613 finally:
1613 finally:
1614 l.release()
1614 l.release()
1615 else:
1615 else:
1616 if opts['rev']:
1616 if opts['rev']:
1617 revs = scmutil.revrange(repo, opts['rev'])
1617 revs = scmutil.revrange(repo, opts['rev'])
1618 nodes = [repo[r].node() for r in revs]
1618 nodes = [repo[r].node() for r in revs]
1619 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1619 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1620 exclusive=opts['exclusive']))
1620 exclusive=opts['exclusive']))
1621 markers.sort(key=lambda x: x._data)
1621 markers.sort(key=lambda x: x._data)
1622 else:
1622 else:
1623 markers = obsutil.getmarkers(repo)
1623 markers = obsutil.getmarkers(repo)
1624
1624
1625 markerstoiter = markers
1625 markerstoiter = markers
1626 isrelevant = lambda m: True
1626 isrelevant = lambda m: True
1627 if opts.get('rev') and opts.get('index'):
1627 if opts.get('rev') and opts.get('index'):
1628 markerstoiter = obsutil.getmarkers(repo)
1628 markerstoiter = obsutil.getmarkers(repo)
1629 markerset = set(markers)
1629 markerset = set(markers)
1630 isrelevant = lambda m: m in markerset
1630 isrelevant = lambda m: m in markerset
1631
1631
1632 fm = ui.formatter('debugobsolete', opts)
1632 fm = ui.formatter('debugobsolete', opts)
1633 for i, m in enumerate(markerstoiter):
1633 for i, m in enumerate(markerstoiter):
1634 if not isrelevant(m):
1634 if not isrelevant(m):
1635 # marker can be irrelevant when we're iterating over a set
1635 # marker can be irrelevant when we're iterating over a set
1636 # of markers (markerstoiter) which is bigger than the set
1636 # of markers (markerstoiter) which is bigger than the set
1637 # of markers we want to display (markers)
1637 # of markers we want to display (markers)
1638 # this can happen if both --index and --rev options are
1638 # this can happen if both --index and --rev options are
1639 # provided and thus we need to iterate over all of the markers
1639 # provided and thus we need to iterate over all of the markers
1640 # to get the correct indices, but only display the ones that
1640 # to get the correct indices, but only display the ones that
1641 # are relevant to --rev value
1641 # are relevant to --rev value
1642 continue
1642 continue
1643 fm.startitem()
1643 fm.startitem()
1644 ind = i if opts.get('index') else None
1644 ind = i if opts.get('index') else None
1645 cmdutil.showmarker(fm, m, index=ind)
1645 cmdutil.showmarker(fm, m, index=ind)
1646 fm.end()
1646 fm.end()
1647
1647
1648 @command('debugpathcomplete',
1648 @command('debugpathcomplete',
1649 [('f', 'full', None, _('complete an entire path')),
1649 [('f', 'full', None, _('complete an entire path')),
1650 ('n', 'normal', None, _('show only normal files')),
1650 ('n', 'normal', None, _('show only normal files')),
1651 ('a', 'added', None, _('show only added files')),
1651 ('a', 'added', None, _('show only added files')),
1652 ('r', 'removed', None, _('show only removed files'))],
1652 ('r', 'removed', None, _('show only removed files'))],
1653 _('FILESPEC...'))
1653 _('FILESPEC...'))
1654 def debugpathcomplete(ui, repo, *specs, **opts):
1654 def debugpathcomplete(ui, repo, *specs, **opts):
1655 '''complete part or all of a tracked path
1655 '''complete part or all of a tracked path
1656
1656
1657 This command supports shells that offer path name completion. It
1657 This command supports shells that offer path name completion. It
1658 currently completes only files already known to the dirstate.
1658 currently completes only files already known to the dirstate.
1659
1659
1660 Completion extends only to the next path segment unless
1660 Completion extends only to the next path segment unless
1661 --full is specified, in which case entire paths are used.'''
1661 --full is specified, in which case entire paths are used.'''
1662
1662
1663 def complete(path, acceptable):
1663 def complete(path, acceptable):
1664 dirstate = repo.dirstate
1664 dirstate = repo.dirstate
1665 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1665 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1666 rootdir = repo.root + pycompat.ossep
1666 rootdir = repo.root + pycompat.ossep
1667 if spec != repo.root and not spec.startswith(rootdir):
1667 if spec != repo.root and not spec.startswith(rootdir):
1668 return [], []
1668 return [], []
1669 if os.path.isdir(spec):
1669 if os.path.isdir(spec):
1670 spec += '/'
1670 spec += '/'
1671 spec = spec[len(rootdir):]
1671 spec = spec[len(rootdir):]
1672 fixpaths = pycompat.ossep != '/'
1672 fixpaths = pycompat.ossep != '/'
1673 if fixpaths:
1673 if fixpaths:
1674 spec = spec.replace(pycompat.ossep, '/')
1674 spec = spec.replace(pycompat.ossep, '/')
1675 speclen = len(spec)
1675 speclen = len(spec)
1676 fullpaths = opts[r'full']
1676 fullpaths = opts[r'full']
1677 files, dirs = set(), set()
1677 files, dirs = set(), set()
1678 adddir, addfile = dirs.add, files.add
1678 adddir, addfile = dirs.add, files.add
1679 for f, st in dirstate.iteritems():
1679 for f, st in dirstate.iteritems():
1680 if f.startswith(spec) and st[0] in acceptable:
1680 if f.startswith(spec) and st[0] in acceptable:
1681 if fixpaths:
1681 if fixpaths:
1682 f = f.replace('/', pycompat.ossep)
1682 f = f.replace('/', pycompat.ossep)
1683 if fullpaths:
1683 if fullpaths:
1684 addfile(f)
1684 addfile(f)
1685 continue
1685 continue
1686 s = f.find(pycompat.ossep, speclen)
1686 s = f.find(pycompat.ossep, speclen)
1687 if s >= 0:
1687 if s >= 0:
1688 adddir(f[:s])
1688 adddir(f[:s])
1689 else:
1689 else:
1690 addfile(f)
1690 addfile(f)
1691 return files, dirs
1691 return files, dirs
1692
1692
1693 acceptable = ''
1693 acceptable = ''
1694 if opts[r'normal']:
1694 if opts[r'normal']:
1695 acceptable += 'nm'
1695 acceptable += 'nm'
1696 if opts[r'added']:
1696 if opts[r'added']:
1697 acceptable += 'a'
1697 acceptable += 'a'
1698 if opts[r'removed']:
1698 if opts[r'removed']:
1699 acceptable += 'r'
1699 acceptable += 'r'
1700 cwd = repo.getcwd()
1700 cwd = repo.getcwd()
1701 if not specs:
1701 if not specs:
1702 specs = ['.']
1702 specs = ['.']
1703
1703
1704 files, dirs = set(), set()
1704 files, dirs = set(), set()
1705 for spec in specs:
1705 for spec in specs:
1706 f, d = complete(spec, acceptable or 'nmar')
1706 f, d = complete(spec, acceptable or 'nmar')
1707 files.update(f)
1707 files.update(f)
1708 dirs.update(d)
1708 dirs.update(d)
1709 files.update(dirs)
1709 files.update(dirs)
1710 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1710 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1711 ui.write('\n')
1711 ui.write('\n')
1712
1712
1713 @command('debugpeer', [], _('PATH'), norepo=True)
1713 @command('debugpeer', [], _('PATH'), norepo=True)
1714 def debugpeer(ui, path):
1714 def debugpeer(ui, path):
1715 """establish a connection to a peer repository"""
1715 """establish a connection to a peer repository"""
1716 # Always enable peer request logging. Requires --debug to display
1716 # Always enable peer request logging. Requires --debug to display
1717 # though.
1717 # though.
1718 overrides = {
1718 overrides = {
1719 ('devel', 'debug.peer-request'): True,
1719 ('devel', 'debug.peer-request'): True,
1720 }
1720 }
1721
1721
1722 with ui.configoverride(overrides):
1722 with ui.configoverride(overrides):
1723 peer = hg.peer(ui, {}, path)
1723 peer = hg.peer(ui, {}, path)
1724
1724
1725 local = peer.local() is not None
1725 local = peer.local() is not None
1726 canpush = peer.canpush()
1726 canpush = peer.canpush()
1727
1727
1728 ui.write(_('url: %s\n') % peer.url())
1728 ui.write(_('url: %s\n') % peer.url())
1729 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1729 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1730 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1730 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1731
1731
1732 @command('debugpickmergetool',
1732 @command('debugpickmergetool',
1733 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1733 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1734 ('', 'changedelete', None, _('emulate merging change and delete')),
1734 ('', 'changedelete', None, _('emulate merging change and delete')),
1735 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1735 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1736 _('[PATTERN]...'),
1736 _('[PATTERN]...'),
1737 inferrepo=True)
1737 inferrepo=True)
1738 def debugpickmergetool(ui, repo, *pats, **opts):
1738 def debugpickmergetool(ui, repo, *pats, **opts):
1739 """examine which merge tool is chosen for specified file
1739 """examine which merge tool is chosen for specified file
1740
1740
1741 As described in :hg:`help merge-tools`, Mercurial examines
1741 As described in :hg:`help merge-tools`, Mercurial examines
1742 configurations below in this order to decide which merge tool is
1742 configurations below in this order to decide which merge tool is
1743 chosen for specified file.
1743 chosen for specified file.
1744
1744
1745 1. ``--tool`` option
1745 1. ``--tool`` option
1746 2. ``HGMERGE`` environment variable
1746 2. ``HGMERGE`` environment variable
1747 3. configurations in ``merge-patterns`` section
1747 3. configurations in ``merge-patterns`` section
1748 4. configuration of ``ui.merge``
1748 4. configuration of ``ui.merge``
1749 5. configurations in ``merge-tools`` section
1749 5. configurations in ``merge-tools`` section
1750 6. ``hgmerge`` tool (for historical reason only)
1750 6. ``hgmerge`` tool (for historical reason only)
1751 7. default tool for fallback (``:merge`` or ``:prompt``)
1751 7. default tool for fallback (``:merge`` or ``:prompt``)
1752
1752
1753 This command writes out examination result in the style below::
1753 This command writes out examination result in the style below::
1754
1754
1755 FILE = MERGETOOL
1755 FILE = MERGETOOL
1756
1756
1757 By default, all files known in the first parent context of the
1757 By default, all files known in the first parent context of the
1758 working directory are examined. Use file patterns and/or -I/-X
1758 working directory are examined. Use file patterns and/or -I/-X
1759 options to limit target files. -r/--rev is also useful to examine
1759 options to limit target files. -r/--rev is also useful to examine
1760 files in another context without actual updating to it.
1760 files in another context without actual updating to it.
1761
1761
1762 With --debug, this command shows warning messages while matching
1762 With --debug, this command shows warning messages while matching
1763 against ``merge-patterns`` and so on, too. It is recommended to
1763 against ``merge-patterns`` and so on, too. It is recommended to
1764 use this option with explicit file patterns and/or -I/-X options,
1764 use this option with explicit file patterns and/or -I/-X options,
1765 because this option increases amount of output per file according
1765 because this option increases amount of output per file according
1766 to configurations in hgrc.
1766 to configurations in hgrc.
1767
1767
1768 With -v/--verbose, this command shows configurations below at
1768 With -v/--verbose, this command shows configurations below at
1769 first (only if specified).
1769 first (only if specified).
1770
1770
1771 - ``--tool`` option
1771 - ``--tool`` option
1772 - ``HGMERGE`` environment variable
1772 - ``HGMERGE`` environment variable
1773 - configuration of ``ui.merge``
1773 - configuration of ``ui.merge``
1774
1774
1775 If merge tool is chosen before matching against
1775 If merge tool is chosen before matching against
1776 ``merge-patterns``, this command can't show any helpful
1776 ``merge-patterns``, this command can't show any helpful
1777 information, even with --debug. In such case, information above is
1777 information, even with --debug. In such case, information above is
1778 useful to know why a merge tool is chosen.
1778 useful to know why a merge tool is chosen.
1779 """
1779 """
1780 opts = pycompat.byteskwargs(opts)
1780 opts = pycompat.byteskwargs(opts)
1781 overrides = {}
1781 overrides = {}
1782 if opts['tool']:
1782 if opts['tool']:
1783 overrides[('ui', 'forcemerge')] = opts['tool']
1783 overrides[('ui', 'forcemerge')] = opts['tool']
1784 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1784 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1785
1785
1786 with ui.configoverride(overrides, 'debugmergepatterns'):
1786 with ui.configoverride(overrides, 'debugmergepatterns'):
1787 hgmerge = encoding.environ.get("HGMERGE")
1787 hgmerge = encoding.environ.get("HGMERGE")
1788 if hgmerge is not None:
1788 if hgmerge is not None:
1789 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1789 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1790 uimerge = ui.config("ui", "merge")
1790 uimerge = ui.config("ui", "merge")
1791 if uimerge:
1791 if uimerge:
1792 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1792 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1793
1793
1794 ctx = scmutil.revsingle(repo, opts.get('rev'))
1794 ctx = scmutil.revsingle(repo, opts.get('rev'))
1795 m = scmutil.match(ctx, pats, opts)
1795 m = scmutil.match(ctx, pats, opts)
1796 changedelete = opts['changedelete']
1796 changedelete = opts['changedelete']
1797 for path in ctx.walk(m):
1797 for path in ctx.walk(m):
1798 fctx = ctx[path]
1798 fctx = ctx[path]
1799 try:
1799 try:
1800 if not ui.debugflag:
1800 if not ui.debugflag:
1801 ui.pushbuffer(error=True)
1801 ui.pushbuffer(error=True)
1802 tool, toolpath = filemerge._picktool(repo, ui, path,
1802 tool, toolpath = filemerge._picktool(repo, ui, path,
1803 fctx.isbinary(),
1803 fctx.isbinary(),
1804 'l' in fctx.flags(),
1804 'l' in fctx.flags(),
1805 changedelete)
1805 changedelete)
1806 finally:
1806 finally:
1807 if not ui.debugflag:
1807 if not ui.debugflag:
1808 ui.popbuffer()
1808 ui.popbuffer()
1809 ui.write(('%s = %s\n') % (path, tool))
1809 ui.write(('%s = %s\n') % (path, tool))
1810
1810
1811 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1811 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1812 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1812 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1813 '''access the pushkey key/value protocol
1813 '''access the pushkey key/value protocol
1814
1814
1815 With two args, list the keys in the given namespace.
1815 With two args, list the keys in the given namespace.
1816
1816
1817 With five args, set a key to new if it currently is set to old.
1817 With five args, set a key to new if it currently is set to old.
1818 Reports success or failure.
1818 Reports success or failure.
1819 '''
1819 '''
1820
1820
1821 target = hg.peer(ui, {}, repopath)
1821 target = hg.peer(ui, {}, repopath)
1822 if keyinfo:
1822 if keyinfo:
1823 key, old, new = keyinfo
1823 key, old, new = keyinfo
1824 r = target.pushkey(namespace, key, old, new)
1824 r = target.pushkey(namespace, key, old, new)
1825 ui.status(pycompat.bytestr(r) + '\n')
1825 ui.status(pycompat.bytestr(r) + '\n')
1826 return not r
1826 return not r
1827 else:
1827 else:
1828 for k, v in sorted(target.listkeys(namespace).iteritems()):
1828 for k, v in sorted(target.listkeys(namespace).iteritems()):
1829 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1829 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1830 stringutil.escapestr(v)))
1830 stringutil.escapestr(v)))
1831
1831
1832 @command('debugpvec', [], _('A B'))
1832 @command('debugpvec', [], _('A B'))
1833 def debugpvec(ui, repo, a, b=None):
1833 def debugpvec(ui, repo, a, b=None):
1834 ca = scmutil.revsingle(repo, a)
1834 ca = scmutil.revsingle(repo, a)
1835 cb = scmutil.revsingle(repo, b)
1835 cb = scmutil.revsingle(repo, b)
1836 pa = pvec.ctxpvec(ca)
1836 pa = pvec.ctxpvec(ca)
1837 pb = pvec.ctxpvec(cb)
1837 pb = pvec.ctxpvec(cb)
1838 if pa == pb:
1838 if pa == pb:
1839 rel = "="
1839 rel = "="
1840 elif pa > pb:
1840 elif pa > pb:
1841 rel = ">"
1841 rel = ">"
1842 elif pa < pb:
1842 elif pa < pb:
1843 rel = "<"
1843 rel = "<"
1844 elif pa | pb:
1844 elif pa | pb:
1845 rel = "|"
1845 rel = "|"
1846 ui.write(_("a: %s\n") % pa)
1846 ui.write(_("a: %s\n") % pa)
1847 ui.write(_("b: %s\n") % pb)
1847 ui.write(_("b: %s\n") % pb)
1848 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1848 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1849 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1849 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1850 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1850 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1851 pa.distance(pb), rel))
1851 pa.distance(pb), rel))
1852
1852
1853 @command('debugrebuilddirstate|debugrebuildstate',
1853 @command('debugrebuilddirstate|debugrebuildstate',
1854 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1854 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1855 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1855 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1856 'the working copy parent')),
1856 'the working copy parent')),
1857 ],
1857 ],
1858 _('[-r REV]'))
1858 _('[-r REV]'))
1859 def debugrebuilddirstate(ui, repo, rev, **opts):
1859 def debugrebuilddirstate(ui, repo, rev, **opts):
1860 """rebuild the dirstate as it would look like for the given revision
1860 """rebuild the dirstate as it would look like for the given revision
1861
1861
1862 If no revision is specified the first current parent will be used.
1862 If no revision is specified the first current parent will be used.
1863
1863
1864 The dirstate will be set to the files of the given revision.
1864 The dirstate will be set to the files of the given revision.
1865 The actual working directory content or existing dirstate
1865 The actual working directory content or existing dirstate
1866 information such as adds or removes is not considered.
1866 information such as adds or removes is not considered.
1867
1867
1868 ``minimal`` will only rebuild the dirstate status for files that claim to be
1868 ``minimal`` will only rebuild the dirstate status for files that claim to be
1869 tracked but are not in the parent manifest, or that exist in the parent
1869 tracked but are not in the parent manifest, or that exist in the parent
1870 manifest but are not in the dirstate. It will not change adds, removes, or
1870 manifest but are not in the dirstate. It will not change adds, removes, or
1871 modified files that are in the working copy parent.
1871 modified files that are in the working copy parent.
1872
1872
1873 One use of this command is to make the next :hg:`status` invocation
1873 One use of this command is to make the next :hg:`status` invocation
1874 check the actual file content.
1874 check the actual file content.
1875 """
1875 """
1876 ctx = scmutil.revsingle(repo, rev)
1876 ctx = scmutil.revsingle(repo, rev)
1877 with repo.wlock():
1877 with repo.wlock():
1878 dirstate = repo.dirstate
1878 dirstate = repo.dirstate
1879 changedfiles = None
1879 changedfiles = None
1880 # See command doc for what minimal does.
1880 # See command doc for what minimal does.
1881 if opts.get(r'minimal'):
1881 if opts.get(r'minimal'):
1882 manifestfiles = set(ctx.manifest().keys())
1882 manifestfiles = set(ctx.manifest().keys())
1883 dirstatefiles = set(dirstate)
1883 dirstatefiles = set(dirstate)
1884 manifestonly = manifestfiles - dirstatefiles
1884 manifestonly = manifestfiles - dirstatefiles
1885 dsonly = dirstatefiles - manifestfiles
1885 dsonly = dirstatefiles - manifestfiles
1886 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1886 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1887 changedfiles = manifestonly | dsnotadded
1887 changedfiles = manifestonly | dsnotadded
1888
1888
1889 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1889 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1890
1890
1891 @command('debugrebuildfncache', [], '')
1891 @command('debugrebuildfncache', [], '')
1892 def debugrebuildfncache(ui, repo):
1892 def debugrebuildfncache(ui, repo):
1893 """rebuild the fncache file"""
1893 """rebuild the fncache file"""
1894 repair.rebuildfncache(ui, repo)
1894 repair.rebuildfncache(ui, repo)
1895
1895
1896 @command('debugrename',
1896 @command('debugrename',
1897 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1897 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1898 _('[-r REV] FILE'))
1898 _('[-r REV] FILE'))
1899 def debugrename(ui, repo, file1, *pats, **opts):
1899 def debugrename(ui, repo, file1, *pats, **opts):
1900 """dump rename information"""
1900 """dump rename information"""
1901
1901
1902 opts = pycompat.byteskwargs(opts)
1902 opts = pycompat.byteskwargs(opts)
1903 ctx = scmutil.revsingle(repo, opts.get('rev'))
1903 ctx = scmutil.revsingle(repo, opts.get('rev'))
1904 m = scmutil.match(ctx, (file1,) + pats, opts)
1904 m = scmutil.match(ctx, (file1,) + pats, opts)
1905 for abs in ctx.walk(m):
1905 for abs in ctx.walk(m):
1906 fctx = ctx[abs]
1906 fctx = ctx[abs]
1907 o = fctx.filelog().renamed(fctx.filenode())
1907 o = fctx.filelog().renamed(fctx.filenode())
1908 rel = m.rel(abs)
1908 rel = m.rel(abs)
1909 if o:
1909 if o:
1910 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1910 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1911 else:
1911 else:
1912 ui.write(_("%s not renamed\n") % rel)
1912 ui.write(_("%s not renamed\n") % rel)
1913
1913
1914 @command('debugrevlog', cmdutil.debugrevlogopts +
1914 @command('debugrevlog', cmdutil.debugrevlogopts +
1915 [('d', 'dump', False, _('dump index data'))],
1915 [('d', 'dump', False, _('dump index data'))],
1916 _('-c|-m|FILE'),
1916 _('-c|-m|FILE'),
1917 optionalrepo=True)
1917 optionalrepo=True)
1918 def debugrevlog(ui, repo, file_=None, **opts):
1918 def debugrevlog(ui, repo, file_=None, **opts):
1919 """show data and statistics about a revlog"""
1919 """show data and statistics about a revlog"""
1920 opts = pycompat.byteskwargs(opts)
1920 opts = pycompat.byteskwargs(opts)
1921 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1921 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1922
1922
1923 if opts.get("dump"):
1923 if opts.get("dump"):
1924 numrevs = len(r)
1924 numrevs = len(r)
1925 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1925 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1926 " rawsize totalsize compression heads chainlen\n"))
1926 " rawsize totalsize compression heads chainlen\n"))
1927 ts = 0
1927 ts = 0
1928 heads = set()
1928 heads = set()
1929
1929
1930 for rev in xrange(numrevs):
1930 for rev in xrange(numrevs):
1931 dbase = r.deltaparent(rev)
1931 dbase = r.deltaparent(rev)
1932 if dbase == -1:
1932 if dbase == -1:
1933 dbase = rev
1933 dbase = rev
1934 cbase = r.chainbase(rev)
1934 cbase = r.chainbase(rev)
1935 clen = r.chainlen(rev)
1935 clen = r.chainlen(rev)
1936 p1, p2 = r.parentrevs(rev)
1936 p1, p2 = r.parentrevs(rev)
1937 rs = r.rawsize(rev)
1937 rs = r.rawsize(rev)
1938 ts = ts + rs
1938 ts = ts + rs
1939 heads -= set(r.parentrevs(rev))
1939 heads -= set(r.parentrevs(rev))
1940 heads.add(rev)
1940 heads.add(rev)
1941 try:
1941 try:
1942 compression = ts / r.end(rev)
1942 compression = ts / r.end(rev)
1943 except ZeroDivisionError:
1943 except ZeroDivisionError:
1944 compression = 0
1944 compression = 0
1945 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1945 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1946 "%11d %5d %8d\n" %
1946 "%11d %5d %8d\n" %
1947 (rev, p1, p2, r.start(rev), r.end(rev),
1947 (rev, p1, p2, r.start(rev), r.end(rev),
1948 r.start(dbase), r.start(cbase),
1948 r.start(dbase), r.start(cbase),
1949 r.start(p1), r.start(p2),
1949 r.start(p1), r.start(p2),
1950 rs, ts, compression, len(heads), clen))
1950 rs, ts, compression, len(heads), clen))
1951 return 0
1951 return 0
1952
1952
1953 v = r.version
1953 v = r.version
1954 format = v & 0xFFFF
1954 format = v & 0xFFFF
1955 flags = []
1955 flags = []
1956 gdelta = False
1956 gdelta = False
1957 if v & revlog.FLAG_INLINE_DATA:
1957 if v & revlog.FLAG_INLINE_DATA:
1958 flags.append('inline')
1958 flags.append('inline')
1959 if v & revlog.FLAG_GENERALDELTA:
1959 if v & revlog.FLAG_GENERALDELTA:
1960 gdelta = True
1960 gdelta = True
1961 flags.append('generaldelta')
1961 flags.append('generaldelta')
1962 if not flags:
1962 if not flags:
1963 flags = ['(none)']
1963 flags = ['(none)']
1964
1964
1965 nummerges = 0
1965 nummerges = 0
1966 numfull = 0
1966 numfull = 0
1967 numprev = 0
1967 numprev = 0
1968 nump1 = 0
1968 nump1 = 0
1969 nump2 = 0
1969 nump2 = 0
1970 numother = 0
1970 numother = 0
1971 nump1prev = 0
1971 nump1prev = 0
1972 nump2prev = 0
1972 nump2prev = 0
1973 chainlengths = []
1973 chainlengths = []
1974 chainbases = []
1974 chainbases = []
1975 chainspans = []
1975 chainspans = []
1976
1976
1977 datasize = [None, 0, 0]
1977 datasize = [None, 0, 0]
1978 fullsize = [None, 0, 0]
1978 fullsize = [None, 0, 0]
1979 deltasize = [None, 0, 0]
1979 deltasize = [None, 0, 0]
1980 chunktypecounts = {}
1980 chunktypecounts = {}
1981 chunktypesizes = {}
1981 chunktypesizes = {}
1982
1982
1983 def addsize(size, l):
1983 def addsize(size, l):
1984 if l[0] is None or size < l[0]:
1984 if l[0] is None or size < l[0]:
1985 l[0] = size
1985 l[0] = size
1986 if size > l[1]:
1986 if size > l[1]:
1987 l[1] = size
1987 l[1] = size
1988 l[2] += size
1988 l[2] += size
1989
1989
1990 numrevs = len(r)
1990 numrevs = len(r)
1991 for rev in xrange(numrevs):
1991 for rev in xrange(numrevs):
1992 p1, p2 = r.parentrevs(rev)
1992 p1, p2 = r.parentrevs(rev)
1993 delta = r.deltaparent(rev)
1993 delta = r.deltaparent(rev)
1994 if format > 0:
1994 if format > 0:
1995 addsize(r.rawsize(rev), datasize)
1995 addsize(r.rawsize(rev), datasize)
1996 if p2 != nullrev:
1996 if p2 != nullrev:
1997 nummerges += 1
1997 nummerges += 1
1998 size = r.length(rev)
1998 size = r.length(rev)
1999 if delta == nullrev:
1999 if delta == nullrev:
2000 chainlengths.append(0)
2000 chainlengths.append(0)
2001 chainbases.append(r.start(rev))
2001 chainbases.append(r.start(rev))
2002 chainspans.append(size)
2002 chainspans.append(size)
2003 numfull += 1
2003 numfull += 1
2004 addsize(size, fullsize)
2004 addsize(size, fullsize)
2005 else:
2005 else:
2006 chainlengths.append(chainlengths[delta] + 1)
2006 chainlengths.append(chainlengths[delta] + 1)
2007 baseaddr = chainbases[delta]
2007 baseaddr = chainbases[delta]
2008 revaddr = r.start(rev)
2008 revaddr = r.start(rev)
2009 chainbases.append(baseaddr)
2009 chainbases.append(baseaddr)
2010 chainspans.append((revaddr - baseaddr) + size)
2010 chainspans.append((revaddr - baseaddr) + size)
2011 addsize(size, deltasize)
2011 addsize(size, deltasize)
2012 if delta == rev - 1:
2012 if delta == rev - 1:
2013 numprev += 1
2013 numprev += 1
2014 if delta == p1:
2014 if delta == p1:
2015 nump1prev += 1
2015 nump1prev += 1
2016 elif delta == p2:
2016 elif delta == p2:
2017 nump2prev += 1
2017 nump2prev += 1
2018 elif delta == p1:
2018 elif delta == p1:
2019 nump1 += 1
2019 nump1 += 1
2020 elif delta == p2:
2020 elif delta == p2:
2021 nump2 += 1
2021 nump2 += 1
2022 elif delta != nullrev:
2022 elif delta != nullrev:
2023 numother += 1
2023 numother += 1
2024
2024
2025 # Obtain data on the raw chunks in the revlog.
2025 # Obtain data on the raw chunks in the revlog.
2026 segment = r._getsegmentforrevs(rev, rev)[1]
2026 segment = r._getsegmentforrevs(rev, rev)[1]
2027 if segment:
2027 if segment:
2028 chunktype = bytes(segment[0:1])
2028 chunktype = bytes(segment[0:1])
2029 else:
2029 else:
2030 chunktype = 'empty'
2030 chunktype = 'empty'
2031
2031
2032 if chunktype not in chunktypecounts:
2032 if chunktype not in chunktypecounts:
2033 chunktypecounts[chunktype] = 0
2033 chunktypecounts[chunktype] = 0
2034 chunktypesizes[chunktype] = 0
2034 chunktypesizes[chunktype] = 0
2035
2035
2036 chunktypecounts[chunktype] += 1
2036 chunktypecounts[chunktype] += 1
2037 chunktypesizes[chunktype] += size
2037 chunktypesizes[chunktype] += size
2038
2038
2039 # Adjust size min value for empty cases
2039 # Adjust size min value for empty cases
2040 for size in (datasize, fullsize, deltasize):
2040 for size in (datasize, fullsize, deltasize):
2041 if size[0] is None:
2041 if size[0] is None:
2042 size[0] = 0
2042 size[0] = 0
2043
2043
2044 numdeltas = numrevs - numfull
2044 numdeltas = numrevs - numfull
2045 numoprev = numprev - nump1prev - nump2prev
2045 numoprev = numprev - nump1prev - nump2prev
2046 totalrawsize = datasize[2]
2046 totalrawsize = datasize[2]
2047 datasize[2] /= numrevs
2047 datasize[2] /= numrevs
2048 fulltotal = fullsize[2]
2048 fulltotal = fullsize[2]
2049 fullsize[2] /= numfull
2049 fullsize[2] /= numfull
2050 deltatotal = deltasize[2]
2050 deltatotal = deltasize[2]
2051 if numrevs - numfull > 0:
2051 if numrevs - numfull > 0:
2052 deltasize[2] /= numrevs - numfull
2052 deltasize[2] /= numrevs - numfull
2053 totalsize = fulltotal + deltatotal
2053 totalsize = fulltotal + deltatotal
2054 avgchainlen = sum(chainlengths) / numrevs
2054 avgchainlen = sum(chainlengths) / numrevs
2055 maxchainlen = max(chainlengths)
2055 maxchainlen = max(chainlengths)
2056 maxchainspan = max(chainspans)
2056 maxchainspan = max(chainspans)
2057 compratio = 1
2057 compratio = 1
2058 if totalsize:
2058 if totalsize:
2059 compratio = totalrawsize / totalsize
2059 compratio = totalrawsize / totalsize
2060
2060
2061 basedfmtstr = '%%%dd\n'
2061 basedfmtstr = '%%%dd\n'
2062 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2062 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2063
2063
2064 def dfmtstr(max):
2064 def dfmtstr(max):
2065 return basedfmtstr % len(str(max))
2065 return basedfmtstr % len(str(max))
2066 def pcfmtstr(max, padding=0):
2066 def pcfmtstr(max, padding=0):
2067 return basepcfmtstr % (len(str(max)), ' ' * padding)
2067 return basepcfmtstr % (len(str(max)), ' ' * padding)
2068
2068
2069 def pcfmt(value, total):
2069 def pcfmt(value, total):
2070 if total:
2070 if total:
2071 return (value, 100 * float(value) / total)
2071 return (value, 100 * float(value) / total)
2072 else:
2072 else:
2073 return value, 100.0
2073 return value, 100.0
2074
2074
2075 ui.write(('format : %d\n') % format)
2075 ui.write(('format : %d\n') % format)
2076 ui.write(('flags : %s\n') % ', '.join(flags))
2076 ui.write(('flags : %s\n') % ', '.join(flags))
2077
2077
2078 ui.write('\n')
2078 ui.write('\n')
2079 fmt = pcfmtstr(totalsize)
2079 fmt = pcfmtstr(totalsize)
2080 fmt2 = dfmtstr(totalsize)
2080 fmt2 = dfmtstr(totalsize)
2081 ui.write(('revisions : ') + fmt2 % numrevs)
2081 ui.write(('revisions : ') + fmt2 % numrevs)
2082 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2082 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2083 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2083 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2084 ui.write(('revisions : ') + fmt2 % numrevs)
2084 ui.write(('revisions : ') + fmt2 % numrevs)
2085 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2085 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2086 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2086 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2087 ui.write(('revision size : ') + fmt2 % totalsize)
2087 ui.write(('revision size : ') + fmt2 % totalsize)
2088 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2088 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2089 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2089 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2090
2090
2091 def fmtchunktype(chunktype):
2091 def fmtchunktype(chunktype):
2092 if chunktype == 'empty':
2092 if chunktype == 'empty':
2093 return ' %s : ' % chunktype
2093 return ' %s : ' % chunktype
2094 elif chunktype in pycompat.bytestr(string.ascii_letters):
2094 elif chunktype in pycompat.bytestr(string.ascii_letters):
2095 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2095 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2096 else:
2096 else:
2097 return ' 0x%s : ' % hex(chunktype)
2097 return ' 0x%s : ' % hex(chunktype)
2098
2098
2099 ui.write('\n')
2099 ui.write('\n')
2100 ui.write(('chunks : ') + fmt2 % numrevs)
2100 ui.write(('chunks : ') + fmt2 % numrevs)
2101 for chunktype in sorted(chunktypecounts):
2101 for chunktype in sorted(chunktypecounts):
2102 ui.write(fmtchunktype(chunktype))
2102 ui.write(fmtchunktype(chunktype))
2103 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2103 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2104 ui.write(('chunks size : ') + fmt2 % totalsize)
2104 ui.write(('chunks size : ') + fmt2 % totalsize)
2105 for chunktype in sorted(chunktypecounts):
2105 for chunktype in sorted(chunktypecounts):
2106 ui.write(fmtchunktype(chunktype))
2106 ui.write(fmtchunktype(chunktype))
2107 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2107 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2108
2108
2109 ui.write('\n')
2109 ui.write('\n')
2110 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2110 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2111 ui.write(('avg chain length : ') + fmt % avgchainlen)
2111 ui.write(('avg chain length : ') + fmt % avgchainlen)
2112 ui.write(('max chain length : ') + fmt % maxchainlen)
2112 ui.write(('max chain length : ') + fmt % maxchainlen)
2113 ui.write(('max chain reach : ') + fmt % maxchainspan)
2113 ui.write(('max chain reach : ') + fmt % maxchainspan)
2114 ui.write(('compression ratio : ') + fmt % compratio)
2114 ui.write(('compression ratio : ') + fmt % compratio)
2115
2115
2116 if format > 0:
2116 if format > 0:
2117 ui.write('\n')
2117 ui.write('\n')
2118 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2118 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2119 % tuple(datasize))
2119 % tuple(datasize))
2120 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2120 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2121 % tuple(fullsize))
2121 % tuple(fullsize))
2122 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2122 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2123 % tuple(deltasize))
2123 % tuple(deltasize))
2124
2124
2125 if numdeltas > 0:
2125 if numdeltas > 0:
2126 ui.write('\n')
2126 ui.write('\n')
2127 fmt = pcfmtstr(numdeltas)
2127 fmt = pcfmtstr(numdeltas)
2128 fmt2 = pcfmtstr(numdeltas, 4)
2128 fmt2 = pcfmtstr(numdeltas, 4)
2129 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2129 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2130 if numprev > 0:
2130 if numprev > 0:
2131 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2131 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2132 numprev))
2132 numprev))
2133 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2133 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2134 numprev))
2134 numprev))
2135 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2135 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2136 numprev))
2136 numprev))
2137 if gdelta:
2137 if gdelta:
2138 ui.write(('deltas against p1 : ')
2138 ui.write(('deltas against p1 : ')
2139 + fmt % pcfmt(nump1, numdeltas))
2139 + fmt % pcfmt(nump1, numdeltas))
2140 ui.write(('deltas against p2 : ')
2140 ui.write(('deltas against p2 : ')
2141 + fmt % pcfmt(nump2, numdeltas))
2141 + fmt % pcfmt(nump2, numdeltas))
2142 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2142 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2143 numdeltas))
2143 numdeltas))
2144
2144
2145 @command('debugrevspec',
2145 @command('debugrevspec',
2146 [('', 'optimize', None,
2146 [('', 'optimize', None,
2147 _('print parsed tree after optimizing (DEPRECATED)')),
2147 _('print parsed tree after optimizing (DEPRECATED)')),
2148 ('', 'show-revs', True, _('print list of result revisions (default)')),
2148 ('', 'show-revs', True, _('print list of result revisions (default)')),
2149 ('s', 'show-set', None, _('print internal representation of result set')),
2149 ('s', 'show-set', None, _('print internal representation of result set')),
2150 ('p', 'show-stage', [],
2150 ('p', 'show-stage', [],
2151 _('print parsed tree at the given stage'), _('NAME')),
2151 _('print parsed tree at the given stage'), _('NAME')),
2152 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2152 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2153 ('', 'verify-optimized', False, _('verify optimized result')),
2153 ('', 'verify-optimized', False, _('verify optimized result')),
2154 ],
2154 ],
2155 ('REVSPEC'))
2155 ('REVSPEC'))
2156 def debugrevspec(ui, repo, expr, **opts):
2156 def debugrevspec(ui, repo, expr, **opts):
2157 """parse and apply a revision specification
2157 """parse and apply a revision specification
2158
2158
2159 Use -p/--show-stage option to print the parsed tree at the given stages.
2159 Use -p/--show-stage option to print the parsed tree at the given stages.
2160 Use -p all to print tree at every stage.
2160 Use -p all to print tree at every stage.
2161
2161
2162 Use --no-show-revs option with -s or -p to print only the set
2162 Use --no-show-revs option with -s or -p to print only the set
2163 representation or the parsed tree respectively.
2163 representation or the parsed tree respectively.
2164
2164
2165 Use --verify-optimized to compare the optimized result with the unoptimized
2165 Use --verify-optimized to compare the optimized result with the unoptimized
2166 one. Returns 1 if the optimized result differs.
2166 one. Returns 1 if the optimized result differs.
2167 """
2167 """
2168 opts = pycompat.byteskwargs(opts)
2168 opts = pycompat.byteskwargs(opts)
2169 aliases = ui.configitems('revsetalias')
2169 aliases = ui.configitems('revsetalias')
2170 stages = [
2170 stages = [
2171 ('parsed', lambda tree: tree),
2171 ('parsed', lambda tree: tree),
2172 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2172 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2173 ui.warn)),
2173 ui.warn)),
2174 ('concatenated', revsetlang.foldconcat),
2174 ('concatenated', revsetlang.foldconcat),
2175 ('analyzed', revsetlang.analyze),
2175 ('analyzed', revsetlang.analyze),
2176 ('optimized', revsetlang.optimize),
2176 ('optimized', revsetlang.optimize),
2177 ]
2177 ]
2178 if opts['no_optimized']:
2178 if opts['no_optimized']:
2179 stages = stages[:-1]
2179 stages = stages[:-1]
2180 if opts['verify_optimized'] and opts['no_optimized']:
2180 if opts['verify_optimized'] and opts['no_optimized']:
2181 raise error.Abort(_('cannot use --verify-optimized with '
2181 raise error.Abort(_('cannot use --verify-optimized with '
2182 '--no-optimized'))
2182 '--no-optimized'))
2183 stagenames = set(n for n, f in stages)
2183 stagenames = set(n for n, f in stages)
2184
2184
2185 showalways = set()
2185 showalways = set()
2186 showchanged = set()
2186 showchanged = set()
2187 if ui.verbose and not opts['show_stage']:
2187 if ui.verbose and not opts['show_stage']:
2188 # show parsed tree by --verbose (deprecated)
2188 # show parsed tree by --verbose (deprecated)
2189 showalways.add('parsed')
2189 showalways.add('parsed')
2190 showchanged.update(['expanded', 'concatenated'])
2190 showchanged.update(['expanded', 'concatenated'])
2191 if opts['optimize']:
2191 if opts['optimize']:
2192 showalways.add('optimized')
2192 showalways.add('optimized')
2193 if opts['show_stage'] and opts['optimize']:
2193 if opts['show_stage'] and opts['optimize']:
2194 raise error.Abort(_('cannot use --optimize with --show-stage'))
2194 raise error.Abort(_('cannot use --optimize with --show-stage'))
2195 if opts['show_stage'] == ['all']:
2195 if opts['show_stage'] == ['all']:
2196 showalways.update(stagenames)
2196 showalways.update(stagenames)
2197 else:
2197 else:
2198 for n in opts['show_stage']:
2198 for n in opts['show_stage']:
2199 if n not in stagenames:
2199 if n not in stagenames:
2200 raise error.Abort(_('invalid stage name: %s') % n)
2200 raise error.Abort(_('invalid stage name: %s') % n)
2201 showalways.update(opts['show_stage'])
2201 showalways.update(opts['show_stage'])
2202
2202
2203 treebystage = {}
2203 treebystage = {}
2204 printedtree = None
2204 printedtree = None
2205 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2205 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2206 for n, f in stages:
2206 for n, f in stages:
2207 treebystage[n] = tree = f(tree)
2207 treebystage[n] = tree = f(tree)
2208 if n in showalways or (n in showchanged and tree != printedtree):
2208 if n in showalways or (n in showchanged and tree != printedtree):
2209 if opts['show_stage'] or n != 'parsed':
2209 if opts['show_stage'] or n != 'parsed':
2210 ui.write(("* %s:\n") % n)
2210 ui.write(("* %s:\n") % n)
2211 ui.write(revsetlang.prettyformat(tree), "\n")
2211 ui.write(revsetlang.prettyformat(tree), "\n")
2212 printedtree = tree
2212 printedtree = tree
2213
2213
2214 if opts['verify_optimized']:
2214 if opts['verify_optimized']:
2215 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2215 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2216 brevs = revset.makematcher(treebystage['optimized'])(repo)
2216 brevs = revset.makematcher(treebystage['optimized'])(repo)
2217 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2217 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2218 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2218 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2219 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2219 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2220 arevs = list(arevs)
2220 arevs = list(arevs)
2221 brevs = list(brevs)
2221 brevs = list(brevs)
2222 if arevs == brevs:
2222 if arevs == brevs:
2223 return 0
2223 return 0
2224 ui.write(('--- analyzed\n'), label='diff.file_a')
2224 ui.write(('--- analyzed\n'), label='diff.file_a')
2225 ui.write(('+++ optimized\n'), label='diff.file_b')
2225 ui.write(('+++ optimized\n'), label='diff.file_b')
2226 sm = difflib.SequenceMatcher(None, arevs, brevs)
2226 sm = difflib.SequenceMatcher(None, arevs, brevs)
2227 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2227 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2228 if tag in ('delete', 'replace'):
2228 if tag in ('delete', 'replace'):
2229 for c in arevs[alo:ahi]:
2229 for c in arevs[alo:ahi]:
2230 ui.write('-%s\n' % c, label='diff.deleted')
2230 ui.write('-%s\n' % c, label='diff.deleted')
2231 if tag in ('insert', 'replace'):
2231 if tag in ('insert', 'replace'):
2232 for c in brevs[blo:bhi]:
2232 for c in brevs[blo:bhi]:
2233 ui.write('+%s\n' % c, label='diff.inserted')
2233 ui.write('+%s\n' % c, label='diff.inserted')
2234 if tag == 'equal':
2234 if tag == 'equal':
2235 for c in arevs[alo:ahi]:
2235 for c in arevs[alo:ahi]:
2236 ui.write(' %s\n' % c)
2236 ui.write(' %s\n' % c)
2237 return 1
2237 return 1
2238
2238
2239 func = revset.makematcher(tree)
2239 func = revset.makematcher(tree)
2240 revs = func(repo)
2240 revs = func(repo)
2241 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2241 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2242 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2242 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2243 if not opts['show_revs']:
2243 if not opts['show_revs']:
2244 return
2244 return
2245 for c in revs:
2245 for c in revs:
2246 ui.write("%d\n" % c)
2246 ui.write("%d\n" % c)
2247
2247
2248 @command('debugserve', [
2248 @command('debugserve', [
2249 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2249 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2250 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2250 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2251 ('', 'logiofile', '', _('file to log server I/O to')),
2251 ('', 'logiofile', '', _('file to log server I/O to')),
2252 ], '')
2252 ], '')
2253 def debugserve(ui, repo, **opts):
2253 def debugserve(ui, repo, **opts):
2254 """run a server with advanced settings
2254 """run a server with advanced settings
2255
2255
2256 This command is similar to :hg:`serve`. It exists partially as a
2256 This command is similar to :hg:`serve`. It exists partially as a
2257 workaround to the fact that ``hg serve --stdio`` must have specific
2257 workaround to the fact that ``hg serve --stdio`` must have specific
2258 arguments for security reasons.
2258 arguments for security reasons.
2259 """
2259 """
2260 opts = pycompat.byteskwargs(opts)
2260 opts = pycompat.byteskwargs(opts)
2261
2261
2262 if not opts['sshstdio']:
2262 if not opts['sshstdio']:
2263 raise error.Abort(_('only --sshstdio is currently supported'))
2263 raise error.Abort(_('only --sshstdio is currently supported'))
2264
2264
2265 logfh = None
2265 logfh = None
2266
2266
2267 if opts['logiofd'] and opts['logiofile']:
2267 if opts['logiofd'] and opts['logiofile']:
2268 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2268 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2269
2269
2270 if opts['logiofd']:
2270 if opts['logiofd']:
2271 # Line buffered because output is line based.
2271 # Line buffered because output is line based.
2272 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2272 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2273 elif opts['logiofile']:
2273 elif opts['logiofile']:
2274 logfh = open(opts['logiofile'], 'ab', 1)
2274 logfh = open(opts['logiofile'], 'ab', 1)
2275
2275
2276 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2276 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2277 s.serve_forever()
2277 s.serve_forever()
2278
2278
2279 @command('debugsetparents', [], _('REV1 [REV2]'))
2279 @command('debugsetparents', [], _('REV1 [REV2]'))
2280 def debugsetparents(ui, repo, rev1, rev2=None):
2280 def debugsetparents(ui, repo, rev1, rev2=None):
2281 """manually set the parents of the current working directory
2281 """manually set the parents of the current working directory
2282
2282
2283 This is useful for writing repository conversion tools, but should
2283 This is useful for writing repository conversion tools, but should
2284 be used with care. For example, neither the working directory nor the
2284 be used with care. For example, neither the working directory nor the
2285 dirstate is updated, so file status may be incorrect after running this
2285 dirstate is updated, so file status may be incorrect after running this
2286 command.
2286 command.
2287
2287
2288 Returns 0 on success.
2288 Returns 0 on success.
2289 """
2289 """
2290
2290
2291 r1 = scmutil.revsingle(repo, rev1).node()
2291 node1 = scmutil.revsingle(repo, rev1).node()
2292 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2292 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2293
2293
2294 with repo.wlock():
2294 with repo.wlock():
2295 repo.setparents(r1, r2)
2295 repo.setparents(node1, node2)
2296
2296
2297 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2297 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2298 def debugssl(ui, repo, source=None, **opts):
2298 def debugssl(ui, repo, source=None, **opts):
2299 '''test a secure connection to a server
2299 '''test a secure connection to a server
2300
2300
2301 This builds the certificate chain for the server on Windows, installing the
2301 This builds the certificate chain for the server on Windows, installing the
2302 missing intermediates and trusted root via Windows Update if necessary. It
2302 missing intermediates and trusted root via Windows Update if necessary. It
2303 does nothing on other platforms.
2303 does nothing on other platforms.
2304
2304
2305 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2305 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2306 that server is used. See :hg:`help urls` for more information.
2306 that server is used. See :hg:`help urls` for more information.
2307
2307
2308 If the update succeeds, retry the original operation. Otherwise, the cause
2308 If the update succeeds, retry the original operation. Otherwise, the cause
2309 of the SSL error is likely another issue.
2309 of the SSL error is likely another issue.
2310 '''
2310 '''
2311 if not pycompat.iswindows:
2311 if not pycompat.iswindows:
2312 raise error.Abort(_('certificate chain building is only possible on '
2312 raise error.Abort(_('certificate chain building is only possible on '
2313 'Windows'))
2313 'Windows'))
2314
2314
2315 if not source:
2315 if not source:
2316 if not repo:
2316 if not repo:
2317 raise error.Abort(_("there is no Mercurial repository here, and no "
2317 raise error.Abort(_("there is no Mercurial repository here, and no "
2318 "server specified"))
2318 "server specified"))
2319 source = "default"
2319 source = "default"
2320
2320
2321 source, branches = hg.parseurl(ui.expandpath(source))
2321 source, branches = hg.parseurl(ui.expandpath(source))
2322 url = util.url(source)
2322 url = util.url(source)
2323 addr = None
2323 addr = None
2324
2324
2325 defaultport = {'https': 443, 'ssh': 22}
2325 defaultport = {'https': 443, 'ssh': 22}
2326 if url.scheme in defaultport:
2326 if url.scheme in defaultport:
2327 try:
2327 try:
2328 addr = (url.host, int(url.port or defaultport[url.scheme]))
2328 addr = (url.host, int(url.port or defaultport[url.scheme]))
2329 except ValueError:
2329 except ValueError:
2330 raise error.Abort(_("malformed port number in URL"))
2330 raise error.Abort(_("malformed port number in URL"))
2331 else:
2331 else:
2332 raise error.Abort(_("only https and ssh connections are supported"))
2332 raise error.Abort(_("only https and ssh connections are supported"))
2333
2333
2334 from . import win32
2334 from . import win32
2335
2335
2336 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2336 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2337 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2337 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2338
2338
2339 try:
2339 try:
2340 s.connect(addr)
2340 s.connect(addr)
2341 cert = s.getpeercert(True)
2341 cert = s.getpeercert(True)
2342
2342
2343 ui.status(_('checking the certificate chain for %s\n') % url.host)
2343 ui.status(_('checking the certificate chain for %s\n') % url.host)
2344
2344
2345 complete = win32.checkcertificatechain(cert, build=False)
2345 complete = win32.checkcertificatechain(cert, build=False)
2346
2346
2347 if not complete:
2347 if not complete:
2348 ui.status(_('certificate chain is incomplete, updating... '))
2348 ui.status(_('certificate chain is incomplete, updating... '))
2349
2349
2350 if not win32.checkcertificatechain(cert):
2350 if not win32.checkcertificatechain(cert):
2351 ui.status(_('failed.\n'))
2351 ui.status(_('failed.\n'))
2352 else:
2352 else:
2353 ui.status(_('done.\n'))
2353 ui.status(_('done.\n'))
2354 else:
2354 else:
2355 ui.status(_('full certificate chain is available\n'))
2355 ui.status(_('full certificate chain is available\n'))
2356 finally:
2356 finally:
2357 s.close()
2357 s.close()
2358
2358
2359 @command('debugsub',
2359 @command('debugsub',
2360 [('r', 'rev', '',
2360 [('r', 'rev', '',
2361 _('revision to check'), _('REV'))],
2361 _('revision to check'), _('REV'))],
2362 _('[-r REV] [REV]'))
2362 _('[-r REV] [REV]'))
2363 def debugsub(ui, repo, rev=None):
2363 def debugsub(ui, repo, rev=None):
2364 ctx = scmutil.revsingle(repo, rev, None)
2364 ctx = scmutil.revsingle(repo, rev, None)
2365 for k, v in sorted(ctx.substate.items()):
2365 for k, v in sorted(ctx.substate.items()):
2366 ui.write(('path %s\n') % k)
2366 ui.write(('path %s\n') % k)
2367 ui.write((' source %s\n') % v[0])
2367 ui.write((' source %s\n') % v[0])
2368 ui.write((' revision %s\n') % v[1])
2368 ui.write((' revision %s\n') % v[1])
2369
2369
2370 @command('debugsuccessorssets',
2370 @command('debugsuccessorssets',
2371 [('', 'closest', False, _('return closest successors sets only'))],
2371 [('', 'closest', False, _('return closest successors sets only'))],
2372 _('[REV]'))
2372 _('[REV]'))
2373 def debugsuccessorssets(ui, repo, *revs, **opts):
2373 def debugsuccessorssets(ui, repo, *revs, **opts):
2374 """show set of successors for revision
2374 """show set of successors for revision
2375
2375
2376 A successors set of changeset A is a consistent group of revisions that
2376 A successors set of changeset A is a consistent group of revisions that
2377 succeed A. It contains non-obsolete changesets only unless closests
2377 succeed A. It contains non-obsolete changesets only unless closests
2378 successors set is set.
2378 successors set is set.
2379
2379
2380 In most cases a changeset A has a single successors set containing a single
2380 In most cases a changeset A has a single successors set containing a single
2381 successor (changeset A replaced by A').
2381 successor (changeset A replaced by A').
2382
2382
2383 A changeset that is made obsolete with no successors are called "pruned".
2383 A changeset that is made obsolete with no successors are called "pruned".
2384 Such changesets have no successors sets at all.
2384 Such changesets have no successors sets at all.
2385
2385
2386 A changeset that has been "split" will have a successors set containing
2386 A changeset that has been "split" will have a successors set containing
2387 more than one successor.
2387 more than one successor.
2388
2388
2389 A changeset that has been rewritten in multiple different ways is called
2389 A changeset that has been rewritten in multiple different ways is called
2390 "divergent". Such changesets have multiple successor sets (each of which
2390 "divergent". Such changesets have multiple successor sets (each of which
2391 may also be split, i.e. have multiple successors).
2391 may also be split, i.e. have multiple successors).
2392
2392
2393 Results are displayed as follows::
2393 Results are displayed as follows::
2394
2394
2395 <rev1>
2395 <rev1>
2396 <successors-1A>
2396 <successors-1A>
2397 <rev2>
2397 <rev2>
2398 <successors-2A>
2398 <successors-2A>
2399 <successors-2B1> <successors-2B2> <successors-2B3>
2399 <successors-2B1> <successors-2B2> <successors-2B3>
2400
2400
2401 Here rev2 has two possible (i.e. divergent) successors sets. The first
2401 Here rev2 has two possible (i.e. divergent) successors sets. The first
2402 holds one element, whereas the second holds three (i.e. the changeset has
2402 holds one element, whereas the second holds three (i.e. the changeset has
2403 been split).
2403 been split).
2404 """
2404 """
2405 # passed to successorssets caching computation from one call to another
2405 # passed to successorssets caching computation from one call to another
2406 cache = {}
2406 cache = {}
2407 ctx2str = bytes
2407 ctx2str = bytes
2408 node2str = short
2408 node2str = short
2409 for rev in scmutil.revrange(repo, revs):
2409 for rev in scmutil.revrange(repo, revs):
2410 ctx = repo[rev]
2410 ctx = repo[rev]
2411 ui.write('%s\n'% ctx2str(ctx))
2411 ui.write('%s\n'% ctx2str(ctx))
2412 for succsset in obsutil.successorssets(repo, ctx.node(),
2412 for succsset in obsutil.successorssets(repo, ctx.node(),
2413 closest=opts[r'closest'],
2413 closest=opts[r'closest'],
2414 cache=cache):
2414 cache=cache):
2415 if succsset:
2415 if succsset:
2416 ui.write(' ')
2416 ui.write(' ')
2417 ui.write(node2str(succsset[0]))
2417 ui.write(node2str(succsset[0]))
2418 for node in succsset[1:]:
2418 for node in succsset[1:]:
2419 ui.write(' ')
2419 ui.write(' ')
2420 ui.write(node2str(node))
2420 ui.write(node2str(node))
2421 ui.write('\n')
2421 ui.write('\n')
2422
2422
2423 @command('debugtemplate',
2423 @command('debugtemplate',
2424 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2424 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2425 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2425 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2426 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2426 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2427 optionalrepo=True)
2427 optionalrepo=True)
2428 def debugtemplate(ui, repo, tmpl, **opts):
2428 def debugtemplate(ui, repo, tmpl, **opts):
2429 """parse and apply a template
2429 """parse and apply a template
2430
2430
2431 If -r/--rev is given, the template is processed as a log template and
2431 If -r/--rev is given, the template is processed as a log template and
2432 applied to the given changesets. Otherwise, it is processed as a generic
2432 applied to the given changesets. Otherwise, it is processed as a generic
2433 template.
2433 template.
2434
2434
2435 Use --verbose to print the parsed tree.
2435 Use --verbose to print the parsed tree.
2436 """
2436 """
2437 revs = None
2437 revs = None
2438 if opts[r'rev']:
2438 if opts[r'rev']:
2439 if repo is None:
2439 if repo is None:
2440 raise error.RepoError(_('there is no Mercurial repository here '
2440 raise error.RepoError(_('there is no Mercurial repository here '
2441 '(.hg not found)'))
2441 '(.hg not found)'))
2442 revs = scmutil.revrange(repo, opts[r'rev'])
2442 revs = scmutil.revrange(repo, opts[r'rev'])
2443
2443
2444 props = {}
2444 props = {}
2445 for d in opts[r'define']:
2445 for d in opts[r'define']:
2446 try:
2446 try:
2447 k, v = (e.strip() for e in d.split('=', 1))
2447 k, v = (e.strip() for e in d.split('=', 1))
2448 if not k or k == 'ui':
2448 if not k or k == 'ui':
2449 raise ValueError
2449 raise ValueError
2450 props[k] = v
2450 props[k] = v
2451 except ValueError:
2451 except ValueError:
2452 raise error.Abort(_('malformed keyword definition: %s') % d)
2452 raise error.Abort(_('malformed keyword definition: %s') % d)
2453
2453
2454 if ui.verbose:
2454 if ui.verbose:
2455 aliases = ui.configitems('templatealias')
2455 aliases = ui.configitems('templatealias')
2456 tree = templater.parse(tmpl)
2456 tree = templater.parse(tmpl)
2457 ui.note(templater.prettyformat(tree), '\n')
2457 ui.note(templater.prettyformat(tree), '\n')
2458 newtree = templater.expandaliases(tree, aliases)
2458 newtree = templater.expandaliases(tree, aliases)
2459 if newtree != tree:
2459 if newtree != tree:
2460 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2460 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2461
2461
2462 if revs is None:
2462 if revs is None:
2463 tres = formatter.templateresources(ui, repo)
2463 tres = formatter.templateresources(ui, repo)
2464 t = formatter.maketemplater(ui, tmpl, resources=tres)
2464 t = formatter.maketemplater(ui, tmpl, resources=tres)
2465 ui.write(t.renderdefault(props))
2465 ui.write(t.renderdefault(props))
2466 else:
2466 else:
2467 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2467 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2468 for r in revs:
2468 for r in revs:
2469 displayer.show(repo[r], **pycompat.strkwargs(props))
2469 displayer.show(repo[r], **pycompat.strkwargs(props))
2470 displayer.close()
2470 displayer.close()
2471
2471
2472 @command('debuguigetpass', [
2472 @command('debuguigetpass', [
2473 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2473 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2474 ], _('[-p TEXT]'), norepo=True)
2474 ], _('[-p TEXT]'), norepo=True)
2475 def debuguigetpass(ui, prompt=''):
2475 def debuguigetpass(ui, prompt=''):
2476 """show prompt to type password"""
2476 """show prompt to type password"""
2477 r = ui.getpass(prompt)
2477 r = ui.getpass(prompt)
2478 ui.write(('respose: %s\n') % r)
2478 ui.write(('respose: %s\n') % r)
2479
2479
2480 @command('debuguiprompt', [
2480 @command('debuguiprompt', [
2481 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2481 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2482 ], _('[-p TEXT]'), norepo=True)
2482 ], _('[-p TEXT]'), norepo=True)
2483 def debuguiprompt(ui, prompt=''):
2483 def debuguiprompt(ui, prompt=''):
2484 """show plain prompt"""
2484 """show plain prompt"""
2485 r = ui.prompt(prompt)
2485 r = ui.prompt(prompt)
2486 ui.write(('response: %s\n') % r)
2486 ui.write(('response: %s\n') % r)
2487
2487
2488 @command('debugupdatecaches', [])
2488 @command('debugupdatecaches', [])
2489 def debugupdatecaches(ui, repo, *pats, **opts):
2489 def debugupdatecaches(ui, repo, *pats, **opts):
2490 """warm all known caches in the repository"""
2490 """warm all known caches in the repository"""
2491 with repo.wlock(), repo.lock():
2491 with repo.wlock(), repo.lock():
2492 repo.updatecaches(full=True)
2492 repo.updatecaches(full=True)
2493
2493
2494 @command('debugupgraderepo', [
2494 @command('debugupgraderepo', [
2495 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2495 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2496 ('', 'run', False, _('performs an upgrade')),
2496 ('', 'run', False, _('performs an upgrade')),
2497 ])
2497 ])
2498 def debugupgraderepo(ui, repo, run=False, optimize=None):
2498 def debugupgraderepo(ui, repo, run=False, optimize=None):
2499 """upgrade a repository to use different features
2499 """upgrade a repository to use different features
2500
2500
2501 If no arguments are specified, the repository is evaluated for upgrade
2501 If no arguments are specified, the repository is evaluated for upgrade
2502 and a list of problems and potential optimizations is printed.
2502 and a list of problems and potential optimizations is printed.
2503
2503
2504 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2504 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2505 can be influenced via additional arguments. More details will be provided
2505 can be influenced via additional arguments. More details will be provided
2506 by the command output when run without ``--run``.
2506 by the command output when run without ``--run``.
2507
2507
2508 During the upgrade, the repository will be locked and no writes will be
2508 During the upgrade, the repository will be locked and no writes will be
2509 allowed.
2509 allowed.
2510
2510
2511 At the end of the upgrade, the repository may not be readable while new
2511 At the end of the upgrade, the repository may not be readable while new
2512 repository data is swapped in. This window will be as long as it takes to
2512 repository data is swapped in. This window will be as long as it takes to
2513 rename some directories inside the ``.hg`` directory. On most machines, this
2513 rename some directories inside the ``.hg`` directory. On most machines, this
2514 should complete almost instantaneously and the chances of a consumer being
2514 should complete almost instantaneously and the chances of a consumer being
2515 unable to access the repository should be low.
2515 unable to access the repository should be low.
2516 """
2516 """
2517 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2517 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2518
2518
2519 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2519 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2520 inferrepo=True)
2520 inferrepo=True)
2521 def debugwalk(ui, repo, *pats, **opts):
2521 def debugwalk(ui, repo, *pats, **opts):
2522 """show how files match on given patterns"""
2522 """show how files match on given patterns"""
2523 opts = pycompat.byteskwargs(opts)
2523 opts = pycompat.byteskwargs(opts)
2524 m = scmutil.match(repo[None], pats, opts)
2524 m = scmutil.match(repo[None], pats, opts)
2525 ui.write(('matcher: %r\n' % m))
2525 ui.write(('matcher: %r\n' % m))
2526 items = list(repo[None].walk(m))
2526 items = list(repo[None].walk(m))
2527 if not items:
2527 if not items:
2528 return
2528 return
2529 f = lambda fn: fn
2529 f = lambda fn: fn
2530 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2530 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2531 f = lambda fn: util.normpath(fn)
2531 f = lambda fn: util.normpath(fn)
2532 fmt = 'f %%-%ds %%-%ds %%s' % (
2532 fmt = 'f %%-%ds %%-%ds %%s' % (
2533 max([len(abs) for abs in items]),
2533 max([len(abs) for abs in items]),
2534 max([len(m.rel(abs)) for abs in items]))
2534 max([len(m.rel(abs)) for abs in items]))
2535 for abs in items:
2535 for abs in items:
2536 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2536 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2537 ui.write("%s\n" % line.rstrip())
2537 ui.write("%s\n" % line.rstrip())
2538
2538
2539 @command('debugwhyunstable', [], _('REV'))
2539 @command('debugwhyunstable', [], _('REV'))
2540 def debugwhyunstable(ui, repo, rev):
2540 def debugwhyunstable(ui, repo, rev):
2541 """explain instabilities of a changeset"""
2541 """explain instabilities of a changeset"""
2542 for entry in obsutil.whyunstable(repo, repo[rev]):
2542 for entry in obsutil.whyunstable(repo, repo[rev]):
2543 dnodes = ''
2543 dnodes = ''
2544 if entry.get('divergentnodes'):
2544 if entry.get('divergentnodes'):
2545 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2545 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2546 for ctx in entry['divergentnodes']) + ' '
2546 for ctx in entry['divergentnodes']) + ' '
2547 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2547 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2548 entry['reason'], entry['node']))
2548 entry['reason'], entry['node']))
2549
2549
2550 @command('debugwireargs',
2550 @command('debugwireargs',
2551 [('', 'three', '', 'three'),
2551 [('', 'three', '', 'three'),
2552 ('', 'four', '', 'four'),
2552 ('', 'four', '', 'four'),
2553 ('', 'five', '', 'five'),
2553 ('', 'five', '', 'five'),
2554 ] + cmdutil.remoteopts,
2554 ] + cmdutil.remoteopts,
2555 _('REPO [OPTIONS]... [ONE [TWO]]'),
2555 _('REPO [OPTIONS]... [ONE [TWO]]'),
2556 norepo=True)
2556 norepo=True)
2557 def debugwireargs(ui, repopath, *vals, **opts):
2557 def debugwireargs(ui, repopath, *vals, **opts):
2558 opts = pycompat.byteskwargs(opts)
2558 opts = pycompat.byteskwargs(opts)
2559 repo = hg.peer(ui, opts, repopath)
2559 repo = hg.peer(ui, opts, repopath)
2560 for opt in cmdutil.remoteopts:
2560 for opt in cmdutil.remoteopts:
2561 del opts[opt[1]]
2561 del opts[opt[1]]
2562 args = {}
2562 args = {}
2563 for k, v in opts.iteritems():
2563 for k, v in opts.iteritems():
2564 if v:
2564 if v:
2565 args[k] = v
2565 args[k] = v
2566 args = pycompat.strkwargs(args)
2566 args = pycompat.strkwargs(args)
2567 # run twice to check that we don't mess up the stream for the next command
2567 # run twice to check that we don't mess up the stream for the next command
2568 res1 = repo.debugwireargs(*vals, **args)
2568 res1 = repo.debugwireargs(*vals, **args)
2569 res2 = repo.debugwireargs(*vals, **args)
2569 res2 = repo.debugwireargs(*vals, **args)
2570 ui.write("%s\n" % res1)
2570 ui.write("%s\n" % res1)
2571 if res1 != res2:
2571 if res1 != res2:
2572 ui.warn("%s\n" % res2)
2572 ui.warn("%s\n" % res2)
2573
2573
2574 def _parsewirelangblocks(fh):
2574 def _parsewirelangblocks(fh):
2575 activeaction = None
2575 activeaction = None
2576 blocklines = []
2576 blocklines = []
2577
2577
2578 for line in fh:
2578 for line in fh:
2579 line = line.rstrip()
2579 line = line.rstrip()
2580 if not line:
2580 if not line:
2581 continue
2581 continue
2582
2582
2583 if line.startswith(b'#'):
2583 if line.startswith(b'#'):
2584 continue
2584 continue
2585
2585
2586 if not line.startswith(' '):
2586 if not line.startswith(' '):
2587 # New block. Flush previous one.
2587 # New block. Flush previous one.
2588 if activeaction:
2588 if activeaction:
2589 yield activeaction, blocklines
2589 yield activeaction, blocklines
2590
2590
2591 activeaction = line
2591 activeaction = line
2592 blocklines = []
2592 blocklines = []
2593 continue
2593 continue
2594
2594
2595 # Else we start with an indent.
2595 # Else we start with an indent.
2596
2596
2597 if not activeaction:
2597 if not activeaction:
2598 raise error.Abort(_('indented line outside of block'))
2598 raise error.Abort(_('indented line outside of block'))
2599
2599
2600 blocklines.append(line)
2600 blocklines.append(line)
2601
2601
2602 # Flush last block.
2602 # Flush last block.
2603 if activeaction:
2603 if activeaction:
2604 yield activeaction, blocklines
2604 yield activeaction, blocklines
2605
2605
2606 @command('debugwireproto',
2606 @command('debugwireproto',
2607 [
2607 [
2608 ('', 'localssh', False, _('start an SSH server for this repo')),
2608 ('', 'localssh', False, _('start an SSH server for this repo')),
2609 ('', 'peer', '', _('construct a specific version of the peer')),
2609 ('', 'peer', '', _('construct a specific version of the peer')),
2610 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2610 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2611 ] + cmdutil.remoteopts,
2611 ] + cmdutil.remoteopts,
2612 _('[PATH]'),
2612 _('[PATH]'),
2613 optionalrepo=True)
2613 optionalrepo=True)
2614 def debugwireproto(ui, repo, path=None, **opts):
2614 def debugwireproto(ui, repo, path=None, **opts):
2615 """send wire protocol commands to a server
2615 """send wire protocol commands to a server
2616
2616
2617 This command can be used to issue wire protocol commands to remote
2617 This command can be used to issue wire protocol commands to remote
2618 peers and to debug the raw data being exchanged.
2618 peers and to debug the raw data being exchanged.
2619
2619
2620 ``--localssh`` will start an SSH server against the current repository
2620 ``--localssh`` will start an SSH server against the current repository
2621 and connect to that. By default, the connection will perform a handshake
2621 and connect to that. By default, the connection will perform a handshake
2622 and establish an appropriate peer instance.
2622 and establish an appropriate peer instance.
2623
2623
2624 ``--peer`` can be used to bypass the handshake protocol and construct a
2624 ``--peer`` can be used to bypass the handshake protocol and construct a
2625 peer instance using the specified class type. Valid values are ``raw``,
2625 peer instance using the specified class type. Valid values are ``raw``,
2626 ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending raw data
2626 ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending raw data
2627 payloads and don't support higher-level command actions.
2627 payloads and don't support higher-level command actions.
2628
2628
2629 ``--noreadstderr`` can be used to disable automatic reading from stderr
2629 ``--noreadstderr`` can be used to disable automatic reading from stderr
2630 of the peer (for SSH connections only). Disabling automatic reading of
2630 of the peer (for SSH connections only). Disabling automatic reading of
2631 stderr is useful for making output more deterministic.
2631 stderr is useful for making output more deterministic.
2632
2632
2633 Commands are issued via a mini language which is specified via stdin.
2633 Commands are issued via a mini language which is specified via stdin.
2634 The language consists of individual actions to perform. An action is
2634 The language consists of individual actions to perform. An action is
2635 defined by a block. A block is defined as a line with no leading
2635 defined by a block. A block is defined as a line with no leading
2636 space followed by 0 or more lines with leading space. Blocks are
2636 space followed by 0 or more lines with leading space. Blocks are
2637 effectively a high-level command with additional metadata.
2637 effectively a high-level command with additional metadata.
2638
2638
2639 Lines beginning with ``#`` are ignored.
2639 Lines beginning with ``#`` are ignored.
2640
2640
2641 The following sections denote available actions.
2641 The following sections denote available actions.
2642
2642
2643 raw
2643 raw
2644 ---
2644 ---
2645
2645
2646 Send raw data to the server.
2646 Send raw data to the server.
2647
2647
2648 The block payload contains the raw data to send as one atomic send
2648 The block payload contains the raw data to send as one atomic send
2649 operation. The data may not actually be delivered in a single system
2649 operation. The data may not actually be delivered in a single system
2650 call: it depends on the abilities of the transport being used.
2650 call: it depends on the abilities of the transport being used.
2651
2651
2652 Each line in the block is de-indented and concatenated. Then, that
2652 Each line in the block is de-indented and concatenated. Then, that
2653 value is evaluated as a Python b'' literal. This allows the use of
2653 value is evaluated as a Python b'' literal. This allows the use of
2654 backslash escaping, etc.
2654 backslash escaping, etc.
2655
2655
2656 raw+
2656 raw+
2657 ----
2657 ----
2658
2658
2659 Behaves like ``raw`` except flushes output afterwards.
2659 Behaves like ``raw`` except flushes output afterwards.
2660
2660
2661 command <X>
2661 command <X>
2662 -----------
2662 -----------
2663
2663
2664 Send a request to run a named command, whose name follows the ``command``
2664 Send a request to run a named command, whose name follows the ``command``
2665 string.
2665 string.
2666
2666
2667 Arguments to the command are defined as lines in this block. The format of
2667 Arguments to the command are defined as lines in this block. The format of
2668 each line is ``<key> <value>``. e.g.::
2668 each line is ``<key> <value>``. e.g.::
2669
2669
2670 command listkeys
2670 command listkeys
2671 namespace bookmarks
2671 namespace bookmarks
2672
2672
2673 Values are interpreted as Python b'' literals. This allows encoding
2673 Values are interpreted as Python b'' literals. This allows encoding
2674 special byte sequences via backslash escaping.
2674 special byte sequences via backslash escaping.
2675
2675
2676 The following arguments have special meaning:
2676 The following arguments have special meaning:
2677
2677
2678 ``PUSHFILE``
2678 ``PUSHFILE``
2679 When defined, the *push* mechanism of the peer will be used instead
2679 When defined, the *push* mechanism of the peer will be used instead
2680 of the static request-response mechanism and the content of the
2680 of the static request-response mechanism and the content of the
2681 file specified in the value of this argument will be sent as the
2681 file specified in the value of this argument will be sent as the
2682 command payload.
2682 command payload.
2683
2683
2684 This can be used to submit a local bundle file to the remote.
2684 This can be used to submit a local bundle file to the remote.
2685
2685
2686 batchbegin
2686 batchbegin
2687 ----------
2687 ----------
2688
2688
2689 Instruct the peer to begin a batched send.
2689 Instruct the peer to begin a batched send.
2690
2690
2691 All ``command`` blocks are queued for execution until the next
2691 All ``command`` blocks are queued for execution until the next
2692 ``batchsubmit`` block.
2692 ``batchsubmit`` block.
2693
2693
2694 batchsubmit
2694 batchsubmit
2695 -----------
2695 -----------
2696
2696
2697 Submit previously queued ``command`` blocks as a batch request.
2697 Submit previously queued ``command`` blocks as a batch request.
2698
2698
2699 This action MUST be paired with a ``batchbegin`` action.
2699 This action MUST be paired with a ``batchbegin`` action.
2700
2700
2701 httprequest <method> <path>
2701 httprequest <method> <path>
2702 ---------------------------
2702 ---------------------------
2703
2703
2704 (HTTP peer only)
2704 (HTTP peer only)
2705
2705
2706 Send an HTTP request to the peer.
2706 Send an HTTP request to the peer.
2707
2707
2708 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2708 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2709
2709
2710 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2710 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2711 headers to add to the request. e.g. ``Accept: foo``.
2711 headers to add to the request. e.g. ``Accept: foo``.
2712
2712
2713 The following arguments are special:
2713 The following arguments are special:
2714
2714
2715 ``BODYFILE``
2715 ``BODYFILE``
2716 The content of the file defined as the value to this argument will be
2716 The content of the file defined as the value to this argument will be
2717 transferred verbatim as the HTTP request body.
2717 transferred verbatim as the HTTP request body.
2718
2718
2719 ``frame <type> <flags> <payload>``
2719 ``frame <type> <flags> <payload>``
2720 Send a unified protocol frame as part of the request body.
2720 Send a unified protocol frame as part of the request body.
2721
2721
2722 All frames will be collected and sent as the body to the HTTP
2722 All frames will be collected and sent as the body to the HTTP
2723 request.
2723 request.
2724
2724
2725 close
2725 close
2726 -----
2726 -----
2727
2727
2728 Close the connection to the server.
2728 Close the connection to the server.
2729
2729
2730 flush
2730 flush
2731 -----
2731 -----
2732
2732
2733 Flush data written to the server.
2733 Flush data written to the server.
2734
2734
2735 readavailable
2735 readavailable
2736 -------------
2736 -------------
2737
2737
2738 Close the write end of the connection and read all available data from
2738 Close the write end of the connection and read all available data from
2739 the server.
2739 the server.
2740
2740
2741 If the connection to the server encompasses multiple pipes, we poll both
2741 If the connection to the server encompasses multiple pipes, we poll both
2742 pipes and read available data.
2742 pipes and read available data.
2743
2743
2744 readline
2744 readline
2745 --------
2745 --------
2746
2746
2747 Read a line of output from the server. If there are multiple output
2747 Read a line of output from the server. If there are multiple output
2748 pipes, reads only the main pipe.
2748 pipes, reads only the main pipe.
2749
2749
2750 ereadline
2750 ereadline
2751 ---------
2751 ---------
2752
2752
2753 Like ``readline``, but read from the stderr pipe, if available.
2753 Like ``readline``, but read from the stderr pipe, if available.
2754
2754
2755 read <X>
2755 read <X>
2756 --------
2756 --------
2757
2757
2758 ``read()`` N bytes from the server's main output pipe.
2758 ``read()`` N bytes from the server's main output pipe.
2759
2759
2760 eread <X>
2760 eread <X>
2761 ---------
2761 ---------
2762
2762
2763 ``read()`` N bytes from the server's stderr pipe, if available.
2763 ``read()`` N bytes from the server's stderr pipe, if available.
2764
2764
2765 Specifying Unified Frame-Based Protocol Frames
2765 Specifying Unified Frame-Based Protocol Frames
2766 ----------------------------------------------
2766 ----------------------------------------------
2767
2767
2768 It is possible to emit a *Unified Frame-Based Protocol* by using special
2768 It is possible to emit a *Unified Frame-Based Protocol* by using special
2769 syntax.
2769 syntax.
2770
2770
2771 A frame is composed as a type, flags, and payload. These can be parsed
2771 A frame is composed as a type, flags, and payload. These can be parsed
2772 from a string of the form ``<requestid> <type> <flags> <payload>``. That is,
2772 from a string of the form ``<requestid> <type> <flags> <payload>``. That is,
2773 4 space-delimited strings.
2773 4 space-delimited strings.
2774
2774
2775 ``payload`` is the simplest: it is evaluated as a Python byte string
2775 ``payload`` is the simplest: it is evaluated as a Python byte string
2776 literal.
2776 literal.
2777
2777
2778 ``requestid`` is an integer defining the request identifier.
2778 ``requestid`` is an integer defining the request identifier.
2779
2779
2780 ``type`` can be an integer value for the frame type or the string name
2780 ``type`` can be an integer value for the frame type or the string name
2781 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2781 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2782 ``command-name``.
2782 ``command-name``.
2783
2783
2784 ``flags`` is a ``|`` delimited list of flag components. Each component
2784 ``flags`` is a ``|`` delimited list of flag components. Each component
2785 (and there can be just one) can be an integer or a flag name for the
2785 (and there can be just one) can be an integer or a flag name for the
2786 specified frame type. Values are resolved to integers and then bitwise
2786 specified frame type. Values are resolved to integers and then bitwise
2787 OR'd together.
2787 OR'd together.
2788 """
2788 """
2789 opts = pycompat.byteskwargs(opts)
2789 opts = pycompat.byteskwargs(opts)
2790
2790
2791 if opts['localssh'] and not repo:
2791 if opts['localssh'] and not repo:
2792 raise error.Abort(_('--localssh requires a repository'))
2792 raise error.Abort(_('--localssh requires a repository'))
2793
2793
2794 if opts['peer'] and opts['peer'] not in ('raw', 'ssh1', 'ssh2'):
2794 if opts['peer'] and opts['peer'] not in ('raw', 'ssh1', 'ssh2'):
2795 raise error.Abort(_('invalid value for --peer'),
2795 raise error.Abort(_('invalid value for --peer'),
2796 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2796 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2797
2797
2798 if path and opts['localssh']:
2798 if path and opts['localssh']:
2799 raise error.Abort(_('cannot specify --localssh with an explicit '
2799 raise error.Abort(_('cannot specify --localssh with an explicit '
2800 'path'))
2800 'path'))
2801
2801
2802 if ui.interactive():
2802 if ui.interactive():
2803 ui.write(_('(waiting for commands on stdin)\n'))
2803 ui.write(_('(waiting for commands on stdin)\n'))
2804
2804
2805 blocks = list(_parsewirelangblocks(ui.fin))
2805 blocks = list(_parsewirelangblocks(ui.fin))
2806
2806
2807 proc = None
2807 proc = None
2808 stdin = None
2808 stdin = None
2809 stdout = None
2809 stdout = None
2810 stderr = None
2810 stderr = None
2811 opener = None
2811 opener = None
2812
2812
2813 if opts['localssh']:
2813 if opts['localssh']:
2814 # We start the SSH server in its own process so there is process
2814 # We start the SSH server in its own process so there is process
2815 # separation. This prevents a whole class of potential bugs around
2815 # separation. This prevents a whole class of potential bugs around
2816 # shared state from interfering with server operation.
2816 # shared state from interfering with server operation.
2817 args = procutil.hgcmd() + [
2817 args = procutil.hgcmd() + [
2818 '-R', repo.root,
2818 '-R', repo.root,
2819 'debugserve', '--sshstdio',
2819 'debugserve', '--sshstdio',
2820 ]
2820 ]
2821 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2821 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2822 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2822 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2823 bufsize=0)
2823 bufsize=0)
2824
2824
2825 stdin = proc.stdin
2825 stdin = proc.stdin
2826 stdout = proc.stdout
2826 stdout = proc.stdout
2827 stderr = proc.stderr
2827 stderr = proc.stderr
2828
2828
2829 # We turn the pipes into observers so we can log I/O.
2829 # We turn the pipes into observers so we can log I/O.
2830 if ui.verbose or opts['peer'] == 'raw':
2830 if ui.verbose or opts['peer'] == 'raw':
2831 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2831 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2832 logdata=True)
2832 logdata=True)
2833 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2833 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2834 logdata=True)
2834 logdata=True)
2835 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2835 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2836 logdata=True)
2836 logdata=True)
2837
2837
2838 # --localssh also implies the peer connection settings.
2838 # --localssh also implies the peer connection settings.
2839
2839
2840 url = 'ssh://localserver'
2840 url = 'ssh://localserver'
2841 autoreadstderr = not opts['noreadstderr']
2841 autoreadstderr = not opts['noreadstderr']
2842
2842
2843 if opts['peer'] == 'ssh1':
2843 if opts['peer'] == 'ssh1':
2844 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2844 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2845 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2845 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2846 None, autoreadstderr=autoreadstderr)
2846 None, autoreadstderr=autoreadstderr)
2847 elif opts['peer'] == 'ssh2':
2847 elif opts['peer'] == 'ssh2':
2848 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2848 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2849 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2849 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2850 None, autoreadstderr=autoreadstderr)
2850 None, autoreadstderr=autoreadstderr)
2851 elif opts['peer'] == 'raw':
2851 elif opts['peer'] == 'raw':
2852 ui.write(_('using raw connection to peer\n'))
2852 ui.write(_('using raw connection to peer\n'))
2853 peer = None
2853 peer = None
2854 else:
2854 else:
2855 ui.write(_('creating ssh peer from handshake results\n'))
2855 ui.write(_('creating ssh peer from handshake results\n'))
2856 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2856 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2857 autoreadstderr=autoreadstderr)
2857 autoreadstderr=autoreadstderr)
2858
2858
2859 elif path:
2859 elif path:
2860 # We bypass hg.peer() so we can proxy the sockets.
2860 # We bypass hg.peer() so we can proxy the sockets.
2861 # TODO consider not doing this because we skip
2861 # TODO consider not doing this because we skip
2862 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
2862 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
2863 u = util.url(path)
2863 u = util.url(path)
2864 if u.scheme != 'http':
2864 if u.scheme != 'http':
2865 raise error.Abort(_('only http:// paths are currently supported'))
2865 raise error.Abort(_('only http:// paths are currently supported'))
2866
2866
2867 url, authinfo = u.authinfo()
2867 url, authinfo = u.authinfo()
2868 openerargs = {}
2868 openerargs = {}
2869
2869
2870 # Turn pipes/sockets into observers so we can log I/O.
2870 # Turn pipes/sockets into observers so we can log I/O.
2871 if ui.verbose:
2871 if ui.verbose:
2872 openerargs = {
2872 openerargs = {
2873 r'loggingfh': ui,
2873 r'loggingfh': ui,
2874 r'loggingname': b's',
2874 r'loggingname': b's',
2875 r'loggingopts': {
2875 r'loggingopts': {
2876 r'logdata': True,
2876 r'logdata': True,
2877 r'logdataapis': False,
2877 r'logdataapis': False,
2878 },
2878 },
2879 }
2879 }
2880
2880
2881 if ui.debugflag:
2881 if ui.debugflag:
2882 openerargs[r'loggingopts'][r'logdataapis'] = True
2882 openerargs[r'loggingopts'][r'logdataapis'] = True
2883
2883
2884 # Don't send default headers when in raw mode. This allows us to
2884 # Don't send default headers when in raw mode. This allows us to
2885 # bypass most of the behavior of our URL handling code so we can
2885 # bypass most of the behavior of our URL handling code so we can
2886 # have near complete control over what's sent on the wire.
2886 # have near complete control over what's sent on the wire.
2887 if opts['peer'] == 'raw':
2887 if opts['peer'] == 'raw':
2888 openerargs[r'sendaccept'] = False
2888 openerargs[r'sendaccept'] = False
2889
2889
2890 opener = urlmod.opener(ui, authinfo, **openerargs)
2890 opener = urlmod.opener(ui, authinfo, **openerargs)
2891
2891
2892 if opts['peer'] == 'raw':
2892 if opts['peer'] == 'raw':
2893 ui.write(_('using raw connection to peer\n'))
2893 ui.write(_('using raw connection to peer\n'))
2894 peer = None
2894 peer = None
2895 elif opts['peer']:
2895 elif opts['peer']:
2896 raise error.Abort(_('--peer %s not supported with HTTP peers') %
2896 raise error.Abort(_('--peer %s not supported with HTTP peers') %
2897 opts['peer'])
2897 opts['peer'])
2898 else:
2898 else:
2899 peer = httppeer.httppeer(ui, path, url, opener)
2899 peer = httppeer.httppeer(ui, path, url, opener)
2900 peer._fetchcaps()
2900 peer._fetchcaps()
2901
2901
2902 # We /could/ populate stdin/stdout with sock.makefile()...
2902 # We /could/ populate stdin/stdout with sock.makefile()...
2903 else:
2903 else:
2904 raise error.Abort(_('unsupported connection configuration'))
2904 raise error.Abort(_('unsupported connection configuration'))
2905
2905
2906 batchedcommands = None
2906 batchedcommands = None
2907
2907
2908 # Now perform actions based on the parsed wire language instructions.
2908 # Now perform actions based on the parsed wire language instructions.
2909 for action, lines in blocks:
2909 for action, lines in blocks:
2910 if action in ('raw', 'raw+'):
2910 if action in ('raw', 'raw+'):
2911 if not stdin:
2911 if not stdin:
2912 raise error.Abort(_('cannot call raw/raw+ on this peer'))
2912 raise error.Abort(_('cannot call raw/raw+ on this peer'))
2913
2913
2914 # Concatenate the data together.
2914 # Concatenate the data together.
2915 data = ''.join(l.lstrip() for l in lines)
2915 data = ''.join(l.lstrip() for l in lines)
2916 data = stringutil.unescapestr(data)
2916 data = stringutil.unescapestr(data)
2917 stdin.write(data)
2917 stdin.write(data)
2918
2918
2919 if action == 'raw+':
2919 if action == 'raw+':
2920 stdin.flush()
2920 stdin.flush()
2921 elif action == 'flush':
2921 elif action == 'flush':
2922 if not stdin:
2922 if not stdin:
2923 raise error.Abort(_('cannot call flush on this peer'))
2923 raise error.Abort(_('cannot call flush on this peer'))
2924 stdin.flush()
2924 stdin.flush()
2925 elif action.startswith('command'):
2925 elif action.startswith('command'):
2926 if not peer:
2926 if not peer:
2927 raise error.Abort(_('cannot send commands unless peer instance '
2927 raise error.Abort(_('cannot send commands unless peer instance '
2928 'is available'))
2928 'is available'))
2929
2929
2930 command = action.split(' ', 1)[1]
2930 command = action.split(' ', 1)[1]
2931
2931
2932 args = {}
2932 args = {}
2933 for line in lines:
2933 for line in lines:
2934 # We need to allow empty values.
2934 # We need to allow empty values.
2935 fields = line.lstrip().split(' ', 1)
2935 fields = line.lstrip().split(' ', 1)
2936 if len(fields) == 1:
2936 if len(fields) == 1:
2937 key = fields[0]
2937 key = fields[0]
2938 value = ''
2938 value = ''
2939 else:
2939 else:
2940 key, value = fields
2940 key, value = fields
2941
2941
2942 args[key] = stringutil.unescapestr(value)
2942 args[key] = stringutil.unescapestr(value)
2943
2943
2944 if batchedcommands is not None:
2944 if batchedcommands is not None:
2945 batchedcommands.append((command, args))
2945 batchedcommands.append((command, args))
2946 continue
2946 continue
2947
2947
2948 ui.status(_('sending %s command\n') % command)
2948 ui.status(_('sending %s command\n') % command)
2949
2949
2950 if 'PUSHFILE' in args:
2950 if 'PUSHFILE' in args:
2951 with open(args['PUSHFILE'], r'rb') as fh:
2951 with open(args['PUSHFILE'], r'rb') as fh:
2952 del args['PUSHFILE']
2952 del args['PUSHFILE']
2953 res, output = peer._callpush(command, fh,
2953 res, output = peer._callpush(command, fh,
2954 **pycompat.strkwargs(args))
2954 **pycompat.strkwargs(args))
2955 ui.status(_('result: %s\n') % stringutil.escapedata(res))
2955 ui.status(_('result: %s\n') % stringutil.escapedata(res))
2956 ui.status(_('remote output: %s\n') %
2956 ui.status(_('remote output: %s\n') %
2957 stringutil.escapedata(output))
2957 stringutil.escapedata(output))
2958 else:
2958 else:
2959 res = peer._call(command, **pycompat.strkwargs(args))
2959 res = peer._call(command, **pycompat.strkwargs(args))
2960 ui.status(_('response: %s\n') % stringutil.escapedata(res))
2960 ui.status(_('response: %s\n') % stringutil.escapedata(res))
2961
2961
2962 elif action == 'batchbegin':
2962 elif action == 'batchbegin':
2963 if batchedcommands is not None:
2963 if batchedcommands is not None:
2964 raise error.Abort(_('nested batchbegin not allowed'))
2964 raise error.Abort(_('nested batchbegin not allowed'))
2965
2965
2966 batchedcommands = []
2966 batchedcommands = []
2967 elif action == 'batchsubmit':
2967 elif action == 'batchsubmit':
2968 # There is a batching API we could go through. But it would be
2968 # There is a batching API we could go through. But it would be
2969 # difficult to normalize requests into function calls. It is easier
2969 # difficult to normalize requests into function calls. It is easier
2970 # to bypass this layer and normalize to commands + args.
2970 # to bypass this layer and normalize to commands + args.
2971 ui.status(_('sending batch with %d sub-commands\n') %
2971 ui.status(_('sending batch with %d sub-commands\n') %
2972 len(batchedcommands))
2972 len(batchedcommands))
2973 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
2973 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
2974 ui.status(_('response #%d: %s\n') %
2974 ui.status(_('response #%d: %s\n') %
2975 (i, stringutil.escapedata(chunk)))
2975 (i, stringutil.escapedata(chunk)))
2976
2976
2977 batchedcommands = None
2977 batchedcommands = None
2978
2978
2979 elif action.startswith('httprequest '):
2979 elif action.startswith('httprequest '):
2980 if not opener:
2980 if not opener:
2981 raise error.Abort(_('cannot use httprequest without an HTTP '
2981 raise error.Abort(_('cannot use httprequest without an HTTP '
2982 'peer'))
2982 'peer'))
2983
2983
2984 request = action.split(' ', 2)
2984 request = action.split(' ', 2)
2985 if len(request) != 3:
2985 if len(request) != 3:
2986 raise error.Abort(_('invalid httprequest: expected format is '
2986 raise error.Abort(_('invalid httprequest: expected format is '
2987 '"httprequest <method> <path>'))
2987 '"httprequest <method> <path>'))
2988
2988
2989 method, httppath = request[1:]
2989 method, httppath = request[1:]
2990 headers = {}
2990 headers = {}
2991 body = None
2991 body = None
2992 frames = []
2992 frames = []
2993 for line in lines:
2993 for line in lines:
2994 line = line.lstrip()
2994 line = line.lstrip()
2995 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
2995 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
2996 if m:
2996 if m:
2997 headers[m.group(1)] = m.group(2)
2997 headers[m.group(1)] = m.group(2)
2998 continue
2998 continue
2999
2999
3000 if line.startswith(b'BODYFILE '):
3000 if line.startswith(b'BODYFILE '):
3001 with open(line.split(b' ', 1), 'rb') as fh:
3001 with open(line.split(b' ', 1), 'rb') as fh:
3002 body = fh.read()
3002 body = fh.read()
3003 elif line.startswith(b'frame '):
3003 elif line.startswith(b'frame '):
3004 frame = wireprotoframing.makeframefromhumanstring(
3004 frame = wireprotoframing.makeframefromhumanstring(
3005 line[len(b'frame '):])
3005 line[len(b'frame '):])
3006
3006
3007 frames.append(frame)
3007 frames.append(frame)
3008 else:
3008 else:
3009 raise error.Abort(_('unknown argument to httprequest: %s') %
3009 raise error.Abort(_('unknown argument to httprequest: %s') %
3010 line)
3010 line)
3011
3011
3012 url = path + httppath
3012 url = path + httppath
3013
3013
3014 if frames:
3014 if frames:
3015 body = b''.join(bytes(f) for f in frames)
3015 body = b''.join(bytes(f) for f in frames)
3016
3016
3017 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3017 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3018
3018
3019 # urllib.Request insists on using has_data() as a proxy for
3019 # urllib.Request insists on using has_data() as a proxy for
3020 # determining the request method. Override that to use our
3020 # determining the request method. Override that to use our
3021 # explicitly requested method.
3021 # explicitly requested method.
3022 req.get_method = lambda: method
3022 req.get_method = lambda: method
3023
3023
3024 try:
3024 try:
3025 opener.open(req).read()
3025 opener.open(req).read()
3026 except util.urlerr.urlerror as e:
3026 except util.urlerr.urlerror as e:
3027 e.read()
3027 e.read()
3028
3028
3029 elif action == 'close':
3029 elif action == 'close':
3030 peer.close()
3030 peer.close()
3031 elif action == 'readavailable':
3031 elif action == 'readavailable':
3032 if not stdout or not stderr:
3032 if not stdout or not stderr:
3033 raise error.Abort(_('readavailable not available on this peer'))
3033 raise error.Abort(_('readavailable not available on this peer'))
3034
3034
3035 stdin.close()
3035 stdin.close()
3036 stdout.read()
3036 stdout.read()
3037 stderr.read()
3037 stderr.read()
3038
3038
3039 elif action == 'readline':
3039 elif action == 'readline':
3040 if not stdout:
3040 if not stdout:
3041 raise error.Abort(_('readline not available on this peer'))
3041 raise error.Abort(_('readline not available on this peer'))
3042 stdout.readline()
3042 stdout.readline()
3043 elif action == 'ereadline':
3043 elif action == 'ereadline':
3044 if not stderr:
3044 if not stderr:
3045 raise error.Abort(_('ereadline not available on this peer'))
3045 raise error.Abort(_('ereadline not available on this peer'))
3046 stderr.readline()
3046 stderr.readline()
3047 elif action.startswith('read '):
3047 elif action.startswith('read '):
3048 count = int(action.split(' ', 1)[1])
3048 count = int(action.split(' ', 1)[1])
3049 if not stdout:
3049 if not stdout:
3050 raise error.Abort(_('read not available on this peer'))
3050 raise error.Abort(_('read not available on this peer'))
3051 stdout.read(count)
3051 stdout.read(count)
3052 elif action.startswith('eread '):
3052 elif action.startswith('eread '):
3053 count = int(action.split(' ', 1)[1])
3053 count = int(action.split(' ', 1)[1])
3054 if not stderr:
3054 if not stderr:
3055 raise error.Abort(_('eread not available on this peer'))
3055 raise error.Abort(_('eread not available on this peer'))
3056 stderr.read(count)
3056 stderr.read(count)
3057 else:
3057 else:
3058 raise error.Abort(_('unknown action: %s') % action)
3058 raise error.Abort(_('unknown action: %s') % action)
3059
3059
3060 if batchedcommands is not None:
3060 if batchedcommands is not None:
3061 raise error.Abort(_('unclosed "batchbegin" request'))
3061 raise error.Abort(_('unclosed "batchbegin" request'))
3062
3062
3063 if peer:
3063 if peer:
3064 peer.close()
3064 peer.close()
3065
3065
3066 if proc:
3066 if proc:
3067 proc.kill()
3067 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now